├── .formatter.exs ├── .github └── workflows │ ├── latest_conformance.yml │ └── main.yml ├── .gitignore ├── CHANGELOG.md ├── LICENSE ├── README.md ├── bench ├── .gitignore ├── README.md ├── data │ ├── dataset.google_message1_proto2.pb │ ├── dataset.google_message1_proto3.pb │ ├── dataset.google_message2.pb │ └── download.sh ├── lib │ ├── benchmarks.pb.ex │ ├── datasets │ │ ├── google_message1 │ │ │ ├── proto2 │ │ │ │ └── benchmark_message1_proto2.pb.ex │ │ │ └── proto3 │ │ │ │ └── benchmark_message1_proto3.pb.ex │ │ ├── google_message2 │ │ │ └── benchmark_message2.pb.ex │ │ ├── google_message3 │ │ │ ├── benchmark_message3.pb.ex │ │ │ ├── benchmark_message3_1.pb.ex │ │ │ ├── benchmark_message3_2.pb.ex │ │ │ ├── benchmark_message3_3.pb.ex │ │ │ ├── benchmark_message3_4.pb.ex │ │ │ ├── benchmark_message3_5.pb.ex │ │ │ ├── benchmark_message3_6.pb.ex │ │ │ ├── benchmark_message3_7.pb.ex │ │ │ └── benchmark_message3_8.pb.ex │ │ └── google_message4 │ │ │ ├── benchmark_message4.pb.ex │ │ │ ├── benchmark_message4_1.pb.ex │ │ │ ├── benchmark_message4_2.pb.ex │ │ │ └── benchmark_message4_3.pb.ex │ └── proto_bench.ex ├── mix.exs ├── mix.lock └── script │ ├── bench.exs │ ├── load.exs │ └── standard_bench.exs ├── conformance ├── README.md ├── exemptions.txt ├── protobuf │ └── runner.ex └── runner.sh ├── coveralls.json ├── lib ├── elixirpb.pb.ex ├── elixirpb │ └── pb_extension.pb.ex ├── google │ ├── protobuf.ex │ └── protobuf │ │ ├── any.pb.ex │ │ ├── compiler │ │ └── plugin.pb.ex │ │ ├── descriptor.pb.ex │ │ ├── duration.pb.ex │ │ ├── empty.pb.ex │ │ ├── field_mask.pb.ex │ │ ├── struct.pb.ex │ │ ├── timestamp.pb.ex │ │ └── wrappers.pb.ex ├── protobuf.ex └── protobuf │ ├── any.ex │ ├── decoder.ex │ ├── dsl.ex │ ├── dsl │ ├── enum.ex │ └── typespecs.ex │ ├── encoder.ex │ ├── errors.ex │ ├── extension.ex │ ├── extension │ └── props.ex │ ├── field_props.ex │ ├── json.ex │ ├── json │ ├── decode.ex │ ├── decode_error.ex │ ├── encode.ex │ ├── encode_error.ex │ ├── json_library.ex │ ├── rfc3339.ex │ └── utils.ex │ ├── message_props.ex │ ├── protoc │ ├── cli.ex │ ├── context.ex │ ├── generator.ex │ └── generator │ │ ├── comment.ex │ │ ├── enum.ex │ │ ├── extension.ex │ │ ├── message.ex │ │ ├── service.ex │ │ └── util.ex │ ├── transform_module.ex │ ├── wire.ex │ └── wire │ ├── types.ex │ ├── varint.ex │ └── zigzag.ex ├── mix.exs ├── mix.lock ├── priv └── templates │ ├── enum.ex.eex │ ├── extension.ex.eex │ ├── message.ex.eex │ └── service.ex.eex ├── src └── elixirpb.proto └── test ├── google └── protobuf_test.exs ├── pbt ├── encode_decode_type_test.exs ├── encode_decode_varint_test.exs └── unknown_fields_test.exs ├── protobuf ├── any_test.exs ├── builder_test.exs ├── conformance_regressions_test.exs ├── decoder_test.exs ├── dsl │ └── typespecs_test.exs ├── dsl_test.exs ├── encoder_test.exs ├── encoder_validation_test.exs ├── extension_test.exs ├── json │ ├── decode_test.exs │ ├── encode_test.exs │ ├── rfc3339_test.exs │ └── utils_test.exs ├── json_test.exs ├── message_merge_test.exs ├── protobuf_test.exs ├── protoc │ ├── cli_integration_test.exs │ ├── cli_test.exs │ ├── generator │ │ ├── enum_test.exs │ │ ├── extension_test.exs │ │ ├── message_test.exs │ │ ├── service_test.exs │ │ └── util_test.exs │ ├── generator_integration_test.exs │ ├── generator_test.exs │ └── proto │ │ ├── custom_options.proto │ │ ├── extension.proto │ │ ├── no_package.proto │ │ ├── service.proto │ │ └── test.proto ├── wire │ └── varint_test.exs └── wire_test.exs ├── support ├── doctest.ex └── test_msg.ex └── test_helper.exs /.formatter.exs: -------------------------------------------------------------------------------- 1 | locals_without_parens = [field: 2, field: 3, oneof: 2, extend: 4, extensions: 1] 2 | 3 | [ 4 | inputs: ["{mix,.formatter}.exs", "{config,lib,conformance,test}/**/*.{ex,exs}"], 5 | locals_without_parens: locals_without_parens, 6 | export: [locals_without_parens: locals_without_parens], 7 | import_deps: [:stream_data] 8 | ] 9 | -------------------------------------------------------------------------------- /.github/workflows/latest_conformance.yml: -------------------------------------------------------------------------------- 1 | name: Latest conformance 2 | 3 | # This workflow *only* tries to run the conformance checks. It is triggered periodically on a cron 4 | # to catch when new conformance tests are added and they don't pass. It uses the latest version of 5 | # protoc to catch any changes in conformance test. 6 | 7 | on: 8 | # This is needed to trigger the workflow manually from the "Actions" tab in the repo. 9 | workflow_dispatch: 10 | inputs: {} 11 | # Every day at 9am. 12 | schedule: 13 | - cron: "0 9 * * *" 14 | 15 | jobs: 16 | latest-conformance-test: 17 | name: Run conformance tests on latest protoc 18 | runs-on: ubuntu-22.04 19 | strategy: 20 | matrix: 21 | include: 22 | - otp: 24.2 23 | elixir: 1.14 24 | 25 | env: 26 | MIX_ENV: test 27 | 28 | steps: 29 | - name: Checkout this repo 30 | uses: actions/checkout@v2 31 | 32 | - name: Update and install dependencies to build protoc locally 33 | # Dependencies from https://github.com/protocolbuffers/protobuf/blob/main/src/README.md 34 | run: sudo apt-get update && sudo apt-get install -y git cmake curl make g++ jq 35 | 36 | - name: Get SHA of Protobuf repo's main branch 37 | id: get-protobuf-sha 38 | run: | 39 | echo sha="$( curl -u "u:${{github.token}}" https://api.github.com/repos/protocolbuffers/protobuf/git/ref/heads/main | jq .object.sha | tr -d '"' )" >> $GITHUB_OUTPUT 40 | 41 | - name: Checkout Protobuf repo 42 | uses: actions/checkout@v2 43 | with: 44 | ref: ${{ steps.get-protobuf-sha.outputs.sha }} 45 | repository: protocolbuffers/protobuf 46 | submodules: true 47 | path: protobuf 48 | 49 | - name: Cache built Protobuf source 50 | id: cache-protobuf-source 51 | uses: actions/cache@v4 52 | with: 53 | path: protobuf 54 | key: ${{ runner.os }}-protobuf-${{ steps.get-protobuf-sha.outputs.sha }} 55 | 56 | - name: Build Protobuf and the conformance test runner 57 | if: steps.cache-protobuf-source.outputs.cache-hit != 'true' 58 | working-directory: protobuf 59 | run: | 60 | cmake \ 61 | -DCMAKE_BUILD_TYPE=Release \ 62 | -Dprotobuf_BUILD_TESTS=OFF \ 63 | -Dprotobuf_INSTALL=OFF \ 64 | -Dprotobuf_BUILD_CONFORMANCE=ON \ 65 | . 66 | NUM_CPUS=$(getconf _NPROCESSORS_ONLN) 67 | make -j "${NUM_CPUS}" protoc conformance_test_runner 68 | 69 | - name: Add protoc to $GITHUB_PATH 70 | run: echo "$PWD/protobuf" >> $GITHUB_PATH 71 | 72 | - name: Install OTP and Elixir 73 | uses: erlef/setup-beam@v1 74 | with: 75 | otp-version: ${{ matrix.otp }} 76 | elixir-version: ${{ matrix.elixir }} 77 | 78 | - name: Get and compile dependencies 79 | run: mix do deps.get, deps.compile 80 | 81 | - name: Compile project 82 | run: mix compile 83 | 84 | - name: Run mix protobuf.conformance with the runner from Protobuf's main branch 85 | run: mix conformance_test --verbose 86 | env: 87 | PROTOBUF_ROOT: ./protobuf 88 | -------------------------------------------------------------------------------- /.github/workflows/main.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request: 8 | 9 | jobs: 10 | test: 11 | name: Test (Elixir ${{ matrix.elixir }} | Erlang/OTP ${{ matrix.otp }}) 12 | runs-on: ubuntu-22.04 13 | strategy: 14 | fail-fast: false 15 | matrix: 16 | include: 17 | - otp: 27.0 18 | elixir: 1.18.1 19 | - otp: 27.0 20 | elixir: 1.17.3 21 | - otp: 24.3 22 | elixir: 1.12.3 23 | 24 | env: 25 | MIX_ENV: test 26 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 27 | 28 | steps: 29 | - name: Checkout this repo 30 | uses: actions/checkout@v2 31 | 32 | - name: Install Protoc 33 | uses: arduino/setup-protoc@v1 34 | with: 35 | version: "3.17.3" 36 | repo-token: ${{ secrets.GITHUB_TOKEN }} # to avoid rate limiting 37 | 38 | - name: Install OTP and Elixir 39 | uses: erlef/setup-beam@v1 40 | with: 41 | otp-version: ${{ matrix.otp }} 42 | elixir-version: ${{ matrix.elixir }} 43 | 44 | - name: Cache dependencies 45 | id: cache-deps 46 | uses: actions/cache@v4 47 | with: 48 | path: deps 49 | key: ${{ runner.os }}-mix-${{ hashFiles('**/mix.lock') }}-elixir-${{ matrix.elixir }}-erlang-${{ matrix.otp }} 50 | 51 | - name: Install dependencies 52 | if: steps.cache-deps.outputs.cache-hit != 'true' 53 | run: mix deps.get 54 | 55 | # Don't cache PLTs based on mix.lock hash, as Dialyzer can incrementally update even old ones 56 | # Cache key based on Elixir & Erlang version (also useful when running in matrix) 57 | - name: Cache Dialyzer's PLT 58 | uses: actions/cache@v4 59 | id: cache-plt 60 | with: 61 | path: priv/plts 62 | key: ${{ runner.os }}-otp${{ matrix.otp }}-elixir${{ matrix.elixir }}-plt 63 | 64 | # Create PLTs if no cache was found 65 | - name: Create PLTs 66 | if: steps.cache-plt.outputs.cache-hit != 'true' 67 | run: mix dialyzer --plt 68 | 69 | - name: Check for unused dependencies 70 | run: mix deps.unlock --check-unused 71 | if: ${{ matrix.lint }} 72 | 73 | - name: Compile with --warnings-as-errors 74 | run: mix compile --warnings-as-errors 75 | if: ${{ matrix.lint }} 76 | 77 | - name: Check mix format 78 | run: mix format --check-formatted 79 | if: ${{ matrix.lint }} 80 | 81 | - name: Run tests with coverage 82 | run: mix coveralls.github 83 | if: ${{ matrix.coverage }} 84 | 85 | - name: Run tests without coverage 86 | run: mix test --trace 87 | if: ${{ !matrix.coverage }} 88 | 89 | - name: Run Dialyzer 90 | run: mix dialyzer 91 | 92 | - name: Compile .proto files to Elixir with protoc 93 | if: ${{ matrix.integration }} 94 | env: 95 | PROTO_BENCH: ./deps/google_protobuf/benchmarks 96 | run: | 97 | mix gen_test_protos 98 | mix gen_bootstrap_protos 99 | 100 | - name: Run integration tests 101 | if: ${{ matrix.integration }} 102 | run: mix test --only integration 103 | 104 | - name: Check that generated files did not change 105 | if: ${{ matrix.integration }} 106 | run: | 107 | rm -rf ./google_protobuf 108 | if [[ -n "$(git status -uno --porcelain)" ]]; then 109 | echo "CHANGES TO THE CODE SEEM TO CHANGE THE GENERATED .pb.ex FILES." 110 | echo "MAKE SURE TO RUN THESE TASKS BEFORE MERGING:" 111 | echo "" 112 | echo " mix do gen_bootstrap_protos, gen_test_protos" 113 | echo "" 114 | echo "Run the above commands and `git diff` to verify the changes" 115 | exit 1 116 | else 117 | echo "Generated .pb.ex are up to date" 118 | fi 119 | 120 | conformance-test: 121 | name: Conformance test (Elixir ${{matrix.elixir}} | Erlang/OTP ${{matrix.otp}}) 122 | runs-on: ubuntu-22.04 123 | strategy: 124 | fail-fast: false 125 | matrix: 126 | otp: [24.2] 127 | elixir: [1.14] 128 | 129 | env: 130 | MIX_ENV: test 131 | 132 | steps: 133 | - uses: actions/checkout@v2 134 | 135 | - name: Install OTP and Elixir 136 | uses: erlef/setup-beam@v1 137 | with: 138 | otp-version: ${{ matrix.otp }} 139 | elixir-version: ${{ matrix.elixir }} 140 | 141 | - name: Cache Elixir dependencies with compiled protoc 142 | id: cache-deps-with-built-protoc 143 | uses: actions/cache@v4 144 | with: 145 | path: deps 146 | key: ${{ runner.os }}-mix-deps-with-protoc-${{ hashFiles('**/mix.lock') }} 147 | 148 | - name: Install dependencies 149 | if: steps.cache-deps-with-built-protoc.outputs.cache-hit != 'true' 150 | run: mix deps.get 151 | 152 | - name: Update and install OS dependencies to build protoc locally 153 | if: steps.cache-deps-with-built-protoc.outputs.cache-hit != 'true' 154 | # Dependencies from https://github.com/protocolbuffers/protobuf/blob/master/src/README.md 155 | run: sudo apt-get update && sudo apt-get install -y git cmake curl make g++ 156 | 157 | # This builds protoc inside deps (not inside _build), so we can cache that whole directory. 158 | - name: Build protoc and the conformance runner from the local Protobuf dependency 159 | if: steps.cache-deps-with-built-protoc.outputs.cache-hit != 'true' 160 | run: mix build_conformance_runner 161 | 162 | # We always need to do this, even if the cache hits. 163 | - name: Add protoc to the PATH 164 | run: echo "$PWD/deps/google_protobuf" >> $GITHUB_PATH 165 | 166 | - name: Run conformance tests 167 | run: | 168 | echo "Using local protoc: $(protoc --version)" 169 | mix conformance_test 170 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # The directory Mix will write compiled artifacts to. 2 | /_build 3 | 4 | # If you run "mix test --cover", coverage assets end up here. 5 | /cover 6 | 7 | # The directory Mix downloads your dependencies sources to. 8 | /deps 9 | 10 | # Where 3rd-party dependencies like ExDoc output generated docs. 11 | /doc 12 | 13 | # Ignore .fetch files in case you like to edit your project deps locally. 14 | /.fetch 15 | 16 | # If the VM crashes, it generates a dump, let's ignore it too. 17 | erl_crash.dump 18 | 19 | # Also ignore archive artifacts (built via "mix archive.build"). 20 | *.ez 21 | 22 | # Ignore package tarball (built via "mix hex.build"). 23 | protobuf-*.tar 24 | 25 | # Misc. 26 | protoc-gen-elixir 27 | .eqc-info 28 | *.eqc 29 | 30 | # ExUnit's :tmp_dir tag. 31 | /tmp 32 | 33 | # Files generated from .proto source files for testing. 34 | generated 35 | 36 | # Generated artifacts for conformance testing. 37 | failing_tests.txt 38 | succeeding_tests.txt 39 | conformance_client 40 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 Bing Han 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /bench/.gitignore: -------------------------------------------------------------------------------- 1 | /_build 2 | /deps 3 | erl_crash.dump 4 | benchmarks 5 | /data/datasets.tar.gz 6 | /data/dataset.google_message3*.pb 7 | /data/dataset.google_message4.pb 8 | -------------------------------------------------------------------------------- /bench/data/dataset.google_message1_proto2.pb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/elixir-protobuf/protobuf/68b8a2ee022340172793d5c3ee8eddd0300ff790/bench/data/dataset.google_message1_proto2.pb -------------------------------------------------------------------------------- /bench/data/dataset.google_message1_proto3.pb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/elixir-protobuf/protobuf/68b8a2ee022340172793d5c3ee8eddd0300ff790/bench/data/dataset.google_message1_proto3.pb -------------------------------------------------------------------------------- /bench/data/dataset.google_message2.pb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/elixir-protobuf/protobuf/68b8a2ee022340172793d5c3ee8eddd0300ff790/bench/data/dataset.google_message2.pb -------------------------------------------------------------------------------- /bench/data/download.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -ev 3 | 4 | cd -P -- "$(dirname -- "$0")" 5 | 6 | curl -O https://storage.googleapis.com/protobuf_opensource_benchmark_data/datasets.tar.gz 7 | tar -zvxf datasets.tar.gz 8 | -------------------------------------------------------------------------------- /bench/lib/benchmarks.pb.ex: -------------------------------------------------------------------------------- 1 | defmodule Benchmarks.BenchmarkDataset do 2 | @moduledoc false 3 | use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto3 4 | 5 | field :name, 1, type: :string 6 | field :message_name, 2, type: :string, json_name: "messageName" 7 | field :payload, 3, repeated: true, type: :bytes 8 | end 9 | -------------------------------------------------------------------------------- /bench/lib/datasets/google_message1/proto2/benchmark_message1_proto2.pb.ex: -------------------------------------------------------------------------------- 1 | defmodule Benchmarks.Proto2.GoogleMessage1 do 2 | @moduledoc false 3 | use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2 4 | 5 | field :field1, 1, required: true, type: :string 6 | field :field9, 9, optional: true, type: :string 7 | field :field18, 18, optional: true, type: :string 8 | field :field80, 80, optional: true, type: :bool, default: false 9 | field :field81, 81, optional: true, type: :bool, default: true 10 | field :field2, 2, required: true, type: :int32 11 | field :field3, 3, required: true, type: :int32 12 | field :field280, 280, optional: true, type: :int32 13 | field :field6, 6, optional: true, type: :int32, default: 0 14 | field :field22, 22, optional: true, type: :int64 15 | field :field4, 4, optional: true, type: :string 16 | field :field5, 5, repeated: true, type: :fixed64 17 | field :field59, 59, optional: true, type: :bool, default: false 18 | field :field7, 7, optional: true, type: :string 19 | field :field16, 16, optional: true, type: :int32 20 | field :field130, 130, optional: true, type: :int32, default: 0 21 | field :field12, 12, optional: true, type: :bool, default: true 22 | field :field17, 17, optional: true, type: :bool, default: true 23 | field :field13, 13, optional: true, type: :bool, default: true 24 | field :field14, 14, optional: true, type: :bool, default: true 25 | field :field104, 104, optional: true, type: :int32, default: 0 26 | field :field100, 100, optional: true, type: :int32, default: 0 27 | field :field101, 101, optional: true, type: :int32, default: 0 28 | field :field102, 102, optional: true, type: :string 29 | field :field103, 103, optional: true, type: :string 30 | field :field29, 29, optional: true, type: :int32, default: 0 31 | field :field30, 30, optional: true, type: :bool, default: false 32 | field :field60, 60, optional: true, type: :int32, default: -1 33 | field :field271, 271, optional: true, type: :int32, default: -1 34 | field :field272, 272, optional: true, type: :int32, default: -1 35 | field :field150, 150, optional: true, type: :int32 36 | field :field23, 23, optional: true, type: :int32, default: 0 37 | field :field24, 24, optional: true, type: :bool, default: false 38 | field :field25, 25, optional: true, type: :int32, default: 0 39 | field :field15, 15, optional: true, type: Benchmarks.Proto2.GoogleMessage1SubMessage 40 | field :field78, 78, optional: true, type: :bool 41 | field :field67, 67, optional: true, type: :int32, default: 0 42 | field :field68, 68, optional: true, type: :int32 43 | field :field128, 128, optional: true, type: :int32, default: 0 44 | field :field129, 129, optional: true, type: :string, default: "xxxxxxxxxxxxxxxxxxxxx" 45 | field :field131, 131, optional: true, type: :int32, default: 0 46 | end 47 | 48 | defmodule Benchmarks.Proto2.GoogleMessage1SubMessage do 49 | @moduledoc false 50 | use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2 51 | 52 | field :field1, 1, optional: true, type: :int32, default: 0 53 | field :field2, 2, optional: true, type: :int32, default: 0 54 | field :field3, 3, optional: true, type: :int32, default: 0 55 | field :field15, 15, optional: true, type: :string 56 | field :field12, 12, optional: true, type: :bool, default: true 57 | field :field13, 13, optional: true, type: :int64 58 | field :field14, 14, optional: true, type: :int64 59 | field :field16, 16, optional: true, type: :int32 60 | field :field19, 19, optional: true, type: :int32, default: 2 61 | field :field20, 20, optional: true, type: :bool, default: true 62 | field :field28, 28, optional: true, type: :bool, default: true 63 | field :field21, 21, optional: true, type: :fixed64 64 | field :field22, 22, optional: true, type: :int32 65 | field :field23, 23, optional: true, type: :bool, default: false 66 | field :field206, 206, optional: true, type: :bool, default: false 67 | field :field203, 203, optional: true, type: :fixed32 68 | field :field204, 204, optional: true, type: :int32 69 | field :field205, 205, optional: true, type: :string 70 | field :field207, 207, optional: true, type: :uint64 71 | field :field300, 300, optional: true, type: :uint64 72 | end 73 | -------------------------------------------------------------------------------- /bench/lib/datasets/google_message1/proto3/benchmark_message1_proto3.pb.ex: -------------------------------------------------------------------------------- 1 | defmodule Benchmarks.Proto3.GoogleMessage1 do 2 | @moduledoc false 3 | use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto3 4 | 5 | field :field1, 1, type: :string 6 | field :field9, 9, type: :string 7 | field :field18, 18, type: :string 8 | field :field80, 80, type: :bool 9 | field :field81, 81, type: :bool 10 | field :field2, 2, type: :int32 11 | field :field3, 3, type: :int32 12 | field :field280, 280, type: :int32 13 | field :field6, 6, type: :int32 14 | field :field22, 22, type: :int64 15 | field :field4, 4, type: :string 16 | field :field5, 5, repeated: true, type: :fixed64 17 | field :field59, 59, type: :bool 18 | field :field7, 7, type: :string 19 | field :field16, 16, type: :int32 20 | field :field130, 130, type: :int32 21 | field :field12, 12, type: :bool 22 | field :field17, 17, type: :bool 23 | field :field13, 13, type: :bool 24 | field :field14, 14, type: :bool 25 | field :field104, 104, type: :int32 26 | field :field100, 100, type: :int32 27 | field :field101, 101, type: :int32 28 | field :field102, 102, type: :string 29 | field :field103, 103, type: :string 30 | field :field29, 29, type: :int32 31 | field :field30, 30, type: :bool 32 | field :field60, 60, type: :int32 33 | field :field271, 271, type: :int32 34 | field :field272, 272, type: :int32 35 | field :field150, 150, type: :int32 36 | field :field23, 23, type: :int32 37 | field :field24, 24, type: :bool 38 | field :field25, 25, type: :int32 39 | field :field15, 15, type: Benchmarks.Proto3.GoogleMessage1SubMessage 40 | field :field78, 78, type: :bool 41 | field :field67, 67, type: :int32 42 | field :field68, 68, type: :int32 43 | field :field128, 128, type: :int32 44 | field :field129, 129, type: :string 45 | field :field131, 131, type: :int32 46 | end 47 | 48 | defmodule Benchmarks.Proto3.GoogleMessage1SubMessage do 49 | @moduledoc false 50 | use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto3 51 | 52 | field :field1, 1, type: :int32 53 | field :field2, 2, type: :int32 54 | field :field3, 3, type: :int32 55 | field :field15, 15, type: :string 56 | field :field12, 12, type: :bool 57 | field :field13, 13, type: :int64 58 | field :field14, 14, type: :int64 59 | field :field16, 16, type: :int32 60 | field :field19, 19, type: :int32 61 | field :field20, 20, type: :bool 62 | field :field28, 28, type: :bool 63 | field :field21, 21, type: :fixed64 64 | field :field22, 22, type: :int32 65 | field :field23, 23, type: :bool 66 | field :field206, 206, type: :bool 67 | field :field203, 203, type: :fixed32 68 | field :field204, 204, type: :int32 69 | field :field205, 205, type: :string 70 | field :field207, 207, type: :uint64 71 | field :field300, 300, type: :uint64 72 | end 73 | -------------------------------------------------------------------------------- /bench/lib/datasets/google_message2/benchmark_message2.pb.ex: -------------------------------------------------------------------------------- 1 | defmodule Benchmarks.Proto2.GoogleMessage2.Group1 do 2 | @moduledoc false 3 | use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2 4 | 5 | field :field11, 11, required: true, type: :float 6 | field :field26, 26, optional: true, type: :float 7 | field :field12, 12, optional: true, type: :string 8 | field :field13, 13, optional: true, type: :string 9 | field :field14, 14, repeated: true, type: :string 10 | field :field15, 15, required: true, type: :uint64 11 | field :field5, 5, optional: true, type: :int32 12 | field :field27, 27, optional: true, type: :string 13 | field :field28, 28, optional: true, type: :int32 14 | field :field29, 29, optional: true, type: :string 15 | field :field16, 16, optional: true, type: :string 16 | field :field22, 22, repeated: true, type: :string 17 | field :field73, 73, repeated: true, type: :int32 18 | field :field20, 20, optional: true, type: :int32, default: 0 19 | field :field24, 24, optional: true, type: :string 20 | field :field31, 31, optional: true, type: Benchmarks.Proto2.GoogleMessage2GroupedMessage 21 | end 22 | 23 | defmodule Benchmarks.Proto2.GoogleMessage2 do 24 | @moduledoc false 25 | use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2 26 | 27 | field :field1, 1, optional: true, type: :string 28 | field :field3, 3, optional: true, type: :int64 29 | field :field4, 4, optional: true, type: :int64 30 | field :field30, 30, optional: true, type: :int64 31 | field :field75, 75, optional: true, type: :bool, default: false 32 | field :field6, 6, optional: true, type: :string 33 | field :field2, 2, optional: true, type: :bytes 34 | field :field21, 21, optional: true, type: :int32, default: 0 35 | field :field71, 71, optional: true, type: :int32 36 | field :field25, 25, optional: true, type: :float 37 | field :field109, 109, optional: true, type: :int32, default: 0 38 | field :field210, 210, optional: true, type: :int32, default: 0 39 | field :field211, 211, optional: true, type: :int32, default: 0 40 | field :field212, 212, optional: true, type: :int32, default: 0 41 | field :field213, 213, optional: true, type: :int32, default: 0 42 | field :field216, 216, optional: true, type: :int32, default: 0 43 | field :field217, 217, optional: true, type: :int32, default: 0 44 | field :field218, 218, optional: true, type: :int32, default: 0 45 | field :field220, 220, optional: true, type: :int32, default: 0 46 | field :field221, 221, optional: true, type: :int32, default: 0 47 | field :field222, 222, optional: true, type: :float, default: 0.0 48 | field :field63, 63, optional: true, type: :int32 49 | field :group1, 10, repeated: true, type: :group 50 | field :field128, 128, repeated: true, type: :string 51 | field :field131, 131, optional: true, type: :int64 52 | field :field127, 127, repeated: true, type: :string 53 | field :field129, 129, optional: true, type: :int32 54 | field :field130, 130, repeated: true, type: :int64 55 | field :field205, 205, optional: true, type: :bool, default: false 56 | field :field206, 206, optional: true, type: :bool, default: false 57 | end 58 | 59 | defmodule Benchmarks.Proto2.GoogleMessage2GroupedMessage do 60 | @moduledoc false 61 | use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2 62 | 63 | field :field1, 1, optional: true, type: :float 64 | field :field2, 2, optional: true, type: :float 65 | field :field3, 3, optional: true, type: :float, default: 0.0 66 | field :field4, 4, optional: true, type: :bool 67 | field :field5, 5, optional: true, type: :bool 68 | field :field6, 6, optional: true, type: :bool, default: true 69 | field :field7, 7, optional: true, type: :bool, default: false 70 | field :field8, 8, optional: true, type: :float 71 | field :field9, 9, optional: true, type: :bool 72 | field :field10, 10, optional: true, type: :float 73 | field :field11, 11, optional: true, type: :int64 74 | end 75 | -------------------------------------------------------------------------------- /bench/lib/datasets/google_message3/benchmark_message3_7.pb.ex: -------------------------------------------------------------------------------- 1 | defmodule Benchmarks.GoogleMessage3.Message11018 do 2 | @moduledoc false 3 | use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2 4 | end 5 | 6 | defmodule Benchmarks.GoogleMessage3.Message10800 do 7 | @moduledoc false 8 | use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2 9 | 10 | field :field10808, 1, optional: true, type: :string 11 | field :field10809, 2, optional: true, type: :int64 12 | field :field10810, 3, optional: true, type: :bool 13 | field :field10811, 4, optional: true, type: :float 14 | end 15 | 16 | defmodule Benchmarks.GoogleMessage3.Message10802 do 17 | @moduledoc false 18 | use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2 19 | end 20 | 21 | defmodule Benchmarks.GoogleMessage3.Message10748 do 22 | @moduledoc false 23 | use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2 24 | 25 | field :field10750, 1, optional: true, type: :string 26 | field :field10751, 2, optional: true, type: :int32 27 | field :field10752, 3, optional: true, type: :int32 28 | field :field10753, 4, optional: true, type: :int32 29 | end 30 | 31 | defmodule Benchmarks.GoogleMessage3.Message7966 do 32 | @moduledoc false 33 | use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2 34 | 35 | field :field7969, 1, optional: true, type: :string 36 | field :field7970, 2, optional: true, type: :bool 37 | end 38 | 39 | defmodule Benchmarks.GoogleMessage3.Message708 do 40 | @moduledoc false 41 | use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2 42 | 43 | field :field823, 1, optional: true, type: Benchmarks.GoogleMessage3.Message741 44 | field :field824, 6, repeated: true, type: :string 45 | field :field825, 2, optional: true, type: :string 46 | field :field826, 3, optional: true, type: :string 47 | field :field827, 4, repeated: true, type: :string 48 | field :field828, 5, repeated: true, type: :string 49 | end 50 | 51 | defmodule Benchmarks.GoogleMessage3.Message8942 do 52 | @moduledoc false 53 | use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2 54 | end 55 | 56 | defmodule Benchmarks.GoogleMessage3.Message11011 do 57 | @moduledoc false 58 | use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2 59 | 60 | field :field11752, 1, required: true, type: :bytes 61 | field :field11753, 2, required: true, type: :bytes 62 | end 63 | 64 | defmodule Benchmarks.GoogleMessage3.UnusedEmptyMessage do 65 | @moduledoc false 66 | use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2 67 | end 68 | 69 | defmodule Benchmarks.GoogleMessage3.Message741 do 70 | @moduledoc false 71 | use Protobuf, protoc_gen_elixir_version: "0.10.1-dev", syntax: :proto2 72 | 73 | field :field936, 1, repeated: true, type: :string 74 | end 75 | -------------------------------------------------------------------------------- /bench/lib/proto_bench.ex: -------------------------------------------------------------------------------- 1 | defmodule ProtoBench do 2 | def load(pb_path) do 3 | pb_path 4 | |> File.read!() 5 | |> Benchmarks.BenchmarkDataset.decode() 6 | end 7 | 8 | def mod_name(name) do 9 | name 10 | |> String.split(".") 11 | |> Enum.map(&Macro.camelize/1) 12 | |> Module.safe_concat() 13 | end 14 | end 15 | -------------------------------------------------------------------------------- /bench/mix.exs: -------------------------------------------------------------------------------- 1 | defmodule ProtoBench.MixProject do 2 | use Mix.Project 3 | 4 | def project do 5 | [ 6 | app: :proto_bench, 7 | version: "0.1.0", 8 | elixir: "~> 1.4", 9 | start_permanent: true, 10 | deps: deps() 11 | ] 12 | end 13 | 14 | def application do 15 | [ 16 | extra_applications: [:logger] 17 | ] 18 | end 19 | 20 | defp deps do 21 | [ 22 | {:protobuf, path: ".."}, 23 | {:benchee, "~> 1.0", only: :dev}, 24 | {:benchee_html, "~> 1.0", only: :dev} 25 | ] 26 | end 27 | end 28 | -------------------------------------------------------------------------------- /bench/mix.lock: -------------------------------------------------------------------------------- 1 | %{ 2 | "benchee": {:hex, :benchee, "1.0.1", "66b211f9bfd84bd97e6d1beaddf8fc2312aaabe192f776e8931cb0c16f53a521", [:mix], [{:deep_merge, "~> 1.0", [hex: :deep_merge, repo: "hexpm", optional: false]}], "hexpm", "3ad58ae787e9c7c94dd7ceda3b587ec2c64604563e049b2a0e8baafae832addb"}, 3 | "benchee_html": {:hex, :benchee_html, "1.0.0", "5b4d24effebd060f466fb460ec06576e7b34a00fc26b234fe4f12c4f05c95947", [:mix], [{:benchee, ">= 0.99.0 and < 2.0.0", [hex: :benchee, repo: "hexpm", optional: false]}, {:benchee_json, "~> 1.0", [hex: :benchee_json, repo: "hexpm", optional: false]}], "hexpm", "5280af9aac432ff5ca4216d03e8a93f32209510e925b60e7f27c33796f69e699"}, 4 | "benchee_json": {:hex, :benchee_json, "1.0.0", "cc661f4454d5995c08fe10dd1f2f72f229c8f0fb1c96f6b327a8c8fc96a91fe5", [:mix], [{:benchee, ">= 0.99.0 and < 2.0.0", [hex: :benchee, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "da05d813f9123505f870344d68fb7c86a4f0f9074df7d7b7e2bb011a63ec231c"}, 5 | "deep_merge": {:hex, :deep_merge, "1.0.0", "b4aa1a0d1acac393bdf38b2291af38cb1d4a52806cf7a4906f718e1feb5ee961", [:mix], [], "hexpm", "ce708e5f094b9cd4e8f2be4f00d2f4250c4095be93f8cd6d018c753894885430"}, 6 | "jason": {:hex, :jason, "1.2.1", "12b22825e22f468c02eb3e4b9985f3d0cb8dc40b9bd704730efa11abd2708c44", [:mix], [{:decimal, "~> 1.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "b659b8571deedf60f79c5a608e15414085fa141344e2716fbd6988a084b5f993"}, 7 | } 8 | -------------------------------------------------------------------------------- /bench/script/bench.exs: -------------------------------------------------------------------------------- 1 | {head, 0} = System.cmd("git", ["symbolic-ref", "--short", "HEAD"]) 2 | {hash, 0} = System.cmd("git", ["rev-parse", "--short", "HEAD"]) 3 | 4 | tag = "#{String.trim(head)}-#{String.trim(hash)}" 5 | 6 | opts = fn name, inputs -> 7 | [ 8 | inputs: inputs, 9 | time: 20, 10 | memory_time: 5, 11 | save: [path: "benchmarks/#{tag}-#{name}.benchee", tag: "#{tag}-#{name}"], 12 | formatters: [Benchee.Formatters.Console] 13 | ] 14 | end 15 | 16 | benches = 17 | for path <- Path.wildcard("data/*.pb"), 18 | bench = ProtoBench.load(path), 19 | payload = Enum.max_by(bench.payload, &byte_size/1), 20 | module = ProtoBench.mod_name(bench.message_name), 21 | do: {bench.name, module, payload} 22 | 23 | decode = 24 | for {name, module, payload} <- benches, 25 | into: %{}, 26 | do: {name, [payload, module]} 27 | 28 | Benchee.run(%{"decode" => &apply(Protobuf.Decoder, :decode, &1)}, opts.("decode", decode)) 29 | 30 | IO.puts("\n") 31 | 32 | encode = 33 | for {name, module, payload} <- benches, 34 | into: %{}, 35 | do: {name, module.decode(payload)} 36 | 37 | Benchee.run(%{"encode" => &Protobuf.Encoder.encode(&1, _opts = [])}, opts.("encode", encode)) 38 | -------------------------------------------------------------------------------- /bench/script/load.exs: -------------------------------------------------------------------------------- 1 | Benchee.run( 2 | %{}, 3 | load: "benchmarks/*-decode.benchee", 4 | print: [configuration: false], 5 | formatters: [ 6 | Benchee.Formatters.Console, 7 | {Benchee.Formatters.HTML, file: "benchmarks/output/decode.html"} 8 | ] 9 | ) 10 | 11 | Benchee.run( 12 | %{}, 13 | load: "benchmarks/*-encode.benchee", 14 | print: [configuration: false], 15 | formatters: [ 16 | Benchee.Formatters.Console, 17 | {Benchee.Formatters.HTML, file: "benchmarks/output/encode.html"} 18 | ] 19 | ) 20 | -------------------------------------------------------------------------------- /bench/script/standard_bench.exs: -------------------------------------------------------------------------------- 1 | # Standard benchmark. Its output is compatible with the built-in benchmarks from 2 | # protobuf for official language implementations, including encoding and decoding 3 | # throughput on each dataset. 4 | # 5 | # Based on Python's implementation: 6 | # https://github.com/protocolbuffers/protobuf/blob/master/benchmarks/python/py_benchmark.py 7 | 8 | single = fn fun, inputs -> 9 | Enum.reduce(inputs, 0, fn input, total -> 10 | {time, _result} = :timer.tc(fun, [input]) 11 | total + time 12 | end) 13 | end 14 | 15 | repeat = fn fun, inputs, reps -> 16 | Enum.reduce(1..reps, 0, fn _, total -> 17 | total + single.(fun, inputs) 18 | end) 19 | end 20 | 21 | run = fn fun, inputs -> 22 | target_run_time = 3_000_000 23 | single_run_time = single.(fun, inputs) 24 | 25 | with true <- single_run_time < target_run_time, 26 | reps when reps > 1 <- trunc(ceil(target_run_time / single_run_time)) do 27 | repeat.(fun, inputs, reps) / reps 28 | else 29 | _ -> single_run_time 30 | end 31 | end 32 | 33 | throughput = fn bytes, microseconds -> 34 | megabytes = bytes / 1_048_576 35 | seconds = microseconds / 1_000_000 36 | Float.round(megabytes / seconds, 2) 37 | end 38 | 39 | for file <- Path.wildcard("data/*.pb") do 40 | %{payload: payloads, message_name: mod_name} = ProtoBench.load(file) 41 | module = ProtoBench.mod_name(mod_name) 42 | 43 | IO.puts("Message #{mod_name} of dataset file #{file}") 44 | 45 | bytes = Enum.reduce(payloads, 0, &(byte_size(&1) + &2)) 46 | messages = Enum.map(payloads, &module.decode/1) 47 | 48 | parse = throughput.(bytes, run.(&module.decode/1, payloads)) 49 | 50 | IO.puts("Average throughput for parse_from_benchmark: #{parse} MB/s") 51 | 52 | serialize = throughput.(bytes, run.(&module.encode/1, messages)) 53 | 54 | IO.puts("Average throughput for serialize_to_benchmark: #{serialize} MB/s") 55 | IO.puts("") 56 | end 57 | -------------------------------------------------------------------------------- /conformance/README.md: -------------------------------------------------------------------------------- 1 | # Protobuf Conformance Tests 2 | 3 | ## Prerequisites 4 | 5 | You'll need to compile the Protobuf conformance test runner binary. The 6 | instructions can be found [in the Protobuf 7 | repository][conformance-instructions]. 8 | 9 | ## Running the Tests 10 | 11 | Once you've successfully compiled the runner, you can run the tests with the 12 | following command: 13 | 14 | ```sh 15 | mix conformance_test --runner=$PATH_TO_RUNNER 16 | ``` 17 | 18 | You should expect to see an output similar to this: 19 | 20 | ```sh 21 | CONFORMANCE TEST BEGIN ==================================== 22 | 23 | CONFORMANCE SUITE PASSED: 1179 successes, 705 skipped, 123 expected failures, 0 unexpected failures. 24 | 25 | 26 | CONFORMANCE TEST BEGIN ==================================== 27 | 28 | CONFORMANCE SUITE PASSED: 0 successes, 69 skipped, 0 expected failures, 0 unexpected failures. 29 | ``` 30 | 31 | ### Debugging a Conformance Test 32 | 33 | Add the `--verbose` flag to the above command to get detailed messages which can 34 | help aid in fixing conformance issues. 35 | 36 | ```sh 37 | mix conformance_test --runner=$PATH_TO_RUNNER --verbose 38 | ``` 39 | 40 | ## Exemptions 41 | 42 | [`conformance/exemptions.txt`][exemptions-file] contains a list of 43 | tests that are currently failing conformance tests. When fixing issues 44 | identified by the conformance test, remove the corresponding test lines from 45 | this file to ensure test regressions do not occur. 46 | 47 | [exemptions-file]: ./conformance/exemptions.txt 48 | [conformance-instructions]: https://github.com/protocolbuffers/protobuf/blob/master/conformance/README.md 49 | [test-message-protobuf-schema-source]: https://github.com/protocolbuffers/protobuf/blob/master/src/google/protobuf/test_messages_proto2.proto 50 | -------------------------------------------------------------------------------- /conformance/exemptions.txt: -------------------------------------------------------------------------------- 1 | Recommended.Proto2.JsonInput.FieldNameExtension.Validator 2 | Recommended.Proto3.JsonInput.IgnoreUnknownEnumStringValueInMapValue.ProtobufOutput 3 | Recommended.Proto3.JsonInput.IgnoreUnknownEnumStringValueInOptionalField.ProtobufOutput 4 | Recommended.Proto3.JsonInput.IgnoreUnknownEnumStringValueInRepeatedField.ProtobufOutput 5 | Recommended.Proto2.JsonInput.IgnoreUnknownEnumStringValueInMapPart.ProtobufOutput 6 | Recommended.Proto2.JsonInput.IgnoreUnknownEnumStringValueInMapValue.ProtobufOutput 7 | Recommended.Proto2.JsonInput.IgnoreUnknownEnumStringValueInOptionalField.ProtobufOutput 8 | Recommended.Proto2.JsonInput.IgnoreUnknownEnumStringValueInRepeatedField.ProtobufOutput 9 | Recommended.Proto2.JsonInput.IgnoreUnknownEnumStringValueInRepeatedPart.ProtobufOutput 10 | Recommended.Proto3.JsonInput.IgnoreUnknownEnumStringValueInMapPart.ProtobufOutput 11 | Recommended.Proto3.JsonInput.IgnoreUnknownEnumStringValueInRepeatedPart.ProtobufOutput 12 | -------------------------------------------------------------------------------- /conformance/protobuf/runner.ex: -------------------------------------------------------------------------------- 1 | defmodule Conformance.Protobuf.Runner do 2 | @moduledoc false 3 | 4 | @stdin_read_timeout 3000 5 | 6 | @spec main() :: :ok 7 | def main() do 8 | # Log things to stderr so that they don't interfere with the stdin/stdout interface 9 | # of the conformance runner. 10 | Logger.configure_backend(:console, device: :standard_error) 11 | 12 | # Force encoding on stdio. 13 | :ok = :io.setopts(:standard_io, binary: true, encoding: :latin1) 14 | 15 | loop() 16 | end 17 | 18 | defp loop() do 19 | case read_bytes(:stdio, 4, @stdin_read_timeout) do 20 | :eof -> 21 | :ok 22 | 23 | {:error, reason} -> 24 | raise "failed to read 4-bytes header: #{inspect(reason)}" 25 | 26 | <> -> 27 | case read_bytes(:stdio, len, @stdin_read_timeout) do 28 | :eof -> 29 | raise "received unexpected EOF when expecting #{len} bytes" 30 | 31 | {:error, reason} -> 32 | raise "failed to read #{len} bytes from stdio: #{inspect(reason)}" 33 | 34 | encoded_request when byte_size(encoded_request) == len -> 35 | response = 36 | struct!(Conformance.ConformanceResponse, 37 | result: handle_encoded_request(encoded_request) 38 | ) 39 | 40 | encoded_response = Protobuf.encode(response) 41 | 42 | iodata_to_write = [ 43 | <>, 44 | encoded_response 45 | ] 46 | 47 | :ok = IO.binwrite(:stdio, iodata_to_write) 48 | 49 | loop() 50 | end 51 | end 52 | end 53 | 54 | defp read_bytes(device, count, timeout) do 55 | task = Task.async(fn -> IO.binread(device, count) end) 56 | 57 | case Task.yield(task, timeout) || Task.shutdown(task) do 58 | {:ok, bytes} -> bytes 59 | {:exit, reason} -> raise "failed to read bytes from stdio: #{inspect(reason)}" 60 | nil -> raise "failed to read bytes from stdio, timed out" 61 | end 62 | end 63 | 64 | defp handle_encoded_request(encoded_request) do 65 | case safe_decode(encoded_request, Conformance.ConformanceRequest) do 66 | {:ok, request} -> 67 | handle_conformance_request(request) 68 | 69 | {:error, exception, stacktrace} -> 70 | message = Exception.format(:error, exception, stacktrace) 71 | {:runtime_error, "failed to decode conformance request: #{message}"} 72 | end 73 | end 74 | 75 | # We need to keep "mod" dynamic here because we generate the .pb.ex files for 76 | # Conformance.ConformanceRequest after compiling this file. 77 | defp handle_conformance_request(%mod{ 78 | requested_output_format: requested_output_format, 79 | message_type: message_type, 80 | payload: {payload_kind, msg} 81 | }) 82 | when mod == Conformance.ConformanceRequest and 83 | requested_output_format in [:PROTOBUF, :JSON] and 84 | payload_kind in [:protobuf_payload, :json_payload] do 85 | test_proto_type = to_test_proto_type(message_type) 86 | 87 | decode_fun = 88 | case payload_kind do 89 | :protobuf_payload -> &safe_decode/2 90 | :json_payload -> &Protobuf.JSON.decode/2 91 | end 92 | 93 | {encode_fun, output_payload_kind} = 94 | case requested_output_format do 95 | :PROTOBUF -> {&safe_encode/1, :protobuf_payload} 96 | :JSON -> {&Protobuf.JSON.encode/1, :json_payload} 97 | end 98 | 99 | with {:decode, {:ok, decoded_msg}} <- {:decode, decode_fun.(msg, test_proto_type)}, 100 | {:encode, {:ok, encoded_msg}} <- {:encode, encode_fun.(decoded_msg)} do 101 | {output_payload_kind, encoded_msg} 102 | else 103 | {:decode, {:error, exception, stacktrace}} -> 104 | {:parse_error, Exception.format(:error, exception, stacktrace)} 105 | 106 | {:encode, {:error, exception, stacktrace}} -> 107 | {:serialize_error, Exception.format(:error, exception, stacktrace)} 108 | 109 | {:decode, {:error, exception}} -> 110 | {:parse_error, Exception.format(:error, exception, [])} 111 | 112 | {:encode, {:error, exception}} -> 113 | {:serialize_error, Exception.format(:error, exception, [])} 114 | end 115 | end 116 | 117 | defp handle_conformance_request(_request) do 118 | {:skipped, "unsupported conformance test"} 119 | end 120 | 121 | defp to_test_proto_type("protobuf_test_messages.proto3.TestAllTypesProto3"), 122 | do: ProtobufTestMessages.Proto3.TestAllTypesProto3 123 | 124 | defp to_test_proto_type("protobuf_test_messages.proto2.TestAllTypesProto2"), 125 | do: ProtobufTestMessages.Proto2.TestAllTypesProto2 126 | 127 | defp to_test_proto_type("conformance.FailureSet"), do: Conformance.FailureSet 128 | defp to_test_proto_type(""), do: ProtobufTestMessages.Proto3.TestAllTypesProto3 129 | 130 | defp safe_decode(binary, proto_type) do 131 | {:ok, proto_type.decode(binary)} 132 | rescue 133 | exception -> {:error, exception, __STACKTRACE__} 134 | end 135 | 136 | defp safe_encode(%mod{} = struct) do 137 | {:ok, mod.encode(struct)} 138 | rescue 139 | exception -> {:error, exception, __STACKTRACE__} 140 | end 141 | end 142 | -------------------------------------------------------------------------------- /conformance/runner.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | MIX_ENV="test" mix run -e "Conformance.Protobuf.Runner.main()" 4 | -------------------------------------------------------------------------------- /coveralls.json: -------------------------------------------------------------------------------- 1 | { 2 | "skip_files": [ 3 | "_build", 4 | "deps", 5 | "test", 6 | "generated", 7 | "conformance", 8 | "lib/elixirpb.pb.ex", 9 | "lib/google/protobuf/compiler/plugin.pb.ex", 10 | "lib/google/protobuf/descriptor.pb.ex" 11 | ] 12 | } 13 | -------------------------------------------------------------------------------- /lib/elixirpb.pb.ex: -------------------------------------------------------------------------------- 1 | defmodule Elixirpb.FileOptions do 2 | @moduledoc false 3 | 4 | use Protobuf, protoc_gen_elixir_version: "0.14.0", syntax: :proto2 5 | 6 | field :module_prefix, 1, optional: true, type: :string, json_name: "modulePrefix" 7 | end 8 | -------------------------------------------------------------------------------- /lib/elixirpb/pb_extension.pb.ex: -------------------------------------------------------------------------------- 1 | defmodule Elixirpb.PbExtension do 2 | @moduledoc false 3 | 4 | use Protobuf, protoc_gen_elixir_version: "0.14.0" 5 | 6 | extend Google.Protobuf.FileOptions, :file, 1047, optional: true, type: Elixirpb.FileOptions 7 | end 8 | -------------------------------------------------------------------------------- /lib/google/protobuf.ex: -------------------------------------------------------------------------------- 1 | defmodule Google.Protobuf do 2 | @moduledoc """ 3 | Utility functions for working with Google Protobuf structs. 4 | """ 5 | 6 | @doc """ 7 | Converts a `Google.Protobuf.Struct` struct to a `t:map()` recursively 8 | converting values to their Elixir equivalents. 9 | 10 | ## Examples 11 | 12 | iex> to_map(%Google.Protobuf.Struct{}) 13 | %{} 14 | 15 | iex> to_map(%Google.Protobuf.Struct{ 16 | ...> fields: %{ 17 | ...> "key_one" => %Google.Protobuf.Value{ 18 | ...> kind: {:string_value, "value_one"}, 19 | ...> }, 20 | ...> "key_two" => %Google.Protobuf.Value{ 21 | ...> kind: {:number_value, 1234.0}, 22 | ...> } 23 | ...> }, 24 | ...> }) 25 | %{"key_one" => "value_one", "key_two" => 1234.0} 26 | 27 | """ 28 | @spec to_map(Google.Protobuf.Struct.t()) :: map() 29 | def to_map(struct) do 30 | Map.new(struct.fields, fn {k, v} -> 31 | {k, to_map_value(v)} 32 | end) 33 | end 34 | 35 | defp to_map_value(%{kind: {:null_value, :NULL_VALUE}}), do: nil 36 | defp to_map_value(%{kind: {:number_value, value}}), do: value 37 | defp to_map_value(%{kind: {:string_value, value}}), do: value 38 | defp to_map_value(%{kind: {:bool_value, value}}), do: value 39 | 40 | defp to_map_value(%{kind: {:struct_value, struct}}), 41 | do: to_map(struct) 42 | 43 | defp to_map_value(%{kind: {:list_value, %{values: values}}}), 44 | do: Enum.map(values, &to_map_value/1) 45 | 46 | @doc """ 47 | Converts a `t:map()` to a `Google.Protobuf.Struct` struct recursively 48 | wrapping values in their `Google.Protobuf.Value` equivalents. 49 | 50 | ## Examples 51 | 52 | iex> from_map(%{}) 53 | %Google.Protobuf.Struct{} 54 | 55 | """ 56 | @spec from_map(map()) :: Google.Protobuf.Struct.t() 57 | def from_map(map) do 58 | struct(Google.Protobuf.Struct, %{ 59 | fields: 60 | Map.new(map, fn {k, v} -> 61 | {to_string(k), from_map_value(v)} 62 | end) 63 | }) 64 | end 65 | 66 | defp from_map_value(nil) do 67 | struct(Google.Protobuf.Value, %{kind: {:null_value, :NULL_VALUE}}) 68 | end 69 | 70 | defp from_map_value(value) when is_number(value) do 71 | struct(Google.Protobuf.Value, %{kind: {:number_value, value}}) 72 | end 73 | 74 | defp from_map_value(value) when is_binary(value) do 75 | struct(Google.Protobuf.Value, %{kind: {:string_value, value}}) 76 | end 77 | 78 | defp from_map_value(value) when is_boolean(value) do 79 | struct(Google.Protobuf.Value, %{kind: {:bool_value, value}}) 80 | end 81 | 82 | defp from_map_value(value) when is_map(value) do 83 | struct(Google.Protobuf.Value, %{kind: {:struct_value, from_map(value)}}) 84 | end 85 | 86 | defp from_map_value(value) when is_list(value) do 87 | struct(Google.Protobuf.Value, %{ 88 | kind: 89 | {:list_value, 90 | struct(Google.Protobuf.ListValue, %{ 91 | values: Enum.map(value, &from_map_value/1) 92 | })} 93 | }) 94 | end 95 | 96 | @doc """ 97 | Converts a `DateTime` struct to a `Google.Protobuf.Timestamp` struct. 98 | 99 | Note: Elixir `DateTime.from_unix!/2` will convert units to 100 | microseconds internally. Nanosecond precision is not guaranteed. 101 | See examples for details. 102 | 103 | ## Examples 104 | 105 | iex> to_datetime(%Google.Protobuf.Timestamp{seconds: 5, nanos: 0}) 106 | ~U[1970-01-01 00:00:05.000000Z] 107 | 108 | iex> one = to_datetime(%Google.Protobuf.Timestamp{seconds: 10, nanos: 100}) 109 | ...> two = to_datetime(%Google.Protobuf.Timestamp{seconds: 10, nanos: 105}) 110 | ...> DateTime.diff(one, two, :nanosecond) 111 | 0 112 | 113 | """ 114 | @spec to_datetime(Google.Protobuf.Timestamp.t()) :: DateTime.t() 115 | def to_datetime(%{seconds: seconds, nanos: nanos}) do 116 | DateTime.from_unix!(seconds * 1_000_000_000 + nanos, :nanosecond) 117 | end 118 | 119 | @doc """ 120 | Converts a `Google.Protobuf.Timestamp` struct to a `DateTime` struct. 121 | 122 | ## Examples 123 | 124 | iex> from_datetime(~U[1970-01-01 00:00:05.000000Z]) 125 | %Google.Protobuf.Timestamp{seconds: 5, nanos: 0} 126 | 127 | """ 128 | @spec from_datetime(DateTime.t()) :: Google.Protobuf.Timestamp.t() 129 | def from_datetime(%DateTime{} = datetime) do 130 | nanoseconds = DateTime.to_unix(datetime, :nanosecond) 131 | 132 | struct(Google.Protobuf.Timestamp, %{ 133 | seconds: div(nanoseconds, 1_000_000_000), 134 | nanos: rem(nanoseconds, 1_000_000_000) 135 | }) 136 | end 137 | 138 | if Code.ensure_loaded?(Duration) do 139 | @doc """ 140 | Converts a `Google.Protobuf.Duration` struct to a `Duration` struct. 141 | 142 | > #### Requires `Duration` {: .warning} 143 | > This function requires `Duration`, which was introduced in Elixir 1.17. 144 | 145 | ## Examples 146 | 147 | iex> to_duration(%Google.Protobuf.Duration{seconds: 1, nanos: 500_000_000}) 148 | Duration.new!(second: 1, microsecond: {500_000, 6}) 149 | """ 150 | @doc since: "0.15.0" 151 | @spec to_duration(Google.Protobuf.Duration.t()) :: Duration.t() 152 | def to_duration(%Google.Protobuf.Duration{} = duration) do 153 | Duration.new!(second: duration.seconds, microsecond: {div(duration.nanos, 1_000), 6}) 154 | end 155 | 156 | @doc """ 157 | Converts a `Duration` struct to a `Google.Protobuf.Duration` struct. 158 | 159 | > #### Requires `Duration` {: .warning} 160 | > This function requires `Duration`, which was introduced in Elixir 1.17. 161 | 162 | ## Examples 163 | 164 | iex> from_duration(Duration.new!(second: 1, microsecond: {500_000, 6})) 165 | %Google.Protobuf.Duration{seconds: 1, nanos: 500_000_000} 166 | 167 | iex> from_duration(Duration.new!(hour: 1, minute: 2, microsecond: {500_000, 6})) 168 | %Google.Protobuf.Duration{seconds: 3720, nanos: 500_000_000} 169 | 170 | iex> from_duration(Duration.new!(minute: 2, microsecond: {6_500_000, 6})) 171 | %Google.Protobuf.Duration{seconds: 126, nanos: 500_000_000} 172 | """ 173 | @doc since: "0.15.0" 174 | @spec from_duration(Duration.t()) :: Google.Protobuf.Duration.t() 175 | def from_duration(%Duration{} = duration) do 176 | {microsecond, _precision} = duration.microsecond 177 | seconds = div(to_timeout(duration), 1000) 178 | 179 | struct(Google.Protobuf.Duration, %{ 180 | seconds: seconds, 181 | nanos: rem(microsecond, 1_000_000) * 1_000 182 | }) 183 | end 184 | end 185 | end 186 | -------------------------------------------------------------------------------- /lib/google/protobuf/any.pb.ex: -------------------------------------------------------------------------------- 1 | defmodule Google.Protobuf.Any do 2 | @moduledoc false 3 | 4 | use Protobuf, protoc_gen_elixir_version: "0.14.0", syntax: :proto3 5 | 6 | def descriptor do 7 | # credo:disable-for-next-line 8 | %Google.Protobuf.DescriptorProto{ 9 | name: "Any", 10 | field: [ 11 | %Google.Protobuf.FieldDescriptorProto{ 12 | name: "type_url", 13 | extendee: nil, 14 | number: 1, 15 | label: :LABEL_OPTIONAL, 16 | type: :TYPE_STRING, 17 | type_name: nil, 18 | default_value: nil, 19 | options: nil, 20 | oneof_index: nil, 21 | json_name: "typeUrl", 22 | proto3_optional: nil, 23 | __unknown_fields__: [] 24 | }, 25 | %Google.Protobuf.FieldDescriptorProto{ 26 | name: "value", 27 | extendee: nil, 28 | number: 2, 29 | label: :LABEL_OPTIONAL, 30 | type: :TYPE_BYTES, 31 | type_name: nil, 32 | default_value: nil, 33 | options: nil, 34 | oneof_index: nil, 35 | json_name: "value", 36 | proto3_optional: nil, 37 | __unknown_fields__: [] 38 | } 39 | ], 40 | nested_type: [], 41 | enum_type: [], 42 | extension_range: [], 43 | extension: [], 44 | options: nil, 45 | oneof_decl: [], 46 | reserved_range: [], 47 | reserved_name: [], 48 | __unknown_fields__: [] 49 | } 50 | end 51 | 52 | field :type_url, 1, type: :string, json_name: "typeUrl" 53 | field :value, 2, type: :bytes 54 | end 55 | -------------------------------------------------------------------------------- /lib/google/protobuf/compiler/plugin.pb.ex: -------------------------------------------------------------------------------- 1 | defmodule Google.Protobuf.Compiler.CodeGeneratorResponse.Feature do 2 | @moduledoc false 3 | 4 | use Protobuf, enum: true, protoc_gen_elixir_version: "0.14.0", syntax: :proto2 5 | 6 | field :FEATURE_NONE, 0 7 | field :FEATURE_PROTO3_OPTIONAL, 1 8 | field :FEATURE_SUPPORTS_EDITIONS, 2 9 | end 10 | 11 | defmodule Google.Protobuf.Compiler.Version do 12 | @moduledoc false 13 | 14 | use Protobuf, protoc_gen_elixir_version: "0.14.0", syntax: :proto2 15 | 16 | field :major, 1, optional: true, type: :int32 17 | field :minor, 2, optional: true, type: :int32 18 | field :patch, 3, optional: true, type: :int32 19 | field :suffix, 4, optional: true, type: :string 20 | end 21 | 22 | defmodule Google.Protobuf.Compiler.CodeGeneratorRequest do 23 | @moduledoc false 24 | 25 | use Protobuf, protoc_gen_elixir_version: "0.14.0", syntax: :proto2 26 | 27 | field :file_to_generate, 1, repeated: true, type: :string, json_name: "fileToGenerate" 28 | field :parameter, 2, optional: true, type: :string 29 | 30 | field :proto_file, 15, 31 | repeated: true, 32 | type: Google.Protobuf.FileDescriptorProto, 33 | json_name: "protoFile" 34 | 35 | field :source_file_descriptors, 17, 36 | repeated: true, 37 | type: Google.Protobuf.FileDescriptorProto, 38 | json_name: "sourceFileDescriptors" 39 | 40 | field :compiler_version, 3, 41 | optional: true, 42 | type: Google.Protobuf.Compiler.Version, 43 | json_name: "compilerVersion" 44 | end 45 | 46 | defmodule Google.Protobuf.Compiler.CodeGeneratorResponse.File do 47 | @moduledoc false 48 | 49 | use Protobuf, protoc_gen_elixir_version: "0.14.0", syntax: :proto2 50 | 51 | field :name, 1, optional: true, type: :string 52 | field :insertion_point, 2, optional: true, type: :string, json_name: "insertionPoint" 53 | field :content, 15, optional: true, type: :string 54 | 55 | field :generated_code_info, 16, 56 | optional: true, 57 | type: Google.Protobuf.GeneratedCodeInfo, 58 | json_name: "generatedCodeInfo" 59 | end 60 | 61 | defmodule Google.Protobuf.Compiler.CodeGeneratorResponse do 62 | @moduledoc false 63 | 64 | use Protobuf, protoc_gen_elixir_version: "0.14.0", syntax: :proto2 65 | 66 | field :error, 1, optional: true, type: :string 67 | field :supported_features, 2, optional: true, type: :uint64, json_name: "supportedFeatures" 68 | field :minimum_edition, 3, optional: true, type: :int32, json_name: "minimumEdition" 69 | field :maximum_edition, 4, optional: true, type: :int32, json_name: "maximumEdition" 70 | field :file, 15, repeated: true, type: Google.Protobuf.Compiler.CodeGeneratorResponse.File 71 | end 72 | -------------------------------------------------------------------------------- /lib/google/protobuf/duration.pb.ex: -------------------------------------------------------------------------------- 1 | defmodule Google.Protobuf.Duration do 2 | @moduledoc false 3 | 4 | use Protobuf, protoc_gen_elixir_version: "0.14.0", syntax: :proto3 5 | 6 | def descriptor do 7 | # credo:disable-for-next-line 8 | %Google.Protobuf.DescriptorProto{ 9 | name: "Duration", 10 | field: [ 11 | %Google.Protobuf.FieldDescriptorProto{ 12 | name: "seconds", 13 | extendee: nil, 14 | number: 1, 15 | label: :LABEL_OPTIONAL, 16 | type: :TYPE_INT64, 17 | type_name: nil, 18 | default_value: nil, 19 | options: nil, 20 | oneof_index: nil, 21 | json_name: "seconds", 22 | proto3_optional: nil, 23 | __unknown_fields__: [] 24 | }, 25 | %Google.Protobuf.FieldDescriptorProto{ 26 | name: "nanos", 27 | extendee: nil, 28 | number: 2, 29 | label: :LABEL_OPTIONAL, 30 | type: :TYPE_INT32, 31 | type_name: nil, 32 | default_value: nil, 33 | options: nil, 34 | oneof_index: nil, 35 | json_name: "nanos", 36 | proto3_optional: nil, 37 | __unknown_fields__: [] 38 | } 39 | ], 40 | nested_type: [], 41 | enum_type: [], 42 | extension_range: [], 43 | extension: [], 44 | options: nil, 45 | oneof_decl: [], 46 | reserved_range: [], 47 | reserved_name: [], 48 | __unknown_fields__: [] 49 | } 50 | end 51 | 52 | field :seconds, 1, type: :int64 53 | field :nanos, 2, type: :int32 54 | end 55 | -------------------------------------------------------------------------------- /lib/google/protobuf/empty.pb.ex: -------------------------------------------------------------------------------- 1 | defmodule Google.Protobuf.Empty do 2 | @moduledoc false 3 | 4 | use Protobuf, protoc_gen_elixir_version: "0.14.0", syntax: :proto3 5 | 6 | def descriptor do 7 | # credo:disable-for-next-line 8 | %Google.Protobuf.DescriptorProto{ 9 | name: "Empty", 10 | field: [], 11 | nested_type: [], 12 | enum_type: [], 13 | extension_range: [], 14 | extension: [], 15 | options: nil, 16 | oneof_decl: [], 17 | reserved_range: [], 18 | reserved_name: [], 19 | __unknown_fields__: [] 20 | } 21 | end 22 | end 23 | -------------------------------------------------------------------------------- /lib/google/protobuf/field_mask.pb.ex: -------------------------------------------------------------------------------- 1 | defmodule Google.Protobuf.FieldMask do 2 | @moduledoc false 3 | 4 | use Protobuf, protoc_gen_elixir_version: "0.14.0", syntax: :proto3 5 | 6 | def descriptor do 7 | # credo:disable-for-next-line 8 | %Google.Protobuf.DescriptorProto{ 9 | name: "FieldMask", 10 | field: [ 11 | %Google.Protobuf.FieldDescriptorProto{ 12 | name: "paths", 13 | extendee: nil, 14 | number: 1, 15 | label: :LABEL_REPEATED, 16 | type: :TYPE_STRING, 17 | type_name: nil, 18 | default_value: nil, 19 | options: nil, 20 | oneof_index: nil, 21 | json_name: "paths", 22 | proto3_optional: nil, 23 | __unknown_fields__: [] 24 | } 25 | ], 26 | nested_type: [], 27 | enum_type: [], 28 | extension_range: [], 29 | extension: [], 30 | options: nil, 31 | oneof_decl: [], 32 | reserved_range: [], 33 | reserved_name: [], 34 | __unknown_fields__: [] 35 | } 36 | end 37 | 38 | field :paths, 1, repeated: true, type: :string 39 | end 40 | -------------------------------------------------------------------------------- /lib/google/protobuf/timestamp.pb.ex: -------------------------------------------------------------------------------- 1 | defmodule Google.Protobuf.Timestamp do 2 | @moduledoc false 3 | 4 | use Protobuf, protoc_gen_elixir_version: "0.14.0", syntax: :proto3 5 | 6 | def descriptor do 7 | # credo:disable-for-next-line 8 | %Google.Protobuf.DescriptorProto{ 9 | name: "Timestamp", 10 | field: [ 11 | %Google.Protobuf.FieldDescriptorProto{ 12 | name: "seconds", 13 | extendee: nil, 14 | number: 1, 15 | label: :LABEL_OPTIONAL, 16 | type: :TYPE_INT64, 17 | type_name: nil, 18 | default_value: nil, 19 | options: nil, 20 | oneof_index: nil, 21 | json_name: "seconds", 22 | proto3_optional: nil, 23 | __unknown_fields__: [] 24 | }, 25 | %Google.Protobuf.FieldDescriptorProto{ 26 | name: "nanos", 27 | extendee: nil, 28 | number: 2, 29 | label: :LABEL_OPTIONAL, 30 | type: :TYPE_INT32, 31 | type_name: nil, 32 | default_value: nil, 33 | options: nil, 34 | oneof_index: nil, 35 | json_name: "nanos", 36 | proto3_optional: nil, 37 | __unknown_fields__: [] 38 | } 39 | ], 40 | nested_type: [], 41 | enum_type: [], 42 | extension_range: [], 43 | extension: [], 44 | options: nil, 45 | oneof_decl: [], 46 | reserved_range: [], 47 | reserved_name: [], 48 | __unknown_fields__: [] 49 | } 50 | end 51 | 52 | field :seconds, 1, type: :int64 53 | field :nanos, 2, type: :int32 54 | end 55 | -------------------------------------------------------------------------------- /lib/protobuf/any.ex: -------------------------------------------------------------------------------- 1 | defmodule Protobuf.Any do 2 | @moduledoc false 3 | 4 | @spec type_url_to_module(String.t()) :: module() 5 | def type_url_to_module(type_url) when is_binary(type_url) do 6 | case type_url do 7 | "type.googleapis.com/" <> package_and_message -> 8 | package_and_message 9 | |> String.split(".") 10 | |> Enum.map(&Macro.camelize/1) 11 | |> Module.safe_concat() 12 | 13 | _other -> 14 | raise ArgumentError, 15 | "type_url must be in the form: type.googleapis.com/., " <> 16 | "got: #{inspect(type_url)}" 17 | end 18 | end 19 | end 20 | -------------------------------------------------------------------------------- /lib/protobuf/dsl/enum.ex: -------------------------------------------------------------------------------- 1 | defmodule Protobuf.DSL.Enum do 2 | @moduledoc false 3 | 4 | alias Protobuf.{FieldProps, MessageProps} 5 | 6 | @callback value(atom()) :: integer() 7 | @callback value(tag) :: tag when tag: integer() 8 | 9 | @callback key(integer()) :: atom() | integer() 10 | 11 | @callback mapping() :: %{optional(atom()) => tag} when tag: integer() 12 | 13 | @callback __reverse_mapping__() :: %{optional(tag) => atom()} when tag: integer() 14 | 15 | @spec quoted_enum_functions(MessageProps.t()) :: Macro.t() 16 | def quoted_enum_functions(%MessageProps{enum?: true} = message_props) do 17 | if message_props.syntax == :proto3 and not Map.has_key?(message_props.field_props, 0) do 18 | first_enum_tag = message_props.field_props |> Map.keys() |> Enum.min() 19 | raise "the first enum value must have tag 0 in proto3, got: #{first_enum_tag}" 20 | end 21 | 22 | [quote(do: @behaviour(unquote(__MODULE__)))] ++ 23 | defp_value_callback(message_props) ++ 24 | defp_key_callback(message_props) ++ 25 | defp_mapping_callbacks(message_props) 26 | end 27 | 28 | defp defp_value_callback(message_props) do 29 | bodiless_clause = 30 | quote do 31 | @impl unquote(__MODULE__) 32 | def value(atom) 33 | end 34 | 35 | atom_clauses = 36 | for {atom, tag} <- message_props.field_tags do 37 | quote do 38 | def value(unquote(atom)), do: unquote(tag) 39 | end 40 | end 41 | 42 | int_clause = 43 | quote do 44 | def value(tag) when is_integer(tag) and tag >= 0, do: tag 45 | end 46 | 47 | [bodiless_clause] ++ atom_clauses ++ [int_clause] 48 | end 49 | 50 | defp defp_key_callback(message_props) do 51 | bodiless_clause = 52 | quote do 53 | @impl unquote(__MODULE__) 54 | def key(atom) 55 | end 56 | 57 | int_clauses = 58 | for {tag, %FieldProps{name_atom: atom}} <- message_props.field_props do 59 | quote do 60 | def key(unquote(tag)), do: unquote(atom) 61 | end 62 | end 63 | 64 | catchall_clause = 65 | quote do 66 | def key(tag) when is_integer(tag) and tag >= 0, do: tag 67 | end 68 | 69 | [bodiless_clause] ++ int_clauses ++ [catchall_clause] 70 | end 71 | 72 | defp defp_mapping_callbacks(message_props) do 73 | reverse_mapping = 74 | for {atom, tag} <- message_props.field_tags, 75 | key <- [tag, Atom.to_string(atom)], 76 | into: %{}, 77 | do: {key, atom} 78 | 79 | [ 80 | quote do 81 | @impl unquote(__MODULE__) 82 | def mapping(), do: unquote(Macro.escape(message_props.field_tags)) 83 | end 84 | ] ++ 85 | [ 86 | quote do 87 | @impl unquote(__MODULE__) 88 | def __reverse_mapping__(), do: unquote(Macro.escape(reverse_mapping)) 89 | end 90 | ] 91 | end 92 | end 93 | -------------------------------------------------------------------------------- /lib/protobuf/dsl/typespecs.ex: -------------------------------------------------------------------------------- 1 | defmodule Protobuf.DSL.Typespecs do 2 | @moduledoc false 3 | 4 | alias Protobuf.{FieldProps, MessageProps} 5 | 6 | @spec quoted_enum_typespec(MessageProps.t()) :: Macro.t() 7 | def quoted_enum_typespec(%MessageProps{field_props: field_props}) do 8 | atom_specs = 9 | field_props 10 | |> Enum.sort_by(fn {fnum, _prop} -> fnum end) 11 | |> Enum.map(fn {_fnum, %FieldProps{name_atom: name}} -> name end) 12 | |> union_specs() 13 | 14 | quote do 15 | integer() | unquote(atom_specs) 16 | end 17 | end 18 | 19 | @spec quoted_message_typespec(MessageProps.t()) :: Macro.t() 20 | def quoted_message_typespec(%MessageProps{syntax: syntax} = message_props) do 21 | regular_fields = 22 | for {_fnum, %FieldProps{oneof: nil} = prop} <- message_props.field_props, 23 | do: {prop.name_atom, field_prop_to_spec(syntax, prop)} 24 | 25 | oneof_fields = 26 | for {field_name, fnum} <- message_props.oneof do 27 | possible_fields = 28 | for {_fnum, %FieldProps{oneof: ^fnum} = prop} <- message_props.field_props, do: prop 29 | 30 | {field_name, oneof_spec(syntax, possible_fields)} 31 | end 32 | 33 | extension_fields = 34 | case message_props.extension_range do 35 | [_ | _] -> [{:__pb_extensions__, quote(do: map())}] 36 | _other -> [] 37 | end 38 | 39 | unknown_fields = [ 40 | {:__unknown_fields__, 41 | quote( 42 | do: [ 43 | Protobuf.unknown_field() 44 | ] 45 | )} 46 | ] 47 | 48 | field_specs = regular_fields ++ oneof_fields ++ extension_fields ++ unknown_fields 49 | 50 | quote do: %__MODULE__{unquote_splicing(field_specs)} 51 | end 52 | 53 | defp oneof_spec(syntax, possible_oneof_fields) do 54 | possible_oneof_fields 55 | |> Enum.map(fn prop -> {prop.name_atom, field_prop_to_spec(syntax, prop)} end) 56 | |> Kernel.++([nil]) 57 | |> union_specs() 58 | end 59 | 60 | defp field_prop_to_spec(_syntax, %FieldProps{map?: true, type: map_mod} = prop) do 61 | Code.ensure_compiled!(map_mod) 62 | map_props = map_mod.__message_props__() 63 | 64 | key_spec = scalar_type_to_spec(map_props.field_props[map_props.field_tags.key].type) 65 | value_prop = map_props.field_props[map_props.field_tags.value] 66 | 67 | value_spec = type_to_spec(value_prop.type, value_prop) 68 | 69 | value_spec = if prop.embedded?, do: quote(do: unquote(value_spec) | nil), else: value_spec 70 | quote do: %{optional(unquote(key_spec)) => unquote(value_spec)} 71 | end 72 | 73 | defp field_prop_to_spec(syntax, %FieldProps{type: type} = prop) do 74 | spec = type_to_spec(type, prop) 75 | 76 | cond do 77 | prop.repeated? -> 78 | quote do: [unquote(spec)] 79 | 80 | prop.embedded? or (prop.optional? and is_nil(prop.oneof) and syntax != :proto3) or 81 | prop.proto3_optional? -> 82 | quote do: unquote(spec) | nil 83 | 84 | true -> 85 | spec 86 | end 87 | end 88 | 89 | defp type_to_spec({:enum, enum_mod}, _prop), do: quote(do: unquote(enum_mod).t()) 90 | defp type_to_spec(mod, %FieldProps{embedded?: true}), do: quote(do: unquote(mod).t()) 91 | defp type_to_spec(:group, _prop), do: quote(do: term()) 92 | defp type_to_spec(type, _prop), do: scalar_type_to_spec(type) 93 | 94 | defp scalar_type_to_spec(:string), do: quote(do: String.t()) 95 | defp scalar_type_to_spec(:bytes), do: quote(do: binary()) 96 | defp scalar_type_to_spec(:bool), do: quote(do: boolean()) 97 | 98 | defp scalar_type_to_spec(type) 99 | when type in [:int32, :int64, :sint32, :sint64, :sfixed32, :sfixed64], 100 | do: quote(do: integer()) 101 | 102 | defp scalar_type_to_spec(type) 103 | when type in [:uint32, :uint64, :fixed32, :fixed64], 104 | do: quote(do: non_neg_integer()) 105 | 106 | defp scalar_type_to_spec(type) when type in [:float, :double], 107 | do: quote(do: float() | :infinity | :negative_infinity | :nan) 108 | 109 | # We do this because the :| operator is left-associative, so if we just map and build "acc | 110 | # spec" then we end up with "((foo | bar) | baz) | bong". By building it from right to left, it 111 | # works just fine. 112 | defp union_specs(specs) do 113 | Enum.reduce(Enum.reverse(specs), fn spec, acc -> 114 | quote do: unquote(spec) | unquote(acc) 115 | end) 116 | end 117 | end 118 | -------------------------------------------------------------------------------- /lib/protobuf/errors.ex: -------------------------------------------------------------------------------- 1 | defmodule Protobuf.DecodeError do 2 | @moduledoc """ 3 | An error for when decoding a Protobuf message fails. 4 | """ 5 | defexception message: "something wrong when decoding" 6 | end 7 | 8 | defmodule Protobuf.EncodeError do 9 | @moduledoc """ 10 | An error for when encoding a Protobuf message fails. 11 | """ 12 | defexception message: "something wrong when encoding" 13 | end 14 | 15 | defmodule Protobuf.InvalidError do 16 | defexception [:message] 17 | end 18 | 19 | defmodule Protobuf.ExtensionNotFound do 20 | defexception message: "extension for the field is not found" 21 | end 22 | -------------------------------------------------------------------------------- /lib/protobuf/extension.ex: -------------------------------------------------------------------------------- 1 | defmodule Protobuf.Extension do 2 | @moduledoc """ 3 | [Extensions](https://developers.google.com/protocol-buffers/docs/proto#extensions) 4 | let you set extra fields for previously defined messages(even for messages in other packages) 5 | without changing the original message. 6 | 7 | To load extensions you should call `Protobuf.load_extensions/0` when your application starts: 8 | 9 | def start(_type, _args) do 10 | Protobuf.load_extensions() 11 | Supervisor.start_link([], strategy: :one_for_one) 12 | end 13 | 14 | ## Examples 15 | 16 | # protoc should be used to generate the code instead of writing by hand. 17 | defmodule Foo do 18 | use Protobuf, syntax: :proto2 19 | 20 | extensions([{100, 101}, {1000, 536_870_912}]) 21 | end 22 | 23 | # This module is generated for all "extend" calls in one file. 24 | # This module is needed in `*_extension` function because the field name is scoped 25 | # in the proto file. 26 | defmodule Ext.PbExtension do 27 | use Protobuf, syntax: :proto2 28 | 29 | extend Foo, :my_custom, 1047, optional: true, type: :string 30 | end 31 | 32 | foo = %Foo{} 33 | Foo.put_extension(foo, Ext.PbExtension, :my_custom, "Custom field") 34 | Foo.get_extension(foo, Ext.PbExtension, :my_custom) 35 | 36 | """ 37 | 38 | import Bitwise, only: [<<<: 2] 39 | 40 | # TODO: replace bitshift with Integer.pow/2 when we depend on Elixir 1.12+. 41 | # 2^29, see https://developers.google.com/protocol-buffers/docs/proto#extensions 42 | @max 1 <<< 29 43 | 44 | @doc """ 45 | Returns the maximum extension number. 46 | 47 | ## Examples 48 | 49 | iex> Protobuf.Extension.max() 50 | #{inspect(@max)} 51 | 52 | """ 53 | @doc since: "0.12.0" 54 | @spec max() :: pos_integer() 55 | def max, do: @max 56 | 57 | @doc "The actual function for `put_extension`" 58 | @spec put(module, map, module, atom, any) :: map 59 | def put(mod, struct, extension_mod, field, value) do 60 | key = {mod, field} 61 | 62 | case extension_mod.__protobuf_info__(:extension_props) do 63 | %{name_to_tag: %{^key => _}} -> 64 | case struct do 65 | %{__pb_extensions__: es} -> 66 | Map.put(struct, :__pb_extensions__, Map.put(es, {extension_mod, field}, value)) 67 | 68 | _ -> 69 | Map.put(struct, :__pb_extensions__, %{{extension_mod, field} => value}) 70 | end 71 | 72 | _ -> 73 | raise Protobuf.ExtensionNotFound, 74 | message: "Extension #{extension_mod}##{field} is not found" 75 | end 76 | end 77 | 78 | @doc "The actual function for `get_extension`" 79 | @spec get(map, module, atom, any) :: any 80 | def get(struct, extension_mod, field, default) do 81 | key = {extension_mod, field} 82 | 83 | case struct do 84 | %{__pb_extensions__: %{^key => val}} -> 85 | val 86 | 87 | %{} -> 88 | default 89 | end 90 | end 91 | 92 | @doc false 93 | def get_extension_props(extendee, ext_mod, field) do 94 | index = {extendee, field} 95 | ext_props = ext_mod.__protobuf_info__(:extension_props) 96 | 97 | case ext_props.name_to_tag do 98 | %{^index => tag_idx} -> 99 | case ext_props.extensions do 100 | %{^tag_idx => props} -> 101 | props 102 | 103 | _ -> 104 | nil 105 | end 106 | 107 | _ -> 108 | nil 109 | end 110 | end 111 | 112 | @doc false 113 | def get_extension_props_by_tag(extendee, tag) do 114 | case :persistent_term.get({Protobuf.Extension, extendee, tag}, nil) do 115 | nil -> 116 | nil 117 | 118 | mod -> 119 | index = {extendee, tag} 120 | 121 | case mod.__protobuf_info__(:extension_props).extensions do 122 | %{^index => props} -> 123 | {mod, props} 124 | 125 | _ -> 126 | nil 127 | end 128 | end 129 | end 130 | end 131 | -------------------------------------------------------------------------------- /lib/protobuf/extension/props.ex: -------------------------------------------------------------------------------- 1 | defmodule Protobuf.Extension.Props do 2 | @moduledoc false 3 | 4 | defmodule Extension do 5 | @moduledoc false 6 | @type t :: %__MODULE__{ 7 | extendee: module, 8 | field_props: FieldProps.T 9 | } 10 | defstruct extendee: nil, 11 | field_props: nil 12 | end 13 | 14 | @type t :: %__MODULE__{ 15 | extensions: %{{module, integer} => Extension.t()}, 16 | name_to_tag: %{{module, atom} => {module, integer}} 17 | } 18 | defstruct extensions: %{}, name_to_tag: %{} 19 | end 20 | -------------------------------------------------------------------------------- /lib/protobuf/field_props.ex: -------------------------------------------------------------------------------- 1 | defmodule Protobuf.FieldProps do 2 | @moduledoc false 3 | 4 | # A struct containing information about a field in a Protobuf message. 5 | 6 | @type t :: %__MODULE__{ 7 | fnum: integer, 8 | name: String.t(), 9 | name_atom: atom, 10 | json_name: String.t(), 11 | wire_type: Protobuf.wire_type(), 12 | type: atom | {:enum, atom}, 13 | default: any, 14 | oneof: non_neg_integer | nil, 15 | required?: boolean, 16 | optional?: boolean, 17 | proto3_optional?: boolean, 18 | repeated?: boolean, 19 | enum?: boolean, 20 | embedded?: boolean, 21 | packed?: boolean, 22 | map?: boolean, 23 | deprecated?: boolean, 24 | encoded_fnum: iodata 25 | } 26 | defstruct fnum: nil, 27 | name: nil, 28 | name_atom: nil, 29 | json_name: nil, 30 | wire_type: nil, 31 | type: nil, 32 | default: nil, 33 | oneof: nil, 34 | required?: false, 35 | optional?: false, 36 | proto3_optional?: false, 37 | repeated?: false, 38 | enum?: false, 39 | embedded?: false, 40 | packed?: nil, 41 | map?: false, 42 | deprecated?: false, 43 | encoded_fnum: nil 44 | end 45 | -------------------------------------------------------------------------------- /lib/protobuf/json/decode_error.ex: -------------------------------------------------------------------------------- 1 | defmodule Protobuf.JSON.DecodeError do 2 | defexception [:message] 3 | 4 | @type t :: %__MODULE__{message: String.t()} 5 | 6 | def new({:unsupported_syntax, syntax}) do 7 | %__MODULE__{message: "JSON encoding of '#{syntax}' syntax is unsupported, try proto3"} 8 | end 9 | 10 | def new(:no_json_lib) do 11 | %__MODULE__{message: "JSON library not loaded, make sure to add :jason to your mix.exs file"} 12 | end 13 | 14 | def new({:bad_message, data, module}) do 15 | %__MODULE__{message: "JSON map expected for module #{inspect(module)}, got: #{inspect(data)}"} 16 | end 17 | 18 | def new({:bad_duration, string, error}) do 19 | %__MODULE__{message: "bad JSON value for duration #{inspect(string)}, got: #{inspect(error)}"} 20 | end 21 | 22 | def new({:bad_timestamp, string, reason}) do 23 | %__MODULE__{ 24 | message: 25 | "bad JSON value for timestamp #{inspect(string)}, failed to parse: #{inspect(reason)}" 26 | } 27 | end 28 | 29 | def new({:bad_field_mask, string}) do 30 | %__MODULE__{message: "invalid characters in field mask: #{inspect(string)}"} 31 | end 32 | 33 | def new({:bad_string, field, value}) do 34 | %__MODULE__{message: "Field '#{field}' has an invalid string (#{inspect(value)})"} 35 | end 36 | 37 | def new({:bad_bool, field, value}) do 38 | %__MODULE__{message: "Field '#{field}' has an invalid boolean (#{inspect(value)})"} 39 | end 40 | 41 | def new({:bad_int, field, value}) do 42 | %__MODULE__{message: "Field '#{field}' has an invalid integer (#{inspect(value)})"} 43 | end 44 | 45 | def new({:bad_float, field, value}) do 46 | %__MODULE__{message: "Field '#{field}' has an invalid floating point (#{inspect(value)})"} 47 | end 48 | 49 | def new({:bad_bytes, field}) do 50 | %__MODULE__{message: "Field '#{field}' has an invalid Base64-encoded byte sequence"} 51 | end 52 | 53 | def new({:bad_enum, field, value}) do 54 | %__MODULE__{message: "Field '#{field}' has an invalid enum value (#{inspect(value)})"} 55 | end 56 | 57 | def new({:bad_map, field, value}) do 58 | %__MODULE__{message: "Field '#{field}' has an invalid map (#{inspect(value)})"} 59 | end 60 | 61 | def new({:bad_map_key, field, type, value}) do 62 | %__MODULE__{message: "Field '#{field}' has an invalid map key (#{type}: #{inspect(value)})"} 63 | end 64 | 65 | def new({:duplicated_oneof, oneof}) do 66 | %__MODULE__{message: "Oneof field '#{oneof}' cannot be set twice"} 67 | end 68 | 69 | def new({:bad_repeated, field, value}) do 70 | %__MODULE__{message: "Repeated field '#{field}' expected a list, got #{inspect(value)}"} 71 | end 72 | 73 | def new({:unexpected_end, position}) do 74 | %__MODULE__{message: "Unexpected end at position #{inspect(position)}"} 75 | end 76 | 77 | def new({:invalid_byte, position, byte}) do 78 | %__MODULE__{message: "Invalid byte at position #{inspect(position)}, byte: #{inspect(byte)}"} 79 | end 80 | 81 | def new({:unexpected_sequence, position, sequence}) do 82 | %__MODULE__{ 83 | message: 84 | "Unexpected sequence at position #{inspect(position)}, sequence: #{inspect(sequence)}" 85 | } 86 | end 87 | end 88 | -------------------------------------------------------------------------------- /lib/protobuf/json/encode_error.ex: -------------------------------------------------------------------------------- 1 | defmodule Protobuf.JSON.EncodeError do 2 | defexception [:message] 3 | 4 | @type t :: %__MODULE__{message: String.t()} 5 | 6 | def new({:unsupported_syntax, syntax}) do 7 | %__MODULE__{message: "JSON encoding of '#{syntax}' syntax is unsupported, try proto3"} 8 | end 9 | 10 | def new(:no_json_lib) do 11 | %__MODULE__{message: "JSON library not loaded, make sure to add :jason to your mix.exs file"} 12 | end 13 | 14 | def new({:bad_duration, :seconds_outside_of_range, seconds}) do 15 | %__MODULE__{ 16 | message: "invalid Google.Protobuf.Duration, seconds are outside of range: #{seconds}" 17 | } 18 | end 19 | 20 | def new({:invalid_timestamp, timestamp, reason}) do 21 | %__MODULE__{ 22 | message: 23 | "invalid Google.Protobuf.Timestamp value #{inspect(timestamp)}, reason: #{inspect(reason)}" 24 | } 25 | end 26 | 27 | def new({:unknown_enum_value, key, enum_mod}) when is_atom(key) and is_atom(enum_mod) do 28 | %__MODULE__{ 29 | message: "unknown value #{key} for enum #{inspect(enum_mod)}" 30 | } 31 | end 32 | 33 | def new({:invalid_type, type, value}) when is_atom(type) do 34 | %__MODULE__{ 35 | message: "invalid value for type #{type}: #{inspect(value)}" 36 | } 37 | end 38 | 39 | def new({:non_numeric_float, nan}) when nan in [:nan, :infinity, :negative_infinity] do 40 | message = """ 41 | cannot encode non-numeric float/double for Google.Protobuf.Value: #{inspect(nan)} 42 | See https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.Value 43 | """ 44 | 45 | %__MODULE__{message: message} 46 | end 47 | 48 | def new({:bad_field_mask, mask}) do 49 | %__MODULE__{message: "unencodable field mask: #{inspect(mask)}"} 50 | end 51 | 52 | def new({:bad_encoding, term}) do 53 | %__MODULE__{message: "bad encoding: #{inspect(term)}"} 54 | end 55 | end 56 | -------------------------------------------------------------------------------- /lib/protobuf/json/json_library.ex: -------------------------------------------------------------------------------- 1 | defmodule Protobuf.JSON.JSONLibrary do 2 | @moduledoc false 3 | # Uses `JSON` for Elixir >= 1.18, Jason if Elixir < 1.18 and Jason available, 4 | # or returns error otherwise 5 | 6 | cond do 7 | Code.ensure_loaded?(JSON) -> 8 | def encode_to_iodata(encodable) do 9 | try do 10 | {:ok, JSON.encode_to_iodata!(encodable)} 11 | rescue 12 | exception -> 13 | {:error, exception} 14 | end 15 | end 16 | 17 | def decode(data) do 18 | case JSON.decode(data) do 19 | {:ok, decoded} -> {:ok, decoded} 20 | {:error, error} -> {:error, Protobuf.JSON.DecodeError.new(error)} 21 | end 22 | end 23 | 24 | Code.ensure_loaded?(Jason) -> 25 | def encode_to_iodata(encodable), do: Jason.encode_to_iodata(encodable) 26 | def decode(data), do: Jason.decode(data) 27 | 28 | true -> 29 | def encode_to_iodata(_), do: {:error, EncodeError.new(:no_json_lib)} 30 | def decode(_), do: {:error, EncodeError.new(:no_json_lib)} 31 | end 32 | end 33 | -------------------------------------------------------------------------------- /lib/protobuf/json/rfc3339.ex: -------------------------------------------------------------------------------- 1 | defmodule Protobuf.JSON.RFC3339 do 2 | @moduledoc false 3 | 4 | # The JSON mapping with Google.Protobuf.Timestamp is tricky. They use RFC3339. Elixir's built-in 5 | # datetime support uses ISO8601. The two are very similar but with a few small differences, 6 | # namely: 7 | # 1. RFC3339 supports nanoseconds, while the Elixir implementation goes up to microseconds. 8 | # 2. RFC3339 seems to be slightly stricter 9 | 10 | # To avoid implementing calendar awareness in here, we use a little trick: we do actually 11 | # manually *parse* datetimes and validate that they are correct and we also read nanoseconds out 12 | # of them. Then, we use the ISO8601-based Elixir functions to actually parse the datetimes with 13 | # calendar awareness, we throw away anything after the seconds, and replace them with the 14 | # nanoseconds we parsed. It seems to work very well, as proved by the conformance tests! 15 | # For encoding, we have to use a "dirtier" trick. We encode using Elixir's ISO8601 without 16 | # anything after the seconds, then we split the string and shove the nanoseconds in it. It works 17 | # because the DD-MM-YYYYTHH:MM:SS part has always the same size so we always know where to 18 | # inject the nanoseconds. Again, a bit dirty? Yes. Does it pass conformance tests? Yes! 19 | 20 | # The grammar for RFC3339 dates is taken straight out of the RFC 21 | # (https://datatracker.ietf.org/doc/html/rfc3339#section-5.6) and is reported here for ease of 22 | # reference: 23 | 24 | # date-fullyear = 4DIGIT 25 | # date-month = 2DIGIT ; 01-12 26 | # date-mday = 2DIGIT ; 01-28, 01-29, 01-30, 01-31 based on 27 | # ; month/year 28 | # time-hour = 2DIGIT ; 00-23 29 | # time-minute = 2DIGIT ; 00-59 30 | # time-second = 2DIGIT ; 00-58, 00-59, 00-60 based on leap second 31 | # ; rules 32 | # time-secfrac = "." 1*DIGIT 33 | # time-numoffset = ("+" / "-") time-hour ":" time-minute 34 | # time-offset = "Z" / time-numoffset 35 | # 36 | # partial-time = time-hour ":" time-minute ":" time-second 37 | # [time-secfrac] 38 | # full-date = date-fullyear "-" date-month "-" date-mday 39 | # full-time = partial-time time-offset 40 | # 41 | # date-time = full-date "T" full-time 42 | # 43 | # date-time = full-date "T" full-time 44 | 45 | # Most of the functions in this module are called "eat_*": these functions consume bytes but 46 | # don't return them. They are just used for "skipping ahead". The "parse_*" functions are the 47 | # only ones that return data. 48 | 49 | alias Protobuf.JSON.Utils 50 | 51 | @spec decode(String.t()) :: 52 | {:ok, seconds :: integer(), nanos :: non_neg_integer()} | {:error, String.t()} 53 | def decode(string) when is_binary(string) do 54 | rest = eat_full_date(string) 55 | rest = eat_literal(rest, "T") 56 | {time_secfrac_nano, rest} = parse_full_time(rest) 57 | ensure_empty(rest) 58 | 59 | case DateTime.from_iso8601(string) do 60 | {:ok, datetime, _offset_in_seconds} -> 61 | if datetime_in_allowed_range?(datetime) do 62 | seconds = 63 | datetime 64 | |> DateTime.truncate(:second) 65 | |> DateTime.to_unix(:second) 66 | 67 | {:ok, seconds, time_secfrac_nano} 68 | else 69 | {:error, "timestamp is outside of allowed range"} 70 | end 71 | 72 | {:error, reason} -> 73 | {:error, Atom.to_string(reason)} 74 | end 75 | catch 76 | :throw, reason -> {:error, reason} 77 | end 78 | 79 | @spec encode(integer(), non_neg_integer()) :: {:ok, String.t()} | {:error, String.t()} 80 | def encode(seconds, nanos) 81 | 82 | def encode(seconds, nanos) 83 | when is_integer(seconds) and is_integer(nanos) and nanos >= 1_000_000_000 do 84 | {:error, "nanos can't be bigger than 1000000000, got: #{nanos}"} 85 | end 86 | 87 | def encode(seconds, nanos) when is_integer(seconds) and is_integer(nanos) and nanos >= 0 do 88 | case DateTime.from_unix(seconds, :second) do 89 | {:ok, datetime} -> 90 | if datetime_in_allowed_range?(datetime) do 91 | string = DateTime.to_iso8601(datetime) 92 | 93 | if nanos > 0 do 94 | bytes_before_time_secfrac = unquote(byte_size("1970-01-01T00:00:00")) 95 | {before_secfrac, after_secfrac} = String.split_at(string, bytes_before_time_secfrac) 96 | {:ok, before_secfrac <> "." <> Utils.format_nanoseconds(nanos) <> after_secfrac} 97 | else 98 | {:ok, string} 99 | end 100 | else 101 | {:error, "timestamp is outside of allowed range"} 102 | end 103 | 104 | {:error, reason} -> 105 | {:error, inspect(reason)} 106 | end 107 | end 108 | 109 | ## Parsing functions 110 | 111 | # Grammar: 112 | # full-date = date-fullyear "-" date-month "-" date-mday 113 | defp eat_full_date(rest) do 114 | rest 115 | |> eat_digits(_fullyear = 4) 116 | |> eat_literal("-") 117 | |> eat_digits(_month = 2) 118 | |> eat_literal("-") 119 | |> eat_digits(_mday = 2) 120 | end 121 | 122 | # Grammar: 123 | # full-time = partial-time time-offset 124 | defp parse_full_time(string) do 125 | rest = eat_partial_time(string) 126 | {time_secfrac, rest} = parse_time_secfrac(rest) 127 | rest = eat_time_offset(rest) 128 | {time_secfrac, rest} 129 | end 130 | 131 | # Grammar: 132 | # partial-time = time-hour ":" time-minute ":" time-second 133 | defp eat_partial_time(rest) do 134 | rest 135 | |> eat_digits(2) 136 | |> eat_literal(":") 137 | |> eat_digits(2) 138 | |> eat_literal(":") 139 | |> eat_digits(2) 140 | end 141 | 142 | # Grammar: 143 | # time-secfrac = "." 1*DIGIT 144 | defp parse_time_secfrac("." <> rest) do 145 | case Utils.parse_nanoseconds(rest) do 146 | {secfrac_nano, rest} -> {secfrac_nano, rest} 147 | :error -> throw("bad time secfrac after \".\", got: #{inspect(rest)}") 148 | end 149 | end 150 | 151 | defp parse_time_secfrac(rest), do: {0, rest} 152 | 153 | defp eat_literal(string, literal) do 154 | literal_size = byte_size(literal) 155 | 156 | case string do 157 | <<^literal::bytes-size(literal_size), rest::binary>> -> rest 158 | other -> throw("expected literal #{inspect(literal)}, got: #{inspect(other)}") 159 | end 160 | end 161 | 162 | # Grammar: 163 | # time-numoffset = ("+" / "-") time-hour ":" time-minute 164 | defp eat_time_offset(<>) when z in [?z, ?Z], do: rest 165 | 166 | defp eat_time_offset(<>) when sign in [?+, ?-] do 167 | rest 168 | |> eat_digits(2) 169 | |> eat_literal(":") 170 | |> eat_digits(2) 171 | end 172 | 173 | defp eat_time_offset("") do 174 | throw("expected time offset, but it's missing") 175 | end 176 | 177 | defp ensure_empty(""), do: :ok 178 | defp ensure_empty(other), do: throw("expected empty string, got: #{inspect(other)}") 179 | 180 | defp eat_digits(string, count) do 181 | case string do 182 | <> -> 183 | case Integer.parse(digits) do 184 | {_digits, ""} -> 185 | rest 186 | 187 | _other -> 188 | throw("expected #{count} digits but got unparsable integer: #{inspect(digits)}") 189 | end 190 | 191 | _other -> 192 | throw("expected #{count} digits, got: #{inspect(string)}") 193 | end 194 | end 195 | 196 | {:ok, min_datetime, 0} = DateTime.from_iso8601("0001-01-01T00:00:00Z") 197 | {:ok, max_datetime, 0} = DateTime.from_iso8601("9999-12-31T23:59:59Z") 198 | 199 | defp datetime_in_allowed_range?(datetime) do 200 | truncated = DateTime.truncate(datetime, :second) 201 | 202 | DateTime.compare(truncated, unquote(Macro.escape(min_datetime))) in [:gt, :eq] and 203 | DateTime.compare(truncated, unquote(Macro.escape(max_datetime))) in [:lt, :eq] 204 | end 205 | end 206 | -------------------------------------------------------------------------------- /lib/protobuf/json/utils.ex: -------------------------------------------------------------------------------- 1 | defmodule Protobuf.JSON.Utils do 2 | @moduledoc false 3 | 4 | # Used to encode nanoseconds for Google.Protobuf.Duration and Google.Protobuf.Timestamp. 5 | # Examples: 6 | # 1 -> "000000001" 7 | # 1_000 -> "001000" 8 | # 1_000_000 -> "001000000" 9 | # 999_999_999 -> "999999999" 10 | @doc false 11 | @spec format_nanoseconds(integer()) :: String.t() 12 | def format_nanoseconds(nanoseconds) 13 | when nanoseconds >= -999_999_999 and nanoseconds <= 999_999_999 do 14 | nanoseconds 15 | |> abs() 16 | |> Integer.to_string() 17 | |> String.pad_leading(9, "0") 18 | |> String.trim_trailing("000") 19 | |> String.trim_trailing("000") 20 | end 21 | 22 | # Used to decode nanoseconds for Google.Protobuf.Duration and Google.Protobuf.Timestamp (in 23 | # Protobuf.JSON.RFC3339). Has the same spec as Integer.parse/2. 24 | # Examples: 25 | # "100" -> 100_000_000 (nanosec) 26 | # "000010" => 10_000 (nanosec) 27 | # "000000001" => 1 (nanosec) 28 | # "01" -> 10_000_000 (nanosec) 29 | @doc false 30 | @spec parse_nanoseconds(binary()) :: {nanoseconds :: integer(), rest :: binary()} | :error 31 | def parse_nanoseconds(binary) when is_binary(binary) do 32 | case parse_nanoseconds(binary, _acc = 0, _starting_power = 100_000_000) do 33 | :error -> :error 34 | {_, ^binary} -> :error 35 | {_nanoseconds, _rest} = result -> result 36 | end 37 | end 38 | 39 | defp parse_nanoseconds(<>, _acc, _power = 0) when digit in ?0..?9 do 40 | :error 41 | end 42 | 43 | defp parse_nanoseconds(<>, acc, power) when digit in ?0..?9 do 44 | digit = digit - ?0 45 | parse_nanoseconds(rest, acc + digit * power, div(power, 10)) 46 | end 47 | 48 | defp parse_nanoseconds(rest, acc, _power) do 49 | {acc, rest} 50 | end 51 | end 52 | -------------------------------------------------------------------------------- /lib/protobuf/message_props.ex: -------------------------------------------------------------------------------- 1 | defmodule Protobuf.MessageProps do 2 | @moduledoc false 3 | 4 | alias Protobuf.FieldProps 5 | 6 | # A struct containing information about a Protobuf message 7 | 8 | # A "field number". 9 | @type tag() :: integer() 10 | 11 | @type field_name() :: atom() 12 | 13 | @type t :: %__MODULE__{ 14 | ordered_tags: [tag()], 15 | tags_map: %{tag() => tag()}, 16 | field_props: %{tag() => FieldProps.t()}, 17 | field_tags: %{field_name() => tag()}, 18 | repeated_fields: [field_name()], 19 | embedded_fields: [field_name()], 20 | syntax: atom(), 21 | oneof: [{field_name(), tag()}], 22 | enum?: boolean(), 23 | extendable?: boolean(), 24 | map?: boolean(), 25 | 26 | # See Protobuf.DSL.extensions/1. 27 | extension_range: [{non_neg_integer(), non_neg_integer()}] | nil 28 | } 29 | 30 | defstruct ordered_tags: [], 31 | tags_map: %{}, 32 | field_props: %{}, 33 | field_tags: %{}, 34 | repeated_fields: [], 35 | embedded_fields: [], 36 | syntax: :proto2, 37 | oneof: [], 38 | enum?: false, 39 | extendable?: false, 40 | map?: false, 41 | extension_range: [] 42 | end 43 | -------------------------------------------------------------------------------- /lib/protobuf/protoc/context.ex: -------------------------------------------------------------------------------- 1 | defmodule Protobuf.Protoc.Context do 2 | @moduledoc false 3 | 4 | @type t() :: %__MODULE__{} 5 | 6 | # Plugins passed by options 7 | defstruct plugins: [], 8 | 9 | ### All files scope 10 | 11 | # All parsed comments from the source file (mapping from path to comment) 12 | # %{"1.4.2" => "this is a comment", "1.5.2.4.2" => "more comment\ndetails"} 13 | comments: %{}, 14 | 15 | # Mapping from file name to (mapping from type name to metadata, like elixir type name) 16 | # %{"example.proto" => %{".example.FooMsg" => %{type_name: "Example.FooMsg"}}} 17 | global_type_mapping: %{}, 18 | 19 | ### One file scope 20 | 21 | # Package name 22 | package: nil, 23 | package_prefix: nil, 24 | module_prefix: nil, 25 | syntax: nil, 26 | # Mapping from type_name to metadata. It's merged type mapping of dependencies files including itself 27 | # %{".example.FooMsg" => %{type_name: "Example.FooMsg"}} 28 | dep_type_mapping: %{}, 29 | 30 | # For a message 31 | # Nested namespace when generating nested messages. It should be joined to get the full namespace 32 | namespace: [], 33 | 34 | # Include binary descriptors in the generated protobuf modules 35 | # And expose them via the `descriptor/0` function 36 | gen_descriptors?: false, 37 | 38 | # Module to transform values before and after encode and decode 39 | transform_module: nil, 40 | 41 | # Generate one file per module with "proper" directory structure 42 | # (according to Elixir conventions) if this is true 43 | one_file_per_module?: false, 44 | 45 | # Include visible module docs in the generated protobuf modules 46 | include_docs?: false, 47 | 48 | # Elixirpb.FileOptions 49 | custom_file_options: %{}, 50 | 51 | # Current comment path. The locations are encoded into with a joining "." 52 | # character. E.g. "4.2.3.0" 53 | current_comment_path: "" 54 | 55 | @spec custom_file_options_from_file_desc(t(), Google.Protobuf.FileDescriptorProto.t()) :: t() 56 | def custom_file_options_from_file_desc(ctx, desc) 57 | 58 | def custom_file_options_from_file_desc( 59 | %__MODULE__{} = ctx, 60 | %Google.Protobuf.FileDescriptorProto{options: nil} 61 | ) do 62 | %__MODULE__{ctx | custom_file_options: %{}} 63 | end 64 | 65 | def custom_file_options_from_file_desc( 66 | %__MODULE__{} = ctx, 67 | %Google.Protobuf.FileDescriptorProto{options: options} 68 | ) do 69 | custom_file_opts = 70 | Google.Protobuf.FileOptions.get_extension(options, Elixirpb.PbExtension, :file) || 71 | %Elixirpb.FileOptions{} 72 | 73 | %__MODULE__{ 74 | ctx 75 | | custom_file_options: custom_file_opts, 76 | module_prefix: Map.get(custom_file_opts, :module_prefix) 77 | } 78 | end 79 | 80 | @doc """ 81 | Appends a comment path to the current comment path. 82 | 83 | ## Examples 84 | 85 | iex> append_comment_path(%{current_comment_path: "4"}, "2.4") 86 | %{current_comment_path: "4.2.4"} 87 | 88 | """ 89 | def append_comment_path(ctx, path) do 90 | %{ctx | current_comment_path: String.trim(ctx.current_comment_path <> "." <> path, ".")} 91 | end 92 | end 93 | -------------------------------------------------------------------------------- /lib/protobuf/protoc/generator.ex: -------------------------------------------------------------------------------- 1 | defmodule Protobuf.Protoc.Generator do 2 | @moduledoc false 3 | 4 | alias Protobuf.Protoc.Context 5 | alias Protobuf.Protoc.Generator 6 | 7 | # TODO: improve spec 8 | @spec generate(Context.t(), %Google.Protobuf.FileDescriptorProto{}) :: 9 | {term(), [Google.Protobuf.Compiler.CodeGeneratorResponse.File.t()]} 10 | def generate(%Context{} = ctx, %Google.Protobuf.FileDescriptorProto{} = desc) do 11 | {package_level_extensions, module_definitions} = generate_module_definitions(ctx, desc) 12 | 13 | files = 14 | if ctx.one_file_per_module? do 15 | Enum.map(module_definitions, fn {mod_name, content} -> 16 | file_name = Macro.underscore(mod_name) <> ".pb.ex" 17 | %Google.Protobuf.Compiler.CodeGeneratorResponse.File{name: file_name, content: content} 18 | end) 19 | else 20 | # desc.name is the filename, ending in ".proto". 21 | file_name = Path.rootname(desc.name) <> ".pb.ex" 22 | 23 | content = 24 | module_definitions 25 | |> Enum.map(fn {_mod_name, contents} -> [contents, ?\n] end) 26 | |> IO.iodata_to_binary() 27 | |> Generator.Util.format() 28 | 29 | [%Google.Protobuf.Compiler.CodeGeneratorResponse.File{name: file_name, content: content}] 30 | end 31 | 32 | {package_level_extensions, files} 33 | end 34 | 35 | defp generate_module_definitions(ctx, %Google.Protobuf.FileDescriptorProto{} = desc) do 36 | ctx = 37 | %Context{ 38 | ctx 39 | | comments: Protobuf.Protoc.Generator.Comment.parse(desc), 40 | syntax: syntax(desc.syntax), 41 | package: desc.package, 42 | dep_type_mapping: get_dep_type_mapping(ctx, desc.dependency, desc.name) 43 | } 44 | |> Protobuf.Protoc.Context.custom_file_options_from_file_desc(desc) 45 | 46 | enum_defmodules = 47 | desc.enum_type 48 | |> Enum.with_index() 49 | |> Enum.map(fn {enum, index} -> 50 | {Context.append_comment_path(ctx, "5.#{index}"), enum} 51 | end) 52 | |> Enum.map(fn {ctx, enum} -> Generator.Enum.generate(ctx, enum) end) 53 | 54 | {nested_enum_defmodules, message_defmodules} = 55 | Generator.Message.generate_list(ctx, desc.message_type) 56 | 57 | {package_level_extensions, extension_defmodules} = Generator.Extension.generate(ctx, desc) 58 | 59 | service_defmodules = 60 | if "grpc" in ctx.plugins do 61 | desc.service 62 | |> Enum.with_index() 63 | |> Enum.map(fn {service, index} -> 64 | Generator.Service.generate( 65 | Context.append_comment_path(ctx, "6.#{index}"), 66 | service 67 | ) 68 | end) 69 | else 70 | [] 71 | end 72 | 73 | defmodules = 74 | List.flatten([ 75 | enum_defmodules, 76 | nested_enum_defmodules, 77 | message_defmodules, 78 | service_defmodules, 79 | extension_defmodules 80 | ]) 81 | 82 | {package_level_extensions, defmodules} 83 | end 84 | 85 | defp get_dep_type_mapping(%Context{global_type_mapping: global_mapping}, deps, file_name) do 86 | mapping = 87 | Enum.reduce(deps, %{}, fn dep, acc -> 88 | Map.merge(acc, global_mapping[dep]) 89 | end) 90 | 91 | Map.merge(mapping, global_mapping[file_name]) 92 | end 93 | 94 | defp syntax("proto3"), do: :proto3 95 | defp syntax("proto2"), do: :proto2 96 | defp syntax(nil), do: :proto2 97 | end 98 | -------------------------------------------------------------------------------- /lib/protobuf/protoc/generator/comment.ex: -------------------------------------------------------------------------------- 1 | defmodule Protobuf.Protoc.Generator.Comment do 2 | @moduledoc false 3 | 4 | alias Protobuf.Protoc.Context 5 | 6 | @doc """ 7 | Parses comment information from `Google.Protobuf.FileDescriptorProto` 8 | into a map with path keys. 9 | """ 10 | @spec parse(Google.Protobuf.FileDescriptorProto.t()) :: %{optional(String.t()) => String.t()} 11 | def parse(file_descriptor_proto) do 12 | file_descriptor_proto 13 | |> get_locations() 14 | |> Enum.reject(&empty_comment?/1) 15 | |> Map.new(fn location -> 16 | {Enum.join(location.path, "."), format_comment(location)} 17 | end) 18 | end 19 | 20 | defp get_locations(%{source_code_info: %{location: value}}) when is_list(value), 21 | do: value 22 | 23 | defp get_locations(_value), do: [] 24 | 25 | defp empty_comment?(%{leading_comments: value}) when not is_nil(value) and value != "", 26 | do: false 27 | 28 | defp empty_comment?(%{trailing_comments: value}) when not is_nil(value) and value != "", 29 | do: false 30 | 31 | defp empty_comment?(%{leading_detached_comments: value}), do: Enum.empty?(value) 32 | 33 | defp format_comment(location) do 34 | [location.leading_comments, location.trailing_comments | location.leading_detached_comments] 35 | |> Enum.reject(&is_nil/1) 36 | |> Enum.map(&String.replace(&1, ~r/^\s*\*/, "", global: true)) 37 | |> Enum.join("\n\n") 38 | |> String.replace(~r/\n{3,}/, "\n") 39 | |> String.trim() 40 | end 41 | 42 | @doc """ 43 | Finds a comment via the context. Returns an empty string if the 44 | comment is not found or if `include_docs?` is set to false. 45 | """ 46 | @spec get(Context.t()) :: String.t() 47 | def get(%{include_docs?: false}), do: "" 48 | 49 | def get(%{comments: comments, current_comment_path: path}), 50 | do: get(comments, path) 51 | 52 | @doc """ 53 | Finds a comment via a map of comments and a path. Returns an 54 | empty string if the comment is not found 55 | """ 56 | @spec get(%{optional(String.t()) => String.t()}, String.t()) :: String.t() 57 | def get(comments, path), do: Map.get(comments, path, "") 58 | end 59 | -------------------------------------------------------------------------------- /lib/protobuf/protoc/generator/enum.ex: -------------------------------------------------------------------------------- 1 | defmodule Protobuf.Protoc.Generator.Enum do 2 | @moduledoc false 3 | 4 | alias Protobuf.Protoc.Context 5 | alias Protobuf.Protoc.Generator.Comment 6 | alias Protobuf.Protoc.Generator.Util 7 | 8 | require EEx 9 | 10 | EEx.function_from_file( 11 | :defp, 12 | :enum_template, 13 | Path.expand("./templates/enum.ex.eex", :code.priv_dir(:protobuf)), 14 | [:assigns] 15 | ) 16 | 17 | @spec generate(Context.t(), Google.Protobuf.EnumDescriptorProto.t()) :: 18 | {module_name :: String.t(), file_contents :: String.t()} 19 | def generate(%Context{namespace: ns} = ctx, %Google.Protobuf.EnumDescriptorProto{} = desc) do 20 | msg_name = Util.mod_name(ctx, ns ++ [Macro.camelize(desc.name)]) 21 | 22 | use_options = 23 | Util.options_to_str(%{ 24 | syntax: ctx.syntax, 25 | enum: true, 26 | protoc_gen_elixir_version: "\"#{Util.version()}\"" 27 | }) 28 | 29 | descriptor_fun_body = 30 | if ctx.gen_descriptors? do 31 | Util.descriptor_fun_body(desc) 32 | else 33 | nil 34 | end 35 | 36 | content = 37 | enum_template( 38 | comment: Comment.get(ctx), 39 | module: msg_name, 40 | use_options: use_options, 41 | fields: desc.value, 42 | descriptor_fun_body: descriptor_fun_body, 43 | module_doc?: ctx.include_docs? 44 | ) 45 | 46 | {msg_name, Util.format(content)} 47 | end 48 | end 49 | -------------------------------------------------------------------------------- /lib/protobuf/protoc/generator/extension.ex: -------------------------------------------------------------------------------- 1 | defmodule Protobuf.Protoc.Generator.Extension do 2 | @moduledoc false 3 | 4 | alias Google.Protobuf.{DescriptorProto, FieldDescriptorProto, FileDescriptorProto} 5 | alias Protobuf.Protoc.Context 6 | alias Protobuf.Protoc.Generator.Comment 7 | alias Protobuf.Protoc.Generator.Util 8 | 9 | require EEx 10 | 11 | @ext_postfix "PbExtension" 12 | 13 | EEx.function_from_file( 14 | :defp, 15 | :extension_template, 16 | Path.expand("./templates/extension.ex.eex", :code.priv_dir(:protobuf)), 17 | [:assigns] 18 | ) 19 | 20 | # Returns a tuple of {module_name, module_contents} with all the given extensions. 21 | @spec generate_package_level(Context.t(), String.t(), [String.t()]) :: 22 | {module_name :: String.t(), contents :: String.t()} 23 | def generate_package_level(%Context{} = ctx, mod_name, extensions) 24 | when is_binary(mod_name) and is_list(extensions) do 25 | use_options = 26 | Util.options_to_str(%{ 27 | syntax: ctx.syntax, 28 | protoc_gen_elixir_version: "\"#{Util.version()}\"" 29 | }) 30 | 31 | module_contents = 32 | Util.format( 33 | extension_template( 34 | comment: Comment.get(ctx), 35 | use_options: use_options, 36 | module: mod_name, 37 | extends: extensions, 38 | module_doc?: ctx.include_docs? 39 | ) 40 | ) 41 | 42 | {mod_name, module_contents} 43 | end 44 | 45 | @spec generate(Context.t(), FileDescriptorProto.t()) :: {package_level_extensions, modules} 46 | when package_level_extensions: 47 | {module_name :: String.t(), extension_dsls :: [String.t()]} | nil, 48 | modules: [{module_name :: String.t(), contents :: String.t()}] 49 | def generate(%Context{} = ctx, %FileDescriptorProto{} = file_desc) do 50 | use_options = 51 | Util.options_to_str(%{ 52 | syntax: ctx.syntax, 53 | protoc_gen_elixir_version: "\"#{Util.version()}\"" 54 | }) 55 | 56 | nested_modules = get_extensions_from_messages(ctx, use_options, file_desc.message_type) 57 | 58 | {package_level_extensions(ctx, file_desc), nested_modules} 59 | end 60 | 61 | defp package_level_extensions(%Context{}, %FileDescriptorProto{extension: []}) do 62 | nil 63 | end 64 | 65 | defp package_level_extensions(%Context{} = ctx, %FileDescriptorProto{extension: extensions}) do 66 | namespace = Util.mod_name(ctx, ctx.namespace ++ [Macro.camelize(@ext_postfix)]) 67 | {namespace, Enum.map(extensions, &generate_extend_dsl(ctx, &1, _ns = ""))} 68 | end 69 | 70 | defp generate_extend_dsl(ctx, %FieldDescriptorProto{} = f, ns) do 71 | extendee = Util.type_from_type_name(ctx, f.extendee) 72 | f = Protobuf.Protoc.Generator.Message.get_field(ctx, f) 73 | 74 | name = 75 | if ns == "" do 76 | f.name 77 | else 78 | inspect("#{ns}.#{f.name}") 79 | end 80 | 81 | "#{extendee}, :#{name}, #{f.number}, #{f.label}: true, type: #{f.type}#{f.opts_str}" 82 | end 83 | 84 | defp get_extensions_from_messages(%Context{} = ctx, use_options, descs) do 85 | descs 86 | |> Enum.with_index() 87 | |> Enum.flat_map(fn {desc, index} -> 88 | generate_module(Context.append_comment_path(ctx, "7.#{index}"), use_options, desc) ++ 89 | get_extensions_from_messages( 90 | %Context{ 91 | Context.append_comment_path(ctx, "6.#{index}") 92 | | namespace: ctx.namespace ++ [Macro.camelize(desc.name)] 93 | }, 94 | use_options, 95 | desc.nested_type 96 | ) 97 | end) 98 | end 99 | 100 | defp generate_module(%Context{}, _use_options, %DescriptorProto{extension: []}) do 101 | [] 102 | end 103 | 104 | defp generate_module(%Context{} = ctx, use_options, %DescriptorProto{} = desc) do 105 | ns = ctx.namespace ++ [Macro.camelize(desc.name)] 106 | module_name = Util.mod_name(ctx, ns ++ [Macro.camelize(@ext_postfix)]) 107 | 108 | module_contents = 109 | Util.format( 110 | extension_template( 111 | comment: Comment.get(ctx), 112 | module: module_name, 113 | use_options: use_options, 114 | extends: Enum.map(desc.extension, &generate_extend_dsl(ctx, &1, _ns = "")), 115 | module_doc?: ctx.include_docs? 116 | ) 117 | ) 118 | 119 | [{module_name, module_contents}] 120 | end 121 | end 122 | -------------------------------------------------------------------------------- /lib/protobuf/protoc/generator/service.ex: -------------------------------------------------------------------------------- 1 | defmodule Protobuf.Protoc.Generator.Service do 2 | @moduledoc false 3 | 4 | alias Protobuf.Protoc.Context 5 | alias Protobuf.Protoc.Generator.Comment 6 | alias Protobuf.Protoc.Generator.Util 7 | 8 | require EEx 9 | 10 | EEx.function_from_file( 11 | :defp, 12 | :service_template, 13 | Path.expand("./templates/service.ex.eex", :code.priv_dir(:protobuf)), 14 | [:assigns] 15 | ) 16 | 17 | @spec generate(Context.t(), Google.Protobuf.ServiceDescriptorProto.t()) :: 18 | {String.t(), String.t()} 19 | def generate(%Context{} = ctx, %Google.Protobuf.ServiceDescriptorProto{} = desc) do 20 | # service can't be nested 21 | mod_name = Util.mod_name(ctx, [Macro.camelize(desc.name)]) 22 | name = Util.prepend_package_prefix(ctx.package, desc.name) 23 | methods = Enum.map(desc.method, &generate_service_method(ctx, &1)) 24 | 25 | descriptor_fun_body = 26 | if ctx.gen_descriptors? do 27 | Util.descriptor_fun_body(desc) 28 | else 29 | nil 30 | end 31 | 32 | {mod_name, 33 | Util.format( 34 | service_template( 35 | comment: Comment.get(ctx), 36 | module: mod_name, 37 | service_name: name, 38 | package: ctx.package, 39 | methods: methods, 40 | descriptor_fun_body: descriptor_fun_body, 41 | version: Util.version(), 42 | module_doc?: ctx.include_docs? 43 | ) 44 | )} 45 | end 46 | 47 | defp generate_service_method(ctx, method) do 48 | input = service_arg(Util.type_from_type_name(ctx, method.input_type), method.client_streaming) 49 | 50 | output = 51 | service_arg(Util.type_from_type_name(ctx, method.output_type), method.server_streaming) 52 | 53 | {method.name, input, output} 54 | end 55 | 56 | defp service_arg(type, _streaming? = true), do: "stream(#{type})" 57 | defp service_arg(type, _streaming?), do: type 58 | end 59 | -------------------------------------------------------------------------------- /lib/protobuf/protoc/generator/util.ex: -------------------------------------------------------------------------------- 1 | defmodule Protobuf.Protoc.Generator.Util do 2 | @moduledoc false 3 | 4 | alias Protobuf.Protoc.Context 5 | 6 | @locals_without_parens [field: 2, field: 3, oneof: 2, rpc: 3, extend: 4, extensions: 1] 7 | 8 | defguardp is_nil_or_nonempty_string(term) when is_nil(term) or (is_binary(term) and term != "") 9 | 10 | @spec mod_name(Context.t(), [String.t()]) :: String.t() 11 | def mod_name(%Context{} = ctx, ns) when is_list(ns) do 12 | ns = Enum.map(ns, &proto_name_to_module_name/1) 13 | 14 | parts = 15 | case camelcase_prefix(ctx) do 16 | "" -> ns 17 | prefix -> [prefix | ns] 18 | end 19 | 20 | Enum.join(parts, ".") 21 | end 22 | 23 | defp camelcase_prefix(%{package_prefix: nil, module_prefix: nil, package: nil} = _ctx), 24 | do: "" 25 | 26 | defp camelcase_prefix(%{package_prefix: prefix, module_prefix: nil, package: package} = _ctx), 27 | do: proto_name_to_module_name(prepend_package_prefix(prefix, package)) 28 | 29 | defp camelcase_prefix(%{module_prefix: module_prefix} = _ctx), 30 | do: proto_name_to_module_name(module_prefix) 31 | 32 | defp proto_name_to_module_name(name) when is_binary(name) do 33 | name 34 | |> String.split(".") 35 | |> Enum.map_join(".", &Macro.camelize/1) 36 | end 37 | 38 | @spec prepend_package_prefix(String.t() | nil, String.t() | nil) :: String.t() 39 | def prepend_package_prefix(prefix, package) 40 | when is_nil_or_nonempty_string(prefix) and is_nil_or_nonempty_string(package) do 41 | [prefix, package] 42 | |> Enum.reject(&is_nil/1) 43 | |> Enum.join(".") 44 | end 45 | 46 | @spec options_to_str(%{optional(atom()) => atom() | integer() | String.t()}) :: String.t() 47 | def options_to_str(opts) when is_map(opts) do 48 | opts 49 | |> Enum.reject(fn {_key, val} -> val in [nil, false] end) 50 | |> Enum.sort() 51 | |> Enum.map_join(", ", fn {key, val} -> "#{key}: #{print(val)}" end) 52 | end 53 | 54 | defp print(atom) when is_atom(atom), do: inspect(atom) 55 | defp print(val), do: val 56 | 57 | @spec type_from_type_name(Context.t(), String.t()) :: String.t() 58 | def type_from_type_name(%Context{dep_type_mapping: mapping}, type_name) 59 | when is_binary(type_name) do 60 | # The doc says there's a situation where type_name begins without a `.`, but I never got that. 61 | # Handle that later. 62 | metadata = 63 | mapping[type_name] || 64 | raise "There's something wrong to get #{type_name}'s type, please contact with the lib author." 65 | 66 | metadata[:type_name] 67 | end 68 | 69 | @spec descriptor_fun_body(desc :: struct()) :: String.t() 70 | def descriptor_fun_body(%mod{} = desc) do 71 | attributes = 72 | desc 73 | |> Map.from_struct() 74 | |> Enum.filter(fn {_key, val} -> not is_nil(val) end) 75 | 76 | struct!(mod, attributes) 77 | |> mod.encode() 78 | |> mod.decode() 79 | |> inspect(limit: :infinity) 80 | end 81 | 82 | @spec format(String.t()) :: String.t() 83 | def format(code) when is_binary(code) do 84 | formatted_code = 85 | code 86 | |> Code.format_string!(locals_without_parens: @locals_without_parens) 87 | |> IO.iodata_to_binary() 88 | 89 | # As neither Code.format_string!/2 nor protoc automatically adds a newline 90 | # at end of files, we must add ourselves if not present. 91 | if String.ends_with?(formatted_code, "\n"), do: formatted_code, else: formatted_code <> "\n" 92 | end 93 | 94 | @spec pad_comment(String.t(), non_neg_integer()) :: String.t() 95 | def pad_comment(comment, size) do 96 | padding = String.duplicate(" ", size) 97 | 98 | comment 99 | |> String.split("\n") 100 | |> Enum.map(fn line -> 101 | trimmed = String.trim_leading(line, " ") 102 | padding <> trimmed 103 | end) 104 | |> Enum.join("\n") 105 | end 106 | 107 | @spec version() :: String.t() 108 | def version do 109 | {:ok, value} = :application.get_key(:protobuf, :vsn) 110 | List.to_string(value) 111 | end 112 | end 113 | -------------------------------------------------------------------------------- /lib/protobuf/transform_module.ex: -------------------------------------------------------------------------------- 1 | defmodule Protobuf.TransformModule do 2 | @moduledoc """ 3 | Behaviour for transformer modules. 4 | 5 | By defining a `transform_module/0` function on your protobuf message module 6 | you can add custom encoding and decoding logic for your message. 7 | 8 | As an example we can use this to implement a message that will be decoded as a string value: 9 | 10 | defmodule StringMessage do 11 | use Protobuf, syntax: :proto3 12 | 13 | field :value, 1, type: :string 14 | 15 | def transform_module(), do: MyTransformModule 16 | end 17 | 18 | The transformer behaviour implementation: 19 | 20 | defmodule MyTransformModule do 21 | @behaviour Protobuf.TransformModule 22 | 23 | defmacro typespec(_default_ast) do 24 | quote do 25 | @type t() :: String.t() 26 | end 27 | end 28 | 29 | @impl true 30 | def encode(string, StringMessage) when is_binary(string), do: struct(StringMessage, value: string) 31 | 32 | @impl true 33 | def decode(%{value: string}, StringMessage), do: string 34 | end 35 | 36 | Notice that since the `c:typespec/1` macro was introduced, transform modules can't 37 | depend on the types that they transform anymore in compile time, meaning struct 38 | syntax can't be used. 39 | """ 40 | 41 | @type value() :: term() 42 | @type type() :: module() 43 | @type message() :: struct() 44 | 45 | @doc """ 46 | Takes any Elixir term and the protobuf message type and encodes it into 47 | that type. 48 | 49 | Called before a message is encoded. 50 | """ 51 | @callback encode(value(), type()) :: message() 52 | 53 | @doc """ 54 | Takes any protobuf message and the message type and encodes it into arbitrary 55 | Elixir term. 56 | 57 | Called after a message is decoded. 58 | """ 59 | @callback decode(message(), type()) :: value() 60 | 61 | @doc """ 62 | Transforms the typespec for modules using this transformer. 63 | 64 | If this callback is not present, the default typespec will be used. 65 | """ 66 | @macrocallback typespec(default_typespec :: Macro.t()) :: Macro.t() 67 | 68 | @optional_callbacks [typespec: 1] 69 | end 70 | -------------------------------------------------------------------------------- /lib/protobuf/wire/types.ex: -------------------------------------------------------------------------------- 1 | defmodule Protobuf.Wire.Types do 2 | @moduledoc false 3 | 4 | # From: https://developers.google.com/protocol-buffers/docs/encoding 5 | defmacro wire_varint, do: 0 6 | defmacro wire_64bits, do: 1 7 | defmacro wire_delimited, do: 2 8 | defmacro wire_start_group, do: 3 9 | defmacro wire_end_group, do: 4 10 | defmacro wire_32bits, do: 5 11 | end 12 | -------------------------------------------------------------------------------- /lib/protobuf/wire/varint.ex: -------------------------------------------------------------------------------- 1 | defmodule Protobuf.Wire.Varint do 2 | @moduledoc false 3 | 4 | # Varint encoding and decoding utilities. 5 | # 6 | # https://developers.google.com/protocol-buffers/docs/encoding#varints 7 | # 8 | # For performance reasons, varint decoding must be built through a macro, so that binary 9 | # match contexts are reused and no new large binaries get allocated. You can define your 10 | # own varint decoders with the `decoder` macro, which generates function heads for up to 11 | # 10-bytes long varint-encoded data. 12 | # 13 | # defmodule VarintDecoders do 14 | # import Protobuf.Wire.Varint 15 | # 16 | # decoder :def, :decode_and_sum, [:plus] do 17 | # {:ok, value + plus, rest} 18 | # end 19 | # 20 | # def decode_all(<>), do: decode_all(bin, []) 21 | # 22 | # defp decode_all(<<>>, acc), do: acc 23 | # 24 | # defdecoderp decode_all(acc) do 25 | # decode_all(rest, [value | acc]) 26 | # end 27 | # end 28 | # 29 | # iex> VarintDecoders.decode_and_sum(<<35>>, 7) 30 | # {:ok, 42, ""} 31 | # 32 | # iex> VarintDecoders.decode_all("abcd asdf") 33 | # [102, 100, 115, 97, 32, 100, 99, 98, 97] 34 | # 35 | # Refer to [efficiency guide](http://www1.erlang.org/doc/efficiency_guide/binaryhandling.html) 36 | # for more on efficient binary handling. 37 | # 38 | # Encoding on the other hand is simpler. It takes an integer and returns an iolist with its 39 | # varint representation: 40 | # 41 | # iex> Protobuf.Wire.Varint.encode(35) 42 | # [35] 43 | # 44 | # iex> Protobuf.Wire.Varint.encode(1_234_567) 45 | # [<<135>>, <<173>>, 75] 46 | 47 | import Bitwise 48 | 49 | @max_bits 64 50 | @mask64 bsl(1, @max_bits) - 1 51 | 52 | # generated: true is required here to silence compilation warnings in Elixir 53 | # 1.10 and 1.11. OK to remove once we support only 1.12+ 54 | @varints [ 55 | { 56 | quote(do: <<0::1, value::7>>), 57 | quote(do: value) 58 | }, 59 | { 60 | quote(do: <<1::1, x0::7, 0::1, x1::7>>), 61 | quote(generated: true, do: x0 + bsl(x1, 7)) 62 | }, 63 | { 64 | quote(do: <<1::1, x0::7, 1::1, x1::7, 0::1, x2::7>>), 65 | quote(generated: true, do: x0 + bsl(x1, 7) + bsl(x2, 14)) 66 | }, 67 | { 68 | quote(do: <<1::1, x0::7, 1::1, x1::7, 1::1, x2::7, 0::1, x3::7>>), 69 | quote(generated: true, do: x0 + bsl(x1, 7) + bsl(x2, 14) + bsl(x3, 21)) 70 | }, 71 | { 72 | quote(do: <<1::1, x0::7, 1::1, x1::7, 1::1, x2::7, 1::1, x3::7, 0::1, x4::7>>), 73 | quote(generated: true, do: x0 + bsl(x1, 7) + bsl(x2, 14) + bsl(x3, 21) + bsl(x4, 28)) 74 | }, 75 | { 76 | quote do 77 | <<1::1, x0::7, 1::1, x1::7, 1::1, x2::7, 1::1, x3::7, 1::1, x4::7, 0::1, x5::7>> 78 | end, 79 | quote(generated: true) do 80 | x0 + 81 | bsl(x1, 7) + 82 | bsl(x2, 14) + 83 | bsl(x3, 21) + 84 | bsl(x4, 28) + 85 | bsl(x5, 35) 86 | end 87 | }, 88 | { 89 | quote do 90 | <<1::1, x0::7, 1::1, x1::7, 1::1, x2::7, 1::1, x3::7, 1::1, x4::7, 1::1, x5::7, 0::1, 91 | x6::7>> 92 | end, 93 | quote(generated: true) do 94 | x0 + 95 | bsl(x1, 7) + 96 | bsl(x2, 14) + 97 | bsl(x3, 21) + 98 | bsl(x4, 28) + 99 | bsl(x5, 35) + 100 | bsl(x6, 42) 101 | end 102 | }, 103 | { 104 | quote do 105 | <<1::1, x0::7, 1::1, x1::7, 1::1, x2::7, 1::1, x3::7, 1::1, x4::7, 1::1, x5::7, 1::1, 106 | x6::7, 0::1, x7::7>> 107 | end, 108 | quote(generated: true) do 109 | x0 + 110 | bsl(x1, 7) + 111 | bsl(x2, 14) + 112 | bsl(x3, 21) + 113 | bsl(x4, 28) + 114 | bsl(x5, 35) + 115 | bsl(x6, 42) + 116 | bsl(x7, 49) 117 | end 118 | }, 119 | { 120 | quote do 121 | <<1::1, x0::7, 1::1, x1::7, 1::1, x2::7, 1::1, x3::7, 1::1, x4::7, 1::1, x5::7, 1::1, 122 | x6::7, 1::1, x7::7, 0::1, x8::7>> 123 | end, 124 | quote(generated: true) do 125 | x0 + 126 | bsl(x1, 7) + 127 | bsl(x2, 14) + 128 | bsl(x3, 21) + 129 | bsl(x4, 28) + 130 | bsl(x5, 35) + 131 | bsl(x6, 42) + 132 | bsl(x7, 49) + 133 | bsl(x8, 56) 134 | end 135 | }, 136 | { 137 | quote do 138 | <<1::1, x0::7, 1::1, x1::7, 1::1, x2::7, 1::1, x3::7, 1::1, x4::7, 1::1, x5::7, 1::1, 139 | x6::7, 1::1, x7::7, 1::1, x8::7, 0::1, x9::7>> 140 | end, 141 | quote(generated: true) do 142 | v = 143 | x0 + 144 | bsl(x1, 7) + 145 | bsl(x2, 14) + 146 | bsl(x3, 21) + 147 | bsl(x4, 28) + 148 | bsl(x5, 35) + 149 | bsl(x6, 42) + 150 | bsl(x7, 49) + 151 | bsl(x8, 56) + 152 | bsl(x9, 63) 153 | 154 | _ = band(v, unquote(@mask64)) 155 | end 156 | } 157 | ] 158 | 159 | defmacro defdecoderp(name_and_args, do: body) do 160 | {name, args} = Macro.decompose_call(name_and_args) 161 | 162 | def_decoder_success_clauses(name, args, body) ++ [def_decoder_failure_clause(name, args)] 163 | end 164 | 165 | defp def_decoder_success_clauses(name, args, body) do 166 | for {pattern, expression} <- @varints do 167 | quote do 168 | defp unquote(name)(<>, unquote_splicing(args)) do 169 | var!(value) = unquote(expression) 170 | var!(rest) = rest 171 | unquote(body) 172 | end 173 | end 174 | end 175 | end 176 | 177 | defp def_decoder_failure_clause(name, args) do 178 | args = 179 | Enum.map(args, fn 180 | {:_, _meta, _ctxt} = underscore -> underscore 181 | {name, meta, ctxt} when is_atom(name) and is_atom(ctxt) -> {:"_#{name}", meta, ctxt} 182 | other -> other 183 | end) 184 | 185 | quote do 186 | defp unquote(name)(<<_::bits>>, unquote_splicing(args)) do 187 | raise Protobuf.DecodeError, message: "cannot decode binary data" 188 | end 189 | end 190 | end 191 | 192 | @spec encode(integer) :: iolist 193 | def encode(n) when n < 0 do 194 | <> = <> 195 | encode(n) 196 | end 197 | 198 | def encode(n) when n <= 127 do 199 | [n] 200 | end 201 | 202 | def encode(n) do 203 | [<<1::1, band(n, 127)::7>> | encode(bsr(n, 7))] 204 | end 205 | end 206 | -------------------------------------------------------------------------------- /lib/protobuf/wire/zigzag.ex: -------------------------------------------------------------------------------- 1 | defmodule Protobuf.Wire.Zigzag do 2 | @moduledoc false 3 | 4 | import Bitwise 5 | 6 | @spec encode(integer) :: integer 7 | def encode(n) when n >= 0, do: n * 2 8 | def encode(n) when n < 0, do: n * -2 - 1 9 | 10 | @spec decode(integer) :: integer 11 | def decode(n) when band(n, 1) == 0, do: bsr(n, 1) 12 | def decode(n) when band(n, 1) == 1, do: -bsr(n + 1, 1) 13 | end 14 | -------------------------------------------------------------------------------- /mix.lock: -------------------------------------------------------------------------------- 1 | %{ 2 | "bunt": {:hex, :bunt, "0.2.1", "e2d4792f7bc0ced7583ab54922808919518d0e57ee162901a16a1b6664ef3b14", [:mix], [], "hexpm", "a330bfb4245239787b15005e66ae6845c9cd524a288f0d141c148b02603777a5"}, 3 | "certifi": {:hex, :certifi, "2.12.0", "2d1cca2ec95f59643862af91f001478c9863c2ac9cb6e2f89780bfd8de987329", [:rebar3], [], "hexpm", "ee68d85df22e554040cdb4be100f33873ac6051387baf6a8f6ce82272340ff1c"}, 4 | "credo": {:hex, :credo, "1.5.6", "e04cc0fdc236fefbb578e0c04bd01a471081616e741d386909e527ac146016c6", [:mix], [{:bunt, "~> 0.2.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2.8", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "4b52a3e558bd64e30de62a648518a5ea2b6e3e5d2b164ef5296244753fc7eb17"}, 5 | "dialyxir": {:hex, :dialyxir, "1.4.3", "edd0124f358f0b9e95bfe53a9fcf806d615d8f838e2202a9f430d59566b6b53b", [:mix], [{:erlex, ">= 0.2.6", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "bf2cfb75cd5c5006bec30141b131663299c661a864ec7fbbc72dfa557487a986"}, 6 | "earmark_parser": {:hex, :earmark_parser, "1.4.41", "ab34711c9dc6212dda44fcd20ecb87ac3f3fce6f0ca2f28d4a00e4154f8cd599", [:mix], [], "hexpm", "a81a04c7e34b6617c2792e291b5a2e57ab316365c2644ddc553bb9ed863ebefa"}, 7 | "erlex": {:hex, :erlex, "0.2.7", "810e8725f96ab74d17aac676e748627a07bc87eb950d2b83acd29dc047a30595", [:mix], [], "hexpm", "3ed95f79d1a844c3f6bf0cea61e0d5612a42ce56da9c03f01df538685365efb0"}, 8 | "ex_doc": {:hex, :ex_doc, "0.34.2", "13eedf3844ccdce25cfd837b99bea9ad92c4e511233199440488d217c92571e8", [:mix], [{:earmark_parser, "~> 1.4.39", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_c, ">= 0.1.0", [hex: :makeup_c, repo: "hexpm", optional: true]}, {:makeup_elixir, "~> 0.14 or ~> 1.0", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1 or ~> 1.0", [hex: :makeup_erlang, repo: "hexpm", optional: false]}, {:makeup_html, ">= 0.1.0", [hex: :makeup_html, repo: "hexpm", optional: true]}], "hexpm", "5ce5f16b41208a50106afed3de6a2ed34f4acfd65715b82a0b84b49d995f95c1"}, 9 | "excoveralls": {:hex, :excoveralls, "0.14.6", "610e921e25b180a8538229ef547957f7e04bd3d3e9a55c7c5b7d24354abbba70", [:mix], [{:hackney, "~> 1.16", [hex: :hackney, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "0eceddaa9785cfcefbf3cd37812705f9d8ad34a758e513bb975b081dce4eb11e"}, 10 | "file_system": {:hex, :file_system, "0.2.10", "fb082005a9cd1711c05b5248710f8826b02d7d1784e7c3451f9c1231d4fc162d", [:mix], [], "hexpm", "41195edbfb562a593726eda3b3e8b103a309b733ad25f3d642ba49696bf715dc"}, 11 | "google_protobuf": {:git, "https://github.com/protocolbuffers/protobuf.git", "b407e8416e3893036aee5af9a12bd9b6a0e2b2e6", [ref: "b407e8416e3893036aee5af9a12bd9b6a0e2b2e6", submodules: true]}, 12 | "hackney": {:hex, :hackney, "1.20.1", "8d97aec62ddddd757d128bfd1df6c5861093419f8f7a4223823537bad5d064e2", [:rebar3], [{:certifi, "~> 2.12.0", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "~> 6.1.0", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "~> 1.0.0", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "~> 1.1", [hex: :mimerl, repo: "hexpm", optional: false]}, {:parse_trans, "3.4.1", [hex: :parse_trans, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "~> 1.1.0", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}, {:unicode_util_compat, "~> 0.7.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "fe9094e5f1a2a2c0a7d10918fee36bfec0ec2a979994cff8cfe8058cd9af38e3"}, 13 | "idna": {:hex, :idna, "6.1.1", "8a63070e9f7d0c62eb9d9fcb360a7de382448200fbbd1b106cc96d3d8099df8d", [:rebar3], [{:unicode_util_compat, "~> 0.7.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "92376eb7894412ed19ac475e4a86f7b413c1b9fbb5bd16dccd57934157944cea"}, 14 | "jason": {:hex, :jason, "1.4.4", "b9226785a9aa77b6857ca22832cffa5d5011a667207eb2a0ad56adb5db443b8a", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "c5eb0cab91f094599f94d55bc63409236a8ec69a21a67814529e8d5f6cc90b3b"}, 15 | "makeup": {:hex, :makeup, "1.1.2", "9ba8837913bdf757787e71c1581c21f9d2455f4dd04cfca785c70bbfff1a76a3", [:mix], [{:nimble_parsec, "~> 1.2.2 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "cce1566b81fbcbd21eca8ffe808f33b221f9eee2cbc7a1706fc3da9ff18e6cac"}, 16 | "makeup_elixir": {:hex, :makeup_elixir, "0.16.2", "627e84b8e8bf22e60a2579dad15067c755531fea049ae26ef1020cad58fe9578", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.2.3 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "41193978704763f6bbe6cc2758b84909e62984c7752b3784bd3c218bb341706b"}, 17 | "makeup_erlang": {:hex, :makeup_erlang, "1.0.1", "c7f58c120b2b5aa5fd80d540a89fdf866ed42f1f3994e4fe189abebeab610839", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "8a89a1eeccc2d798d6ea15496a6e4870b75e014d1af514b1b71fa33134f57814"}, 18 | "metrics": {:hex, :metrics, "1.0.1", "25f094dea2cda98213cecc3aeff09e940299d950904393b2a29d191c346a8486", [:rebar3], [], "hexpm", "69b09adddc4f74a40716ae54d140f93beb0fb8978d8636eaded0c31b6f099f16"}, 19 | "mimerl": {:hex, :mimerl, "1.3.0", "d0cd9fc04b9061f82490f6581e0128379830e78535e017f7780f37fea7545726", [:rebar3], [], "hexpm", "a1e15a50d1887217de95f0b9b0793e32853f7c258a5cd227650889b38839fe9d"}, 20 | "nimble_parsec": {:hex, :nimble_parsec, "1.4.0", "51f9b613ea62cfa97b25ccc2c1b4216e81df970acd8e16e8d1bdc58fef21370d", [:mix], [], "hexpm", "9c565862810fb383e9838c1dd2d7d2c437b3d13b267414ba6af33e50d2d1cf28"}, 21 | "parse_trans": {:hex, :parse_trans, "3.4.1", "6e6aa8167cb44cc8f39441d05193be6e6f4e7c2946cb2759f015f8c56b76e5ff", [:rebar3], [], "hexpm", "620a406ce75dada827b82e453c19cf06776be266f5a67cff34e1ef2cbb60e49a"}, 22 | "ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.7", "354c321cf377240c7b8716899e182ce4890c5938111a1296add3ec74cf1715df", [:make, :mix, :rebar3], [], "hexpm", "fe4c190e8f37401d30167c8c405eda19469f34577987c76dde613e838bbc67f8"}, 23 | "stream_data": {:hex, :stream_data, "1.1.2", "05499eaec0443349ff877aaabc6e194e82bda6799b9ce6aaa1aadac15a9fdb4d", [:mix], [], "hexpm", "129558d2c77cbc1eb2f4747acbbea79e181a5da51108457000020a906813a1a9"}, 24 | "telemetry": {:hex, :telemetry, "1.2.1", "68fdfe8d8f05a8428483a97d7aab2f268aaff24b49e0f599faa091f1d4e7f61c", [:rebar3], [], "hexpm", "dad9ce9d8effc621708f99eac538ef1cbe05d6a874dd741de2e689c47feafed5"}, 25 | "unicode_util_compat": {:hex, :unicode_util_compat, "0.7.0", "bc84380c9ab48177092f43ac89e4dfa2c6d62b40b8bd132b1059ecc7232f9a78", [:rebar3], [], "hexpm", "25eee6d67df61960cf6a794239566599b09e17e668d3700247bc498638152521"}, 26 | } 27 | -------------------------------------------------------------------------------- /priv/templates/enum.ex.eex: -------------------------------------------------------------------------------- 1 | defmodule <%= @module %> do 2 | <%= if @module_doc? do %> 3 | <%= if @comment != "" do %> 4 | @moduledoc """ 5 | <%= Protobuf.Protoc.Generator.Util.pad_comment(@comment, 2) %> 6 | """ 7 | <% end %> 8 | <% else %> 9 | @moduledoc false 10 | <% end %> 11 | 12 | use Protobuf, <%= @use_options %> 13 | 14 | <%= if @descriptor_fun_body do %> 15 | def descriptor do 16 | # credo:disable-for-next-line 17 | <%= @descriptor_fun_body %> 18 | end 19 | <% end %> 20 | 21 | <%= for %Google.Protobuf.EnumValueDescriptorProto{name: name, number: number} <- @fields do %> 22 | field :<%= name %>, <%= number %><% end %> 23 | end 24 | -------------------------------------------------------------------------------- /priv/templates/extension.ex.eex: -------------------------------------------------------------------------------- 1 | defmodule <%= @module %> do 2 | <%= if @module_doc? do %> 3 | <%= if @comment != "" do %> 4 | @moduledoc """ 5 | <%= Protobuf.Protoc.Generator.Util.pad_comment(@comment, 2) %> 6 | """ 7 | <% end %> 8 | <% else %> 9 | @moduledoc false 10 | <% end %> 11 | 12 | use Protobuf, <%= @use_options %> 13 | 14 | <% if @extends == [], do: raise("Fuck! #{@module}") %> 15 | 16 | <%= for ext <- @extends do %> 17 | extend <%= ext %> 18 | <% end %> 19 | end 20 | -------------------------------------------------------------------------------- /priv/templates/message.ex.eex: -------------------------------------------------------------------------------- 1 | defmodule <%= @module %> do 2 | <%= if @module_doc? do %> 3 | <%= if @comment != "" do %> 4 | @moduledoc """ 5 | <%= Protobuf.Protoc.Generator.Util.pad_comment(@comment, 2) %> 6 | """ 7 | <% end %> 8 | <% else %> 9 | @moduledoc false 10 | <% end %> 11 | 12 | use Protobuf<%= @use_options %> 13 | 14 | <%= if @descriptor_fun_body do %> 15 | def descriptor do 16 | # credo:disable-for-next-line 17 | <%= @descriptor_fun_body %> 18 | end 19 | <% end %> 20 | 21 | <%= for {%Google.Protobuf.OneofDescriptorProto{name: name}, index} <- Enum.with_index(@oneofs) do %> 22 | <%= if !String.starts_with?(name, "_") do %> 23 | oneof :<%= name %>, <%= index %><% end %> 24 | <% end %> 25 | 26 | <%= for f <- @fields do %> 27 | field <%= f %><% end %> 28 | 29 | <%= if @transform_module do %> 30 | def transform_module(), do: <%= inspect(@transform_module) %> 31 | <% end %> 32 | 33 | <%= if @extensions != [] do %> 34 | extensions [<%= Enum.map_join(@extensions, ", ", fn {start, end_} -> "{#{start}, #{end_}}" end) %>] 35 | <% end %> 36 | end 37 | -------------------------------------------------------------------------------- /priv/templates/service.ex.eex: -------------------------------------------------------------------------------- 1 | defmodule <%= @module %>.Service do 2 | <%= if @module_doc? do %> 3 | <%= if @comment != "" do %> 4 | @moduledoc """ 5 | <%= Protobuf.Protoc.Generator.Util.pad_comment(@comment, 2) %> 6 | """ 7 | <% end %> 8 | <% else %> 9 | @moduledoc false 10 | <% end %> 11 | 12 | use GRPC.Service, name: <%= inspect(@service_name) %>, protoc_gen_elixir_version: "<%= @version %>" 13 | 14 | <%= if @descriptor_fun_body do %> 15 | def descriptor do 16 | # credo:disable-for-next-line 17 | <%= @descriptor_fun_body %> 18 | end 19 | <% end %> 20 | 21 | <%= for {method_name, input, output} <- @methods do %> 22 | rpc :<%= method_name %>, <%= input %>, <%= output %> 23 | <% end %> 24 | end 25 | 26 | defmodule <%= @module %>.Stub do 27 | <%= unless @module_doc? do %> 28 | @moduledoc false 29 | <% end %> 30 | use GRPC.Stub, service: <%= @module %>.Service 31 | end 32 | -------------------------------------------------------------------------------- /src/elixirpb.proto: -------------------------------------------------------------------------------- 1 | // Put this file to elixirpb.proto under PROTO_PATH. See `protoc -h` for `--proto_path` option. 2 | // Then import it 3 | // ````proto 4 | // import "elixirpb.proto"; 5 | // ```` 6 | // 7 | // 1047 is allocated to this project by Protobuf 8 | // https://github.com/protocolbuffers/protobuf/blob/master/docs/options.md 9 | 10 | syntax = "proto2"; 11 | 12 | package elixirpb; 13 | 14 | import "google/protobuf/descriptor.proto"; 15 | 16 | // File level options 17 | // 18 | // For example, 19 | // option (elixirpb.file).module_prefix = "Foo"; 20 | message FileOptions { 21 | // Specify a module prefix. This will override package name. 22 | // For example, the package is "hello" and a message is "Request", the message 23 | // will be "Hello.Request". But with module_prefix "Foo", the message will be 24 | // "Foo.Request" 25 | optional string module_prefix = 1; 26 | } 27 | 28 | extend google.protobuf.FileOptions { 29 | optional FileOptions file = 1047; 30 | } 31 | -------------------------------------------------------------------------------- /test/google/protobuf_test.exs: -------------------------------------------------------------------------------- 1 | defmodule Google.ProtobufTest do 2 | use ExUnit.Case, async: true 3 | 4 | import Google.Protobuf 5 | 6 | alias Google.Protobuf.{Struct, Timestamp} 7 | 8 | doctest Google.Protobuf 9 | 10 | @basic_json """ 11 | { 12 | "key_one": "value_one", 13 | "key_two": 1234, 14 | "key_three": null, 15 | "key_four": true 16 | } 17 | """ 18 | 19 | @basic_elixir %{ 20 | "key_one" => "value_one", 21 | "key_two" => 1234, 22 | "key_three" => nil, 23 | "key_four" => true 24 | } 25 | 26 | @advanced_json """ 27 | { 28 | "key_two": [1, 2, 3, null, true, "value"], 29 | "key_three": { 30 | "key_four": "value_four", 31 | "key_five": { 32 | "key_six": 99, 33 | "key_seven": { 34 | "key_eight": "value_eight" 35 | } 36 | } 37 | } 38 | } 39 | """ 40 | 41 | @advanced_elixir %{ 42 | "key_two" => [1, 2, 3, nil, true, "value"], 43 | "key_three" => %{ 44 | "key_four" => "value_four", 45 | "key_five" => %{ 46 | "key_six" => 99, 47 | "key_seven" => %{ 48 | "key_eight" => "value_eight" 49 | } 50 | } 51 | } 52 | } 53 | 54 | describe "to_map/1" do 55 | test "converts nil values to empty map" do 56 | assert %{} == to_map(%Struct{}) 57 | end 58 | 59 | test "converts basic json to map" do 60 | assert @basic_elixir == to_map(Protobuf.JSON.decode!(@basic_json, Struct)) 61 | end 62 | 63 | test "converts advanced json to map" do 64 | assert @advanced_elixir == to_map(Protobuf.JSON.decode!(@advanced_json, Struct)) 65 | end 66 | end 67 | 68 | describe "from_map/1" do 69 | test "converts basic elixir to struct" do 70 | assert Protobuf.JSON.decode!(@basic_json, Struct) == from_map(@basic_elixir) 71 | end 72 | 73 | test "converts advanced elixir to struct" do 74 | assert Protobuf.JSON.decode!(@advanced_json, Struct) == from_map(@advanced_elixir) 75 | end 76 | end 77 | 78 | describe "to_datetime/1" do 79 | # This matches golang behaviour 80 | # https://github.com/golang/protobuf/blob/5d5e8c018a13017f9d5b8bf4fad64aaa42a87308/ptypes/timestamp.go#L43 81 | test "converts nil values to unix time start" do 82 | assert ~U[1970-01-01 00:00:00.000000Z] == to_datetime(%Timestamp{}) 83 | end 84 | 85 | test "converts to DateTime" do 86 | assert ~U[1970-01-01 00:00:05.000000Z] == 87 | to_datetime(%Timestamp{seconds: 5, nanos: 0}) 88 | end 89 | 90 | test "nanosecond precision" do 91 | one = to_datetime(%Timestamp{seconds: 10, nanos: 100}) 92 | two = to_datetime(%Timestamp{seconds: 10, nanos: 105}) 93 | assert 0 == DateTime.diff(one, two, :nanosecond) 94 | end 95 | end 96 | 97 | describe "from_datetime/1" do 98 | test "converts from DateTime" do 99 | assert %Timestamp{seconds: 5, nanos: 0} == 100 | from_datetime(~U[1970-01-01 00:00:05.000000Z]) 101 | end 102 | end 103 | end 104 | -------------------------------------------------------------------------------- /test/pbt/encode_decode_type_test.exs: -------------------------------------------------------------------------------- 1 | defmodule Protobuf.EncodeDecodeTypeTest.PropertyGenerator do 2 | def decode(type, bin) do 3 | bin 4 | |> TestMsg.Scalars.decode() 5 | |> Map.fetch!(type) 6 | end 7 | 8 | def encode(type, val) do 9 | TestMsg.Scalars 10 | |> struct!([{type, val}]) 11 | |> Protobuf.encode() 12 | end 13 | 14 | defmacro make_properties(gen_func, field_type) do 15 | quote do 16 | property "#{unquote(field_type)} roundtrip" do 17 | check all n <- unquote(gen_func) do 18 | field_type = unquote(field_type) 19 | bin = encode(field_type, n) 20 | 21 | assert n == decode(field_type, bin) 22 | end 23 | end 24 | 25 | property "repeated #{unquote(field_type)} roundtrip" do 26 | check all n <- unquote(gen_func) do 27 | field_type = :"repeated_#{unquote(field_type)}" 28 | bin = encode(field_type, [n, n]) 29 | 30 | assert [n, n] == decode(field_type, bin) 31 | end 32 | end 33 | end 34 | end 35 | 36 | # Since float point is not precise, make canonical value before doing PBT 37 | # ref: http://hypothesis.works/articles/canonical-serialization/ 38 | # and try 0.2 here: https://www.h-schmidt.net/FloatConverter/IEEE754.html 39 | defmacro make_canonical_properties(gen_func, field_type) do 40 | quote do 41 | property "#{unquote(field_type)} canonical roundtrip" do 42 | check all n <- unquote(gen_func) do 43 | field_type = unquote(field_type) 44 | encoded_val = encode(field_type, n) 45 | canonical_val = decode(field_type, encoded_val) 46 | bin = encode(field_type, canonical_val) 47 | 48 | assert canonical_val == decode(field_type, bin) 49 | end 50 | end 51 | 52 | property "repeated #{unquote(field_type)} canonical roundtrip" do 53 | check all n <- unquote(gen_func) do 54 | field_type = :"repeated_#{unquote(field_type)}" 55 | encoded_vals = encode(field_type, [n, n]) 56 | canonical_vals = decode(field_type, encoded_vals) 57 | bin = encode(field_type, canonical_vals) 58 | 59 | assert canonical_vals == decode(field_type, bin) 60 | end 61 | end 62 | end 63 | end 64 | end 65 | 66 | defmodule Protobuf.EncodeDecodeTypeTest do 67 | use ExUnit.Case, async: true 68 | use ExUnitProperties 69 | 70 | import Protobuf.EncodeDecodeTypeTest.PropertyGenerator 71 | 72 | defp uint32_gen do 73 | map(binary(length: 4), fn <> -> x end) 74 | end 75 | 76 | defp uint64_gen do 77 | map(binary(length: 8), fn <> -> x end) 78 | end 79 | 80 | defp large_integer do 81 | scale(integer(), &(&1 * 10_000)) 82 | end 83 | 84 | defp natural_number do 85 | map(integer(), &abs/1) 86 | end 87 | 88 | make_properties(integer(), :int32) 89 | make_properties(large_integer(), :int64) 90 | make_properties(uint32_gen(), :uint32) 91 | make_properties(uint64_gen(), :uint64) 92 | make_properties(integer(), :sint32) 93 | make_properties(large_integer(), :sint64) 94 | 95 | make_properties(boolean(), :bool) 96 | 97 | make_properties(natural_number(), :fixed64) 98 | make_properties(large_integer(), :sfixed64) 99 | 100 | make_canonical_properties(float(), :double) 101 | make_canonical_properties(float(), :float) 102 | end 103 | -------------------------------------------------------------------------------- /test/pbt/encode_decode_varint_test.exs: -------------------------------------------------------------------------------- 1 | defmodule Protobuf.EncodeDecodeVarintTest do 2 | use ExUnit.Case, async: true 3 | use ExUnitProperties 4 | 5 | import Protobuf.Wire.Varint 6 | 7 | defdecoderp decode() do 8 | "" = rest 9 | value 10 | end 11 | 12 | property "varint roundtrip" do 13 | check all n <- large_integer_gen() do 14 | iodata = encode(n) 15 | bin = IO.iodata_to_binary(iodata) 16 | n = decode(bin) 17 | assert <> == <> 18 | end 19 | end 20 | 21 | property "encode_varint for negative integers should always be 10 bytes" do 22 | negative_large_integer_gen = map(large_integer_gen(), &(-abs(&1))) 23 | 24 | check all n <- negative_large_integer_gen do 25 | assert IO.iodata_length(encode(n)) == 10 26 | end 27 | end 28 | 29 | defp large_integer_gen do 30 | scale(integer(), &(&1 * 10_000)) 31 | end 32 | end 33 | -------------------------------------------------------------------------------- /test/pbt/unknown_fields_test.exs: -------------------------------------------------------------------------------- 1 | defmodule Protobuf.UnknownFieldsTest do 2 | use ExUnit.Case, async: true 3 | use ExUnitProperties 4 | 5 | import Protobuf.Wire.Types 6 | 7 | alias ProtobufTestMessages.Proto3.TestAllTypesProto3 8 | 9 | property "round-trip encoding and decoding of unknown fields" do 10 | unknown_fields_generator = list_of(unknown_field_generator()) 11 | 12 | check all unknown_fields <- unknown_fields_generator, max_runs: 20 do 13 | decoded = 14 | %TestAllTypesProto3{} 15 | |> Map.put(:__unknown_fields__, unknown_fields) 16 | |> Protobuf.encode() 17 | |> Protobuf.decode(TestAllTypesProto3) 18 | 19 | assert Protobuf.get_unknown_fields(decoded) == unknown_fields 20 | end 21 | end 22 | 23 | defp unknown_field_generator() do 24 | value_generator = 25 | one_of([delimited_generator(), varint_generator(), bits32_generator(), bits64_generator()]) 26 | 27 | gen all field_number <- integer(_unknown_fields_range = 100_000..200_000), 28 | {wire_type, value} <- value_generator do 29 | {field_number, wire_type, value} 30 | end 31 | end 32 | 33 | defp delimited_generator(), do: map(binary(), &{wire_delimited(), &1}) 34 | defp varint_generator(), do: map(integer(0..10_000), &{wire_varint(), &1}) 35 | defp bits64_generator(), do: map(binary(length: 8), &{wire_64bits(), &1}) 36 | defp bits32_generator(), do: map(binary(length: 4), &{wire_32bits(), &1}) 37 | end 38 | -------------------------------------------------------------------------------- /test/protobuf/any_test.exs: -------------------------------------------------------------------------------- 1 | defmodule Protobuf.AnyTest do 2 | use ExUnit.Case, async: true 3 | 4 | describe "type_url_to_module/1" do 5 | test "returns the module for a valid type_url" do 6 | assert Protobuf.Any.type_url_to_module("type.googleapis.com/google.protobuf.Duration") == 7 | Google.Protobuf.Duration 8 | end 9 | 10 | test "raises an error for an invalid type_url" do 11 | assert_raise ArgumentError, ~r/type_url must be in the form/, fn -> 12 | Protobuf.Any.type_url_to_module("invalid") 13 | end 14 | end 15 | end 16 | end 17 | -------------------------------------------------------------------------------- /test/protobuf/builder_test.exs: -------------------------------------------------------------------------------- 1 | defmodule Protobuf.BuilderTest do 2 | use ExUnit.Case, async: true 3 | 4 | alias TestMsg.{Foo, Foo2, Proto3Optional} 5 | 6 | describe "default values for structs" do 7 | test "for proto3" do 8 | assert %Foo{}.a == 0 9 | assert %Foo{}.c == "" 10 | assert %Foo{}.e == nil 11 | end 12 | 13 | test "uses nil for proto3 optional field" do 14 | assert %Proto3Optional{}.a == nil 15 | assert %Proto3Optional{}.b == "" 16 | end 17 | 18 | test "uses nil for proto2" do 19 | assert %Foo2{}.a == nil 20 | assert %Foo2{}.c == nil 21 | assert %Foo2{}.e == nil 22 | end 23 | 24 | test "works for circular reference" do 25 | assert %TestMsg.Parent{}.child == nil 26 | end 27 | 28 | test "builds embedded messages" do 29 | assert %Foo{e: %Foo.Bar{a: 1}}.e == %Foo.Bar{a: 1} 30 | assert %Foo{h: [%Foo.Bar{a: 1}]}.h == [%Foo.Bar{a: 1}] 31 | end 32 | end 33 | end 34 | -------------------------------------------------------------------------------- /test/protobuf/conformance_regressions_test.exs: -------------------------------------------------------------------------------- 1 | defmodule Protobuf.ConformanceRegressionsTest do 2 | use ExUnit.Case, async: true 3 | 4 | describe "proto3" do 5 | setup :url_to_message 6 | setup :decode_conformance_input 7 | 8 | @describetag message_type: "protobuf_test_messages.proto3.TestAllTypesProto3" 9 | 10 | # Issue #218 11 | @tag conformance_input: ~S(\250\037\001) 12 | test "Required.Proto3.ProtobufInput.UnknownVarint.ProtobufOutput", 13 | %{proto_input: proto_input, message_mod: message_mod} do 14 | assert proto_input |> message_mod.decode() |> message_mod.encode() == proto_input 15 | end 16 | 17 | @tag conformance_input: ~S(\001DEADBEEF) 18 | test "Required.Proto3.ProtobufInput.IllegalZeroFieldNum_Case_0", 19 | %{proto_input: proto_input, message_mod: message_mod} do 20 | assert_raise Protobuf.DecodeError, fn -> 21 | message_mod.decode(proto_input) 22 | end 23 | end 24 | 25 | @tag conformance_input: ~S{(\202\200\200\200\020} 26 | test "Required.Proto3.ProtobufInput.ValidDataScalar.SINT32[4].JsonOutput", 27 | %{proto_input: proto_input, message_mod: message_mod} do 28 | # Value gets "cast" back to an int32. 29 | assert message_mod.decode(proto_input).optional_sint32 == 1 30 | end 31 | 32 | @tag conformance_input: 33 | ~S{\222\001\014\022\n\010\322\t\020\322\t\370\001\322\t\222\001\014\022\n\010\341!\030\341!\370\001\341!} 34 | test "Required.Proto3.ProtobufInput.RepeatedScalarMessageMerge.ProtobufOutput", 35 | %{proto_input: proto_input, message_mod: message_mod} do 36 | decoded = message_mod.decode(proto_input) 37 | 38 | assert decoded.optional_nested_message.corecursive.repeated_int32 == [1234, 4321] 39 | assert decoded.optional_nested_message.corecursive.optional_int64 == 1234 40 | end 41 | 42 | @tag conformance_input: ~S(\202\007\014\022\012\010\001\020\001\310\005\001\310\005\001) 43 | test "Recommended.Proto3.ProtobufInput.ValidDataOneofBinary.MESSAGE.Merge.ProtobufOutput", 44 | %{proto_input: proto_input, message_mod: message_mod} do 45 | assert proto_input |> message_mod.decode() |> message_mod.encode() == proto_input 46 | end 47 | 48 | @tag conformance_input: ~S(\222\023\t\021\000\000\000\000\000\000\370\177) 49 | test "Recommended.Proto3.ValueRejectInfNumberValue.JsonOutput", 50 | %{proto_input: proto_input, message_mod: message_mod} do 51 | decoded = message_mod.decode(proto_input) 52 | assert decoded.optional_value == %Google.Protobuf.Value{kind: {:number_value, :nan}} 53 | assert {:error, error} = Protobuf.JSON.to_encodable(decoded) 54 | 55 | assert Exception.message(error) =~ 56 | "cannot encode non-numeric float/double for Google.Protobuf.Value: :nan" 57 | end 58 | end 59 | 60 | test "Required.Proto2.ProtobufInput.ValidDataMap.INT32.INT32.MissingDefault.JsonOutput" do 61 | mod = ProtobufTestMessages.Proto2.TestAllTypesProto2 62 | problematic_payload = <<194, 3, 0>> 63 | assert %{map_int32_int32: %{0 => 0}} = mod.decode(problematic_payload) 64 | end 65 | 66 | test "Required.Proto3.JsonInput.Int32FieldQuotedExponentialValue.JsonOutput" do 67 | mod = ProtobufTestMessages.Proto3.TestAllTypesProto3 68 | problematic_payload = ~S({"optionalInt32": "1e5"}) 69 | assert %{optional_int32: 100_000} = Protobuf.JSON.decode!(problematic_payload, mod) 70 | end 71 | 72 | test "Required.Proto2.JsonInput.AllFieldAcceptNull.ProtobufOutput" do 73 | mod = ProtobufTestMessages.Proto2.TestAllTypesProto2 74 | problematic_payload = ~S({ 75 | "map_bool_bool": null, 76 | "repeated_int32": null 77 | }) 78 | 79 | assert %{ 80 | map_bool_bool: map_bool_bool, 81 | repeated_int32: [] 82 | } = Protobuf.JSON.decode!(problematic_payload, mod) 83 | 84 | assert is_map(map_bool_bool) and map_size(map_bool_bool) == 0 85 | end 86 | 87 | describe "proto2" do 88 | setup :url_to_message 89 | setup :decode_conformance_input 90 | 91 | @describetag message_type: "protobuf_test_messages.proto2.TestAllTypesProto2" 92 | @tag conformance_input: 93 | ~S{\332\002(\000\001\377\377\377\377\377\377\377\377\377\001\316\302\361\005\200\200\200\200 \377\377\377\377\377\377\377\377\177\200\200\200\200\200\200\200\200\200\001} 94 | test "Recommended.Proto2.ProtobufInput.ValidDataRepeated.BOOL.PackedInput.DefaultOutput.ProtobufOutput", 95 | %{proto_input: proto_input, message_mod: message_mod} do 96 | assert message_mod.decode(proto_input).repeated_bool == [ 97 | false, 98 | true, 99 | true, 100 | true, 101 | true, 102 | true, 103 | true 104 | ] 105 | end 106 | end 107 | 108 | describe "JSON" do 109 | setup :url_to_message 110 | 111 | @describetag message_type: "protobuf_test_messages.proto3.TestAllTypesProto3" 112 | 113 | test "Recommended.Proto3.JsonInput.NullValueInOtherOneofNewFormat.Validator", 114 | %{message_mod: message_mod} do 115 | json = "{\"oneofNullValue\": null}" 116 | 117 | assert json 118 | |> Protobuf.JSON.decode!(message_mod) 119 | |> Protobuf.JSON.encode!() 120 | |> Jason.decode!() == Jason.decode!(json) 121 | end 122 | 123 | test "Recommended.Proto3.JsonInput.NullValueInOtherOneofOldFormat.Validator", 124 | %{message_mod: message_mod} do 125 | json = "{\"oneofNullValue\": \"NULL_VALUE\"}" 126 | 127 | assert json 128 | |> Protobuf.JSON.decode!(message_mod) 129 | |> Protobuf.JSON.encode!() 130 | |> Jason.decode!() == %{"oneofNullValue" => nil} 131 | end 132 | end 133 | 134 | test "memory leak and infinite loop regression" do 135 | mod = ProtobufTestMessages.Proto2.TestAllTypesProto2 136 | 137 | problematic_payload = 138 | <<224, 4, 0, 224, 4, 185, 96, 224, 4, 255, 255, 255, 255, 255, 255, 255, 255, 127, 224, 4, 139 | 128, 128, 128, 128, 128, 128, 128, 128, 128, 1>> 140 | 141 | assert %^mod{} = mod.decode(problematic_payload) 142 | end 143 | 144 | defp url_to_message(%{message_type: type_url}) do 145 | case type_url do 146 | "protobuf_test_messages.proto3.TestAllTypesProto3" -> 147 | %{message_mod: ProtobufTestMessages.Proto3.TestAllTypesProto3} 148 | 149 | "protobuf_test_messages.proto2.TestAllTypesProto2" -> 150 | %{message_mod: ProtobufTestMessages.Proto2.TestAllTypesProto2} 151 | end 152 | end 153 | 154 | defp decode_conformance_input(%{conformance_input: conformance_input}) do 155 | %{proto_input: conformance_input_to_binary(conformance_input)} 156 | end 157 | 158 | defp decode_conformance_input(context) do 159 | context 160 | end 161 | 162 | defp conformance_input_to_binary(<>) 163 | when d1 in ?0..?9 and d2 in ?0..?9 and d3 in ?0..?9 do 164 | integer = Integer.undigits([d1 - ?0, d2 - ?0, d3 - ?0], 8) 165 | <> 166 | end 167 | 168 | defp conformance_input_to_binary(<>) when char in [?n, ?t, ?r, ?\\] do 169 | <> 170 | end 171 | 172 | defp conformance_input_to_binary(<>) do 173 | <> 174 | end 175 | 176 | defp conformance_input_to_binary(<<>>) do 177 | <<>> 178 | end 179 | 180 | defp escape_char(?n), do: ?\n 181 | defp escape_char(?t), do: ?\t 182 | defp escape_char(?r), do: ?\r 183 | defp escape_char(?\\), do: ?\\ 184 | end 185 | -------------------------------------------------------------------------------- /test/protobuf/dsl/typespecs_test.exs: -------------------------------------------------------------------------------- 1 | defmodule Protobuf.DSL.TypespecsTest do 2 | use ExUnit.Case, async: true 3 | use ExUnitProperties 4 | 5 | alias Protobuf.{FieldProps, MessageProps} 6 | alias Protobuf.DSL.Typespecs 7 | 8 | @unknown_fields_spec quote( 9 | do: [ 10 | __unknown_fields__: [Protobuf.unknown_field()] 11 | ] 12 | ) 13 | 14 | describe "quoted_enum_typespec/1" do 15 | test "returns integer() | ..." do 16 | message_props = %MessageProps{ 17 | field_props: %{ 18 | 1 => %FieldProps{name_atom: :FOO}, 19 | 2 => %FieldProps{name_atom: :BAR}, 20 | 3 => %FieldProps{name_atom: :BAZ} 21 | } 22 | } 23 | 24 | expected = 25 | quote do 26 | integer() | :FOO | :BAR | :BAZ 27 | end 28 | 29 | assert Typespecs.quoted_enum_typespec(message_props) == clean_meta(expected) 30 | end 31 | 32 | property "works for any number of possible enum values" do 33 | check all field_names <- list_of(atom(:alphanumeric), min_length: 1), max_runs: 10 do 34 | field_props = 35 | field_names 36 | |> Enum.with_index(1) 37 | |> Map.new(fn {name, index} -> {index, %FieldProps{name_atom: name}} end) 38 | 39 | message_props = %MessageProps{field_props: field_props} 40 | 41 | expected = 42 | [quote(do: integer()) | field_names] 43 | |> Enum.reverse() 44 | |> Enum.reduce(&{:|, [], [&1, &2]}) 45 | 46 | assert message_props |> Typespecs.quoted_enum_typespec() |> Macro.to_string() == 47 | Macro.to_string(expected) 48 | end 49 | end 50 | end 51 | 52 | describe "quoted_message_typespec/1" do 53 | test "with an empty message" do 54 | message_props = %MessageProps{field_props: %{}, oneof: []} 55 | quoted = Typespecs.quoted_message_typespec(message_props) 56 | 57 | assert Macro.to_string(quoted) == 58 | Macro.to_string(quote(do: %__MODULE__{unquote_splicing(@unknown_fields_spec)})) 59 | end 60 | 61 | test "with a field" do 62 | message_props = %MessageProps{ 63 | field_props: %{1 => %FieldProps{name_atom: :foo, type: :int32}}, 64 | oneof: [] 65 | } 66 | 67 | quoted = Typespecs.quoted_message_typespec(message_props) 68 | fields = quote(do: [foo: integer()]) ++ @unknown_fields_spec 69 | 70 | assert Macro.to_string(quoted) == 71 | Macro.to_string(quote(do: %__MODULE__{unquote_splicing(fields)})) 72 | end 73 | 74 | test "with a oneof field" do 75 | message_props = %MessageProps{ 76 | field_props: %{ 77 | 1 => %FieldProps{name_atom: :foo, type: :int32, oneof: 0}, 78 | 2 => %FieldProps{name_atom: :bar, type: :bool, oneof: 0} 79 | }, 80 | oneof: [my_oneof_field: 0] 81 | } 82 | 83 | quoted = Typespecs.quoted_message_typespec(message_props) 84 | 85 | fields = 86 | quote(do: [my_oneof_field: {:foo, integer()} | {:bar, boolean()} | nil]) ++ 87 | @unknown_fields_spec 88 | 89 | assert Macro.to_string(quoted) == 90 | Macro.to_string(quote(do: %__MODULE__{unquote_splicing(fields)})) 91 | end 92 | 93 | test "with fields of scalar types" do 94 | mappings = [ 95 | {:string, quote(do: String.t())}, 96 | {:bytes, quote(do: binary())}, 97 | {:bool, quote(do: boolean())}, 98 | {:int32, quote(do: integer())}, 99 | {:int64, quote(do: integer())}, 100 | {:sint32, quote(do: integer())}, 101 | {:sint64, quote(do: integer())}, 102 | {:sfixed32, quote(do: integer())}, 103 | {:sfixed64, quote(do: integer())}, 104 | {:uint32, quote(do: non_neg_integer())}, 105 | {:uint64, quote(do: non_neg_integer())}, 106 | {:fixed32, quote(do: non_neg_integer())}, 107 | {:fixed64, quote(do: non_neg_integer())}, 108 | {:float, quote(do: float() | :infinity | :negative_infinity | :nan)}, 109 | {:double, quote(do: float() | :infinity | :negative_infinity | :nan)} 110 | ] 111 | 112 | for {proto_type, quoted_spec} <- mappings do 113 | message_props = %MessageProps{ 114 | field_props: %{1 => %FieldProps{name_atom: :foo, type: proto_type}}, 115 | oneof: [] 116 | } 117 | 118 | quoted = Typespecs.quoted_message_typespec(message_props) 119 | fields = quote(do: [foo: unquote(quoted_spec)]) ++ @unknown_fields_spec 120 | 121 | assert Macro.to_string(quoted) == 122 | Macro.to_string(quote(do: %__MODULE__{unquote_splicing(fields)})) 123 | end 124 | end 125 | 126 | test "with an enum field" do 127 | message_props = %MessageProps{ 128 | field_props: %{1 => %FieldProps{name_atom: :foo, type: {:enum, Foo}}}, 129 | oneof: [] 130 | } 131 | 132 | quoted = Typespecs.quoted_message_typespec(message_props) 133 | fields = quote(do: [foo: Foo.t()]) ++ @unknown_fields_spec 134 | 135 | assert Macro.to_string(quoted) == 136 | Macro.to_string(quote(do: %__MODULE__{unquote_splicing(fields)})) 137 | end 138 | 139 | test "with a group field" do 140 | message_props = %MessageProps{ 141 | field_props: %{1 => %FieldProps{name_atom: :foo, type: :group}}, 142 | oneof: [] 143 | } 144 | 145 | quoted = Typespecs.quoted_message_typespec(message_props) 146 | fields = quote(do: [foo: term()]) ++ @unknown_fields_spec 147 | 148 | assert Macro.to_string(quoted) == 149 | Macro.to_string(quote(do: %__MODULE__{unquote_splicing(fields)})) 150 | end 151 | 152 | test "with an embedded field" do 153 | message_props = %MessageProps{ 154 | field_props: %{1 => %FieldProps{name_atom: :foo, type: EmbeddedFoo, embedded?: true}}, 155 | oneof: [] 156 | } 157 | 158 | quoted = Typespecs.quoted_message_typespec(message_props) 159 | fields = quote(do: [foo: EmbeddedFoo.t() | nil]) ++ @unknown_fields_spec 160 | 161 | assert Macro.to_string(quoted) == 162 | Macro.to_string(quote(do: %__MODULE__{unquote_splicing(fields)})) 163 | end 164 | 165 | test "with an optional field" do 166 | message_props = %MessageProps{ 167 | field_props: %{1 => %FieldProps{name_atom: :foo, type: :int32, optional?: true}}, 168 | oneof: [] 169 | } 170 | 171 | quoted = Typespecs.quoted_message_typespec(message_props) 172 | fields = quote(do: [foo: integer() | nil]) ++ @unknown_fields_spec 173 | 174 | assert Macro.to_string(quoted) == 175 | Macro.to_string(quote(do: %__MODULE__{unquote_splicing(fields)})) 176 | end 177 | 178 | test "with a proto3_optional field" do 179 | message_props = %MessageProps{ 180 | field_props: %{1 => %FieldProps{name_atom: :foo, type: :int32, proto3_optional?: true}}, 181 | syntax: :proto3 182 | } 183 | 184 | quoted = Typespecs.quoted_message_typespec(message_props) 185 | 186 | fields = quote(do: [foo: integer() | nil]) ++ @unknown_fields_spec 187 | 188 | assert Macro.to_string(quoted) == 189 | Macro.to_string(quote(do: %__MODULE__{unquote_splicing(fields)})) 190 | end 191 | 192 | test "with extensions" do 193 | message_props = %MessageProps{ 194 | field_props: %{}, 195 | oneof: [], 196 | extension_range: [{1, 10}] 197 | } 198 | 199 | quoted = Typespecs.quoted_message_typespec(message_props) 200 | fields = quote(do: [__pb_extensions__: map()]) ++ @unknown_fields_spec 201 | 202 | assert Macro.to_string(quoted) == 203 | Macro.to_string(quote(do: %__MODULE__{unquote_splicing(fields)})) 204 | end 205 | end 206 | 207 | defp clean_meta(expr) do 208 | Macro.prewalk(expr, &Macro.update_meta(&1, fn _ -> [] end)) 209 | end 210 | end 211 | -------------------------------------------------------------------------------- /test/protobuf/encoder_validation_test.exs: -------------------------------------------------------------------------------- 1 | defmodule Protobuf.EncoderTest.Validation do 2 | use ExUnit.Case, async: true 3 | 4 | @valid_vals %{ 5 | int32: -32, 6 | int64: -64, 7 | uint32: 32, 8 | uint64: 64, 9 | sint32: 32, 10 | sint64: 64, 11 | bool: true, 12 | enum: 3, 13 | fixed64: 164, 14 | sfixed64: 264, 15 | double: 2.23, 16 | bytes: <<1, 2, 3>>, 17 | string: "str", 18 | fixed32: 132, 19 | sfixed32: 232, 20 | float: 1.23, 21 | nil: nil 22 | } 23 | 24 | def other_types(white_list) do 25 | keys = List.wrap(white_list) 26 | 27 | @valid_vals |> Map.take(keys) |> Map.values() 28 | end 29 | 30 | test "encode_type/2 is invalid" do 31 | assert_invalid = fn type, others -> 32 | Enum.each(other_types(others), fn {invalid, err_type} -> 33 | assert_raise err_type, fn -> 34 | Protobuf.Wire.encode(type, invalid) 35 | end 36 | end) 37 | end 38 | 39 | int_list = [ 40 | bool: ArgumentError, 41 | double: ArithmeticError, 42 | bytes: ArgumentError, 43 | string: ArgumentError, 44 | float: ArithmeticError, 45 | nil: ArgumentError 46 | ] 47 | 48 | assert_invalid.(:int32, int_list) 49 | assert_invalid.(:int64, int_list) 50 | assert_invalid.(:uint32, int_list) 51 | assert_invalid.(:uint64, int_list) 52 | assert_invalid.(:sint32, int_list) 53 | assert_invalid.(:sint64, int_list) 54 | assert_invalid.(:enum, int_list) 55 | assert_invalid.(:fixed64, int_list) 56 | assert_invalid.(:sfixed64, int_list) 57 | assert_invalid.(:fixed32, int_list) 58 | assert_invalid.(:fixed64, int_list) 59 | 60 | float_list = [ 61 | bool: ArgumentError, 62 | bytes: ArgumentError, 63 | string: ArgumentError, 64 | nil: ArgumentError 65 | ] 66 | 67 | assert_invalid.(:double, float_list) 68 | assert_invalid.(:float, float_list) 69 | 70 | bytes_list = [ 71 | int32: ArgumentError, 72 | bool: ArgumentError, 73 | double: ArgumentError, 74 | nil: ArgumentError 75 | ] 76 | 77 | assert_invalid.(:bytes, bytes_list) 78 | assert_invalid.(:string, bytes_list) 79 | end 80 | 81 | test "field is invalid" do 82 | msg = %TestMsg.Foo{a: "abc"} 83 | 84 | assert_raise Protobuf.EncodeError, ~r/TestMsg.Foo#a.*Protobuf.EncodeError/, fn -> 85 | Protobuf.Encoder.encode(msg) 86 | end 87 | end 88 | 89 | test "proto2 invalid when required field is nil" do 90 | msg = %TestMsg.Foo2{a: nil} 91 | 92 | assert_raise Protobuf.EncodeError, ~r/TestMsg.Foo2#a.*Protobuf.EncodeError/, fn -> 93 | Protobuf.Encoder.encode(msg) 94 | end 95 | end 96 | 97 | test "proto2 valid optional field is nil" do 98 | msg = %TestMsg.Foo2{a: 1, c: nil} 99 | 100 | assert Protobuf.Encoder.encode(msg) 101 | end 102 | 103 | test "oneof invalid format" do 104 | msg = %TestMsg.Oneof{first: 1} 105 | 106 | assert_raise Protobuf.EncodeError, ~r/TestMsg.Oneof#first should be {key, val}/, fn -> 107 | Protobuf.Encoder.encode(msg) 108 | end 109 | end 110 | 111 | test "oneof field doesn't match" do 112 | msg = %TestMsg.Oneof{first: {:c, 42}} 113 | 114 | assert_raise Protobuf.EncodeError, ~r/:c doesn't belong to TestMsg.Oneof#first/, fn -> 115 | Protobuf.Encoder.encode(msg) 116 | end 117 | end 118 | 119 | test "oneof field is invalid" do 120 | msg = %TestMsg.Oneof{first: {:a, "abc"}} 121 | 122 | assert_raise Protobuf.EncodeError, ~r/TestMsg.Oneof#a.*Protobuf.EncodeError/, fn -> 123 | Protobuf.Encoder.encode(msg) 124 | end 125 | end 126 | 127 | test "oneof field is non-existent" do 128 | msg = %TestMsg.OneofProto3{first: {:x, "foo"}} 129 | 130 | assert_raise Protobuf.EncodeError, ~r/:x wasn't found in TestMsg.OneofProto3#first/, fn -> 131 | Protobuf.Encoder.encode(msg) 132 | end 133 | end 134 | 135 | test "repeated field is not list" do 136 | msg = %TestMsg.Foo{g: 1} 137 | 138 | assert_raise Protobuf.EncodeError, ~r/TestMsg.Foo#g.*Protocol.UndefinedError/, fn -> 139 | Protobuf.Encoder.encode(msg) 140 | end 141 | 142 | msg = %TestMsg.Foo{} 143 | msg = %{msg | h: %TestMsg.Foo.Bar{}} 144 | 145 | assert_raise Protobuf.EncodeError, ~r/TestMsg.Foo#h.*Protocol.UndefinedError/, fn -> 146 | Protobuf.Encoder.encode(msg) 147 | end 148 | end 149 | 150 | test "build embedded field map when encode" do 151 | msg = %TestMsg.Foo{} 152 | msg = %TestMsg.Foo{msg | e: %{a: 1}} 153 | msg1 = %TestMsg.Foo{e: %{a: 1}} 154 | 155 | assert Protobuf.Encoder.encode(msg) == Protobuf.Encoder.encode(msg1) 156 | end 157 | end 158 | -------------------------------------------------------------------------------- /test/protobuf/extension_test.exs: -------------------------------------------------------------------------------- 1 | defmodule Protobuf.ExtensionTest do 2 | use ExUnit.Case, async: true 3 | 4 | alias TestMsg.Ext 5 | 6 | doctest Protobuf.Extension 7 | 8 | test "extension persistent works" do 9 | assert Ext.PbExtension == :persistent_term.get({Protobuf.Extension, Ext.Foo1, 1047}) 10 | assert Ext.PbExtension == :persistent_term.get({Protobuf.Extension, Ext.Foo1, 1048}) 11 | assert Ext.PbExtension == :persistent_term.get({Protobuf.Extension, Ext.Foo1, 1049}) 12 | assert Ext.PbExtension == :persistent_term.get({Protobuf.Extension, Ext.Foo2, 1047}) 13 | end 14 | 15 | test "extension get/put work" do 16 | assert %{__pb_extensions__: %{{Ext.PbExtension, :foo} => "foo"}} = 17 | Ext.Foo1.put_extension(%{}, Ext.PbExtension, :foo, "foo") 18 | 19 | assert %{__pb_extensions__: %{{Ext.PbExtension, :bar} => "bar"}} = 20 | Ext.Foo2.put_extension(%{}, Ext.PbExtension, :bar, "bar") 21 | 22 | assert %{__pb_extensions__: %{{Ext.PbExtension, :"Parent.foo"} => "nested.foo"}} = 23 | Ext.Foo1.put_extension(%{}, Ext.PbExtension, :"Parent.foo", "nested.foo") 24 | end 25 | 26 | test "extension put not existed key" do 27 | assert_raise Protobuf.ExtensionNotFound, fn -> 28 | Ext.Foo1.put_extension(%{}, Ext.PbExtension, :not_exist, "foo") 29 | end 30 | end 31 | 32 | test "simple types work" do 33 | bin = <<186, 65, 3, 97, 98, 99>> 34 | msg = %Ext.Foo2{} 35 | msg = Ext.Foo2.put_extension(msg, Ext.PbExtension, :bar, "abc") 36 | assert bin == Ext.Foo2.encode(msg) 37 | 38 | msg = Ext.Foo2.decode(bin) 39 | assert %{__pb_extensions__: %{{Ext.PbExtension, :bar} => "abc"}} = msg 40 | assert "abc" == Ext.Foo2.get_extension(msg, Ext.PbExtension, :bar) 41 | end 42 | 43 | test "nested types work" do 44 | bin = <<186, 65, 5, 10, 3, 97, 98, 99>> 45 | msg = %Ext.Foo1{} 46 | ext_msg = %Ext.Options{a: "abc"} 47 | msg = Ext.Foo1.put_extension(msg, Ext.PbExtension, :foo, ext_msg) 48 | assert bin == Ext.Foo2.encode(msg) 49 | 50 | msg = Ext.Foo1.decode(bin) 51 | assert %{__pb_extensions__: %{{Ext.PbExtension, :foo} => %Ext.Options{a: "abc"}}} = msg 52 | assert %Ext.Options{a: "abc"} == Ext.Foo2.get_extension(msg, Ext.PbExtension, :foo) 53 | end 54 | 55 | test "enum types work" do 56 | bin = <<192, 65, 2>> 57 | msg = %Ext.Foo1{} 58 | msg = Ext.Foo1.put_extension(msg, Ext.PbExtension, :"Parent.foo", :B) 59 | assert bin == Ext.Foo1.encode(msg) 60 | 61 | msg = Ext.Foo1.decode(bin) 62 | assert %{__pb_extensions__: %{{Ext.PbExtension, :"Parent.foo"} => :B}} = msg 63 | assert :B == Ext.Foo1.get_extension(msg, Ext.PbExtension, :"Parent.foo") 64 | end 65 | end 66 | -------------------------------------------------------------------------------- /test/protobuf/json/rfc3339_test.exs: -------------------------------------------------------------------------------- 1 | defmodule Protobuf.JSON.RFC3339Test do 2 | use ExUnit.Case, async: true 3 | use ExUnitProperties 4 | 5 | alias Protobuf.JSON.RFC3339 6 | 7 | describe "decode/1" do 8 | test "returns {:ok, seconds, nanos} with the right nanos and seconds" do 9 | assert {:ok, seconds, nanos} = RFC3339.decode("2021-11-26T16:19:13.310017Z") 10 | 11 | {:ok, dt, _offset} = DateTime.from_iso8601("2021-11-26T16:19:13Z") 12 | 13 | assert seconds == DateTime.to_unix(dt, :second) 14 | assert nanos == 310_017_000 15 | end 16 | 17 | test "returns {:ok, seconds, nanos} with a time offset" do 18 | assert {:ok, seconds, nanos} = RFC3339.decode("2021-11-26T16:19:13.310017+01:00") 19 | 20 | {:ok, dt, _offset} = DateTime.from_iso8601("2021-11-26T16:19:13+01:00") 21 | 22 | assert seconds == DateTime.to_unix(dt, :second) 23 | assert nanos == 310_017_000 24 | end 25 | 26 | test "returns {:error, reason} if the timestamp is outside of the allowed range" do 27 | assert {:error, reason} = RFC3339.decode("0000-01-01T00:00:00Z") 28 | assert reason == "timestamp is outside of allowed range" 29 | end 30 | 31 | test "returns {:error, reason} if the timestamp has cruft after" do 32 | assert {:error, reason} = RFC3339.decode("0000-01-01T00:00:00Zand the rest") 33 | assert reason == ~s(expected empty string, got: "and the rest") 34 | end 35 | 36 | test "returns {:error, reason} if the timestamp is missing the offset" do 37 | assert {:error, reason} = RFC3339.decode("0000-01-01T00:00:00") 38 | assert reason == "expected time offset, but it's missing" 39 | end 40 | 41 | test "returns {:error, reason} with bad nanoseconds" do 42 | assert {:error, reason} = RFC3339.decode("0000-01-01T00:00:00.nonanoZ") 43 | assert reason == ~s(bad time secfrac after ".", got: "nonanoZ") 44 | end 45 | 46 | test "returns {:error, reason} with bad digit something" do 47 | assert {:error, reason} = RFC3339.decode("000-01-01T00:00:00Z") 48 | assert reason == ~s(expected 4 digits but got unparsable integer: "000-") 49 | end 50 | 51 | test "returns {:error, reason} with missing T to separate date and time" do 52 | assert {:error, reason} = RFC3339.decode("0000-01-01.00:00:00Z") 53 | assert reason == ~s(expected literal "T", got: ".00:00:00Z") 54 | end 55 | 56 | test "returns {:error, reason} with not enough digits" do 57 | assert {:error, reason} = RFC3339.decode("000") 58 | assert reason == ~s(expected 4 digits, got: "000") 59 | end 60 | 61 | test "returns {:ok, seconds, nanos} for the latest possible timestamp" do 62 | assert {:ok, _seconds, 999_999_999} = RFC3339.decode("9999-12-31T23:59:59.999999999Z") 63 | end 64 | 65 | property "returns the right nanoseconds regardless of how many digits" do 66 | check all digits_count <- member_of([3, 6, 9]), 67 | max_range = String.to_integer(String.duplicate("9", digits_count)), 68 | nanos <- integer(1..max_range), 69 | nanos = nanos * round(:math.pow(10, 9 - digits_count)) do 70 | real_nanos = String.to_integer(String.pad_trailing(Integer.to_string(nanos), 9, "0")) 71 | nanos_str = String.pad_leading(Integer.to_string(real_nanos), digits_count, "0") 72 | timestamp_str = "1970-01-01T00:00:00.#{nanos_str}Z" 73 | 74 | assert {:ok, 0, ^real_nanos} = RFC3339.decode(timestamp_str) 75 | end 76 | end 77 | end 78 | 79 | describe "encode/1" do 80 | test "returns {:ok, formatted_string} with the right nanos and seconds" do 81 | {:ok, dt, _offset} = DateTime.from_iso8601("2021-11-26T16:19:13Z") 82 | unix_sec = dt |> DateTime.truncate(:second) |> DateTime.to_unix(:second) 83 | 84 | assert RFC3339.encode(unix_sec, 123_000) == {:ok, "2021-11-26T16:19:13.000123Z"} 85 | end 86 | 87 | test "returns {:error, reason} with too big nanos" do 88 | assert {:error, message} = RFC3339.encode(_unix_sec = 0, _nanos = 10_000_000_000) 89 | assert message == "nanos can't be bigger than 1000000000, got: 10000000000" 90 | end 91 | 92 | test "returns {:error, reason} with invalid seconds" do 93 | assert {:error, message} = RFC3339.encode(10_000_000_000_000_000, _nanos = 0) 94 | assert message == ":invalid_unix_time" 95 | end 96 | 97 | property "injects the right nanoseconds regardless of how many digits" do 98 | check all digits_count <- member_of([3, 6, 9]), 99 | max_range = String.to_integer(String.duplicate("9", digits_count)), 100 | nanos <- integer(1..max_range), 101 | nanos = nanos * round(:math.pow(10, 9 - digits_count)) do 102 | real_nanos = String.to_integer(String.pad_trailing(Integer.to_string(nanos), 9, "0")) 103 | 104 | nanos_str = 105 | String.pad_leading(Integer.to_string(real_nanos), digits_count, "0") 106 | |> String.trim_trailing("000") 107 | |> String.trim_trailing("000") 108 | 109 | timestamp_str = "1970-01-01T00:00:00.#{nanos_str}Z" 110 | 111 | assert RFC3339.encode(0, real_nanos) == {:ok, timestamp_str} 112 | end 113 | end 114 | end 115 | end 116 | -------------------------------------------------------------------------------- /test/protobuf/json/utils_test.exs: -------------------------------------------------------------------------------- 1 | defmodule Protobuf.JSON.UtilsTest do 2 | use ExUnit.Case, async: true 3 | use ExUnitProperties 4 | 5 | alias Protobuf.JSON.Utils 6 | 7 | @max_nanosec 999_999_999 8 | 9 | property "format_nanoseconds/1 and parse_nanoseconds/1 are circular" do 10 | check all nanoseconds <- integer(0..@max_nanosec) do 11 | formatted = Utils.format_nanoseconds(nanoseconds) 12 | assert {parsed, ""} = Utils.parse_nanoseconds(formatted) 13 | assert parsed == nanoseconds 14 | end 15 | end 16 | 17 | describe "format_nanoseconds/1" do 18 | test "correctly pads the string and removes trailing zeros" do 19 | assert Utils.format_nanoseconds(1) == "000000001" 20 | assert Utils.format_nanoseconds(1_000) == "000001" 21 | assert Utils.format_nanoseconds(1_000_000) == "001" 22 | assert Utils.format_nanoseconds(999_999_999) == "999999999" 23 | end 24 | 25 | property "always formats nanoseconds as 3, 6, or 9 digits" do 26 | check all nanoseconds <- integer(0..@max_nanosec), max_runs: 100 do 27 | formatted = Utils.format_nanoseconds(nanoseconds) 28 | assert byte_size(formatted) in [3, 6, 9] 29 | end 30 | end 31 | end 32 | 33 | describe "parse_nanoseconds/1" do 34 | test "returns :error if no digits are present" do 35 | assert Utils.parse_nanoseconds("foo") == :error 36 | end 37 | 38 | test "returns :error if more than 9 digits are passed" do 39 | assert Utils.parse_nanoseconds("1234567899") == :error 40 | end 41 | 42 | property "returns whatever's left after parsing the digits" do 43 | assert Utils.parse_nanoseconds("123456789foo") == {123_456_789, "foo"} 44 | assert Utils.parse_nanoseconds("123456789") == {123_456_789, ""} 45 | 46 | check all nanos <- string(?0..?9, min_length: 1, max_length: 9), 47 | rest <- string([?a..?z, ?A..?Z], min_length: 0, max_length: 5), 48 | max_runs: 10 do 49 | assert {parsed_nanos, ^rest} = Utils.parse_nanoseconds(nanos <> rest) 50 | assert parsed_nanos in 0..@max_nanosec 51 | end 52 | end 53 | end 54 | end 55 | -------------------------------------------------------------------------------- /test/protobuf/json_test.exs: -------------------------------------------------------------------------------- 1 | defmodule Protobuf.JSONTest do 2 | use ExUnit.Case, async: true 3 | doctest Protobuf.JSON 4 | 5 | alias TestMsg.Scalars 6 | 7 | test "encodes proto2 structs" do 8 | assert Protobuf.JSON.encode!(%TestMsg.Foo2{b: 10}) == ~S|{"b":"10"}| 9 | end 10 | 11 | test "encode_to_iodata variants encode to iodata" do 12 | assert iodata = Protobuf.JSON.encode_to_iodata!(%TestMsg.Foo2{b: 10}) 13 | assert {:ok, ^iodata} = Protobuf.JSON.encode_to_iodata(%TestMsg.Foo2{b: 10}) 14 | assert IO.iodata_to_binary(iodata) == ~S|{"b":"10"}| 15 | end 16 | 17 | test "encoding string field with invalid UTF-8 data" do 18 | message = %Scalars{string: " \xff "} 19 | assert {:error, exception} = Protobuf.JSON.encode(message) 20 | assert is_exception(exception) 21 | end 22 | 23 | test "decoding string field with invalid UTF-8 data" do 24 | json = ~S|{"string":" \xff "}| 25 | assert {:error, exception} = Protobuf.JSON.decode(json, Scalars) 26 | assert is_exception(exception) 27 | end 28 | 29 | describe "bang variants of encode and decode" do 30 | # TODO: remove Jason when we require Elixir 1.18 31 | if Code.ensure_loaded?(JSON) do 32 | test "decode!/2" do 33 | json = ~S|{"string":" \xff "}| 34 | 35 | assert_raise Protobuf.JSON.DecodeError, fn -> 36 | Protobuf.JSON.decode!(json, Scalars) 37 | end 38 | end 39 | else 40 | test "decode!/2" do 41 | json = ~S|{"string":" \xff "}| 42 | 43 | assert_raise Jason.DecodeError, fn -> 44 | Protobuf.JSON.decode!(json, Scalars) 45 | end 46 | end 47 | end 48 | end 49 | 50 | describe "to_encodable/2" do 51 | test "validates options" do 52 | assert_raise ArgumentError, ~r"option :use_proto_names must be a boolean", fn -> 53 | Protobuf.JSON.to_encodable(%TestMsg.Foo2{b: 10}, use_proto_names: :no_bool) 54 | end 55 | 56 | assert_raise ArgumentError, "unknown option: :unknown_opt", fn -> 57 | Protobuf.JSON.to_encodable(%TestMsg.Foo2{b: 10}, unknown_opt: 1) 58 | end 59 | 60 | assert_raise ArgumentError, "invalid element in options list: :bad_value", fn -> 61 | Protobuf.JSON.to_encodable(%TestMsg.Foo2{b: 10}, [:bad_value]) 62 | end 63 | end 64 | end 65 | 66 | test "going back and forth with the Any type" do 67 | data = """ 68 | { 69 | "optionalAny": { 70 | "@type": "type.googleapis.com/protobuf_test_messages.proto3.TestAllTypesProto3", 71 | "optionalInt32": 12345 72 | } 73 | } 74 | """ 75 | 76 | assert {:ok, decoded} = 77 | Protobuf.JSON.decode(data, ProtobufTestMessages.Proto3.TestAllTypesProto3) 78 | 79 | assert %Google.Protobuf.Any{} = decoded.optional_any 80 | assert decoded.optional_any.type_url =~ "TestAllTypesProto3" 81 | 82 | assert Protobuf.JSON.to_encodable(decoded) == {:ok, Jason.decode!(data)} 83 | end 84 | end 85 | -------------------------------------------------------------------------------- /test/protobuf/message_merge_test.exs: -------------------------------------------------------------------------------- 1 | defmodule Protobuf.MessageMergeTest do 2 | use ExUnit.Case, async: true 3 | use ExUnitProperties 4 | 5 | alias ProtobufTestMessages.Proto3.TestAllTypesProto3 6 | 7 | @max_runs 5 8 | 9 | property "the latest scalar value takes precedence if it's not the default" do 10 | fields_with_gen = [ 11 | {:optional_int32, integer(-10..10), _default = 0}, 12 | {:optional_double, float(), _default = 0.0}, 13 | {:optional_string, string(:printable), _default = ""}, 14 | {:optional_bytes, binary(), _default = <<>>} 15 | ] 16 | 17 | for {field, gen, default} <- fields_with_gen do 18 | check all val1 <- gen, 19 | val2 <- gen, 20 | val2 != default, 21 | max_runs: @max_runs do 22 | msg1 = struct!(TestAllTypesProto3, [{field, val1}]) 23 | msg2 = struct!(TestAllTypesProto3, [{field, val2}]) 24 | 25 | decoded = concat_and_decode([msg1, msg2]) 26 | 27 | assert Map.fetch!(decoded, field) == val2 28 | end 29 | end 30 | end 31 | 32 | property "repeated fields are concatenated" do 33 | check all list1 <- list_of(integer()), 34 | list2 <- list_of(integer()), 35 | max_runs: @max_runs do 36 | msg1 = %TestAllTypesProto3{repeated_int32: list1} 37 | msg2 = %TestAllTypesProto3{repeated_int32: list2} 38 | 39 | decoded = concat_and_decode([msg1, msg2]) 40 | 41 | assert decoded.repeated_int32 == list1 ++ list2 42 | end 43 | end 44 | 45 | # TODO 46 | # property "map fields are merged" 47 | 48 | @tag :skip 49 | test "oneof fields with the same tag are merged" 50 | 51 | test "the latest oneof field takes precedence if the two have different tags" do 52 | msg1 = %TestAllTypesProto3{ 53 | oneof_field: {:oneof_nested_message, %TestAllTypesProto3.NestedMessage{}} 54 | } 55 | 56 | msg2 = %TestAllTypesProto3{oneof_field: {:oneof_string, "foo"}} 57 | 58 | decoded = concat_and_decode([msg1, msg2]) 59 | 60 | assert decoded.oneof_field == {:oneof_string, "foo"} 61 | end 62 | 63 | describe "nested messages" do 64 | end 65 | 66 | defp concat_and_decode(messages) do 67 | messages 68 | |> Enum.map_join("", &Protobuf.encode/1) 69 | |> Protobuf.decode(TestAllTypesProto3) 70 | end 71 | end 72 | -------------------------------------------------------------------------------- /test/protobuf/protobuf_test.exs: -------------------------------------------------------------------------------- 1 | defmodule Protobuf.ProtobufTest do 2 | use ExUnit.Case, async: false 3 | 4 | test "load_extensions/0 is a noop" do 5 | assert loaded_extensions() == 18 6 | Protobuf.load_extensions() 7 | assert loaded_extensions() == 18 8 | end 9 | 10 | describe "encode/1" do 11 | test "encodes a struct" do 12 | bin = Protobuf.encode(%TestMsg.Foo{a: 42}) 13 | assert bin == <<8, 42>> 14 | end 15 | 16 | test "encodes a struct with proto3 optional field" do 17 | bin = Protobuf.encode(%TestMsg.Proto3Optional{b: "A"}) 18 | assert bin == <<18, 1, 65>> 19 | end 20 | end 21 | 22 | describe "encode_to_iodata/1" do 23 | test "encodes a struct as iodata" do 24 | iodata = Protobuf.encode_to_iodata(%TestMsg.Foo{a: 42}) 25 | assert IO.iodata_to_binary(iodata) == <<8, 42>> 26 | end 27 | end 28 | 29 | describe "decode/2" do 30 | test "decodes a struct" do 31 | struct = Protobuf.decode(<<8, 42>>, TestMsg.Foo) 32 | assert struct == %TestMsg.Foo{a: 42} 33 | end 34 | end 35 | 36 | describe "get_unknown_fields/1" do 37 | test "returns a list of decoded unknown varints" do 38 | input = <<168, 31, 1>> 39 | message = ProtobufTestMessages.Proto3.TestAllTypesProto3.decode(input) 40 | assert Protobuf.get_unknown_fields(message) == [{501, _wire_varint = 0, 1}] 41 | end 42 | 43 | test "raises if the given struct doesn't have an :__unknown_fields__ field" do 44 | assert_raise ArgumentError, ~r/can't retrieve unknown fields for struct URI/, fn -> 45 | Protobuf.get_unknown_fields(%URI{}) 46 | end 47 | end 48 | end 49 | 50 | defp loaded_extensions do 51 | Enum.count(:persistent_term.get(), &match?({{Protobuf.Extension, _, _}, _}, &1)) 52 | end 53 | end 54 | -------------------------------------------------------------------------------- /test/protobuf/protoc/generator/enum_test.exs: -------------------------------------------------------------------------------- 1 | defmodule Protobuf.Protoc.Generator.EnumTest do 2 | use ExUnit.Case, async: true 3 | 4 | alias Protobuf.Protoc.Context 5 | alias Protobuf.Protoc.Generator.Enum, as: Generator 6 | alias Protobuf.Protoc.Generator.Util 7 | alias Protobuf.TestHelpers 8 | 9 | test "generate/2 generates enum type messages" do 10 | ctx = %Context{} 11 | module = Module.concat(__MODULE__, "EnumFoo") |> inspect() |> String.replace(".", "") 12 | 13 | desc = %Google.Protobuf.EnumDescriptorProto{ 14 | name: module, 15 | options: nil, 16 | value: [ 17 | %Google.Protobuf.EnumValueDescriptorProto{name: "A", number: 0}, 18 | %Google.Protobuf.EnumValueDescriptorProto{name: "B", number: 1}, 19 | %Google.Protobuf.EnumValueDescriptorProto{name: "HAS_UNDERSCORES", number: 2}, 20 | %Google.Protobuf.EnumValueDescriptorProto{name: "HAS_UNDERSCORES_X", number: 3}, 21 | %Google.Protobuf.EnumValueDescriptorProto{name: "HAS_UNDERSCORES_", number: 4} 22 | ] 23 | } 24 | 25 | assert {^module, msg} = Generator.generate(ctx, desc) 26 | 27 | # Make sure the generated file is compilable. 28 | assert [{compiled_mod, bytecode}] = Code.compile_string(msg) 29 | assert inspect(compiled_mod) == module 30 | 31 | assert msg =~ "defmodule #{module} do\n" 32 | assert msg =~ "use Protobuf, enum: true, protoc_gen_elixir_version: \"#{Util.version()}\"\n" 33 | 34 | refute msg =~ "defstruct " 35 | 36 | assert msg =~ """ 37 | field :A, 0 38 | field :B, 1 39 | field :HAS_UNDERSCORES, 2 40 | field :HAS_UNDERSCORES_X, 3 41 | field :HAS_UNDERSCORES_, 4 42 | """ 43 | 44 | assert TestHelpers.get_type_spec_as_string(compiled_mod, bytecode, :t) == 45 | "t() :: integer() | :A | :B | :HAS_UNDERSCORES | :HAS_UNDERSCORES_X | :HAS_UNDERSCORES_" 46 | end 47 | 48 | test "generate/2 generates enum type messages with descriptor" do 49 | ctx = %Context{gen_descriptors?: true} 50 | module = Module.concat(__MODULE__, "EnumFooDesc") |> inspect() |> String.replace(".", "") 51 | 52 | desc = %Google.Protobuf.EnumDescriptorProto{ 53 | name: module, 54 | options: nil, 55 | value: [ 56 | %Google.Protobuf.EnumValueDescriptorProto{name: "A", number: 0}, 57 | %Google.Protobuf.EnumValueDescriptorProto{name: "B", number: 1}, 58 | %Google.Protobuf.EnumValueDescriptorProto{name: "HAS_UNDERSCORES", number: 2}, 59 | %Google.Protobuf.EnumValueDescriptorProto{name: "HAS_UNDERSCORES_X", number: 3}, 60 | %Google.Protobuf.EnumValueDescriptorProto{name: "HAS_UNDERSCORES_", number: 4} 61 | ] 62 | } 63 | 64 | assert {^module, msg} = Generator.generate(ctx, desc) 65 | 66 | # Make sure the generated file is compilable. 67 | assert [{compiled_mod, bytecode}] = Code.compile_string(msg) 68 | assert inspect(compiled_mod) == module 69 | 70 | assert msg =~ "defmodule #{module} do\n" 71 | assert msg =~ "use Protobuf, enum: true, protoc_gen_elixir_version: \"#{Util.version()}\"\n" 72 | 73 | refute msg =~ "defstruct " 74 | 75 | assert msg =~ """ 76 | field :A, 0 77 | field :B, 1 78 | field :HAS_UNDERSCORES, 2 79 | field :HAS_UNDERSCORES_X, 3 80 | field :HAS_UNDERSCORES_, 4 81 | """ 82 | 83 | assert %Google.Protobuf.EnumDescriptorProto{} = desc = compiled_mod.descriptor() 84 | assert desc.name == module 85 | 86 | assert msg =~ """ 87 | def descriptor do 88 | # credo:disable-for-next-line 89 | """ 90 | 91 | assert TestHelpers.get_type_spec_as_string(compiled_mod, bytecode, :t) == 92 | "t() :: integer() | :A | :B | :HAS_UNDERSCORES | :HAS_UNDERSCORES_X | :HAS_UNDERSCORES_" 93 | end 94 | 95 | test "generate/2 generates the right code when the enum name starts with lowercase" do 96 | ctx = %Context{} 97 | 98 | desc = %Google.Protobuf.EnumDescriptorProto{ 99 | name: "valueType", 100 | options: nil, 101 | value: [ 102 | %Google.Protobuf.EnumValueDescriptorProto{name: "VALUE_TYPE_UNDEFINED", number: 0}, 103 | %Google.Protobuf.EnumValueDescriptorProto{name: "VALUE_TYPE_INTEGER", number: 1} 104 | ] 105 | } 106 | 107 | assert {module, msg} = Generator.generate(ctx, desc) 108 | 109 | assert module == "ValueType" 110 | assert msg =~ "defmodule ValueType do" 111 | end 112 | 113 | describe "generate/2 include_docs" do 114 | test "includes enum comment for `@moduledoc` when flag is true" do 115 | test_pb = TestHelpers.read_generated_file("test.pb.ex") 116 | 117 | assert test_pb =~ """ 118 | defmodule My.Test.Days do 119 | @moduledoc \"\"\" 120 | This enum represents days of the week. 121 | \"\"\" 122 | """ 123 | 124 | assert test_pb =~ """ 125 | defmodule My.Test.HatType do 126 | @moduledoc \"\"\" 127 | This enum represents different kinds of hats. 128 | \"\"\" 129 | """ 130 | 131 | assert test_pb =~ """ 132 | defmodule My.Test.Request.Color do 133 | @moduledoc \"\"\" 134 | This enum represents three different colors. 135 | \"\"\" 136 | """ 137 | end 138 | 139 | test "includes `@moduledoc false` by default" do 140 | ctx = %Context{include_docs?: false} 141 | desc = %Google.Protobuf.EnumDescriptorProto{name: "valueType"} 142 | 143 | {_module, msg} = Generator.generate(ctx, desc) 144 | 145 | assert msg =~ "@moduledoc false\n" 146 | end 147 | end 148 | end 149 | -------------------------------------------------------------------------------- /test/protobuf/protoc/generator/extension_test.exs: -------------------------------------------------------------------------------- 1 | defmodule Protobuf.Protoc.Generator.ExtensionTest do 2 | use ExUnit.Case, async: true 3 | 4 | alias Google.Protobuf.{DescriptorProto, FieldDescriptorProto, FileDescriptorProto} 5 | alias Protobuf.Protoc.Context 6 | alias Protobuf.Protoc.Generator.Extension, as: Generator 7 | 8 | describe "generate/3" do 9 | test "doesn't generate any modules if the given file has no messages and no extensions" do 10 | ctx = %Context{namespace: [""]} 11 | desc = %FileDescriptorProto{extension: []} 12 | 13 | assert Generator.generate(ctx, desc) == {nil, []} 14 | end 15 | 16 | test "generates file-level extensions using the file's module" do 17 | ctx = %Context{ 18 | module_prefix: "ext", 19 | dep_type_mapping: %{ 20 | ".ext.Foo1" => %{type_name: "Ext.Foo1"}, 21 | ".ext.Bar1" => %{type_name: "Ext.Bar1"}, 22 | ".ext.Options" => %{type_name: "Ext.Options"} 23 | }, 24 | syntax: :proto2 25 | } 26 | 27 | desc = %FileDescriptorProto{ 28 | extension: [ 29 | %FieldDescriptorProto{ 30 | extendee: ".ext.Foo1", 31 | name: "foo", 32 | json_name: "foo", 33 | number: 1047, 34 | label: :LABEL_OPTIONAL, 35 | type: :TYPE_MESSAGE, 36 | type_name: ".ext.Options" 37 | }, 38 | %FieldDescriptorProto{ 39 | extendee: ".ext.Bar1", 40 | name: "bar", 41 | json_name: "bar", 42 | number: 1048, 43 | label: :LABEL_OPTIONAL, 44 | type: :TYPE_MESSAGE, 45 | type_name: ".ext.Options" 46 | } 47 | ] 48 | } 49 | 50 | assert {{mod_name, msg}, []} = Generator.generate(ctx, desc) 51 | assert mod_name == "Ext.PbExtension" 52 | 53 | assert Enum.join(msg, "\n") =~ 54 | "Ext.Foo1, :foo, 1047, optional: true, type: Ext.Options" 55 | 56 | assert Enum.join(msg, "\n") =~ 57 | "Ext.Bar1, :bar, 1048, optional: true, type: Ext.Options" 58 | end 59 | 60 | test "resolves type names" do 61 | ctx = %Context{ 62 | module_prefix: "ext", 63 | dep_type_mapping: %{ 64 | ".ext.Foo1" => %{type_name: "Ext.Foo1"}, 65 | ".ext.Options" => %{type_name: "Ext.Options"}, 66 | ".ext.Foo2" => %{type_name: "Ext.Foo2"} 67 | }, 68 | syntax: :proto2 69 | } 70 | 71 | desc = %Google.Protobuf.FileDescriptorProto{ 72 | extension: [ 73 | %Google.Protobuf.FieldDescriptorProto{ 74 | extendee: ".ext.Foo1", 75 | name: "foo", 76 | json_name: "foo", 77 | number: 1047, 78 | label: :LABEL_OPTIONAL, 79 | type: :TYPE_MESSAGE, 80 | type_name: ".ext.Options" 81 | }, 82 | %Google.Protobuf.FieldDescriptorProto{ 83 | extendee: ".ext.Foo1", 84 | name: "foo2", 85 | json_name: "foo2", 86 | number: 1049, 87 | label: :LABEL_REPEATED, 88 | type: :TYPE_UINT32 89 | }, 90 | %Google.Protobuf.FieldDescriptorProto{ 91 | extendee: ".ext.Foo2", 92 | name: "bar", 93 | json_name: "bar", 94 | number: 1047, 95 | label: :LABEL_OPTIONAL, 96 | type: :TYPE_STRING 97 | } 98 | ] 99 | } 100 | 101 | assert {{"Ext.PbExtension", msg}, []} = Generator.generate(ctx, desc) 102 | msg = Enum.join(msg, "\n") 103 | assert msg =~ "Ext.Foo1, :foo, 1047, optional: true, type: Ext.Options" 104 | assert msg =~ "Ext.Foo1, :foo2, 1049, repeated: true, type: :uint32" 105 | assert msg =~ "Ext.Foo2, :bar, 1047, optional: true, type: :string" 106 | end 107 | 108 | test "generates nested extensions when given" do 109 | ctx = %Context{ 110 | module_prefix: "ext", 111 | dep_type_mapping: %{ 112 | ".ext.Foo" => %{type_name: "Ext.Foo"} 113 | }, 114 | syntax: :proto2 115 | } 116 | 117 | desc = %Google.Protobuf.FileDescriptorProto{ 118 | extension: [ 119 | %Google.Protobuf.FieldDescriptorProto{ 120 | extendee: ".ext.Foo", 121 | label: :LABEL_OPTIONAL, 122 | name: "file_level", 123 | json_name: "file_level", 124 | number: 1048, 125 | type: :TYPE_STRING 126 | } 127 | ], 128 | message_type: [ 129 | %DescriptorProto{ 130 | name: "MyMessage", 131 | extension: [ 132 | %Google.Protobuf.FieldDescriptorProto{ 133 | extendee: ".ext.Foo", 134 | label: :LABEL_OPTIONAL, 135 | name: "in_msg", 136 | json_name: "in_msg", 137 | number: 1049, 138 | type: :TYPE_STRING 139 | } 140 | ], 141 | nested_type: [ 142 | %DescriptorProto{ 143 | name: "NestedMessage", 144 | extension: [ 145 | %Google.Protobuf.FieldDescriptorProto{ 146 | extendee: ".ext.Foo", 147 | label: :LABEL_OPTIONAL, 148 | name: "in_nested", 149 | json_name: "in_nested", 150 | number: 1050, 151 | type: :TYPE_STRING 152 | } 153 | ] 154 | } 155 | ] 156 | } 157 | ] 158 | } 159 | 160 | assert {{"Ext.PbExtension", file_message}, 161 | [ 162 | {"Ext.MyMessage.PbExtension", my_message}, 163 | {"Ext.MyMessage.NestedMessage.PbExtension", nested_message} 164 | ]} = Generator.generate(ctx, desc) 165 | 166 | assert Enum.join(file_message, "\n") =~ 167 | "Ext.Foo, :file_level, 1048, optional: true, type: :string" 168 | 169 | assert my_message =~ "Ext.Foo, :in_msg, 1049, optional: true, type: :string" 170 | assert nested_message =~ "Ext.Foo, :in_nested, 1050, optional: true, type: :string" 171 | end 172 | end 173 | end 174 | -------------------------------------------------------------------------------- /test/protobuf/protoc/generator/service_test.exs: -------------------------------------------------------------------------------- 1 | defmodule Protobuf.Protoc.Generator.ServiceTest do 2 | use ExUnit.Case, async: true 3 | 4 | alias Protobuf.Protoc.Context 5 | alias Protobuf.Protoc.Generator.Service, as: Generator 6 | alias Protobuf.Protoc.Generator.Util 7 | 8 | test "generate/2 generates services" do 9 | ctx = %Context{ 10 | package: "foo", 11 | dep_type_mapping: %{ 12 | ".foo.Input0" => %{type_name: "Foo.Input0"}, 13 | ".foo.Input1" => %{type_name: "Foo.Input1"}, 14 | ".foo.Input2" => %{type_name: "Foo.Input2"}, 15 | ".foo.Input3" => %{type_name: "Foo.Input3"}, 16 | ".foo.Output0" => %{type_name: "Foo.Output0"}, 17 | ".foo.Output1" => %{type_name: "Foo.Output1"}, 18 | ".foo.Output2" => %{type_name: "Foo.Output2"}, 19 | ".foo.Output3" => %{type_name: "Foo.Output3"} 20 | }, 21 | module_prefix: "Foo" 22 | } 23 | 24 | desc = %Google.Protobuf.ServiceDescriptorProto{ 25 | name: "ServiceFoo", 26 | method: [ 27 | %Google.Protobuf.MethodDescriptorProto{ 28 | name: "MethodA", 29 | input_type: ".foo.Input0", 30 | output_type: ".foo.Output0" 31 | }, 32 | %Google.Protobuf.MethodDescriptorProto{ 33 | name: "MethodB", 34 | input_type: ".foo.Input1", 35 | output_type: ".foo.Output1", 36 | client_streaming: true 37 | }, 38 | %Google.Protobuf.MethodDescriptorProto{ 39 | name: "MethodC", 40 | input_type: ".foo.Input2", 41 | output_type: ".foo.Output2", 42 | server_streaming: true 43 | }, 44 | %Google.Protobuf.MethodDescriptorProto{ 45 | name: "MethodD", 46 | input_type: ".foo.Input3", 47 | output_type: ".foo.Output3", 48 | client_streaming: true, 49 | server_streaming: true 50 | } 51 | ] 52 | } 53 | 54 | assert {"Foo.ServiceFoo", msg} = Generator.generate(ctx, desc) 55 | assert msg =~ "defmodule Foo.ServiceFoo.Service do\n" 56 | 57 | assert msg =~ 58 | "use GRPC.Service, name: \"foo.ServiceFoo\", protoc_gen_elixir_version: \"#{Util.version()}\"\n" 59 | 60 | assert msg =~ "rpc :MethodA, Foo.Input0, Foo.Output0\n" 61 | assert msg =~ "rpc :MethodB, stream(Foo.Input1), Foo.Output1\n" 62 | assert msg =~ "rpc :MethodC, Foo.Input2, stream(Foo.Output2)\n" 63 | assert msg =~ "rpc :MethodD, stream(Foo.Input3), stream(Foo.Output3)\n" 64 | end 65 | 66 | describe "generate/2 include_docs" do 67 | test "includes service comment for `@moduledoc` when flag is true" do 68 | ctx = %Context{ 69 | package: "foo", 70 | include_docs?: true, 71 | comments: %{ 72 | "" => 73 | "An example test service that has\n" <> 74 | "a test method. It expects a Request\n" <> 75 | "and returns a Reply." 76 | }, 77 | dep_type_mapping: %{ 78 | ".foo.Input0" => %{type_name: "Foo.Input0"}, 79 | ".foo.Output0" => %{type_name: "Foo.Output0"} 80 | }, 81 | module_prefix: "Foo" 82 | } 83 | 84 | desc = %Google.Protobuf.ServiceDescriptorProto{ 85 | name: "ServiceFoo", 86 | method: [ 87 | %Google.Protobuf.MethodDescriptorProto{ 88 | name: "MethodA", 89 | input_type: ".foo.Input0", 90 | output_type: ".foo.Output0" 91 | } 92 | ] 93 | } 94 | 95 | assert {"Foo.ServiceFoo", msg} = Generator.generate(ctx, desc) 96 | 97 | assert msg =~ """ 98 | @moduledoc \"\"\" 99 | An example test service that has 100 | a test method. It expects a Request 101 | and returns a Reply. 102 | \"\"\" 103 | """ 104 | end 105 | 106 | test "includes `@moduledoc false` by default" do 107 | ctx = %Context{include_docs?: false} 108 | desc = %Google.Protobuf.ServiceDescriptorProto{name: "ServiceFoo"} 109 | 110 | {_module, msg} = Generator.generate(ctx, desc) 111 | 112 | assert msg =~ "@moduledoc false\n" 113 | end 114 | end 115 | end 116 | -------------------------------------------------------------------------------- /test/protobuf/protoc/generator/util_test.exs: -------------------------------------------------------------------------------- 1 | defmodule Protobuf.Protoc.Generator.UtilTest do 2 | use ExUnit.Case, async: true 3 | 4 | import Protobuf.Protoc.Generator.Util 5 | 6 | alias Protobuf.Protoc.Context 7 | 8 | describe "mod_name/2" do 9 | test "camelizes components" do 10 | assert mod_name(%Context{}, ["lowercaseName"]) == "LowercaseName" 11 | assert mod_name(%Context{}, ["lowercase", "name"]) == "Lowercase.Name" 12 | assert mod_name(%Context{}, ["Upper", "lower"]) == "Upper.Lower" 13 | end 14 | 15 | test "can handle nil prefix" do 16 | assert mod_name(%Context{module_prefix: nil}, ["Foo", "Bar"]) == "Foo.Bar" 17 | end 18 | 19 | test "can handle empty package" do 20 | assert mod_name(%Context{module_prefix: ""}, ["Foo", "Bar"]) == "Foo.Bar" 21 | end 22 | 23 | test "can handle non-empty module prefix" do 24 | assert mod_name(%Context{module_prefix: "custom.prefix"}, ["Foo", "Bar"]) == 25 | "Custom.Prefix.Foo.Bar" 26 | end 27 | 28 | test "returns prefixed package name" do 29 | ctx = %Context{package_prefix: "custom.prefix", package: "pkg", module_prefix: nil} 30 | assert mod_name(ctx, ["Foo", "Bar"]) == "Custom.Prefix.Pkg.Foo.Bar" 31 | end 32 | 33 | test "returns module prefix when package prefix is present" do 34 | ctx = %Context{module_prefix: "overrides", package_prefix: "custom.prefix", package: "pkg"} 35 | assert mod_name(ctx, ["Foo", "Bar"]) == "Overrides.Foo.Bar" 36 | end 37 | 38 | test "ensure all components of namespace are camel-case'd" do 39 | assert mod_name(%Context{module_prefix: nil}, ["foo", "Bar"]) == "Foo.Bar" 40 | end 41 | end 42 | 43 | describe "options_to_str/1" do 44 | test "stringifies a map of options" do 45 | assert options_to_str(%{}) == "" 46 | assert options_to_str(%{enum: true, syntax: nil}) == "enum: true" 47 | assert options_to_str(%{syntax: :proto2}) == "syntax: :proto2" 48 | assert options_to_str(%{default: nil, enum: false}) == "" 49 | assert options_to_str(%{deprecated: nil, map: nil, syntax: nil}) == "" 50 | assert options_to_str(%{default: "42", enum: false}) == "default: 42" 51 | assert options_to_str(%{json_name: "\"theFieldName\""}) == "json_name: \"theFieldName\"" 52 | end 53 | 54 | test "keep options string in alphabetical order" do 55 | opts = %{ 56 | syntax: :proto3, 57 | map: true, 58 | deprecated: true, 59 | protoc_gen_elixir_version: "1.2.3" 60 | } 61 | 62 | sorted_str = 63 | "deprecated: true, map: true, protoc_gen_elixir_version: 1.2.3, syntax: :proto3" 64 | 65 | assert options_to_str(opts) == sorted_str 66 | end 67 | end 68 | 69 | describe "type_from_type_name/2" do 70 | test "fetches the right type" do 71 | ctx = %Context{ 72 | dep_type_mapping: %{ 73 | ".Bar" => %{type_name: "Bar"}, 74 | ".Baz" => %{type_name: "Baz"} 75 | } 76 | } 77 | 78 | assert type_from_type_name(ctx, ".Baz") == "Baz" 79 | 80 | ctx = %Context{ 81 | dep_type_mapping: %{".foo_bar.ab_cd.Bar" => %{type_name: "FooBar.AbCd.Bar"}} 82 | } 83 | 84 | assert type_from_type_name(ctx, ".foo_bar.ab_cd.Bar") 85 | end 86 | end 87 | 88 | describe "prepend_package_prefix/2" do 89 | test "ignores nils" do 90 | assert prepend_package_prefix("foo", nil) == "foo" 91 | assert prepend_package_prefix(nil, "foo") == "foo" 92 | assert prepend_package_prefix("foo", "bar") == "foo.bar" 93 | end 94 | end 95 | end 96 | -------------------------------------------------------------------------------- /test/protobuf/protoc/generator_integration_test.exs: -------------------------------------------------------------------------------- 1 | defmodule Protobuf.Protoc.GeneratorIntegrationTest do 2 | use ExUnit.Case, async: true 3 | @moduletag :integration 4 | 5 | test "encode and decode My.Test.Request" do 6 | entry = %My.Test.Reply.Entry{ 7 | key_that_needs_1234camel_CasIng: 1, 8 | value: -12_345, 9 | _my_field_name_2: 21 10 | } 11 | 12 | reply = %My.Test.Reply{found: [entry], compact_keys: [1, 2, 3]} 13 | 14 | input = %My.Test.Request{ 15 | key: [123], 16 | hue: :GREEN, 17 | hat: :FEZ, 18 | deadline: 123.0, 19 | name_mapping: %{321 => "name"}, 20 | msg_mapping: %{1234 => reply} 21 | } 22 | 23 | output = My.Test.Request.encode(input) 24 | assert My.Test.Request.__message_props__().field_props[14].map? 25 | assert My.Test.Request.__message_props__().field_props[15].map? 26 | assert My.Test.Request.NameMappingEntry.__message_props__().map? 27 | assert My.Test.Request.MsgMappingEntry.__message_props__().map? 28 | assert My.Test.Request.decode(output) == input 29 | end 30 | 31 | test "encode and decode My.Test.Communique(oneof)" do 32 | unions = [ 33 | number: 42, 34 | name: "abc", 35 | temp_c: 1.2, 36 | height: 2.5, 37 | today: :MONDAY, 38 | maybe: true, 39 | delta: 123, 40 | msg: %My.Test.Reply{} 41 | ] 42 | 43 | Enum.each(unions, fn union -> 44 | input = %My.Test.Communique{union: union} 45 | output = My.Test.Communique.encode(input) 46 | assert My.Test.Communique.decode(output) == input 47 | end) 48 | end 49 | 50 | test "options" do 51 | assert %{deprecated?: true} = My.Test.Options.__message_props__().field_props[1] 52 | end 53 | 54 | test "extensions" do 55 | assert "hello" == %Protobuf.Protoc.ExtTest.Foo{a: "hello"}.a 56 | end 57 | 58 | describe "custom options" do 59 | # These fail the first time, when extensions are not loaded. Then, they start to pass. 60 | @describetag :skip 61 | 62 | test "with enums" do 63 | descriptor = Test.EnumWithCustomOptions.descriptor() 64 | 65 | assert %Google.Protobuf.EnumValueDescriptorProto{} = 66 | value = Enum.find(descriptor.value, &(&1.number == 1)) 67 | 68 | assert %Google.Protobuf.EnumValueOptions{__pb_extensions__: extensions} = value.options 69 | assert Map.fetch(extensions, {Test.PbExtension, :my_custom_option}) == {:ok, "hello"} 70 | end 71 | 72 | test "with messages" do 73 | descriptor = Test.MessageWithCustomOptions.descriptor() 74 | 75 | assert %Google.Protobuf.MessageOptions{__pb_extensions__: extensions} = descriptor.options 76 | 77 | assert Map.fetch(extensions, {Test.PbExtension, :lowercase_name}) == 78 | {:ok, "message_with_custom_options"} 79 | end 80 | end 81 | 82 | test "maps without packages" do 83 | input = %NoPackageMessage{number_mapping: %{321 => 123, 1337 => 1}} 84 | 85 | output = NoPackageMessage.encode(input) 86 | assert NoPackageMessage.__message_props__().field_props[1].map? 87 | assert NoPackageMessage.NumberMappingEntry.__message_props__().map? 88 | assert NoPackageMessage.decode(output) == input 89 | end 90 | end 91 | -------------------------------------------------------------------------------- /test/protobuf/protoc/generator_test.exs: -------------------------------------------------------------------------------- 1 | defmodule Protobuf.Protoc.GeneratorTest do 2 | use ExUnit.Case, async: true 3 | 4 | import Protobuf.TestHelpers 5 | 6 | alias Protobuf.Protoc.{Generator, Context} 7 | alias Google.Protobuf.Compiler.CodeGeneratorResponse 8 | 9 | describe "generate/2" do 10 | test "returns a list of Google.Protobuf.Compiler.CodeGeneratorResponse.File structs" do 11 | ctx = %Context{global_type_mapping: %{"name.proto" => %{}}} 12 | desc = %Google.Protobuf.FileDescriptorProto{name: "name.proto"} 13 | 14 | assert Generator.generate(ctx, desc) == 15 | {nil, [%CodeGeneratorResponse.File{name: "name.pb.ex", content: "\n"}]} 16 | end 17 | 18 | test "uses the package prefix" do 19 | ctx = %Context{ 20 | package_prefix: "myapp", 21 | global_type_mapping: %{ 22 | "name.proto" => %{".myapp.Foo" => %{type_name: "Myapp.Foo"}} 23 | } 24 | } 25 | 26 | desc = %Google.Protobuf.FileDescriptorProto{ 27 | name: "name.proto", 28 | message_type: [%Google.Protobuf.DescriptorProto{name: "Foo"}] 29 | } 30 | 31 | assert {nil, [%CodeGeneratorResponse.File{} = file]} = Generator.generate(ctx, desc) 32 | 33 | assert [{mod, _bytecode}] = Code.compile_string(file.content) 34 | assert mod == Myapp.Foo 35 | 36 | purge_modules([mod]) 37 | end 38 | 39 | test "uses the package prefix when descriptor has package" do 40 | ctx = %Context{ 41 | package_prefix: "myapp.proto", 42 | global_type_mapping: %{ 43 | "name.proto" => %{".myapp.proto.lib.Foo" => %{type_name: "Myapp.Proto.Lib.Foo"}} 44 | } 45 | } 46 | 47 | desc = %Google.Protobuf.FileDescriptorProto{ 48 | name: "name.proto", 49 | package: "lib", 50 | message_type: [%Google.Protobuf.DescriptorProto{name: "Foo"}] 51 | } 52 | 53 | assert {nil, [%CodeGeneratorResponse.File{} = file]} = Generator.generate(ctx, desc) 54 | 55 | assert [{mod, _bytecode}] = Code.compile_string(file.content) 56 | assert mod == Myapp.Proto.Lib.Foo 57 | 58 | purge_modules([mod]) 59 | end 60 | 61 | test "returns a module for each enum and message" do 62 | ctx = %Context{ 63 | package: "foo", 64 | global_type_mapping: %{"name.proto" => %{}} 65 | } 66 | 67 | desc = %Google.Protobuf.FileDescriptorProto{ 68 | name: "name.proto", 69 | message_type: [%Google.Protobuf.DescriptorProto{name: "MyMessage"}], 70 | enum_type: [ 71 | %Google.Protobuf.EnumDescriptorProto{ 72 | name: "MyEnum", 73 | value: [ 74 | %Google.Protobuf.EnumValueDescriptorProto{name: :MY_ENUM_NOT_SET, number: 0} 75 | ] 76 | } 77 | ] 78 | } 79 | 80 | assert {nil, [%CodeGeneratorResponse.File{} = file]} = Generator.generate(ctx, desc) 81 | 82 | assert [{enum_mod, _bytecode1}, {message_mod, _bytecode2}] = 83 | Code.compile_string(file.content) 84 | 85 | assert enum_mod == MyEnum 86 | assert message_mod == MyMessage 87 | 88 | purge_modules([enum_mod, message_mod]) 89 | end 90 | 91 | test "returns a module for each enum and message as separate files with one_file_per_module=true" do 92 | ctx = %Context{ 93 | global_type_mapping: %{"name.proto" => %{}}, 94 | one_file_per_module?: true 95 | } 96 | 97 | desc = %Google.Protobuf.FileDescriptorProto{ 98 | name: "name.proto", 99 | package: "foo", 100 | message_type: [%Google.Protobuf.DescriptorProto{name: "MyMessage.Nested"}], 101 | enum_type: [ 102 | %Google.Protobuf.EnumDescriptorProto{ 103 | name: "MyEnum", 104 | value: [ 105 | %Google.Protobuf.EnumValueDescriptorProto{name: :MY_ENUM_NOT_SET, number: 0} 106 | ] 107 | } 108 | ] 109 | } 110 | 111 | assert {nil = _extensions, 112 | [ 113 | %CodeGeneratorResponse.File{} = enum_file, 114 | %CodeGeneratorResponse.File{} = message_file 115 | ]} = Generator.generate(ctx, desc) 116 | 117 | assert message_file.name == "foo/my_message/nested.pb.ex" 118 | assert enum_file.name == "foo/my_enum.pb.ex" 119 | end 120 | 121 | test "with one_file_per_module=true and package_prefix" do 122 | ctx = %Context{ 123 | global_type_mapping: %{"name.proto" => %{}}, 124 | one_file_per_module?: true, 125 | package_prefix: "prfx" 126 | } 127 | 128 | desc = %Google.Protobuf.FileDescriptorProto{ 129 | name: "name.proto", 130 | package: "foo", 131 | message_type: [%Google.Protobuf.DescriptorProto{name: "MyMessage.Nested"}] 132 | } 133 | 134 | assert {nil, [%CodeGeneratorResponse.File{} = file]} = Generator.generate(ctx, desc) 135 | 136 | assert file.name == "prfx/foo/my_message/nested.pb.ex" 137 | end 138 | 139 | test "with one_file_per_module=true and module_prefix" do 140 | ctx = %Context{ 141 | global_type_mapping: %{"name.proto" => %{}}, 142 | one_file_per_module?: true, 143 | module_prefix: "My.Prefix" 144 | } 145 | 146 | desc = %Google.Protobuf.FileDescriptorProto{ 147 | name: "name.proto", 148 | package: "foo", 149 | message_type: [%Google.Protobuf.DescriptorProto{name: "MyMessage.Nested"}] 150 | } 151 | 152 | assert {nil, [%CodeGeneratorResponse.File{} = file]} = Generator.generate(ctx, desc) 153 | 154 | assert file.name == "my/prefix/my_message/nested.pb.ex" 155 | end 156 | 157 | test "can generate a GRPC service" do 158 | ctx = %Context{ 159 | package: "foo", 160 | plugins: ["grpc"], 161 | global_type_mapping: %{"name.proto" => %{}, "my_dep" => %{}} 162 | } 163 | 164 | desc = %Google.Protobuf.FileDescriptorProto{ 165 | name: "name.proto", 166 | dependency: ["my_dep"], 167 | service: [%Google.Protobuf.ServiceDescriptorProto{name: "my_service"}] 168 | } 169 | 170 | # We can't compile the generated service module because we haven't loaded GRPC.Service here. 171 | assert {nil, [%CodeGeneratorResponse.File{} = file]} = Generator.generate(ctx, desc) 172 | assert file.content =~ "defmodule MyService.Service do" 173 | end 174 | end 175 | end 176 | -------------------------------------------------------------------------------- /test/protobuf/protoc/proto/custom_options.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package test; 4 | 5 | import "google/protobuf/descriptor.proto"; 6 | 7 | extend google.protobuf.EnumValueOptions { 8 | optional string my_custom_option = 50005; 9 | } 10 | 11 | extend google.protobuf.MessageOptions { 12 | string lowercase_name = 51300; 13 | } 14 | 15 | enum EnumWithCustomOptions { 16 | MY_ENUM_NOT_SET = 0; 17 | MY_ENUM_FOO = 1 [(my_custom_option) = "hello"]; 18 | MY_ENUM_BAR = 2; 19 | } 20 | 21 | message MessageWithCustomOptions { 22 | option (lowercase_name) = "message_with_custom_options"; 23 | } 24 | -------------------------------------------------------------------------------- /test/protobuf/protoc/proto/extension.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto2"; 2 | 3 | package ext; 4 | 5 | // -I src is needed, see Makefile 6 | import "elixirpb.proto"; 7 | 8 | option (elixirpb.file).module_prefix = "Protobuf.Protoc.ExtTest"; 9 | 10 | message Foo { 11 | optional string a = 1; 12 | } 13 | -------------------------------------------------------------------------------- /test/protobuf/protoc/proto/no_package.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | message NoPackageMessage { 4 | map number_mapping = 1; 5 | } 6 | -------------------------------------------------------------------------------- /test/protobuf/protoc/proto/service.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto2"; 2 | 3 | // This package holds interesting messages. 4 | package test; // dotted package name 5 | 6 | import "test.proto"; 7 | 8 | // An example test service that has 9 | // a test method. It expects a Request 10 | // and returns a Reply. 11 | service TestService { 12 | rpc test (Request) returns (Reply); 13 | } 14 | -------------------------------------------------------------------------------- /test/protobuf/protoc/proto/test.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto2"; 2 | 3 | // This package holds interesting messages. 4 | package test; // dotted package name 5 | 6 | // This enum represents different kinds of hats. 7 | enum HatType { 8 | // deliberately skipping 0 9 | FEDORA = 1; 10 | FEZ = 2; 11 | } 12 | 13 | // This enum represents days of the week. 14 | enum Days { 15 | option allow_alias = true; 16 | 17 | MONDAY = 1; 18 | TUESDAY = 2; 19 | LUNDI = 1; // same value as MONDAY 20 | } 21 | 22 | // This is a message that might be sent somewhere. 23 | // 24 | // Here is another line for a documentation example. 25 | message Request { 26 | // This enum represents three different colors. 27 | enum Color { 28 | RED = 0; 29 | GREEN = 1; 30 | BLUE = 2; 31 | } 32 | repeated int64 key = 1; 33 | // optional imp.ImportedMessage imported_message = 2; 34 | optional Color hue = 3; // no default 35 | optional HatType hat = 4 [default = FEDORA]; 36 | // optional imp.ImportedMessage.Owner owner = 6; 37 | optional float deadline = 7 [default = inf]; 38 | optional group SomeGroup = 8 { 39 | optional int32 group_field = 9; 40 | } 41 | 42 | // These foreign types are in imp2.proto, 43 | // which is publicly imported by imp.proto. 44 | // optional imp.PubliclyImportedMessage pub = 10; 45 | // optional imp.PubliclyImportedEnum pub_enum = 13 [default=HAIR]; 46 | 47 | // This is a map field. It will generate map[int32]string. 48 | map name_mapping = 14; 49 | // This is a map field whose value type is a message. 50 | map msg_mapping = 15; 51 | 52 | optional int32 reset = 12; 53 | // This field should not conflict with any getters. 54 | optional string get_key = 16; 55 | } 56 | 57 | message Reply { 58 | message Entry { 59 | required int64 key_that_needs_1234camel_CasIng = 1; 60 | optional int64 value = 2 [default = 7]; 61 | optional int64 _my_field_name_2 = 3; 62 | enum Game { 63 | FOOTBALL = 1; 64 | TENNIS = 2; 65 | } 66 | } 67 | repeated Entry found = 1; 68 | repeated int32 compact_keys = 2 [packed = true]; 69 | extensions 100 to max; 70 | } 71 | 72 | message OtherBase { 73 | optional string name = 1; 74 | extensions 100 to 110, 199; 75 | } 76 | 77 | message ReplyExtensions { 78 | // Extends Reply 79 | extend Reply { 80 | optional double time = 101; 81 | optional ReplyExtensions carrot = 105; 82 | } 83 | // Yet another base message 84 | extend OtherBase { 85 | optional ReplyExtensions donut = 101; 86 | } 87 | } 88 | 89 | message OtherReplyExtensions { 90 | optional int32 key = 1; 91 | } 92 | 93 | // top-level extension 94 | extend Reply { 95 | optional string tag = 103; 96 | optional OtherReplyExtensions donut = 106; 97 | // optional imp.ImportedMessage elephant = 107; // extend with message from another file. 98 | } 99 | 100 | message OldReply { 101 | // Extensions will be encoded in MessageSet wire format. 102 | option message_set_wire_format = true; 103 | extensions 100 to max; 104 | } 105 | 106 | message Communique { 107 | optional bool make_me_cry = 1; 108 | 109 | // This is a oneof, called "union". 110 | oneof union { 111 | int32 number = 5; 112 | string name = 6; 113 | bytes data = 7; 114 | double temp_c = 8; 115 | float height = 9; 116 | Days today = 10; 117 | bool maybe = 11; 118 | sint32 delta = 12; // name will conflict with Delta below 119 | Reply msg = 13; 120 | group SomeGroup = 14 { 121 | optional string member = 15; 122 | } 123 | } 124 | 125 | message Delta { 126 | } 127 | } 128 | 129 | message Options { 130 | optional string opt1 = 1 [deprecated = true]; 131 | } 132 | 133 | message MapInput { 134 | map int32_map = 1; 135 | map sint32_map = 2; 136 | map sfixed32_map = 3; 137 | map fixed32_map = 4; 138 | map uint32_map = 5; 139 | map int64_map = 6; 140 | map sint64_map = 7; 141 | map sfixed64_map = 8; 142 | map fixed64_map = 9; 143 | map uint64_map = 10; 144 | map float_map = 11; 145 | map double_map = 12; 146 | map string_map = 13; 147 | map bool_map = 14; 148 | map bytes_map = 15; 149 | map enum_map = 16; 150 | } 151 | 152 | enum MapEnum { 153 | HELLO = 0; 154 | WORLD = 2; 155 | } 156 | -------------------------------------------------------------------------------- /test/protobuf/wire/varint_test.exs: -------------------------------------------------------------------------------- 1 | defmodule Protobuf.Wire.VarintTest do 2 | use ExUnit.Case, async: true 3 | 4 | doctest Protobuf.Wire.Varint 5 | 6 | describe "encode/1" do 7 | alias Protobuf.Wire.Varint 8 | 9 | test "300" do 10 | assert encode(300) == <<0b10101100, 0b00000010>> 11 | end 12 | 13 | test "150" do 14 | assert encode(150) == <<150, 1>> 15 | end 16 | 17 | test "0" do 18 | assert encode(0) == <<0>> 19 | end 20 | 21 | test "1" do 22 | assert encode(1) == <<1>> 23 | end 24 | 25 | test "min int32" do 26 | assert encode(-2_147_483_648) == <<128, 128, 128, 128, 248, 255, 255, 255, 255, 1>> 27 | end 28 | 29 | test "max int32" do 30 | assert encode(2_147_483_647) == <<255, 255, 255, 255, 7>> 31 | end 32 | 33 | test "min int64" do 34 | assert encode(-9_223_372_036_854_775_808) == 35 | <<128, 128, 128, 128, 128, 128, 128, 128, 128, 1>> 36 | end 37 | 38 | test "max int64" do 39 | assert encode(9_223_372_036_854_775_807) == 40 | <<255, 255, 255, 255, 255, 255, 255, 255, 127>> 41 | end 42 | 43 | test "max uint32" do 44 | assert encode(4_294_967_295) == <<255, 255, 255, 255, 15>> 45 | end 46 | 47 | test "max uint64" do 48 | assert encode(18_446_744_073_709_551_615) == 49 | <<255, 255, 255, 255, 255, 255, 255, 255, 255, 1>> 50 | end 51 | 52 | defp encode(n) do 53 | n 54 | |> Varint.encode() 55 | |> IO.iodata_to_binary() 56 | end 57 | end 58 | 59 | describe "defdecoderp/2" do 60 | import Protobuf.Wire.Varint 61 | 62 | defdecoderp(decode(), do: {value, rest}) 63 | defdecoderp(decode_with_args(arg, _, :fixed_arg), do: {value, rest, arg}) 64 | 65 | test "some numbers" do 66 | cases = [ 67 | {300, <<0b1010110000000010::16>>}, 68 | {150, <<150, 01>>}, 69 | {0, <<0>>}, 70 | {1, <<1>>} 71 | ] 72 | 73 | for {number, bits} <- cases do 74 | assert decode(bits) == {number, ""} 75 | end 76 | end 77 | 78 | test "min int32" do 79 | assert {val, ""} = decode(<<128, 128, 128, 128, 248, 255, 255, 255, 255, 1>>) 80 | assert <<-2_147_483_648::signed-32>> == <> 81 | end 82 | 83 | test "max int32" do 84 | assert decode(<<255, 255, 255, 255, 7>>) == {2_147_483_647, ""} 85 | end 86 | 87 | test "min int64" do 88 | assert {val, ""} = decode(<<128, 128, 128, 128, 128, 128, 128, 128, 128, 1>>) 89 | assert <<-9_223_372_036_854_775_808::signed-64>> == <> 90 | end 91 | 92 | test "max int64" do 93 | assert decode(<<255, 255, 255, 255, 255, 255, 255, 255, 127>>) == 94 | {9_223_372_036_854_775_807, ""} 95 | end 96 | 97 | test "max uint32" do 98 | assert decode(<<255, 255, 255, 255, 15>>) == {4_294_967_295, ""} 99 | end 100 | 101 | test "max uint64" do 102 | assert decode(<<255, 255, 255, 255, 255, 255, 255, 255, 255, 1>>) == 103 | {18_446_744_073_709_551_615, ""} 104 | end 105 | 106 | test "raises an error if the varint is not decodable" do 107 | assert_raise Protobuf.DecodeError, "cannot decode binary data", fn -> 108 | decode(<<>>) 109 | end 110 | end 111 | 112 | test "can define a decoder that takes any kinds of arguments" do 113 | assert decode_with_args(<<150, 01>>, :some_arg, :ignored, :fixed_arg) == 114 | {150, _rest = "", :some_arg} 115 | 116 | assert_raise Protobuf.DecodeError, "cannot decode binary data", fn -> 117 | decode_with_args(<<>>, :some_arg, :ignored, :fixed_arg) 118 | end 119 | end 120 | end 121 | end 122 | -------------------------------------------------------------------------------- /test/support/doctest.ex: -------------------------------------------------------------------------------- 1 | # Modules mentioned in doctests 2 | 3 | defmodule Color do 4 | @moduledoc false 5 | use Protobuf, syntax: :proto3, enum: true 6 | 7 | field :GREEN, 0 8 | field :RED, 1 9 | end 10 | 11 | defmodule Car do 12 | @moduledoc false 13 | use Protobuf, syntax: :proto3 14 | 15 | field :color, 1, type: Color, enum: true 16 | field :top_speed, 2, type: :float, json_name: "topSpeed" 17 | end 18 | 19 | defmodule VarintDecoders do 20 | import Protobuf.Wire.Varint 21 | 22 | def decode_and_sum(bin, plus) do 23 | decoder_decode_and_sum(bin, plus) 24 | end 25 | 26 | defdecoderp decoder_decode_and_sum(plus) do 27 | {:ok, value + plus, rest} 28 | end 29 | 30 | def decode_all(<>), do: decode_all(bin, []) 31 | 32 | defp decode_all(<<>>, acc), do: acc 33 | 34 | defdecoderp decode_all(acc) do 35 | decode_all(rest, [value | acc]) 36 | end 37 | end 38 | -------------------------------------------------------------------------------- /test/test_helper.exs: -------------------------------------------------------------------------------- 1 | ExUnit.configure(exclude: [integration: true]) 2 | ExUnit.start() 3 | 4 | Protobuf.load_extensions() 5 | 6 | defmodule Protobuf.TestHelpers do 7 | import ExUnit.Assertions 8 | 9 | def purge_modules(modules) when is_list(modules) do 10 | Enum.each(modules, fn mod -> 11 | :code.purge(mod) 12 | :code.delete(mod) 13 | end) 14 | end 15 | 16 | # TODO: Remove when we depend on Elixir 1.11+. 17 | def tmp_dir(context) do 18 | dir_name = 19 | "#{inspect(context[:case])}#{context[:describe]}#{context[:test]}" 20 | |> String.downcase() 21 | |> String.replace(["-", " ", ".", "_"], "_") 22 | 23 | tmp_dir_name = Path.join(System.tmp_dir!(), dir_name) 24 | 25 | File.rm_rf!(tmp_dir_name) 26 | File.mkdir_p!(tmp_dir_name) 27 | 28 | Map.put(context, :tmp_dir, tmp_dir_name) 29 | end 30 | 31 | def read_generated_file(relative_path) do 32 | [__DIR__, "../generated", relative_path] 33 | |> Path.join() 34 | |> File.read!() 35 | end 36 | 37 | def get_type_spec_as_string(module, bytecode, type) 38 | when is_atom(module) and is_binary(bytecode) and is_atom(type) do 39 | # This code is taken from Code.Typespec in Elixir (v1.13 in particular). 40 | assert {:ok, {_, [debug_info: {:debug_info_v1, _backend, {:elixir_v1, %{}, specs}}]}} = 41 | :beam_lib.chunks(bytecode, [:debug_info]) 42 | 43 | spec = 44 | Enum.find_value(specs, fn 45 | {:attribute, _, :type, {^type, _, _} = spec} -> spec 46 | _other -> nil 47 | end) 48 | 49 | assert not is_nil(spec), "Spec for type #{inspect(module)}.#{type} not found" 50 | 51 | # Code.Typespec.type_to_quoted/1 is not public API in Elixir, but we're still using 52 | # it here for tests. 53 | spec 54 | |> Code.Typespec.type_to_quoted() 55 | |> Macro.to_string() 56 | end 57 | 58 | # This code is taken from Code.fetch_docs/1 in Elixir (v1.13 in particular). 59 | def fetch_docs_from_bytecode(bytecode) when is_binary(bytecode) do 60 | docs_chunk = ~c"Docs" 61 | assert {:ok, {_module, [{^docs_chunk, bin}]}} = :beam_lib.chunks(bytecode, [docs_chunk]) 62 | :erlang.binary_to_term(bin) 63 | end 64 | end 65 | --------------------------------------------------------------------------------