├── .eslintrc ├── .gitignore ├── .gitlab-ci.yml ├── .prettierignore ├── .prettierrc.json ├── .protolint.yaml ├── .rubocop.yml ├── .style.yapf ├── .vscode └── launch.json ├── CHANGELOG.md ├── Dockerfile ├── LICENSE ├── README.md ├── output ├── go │ ├── go.mod │ ├── go.sum │ └── nsa │ │ ├── enums │ │ └── enums.go │ │ ├── profile │ │ └── profile │ │ │ └── profile_v1.go │ │ └── tweet │ │ ├── my_tweets │ │ └── my_tweets_v1.go │ │ └── stats │ │ └── stats_v1.go ├── json-schema │ ├── profile │ │ └── profile_v1.json │ └── tweet │ │ ├── my_tweets_v1.json │ │ └── stats_v1.json ├── protobuf │ └── nsa │ │ ├── enums │ │ └── enums.proto │ │ ├── profile │ │ └── profile │ │ │ └── profile_v1.proto │ │ └── tweet │ │ ├── my_tweets │ │ └── my_tweets_v1.proto │ │ └── stats │ │ └── stats_v1.proto ├── python │ ├── nsa │ │ ├── __init__.py │ │ ├── profile │ │ │ ├── __init__.py │ │ │ └── profile │ │ │ │ ├── __init__.py │ │ │ │ └── profile_v1.py │ │ ├── py.typed │ │ └── tweet │ │ │ ├── __init__.py │ │ │ ├── my_tweets │ │ │ ├── __init__.py │ │ │ └── my_tweets_v1.py │ │ │ └── stats │ │ │ ├── __init__.py │ │ │ └── stats_v1.py │ └── setup.py └── ruby │ ├── nav-schema-architecture.gemspec │ └── nsa │ ├── enums.rb │ ├── nav_schema_architecture.rb │ ├── profile │ └── profile │ │ └── profile_v1.rb │ ├── tweet │ ├── my_tweets │ │ └── my_tweets_v1.rb │ └── stats │ │ └── stats_v1.rb │ └── utils.rb ├── package-lock.json ├── package.json ├── samples ├── profile_v1.html ├── schema_doc.css └── schema_doc.min.js ├── schema ├── integration-schema.gql ├── message-schema-definitions │ ├── profile │ │ └── profile-v1.graphql │ └── tweet │ │ ├── my-tweets-v1.graphql │ │ └── stats-v1.graphql └── scalars │ ├── go-scalars.js │ ├── json-schema.js │ ├── protobuf-scalars.js │ ├── python-scalars.js │ └── ruby-scalars.js ├── scripts ├── bump │ ├── python.js │ └── ruby.js ├── changelog │ └── index.js ├── clean-output │ └── index.js ├── format │ ├── go-mod.js │ ├── go.js │ ├── protobuf.js │ ├── python.js │ └── ruby.js ├── generate-output │ ├── commit.js │ └── generate.js ├── git │ ├── add-working-files.js │ ├── commit.js │ ├── push-files-tags.js │ ├── push-files.js │ └── tag-go-module.js ├── postversion │ └── index.js ├── preversion │ └── index.js └── python │ └── packages.js ├── src ├── generators │ ├── go-struct.js │ ├── index.js │ ├── json-schema.js │ ├── protobuf.js │ ├── python.js │ └── ruby.js ├── index.js └── lib │ ├── generate.js │ ├── index.js │ ├── parse.js │ └── util.js ├── static └── ruby │ └── nsa │ ├── enums.rb │ └── utils.rb └── test ├── samples ├── message-definitions │ └── new-address.graphql ├── parsed-schema.json └── test-schema.gql ├── test.generators.js └── test.lib.util.js /.eslintrc: -------------------------------------------------------------------------------- 1 | { 2 | "extends": ["prettier"], 3 | "parserOptions": { 4 | "ecmaVersion": 2020 5 | }, 6 | "plugins": ["prettier"], 7 | "rules": { 8 | "prettier/prettier": ["error", { "singleQuote": true, "parser": "flow" }] 9 | }, 10 | "env": { 11 | "es6": true, 12 | "node": true 13 | }, 14 | "ignorePatterns": ["output/**.*", "schema/**/*.graphql", "schema/**/*.gql"] 15 | } 16 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | 3 | output/ 4 | 5 | # Logs 6 | logs 7 | *.log 8 | npm-debug.log* 9 | yarn-debug.log* 10 | yarn-error.log* 11 | lerna-debug.log* 12 | 13 | # Diagnostic reports (https://nodejs.org/api/report.html) 14 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 15 | 16 | # Runtime data 17 | pids 18 | *.pid 19 | *.seed 20 | *.pid.lock 21 | 22 | # Directory for instrumented libs generated by jscoverage/JSCover 23 | lib-cov 24 | 25 | # Coverage directory used by tools like istanbul 26 | coverage 27 | *.lcov 28 | 29 | # nyc test coverage 30 | .nyc_output 31 | 32 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 33 | .grunt 34 | 35 | # Bower dependency directory (https://bower.io/) 36 | bower_components 37 | 38 | # node-waf configuration 39 | .lock-wscript 40 | 41 | # Compiled binary addons (https://nodejs.org/api/addons.html) 42 | build/Release 43 | 44 | # Dependency directories 45 | node_modules/ 46 | jspm_packages/ 47 | 48 | # TypeScript v1 declaration files 49 | typings/ 50 | 51 | # TypeScript cache 52 | *.tsbuildinfo 53 | 54 | # Optional npm cache directory 55 | .npm 56 | 57 | # Optional eslint cache 58 | .eslintcache 59 | 60 | # Optional REPL history 61 | .node_repl_history 62 | 63 | # Output of 'npm pack' 64 | *.tgz 65 | 66 | # Yarn Integrity file 67 | .yarn-integrity 68 | 69 | # dotenv environment variables file 70 | .env 71 | .env.test 72 | 73 | # parcel-bundler cache (https://parceljs.org/) 74 | .cache 75 | 76 | # next.js build output 77 | .next 78 | 79 | # nuxt.js build output 80 | .nuxt 81 | 82 | # vuepress build output 83 | .vuepress/dist 84 | 85 | # Serverless directories 86 | .serverless/ 87 | app__* 88 | 89 | # FuseBox cache 90 | .fusebox/ 91 | 92 | # DynamoDB Local files 93 | .dynamodb/ 94 | 95 | # VS Code 96 | .vscode/ 97 | 98 | setup.sh 99 | -------------------------------------------------------------------------------- /.gitlab-ci.yml: -------------------------------------------------------------------------------- 1 | stages: 2 | - Build 3 | - 'Node Modules' 4 | - Test 5 | - Generate 6 | - Publish 7 | 8 | variables: 9 | IMG: "${CI_REGISTRY_IMAGE}" 10 | GOINSTALLPATH: "/go" 11 | WORKDIR: "${CI_PROJECT_DIR}" 12 | 13 | build: 14 | stage: Build 15 | services: 16 | - docker:dind 17 | image: registry.nav.engineering/goldmaster/newbuilder:main 18 | before_script: 19 | - docker login -u gitlab-ci-token -p "${CI_JOB_TOKEN}" "${CI_REGISTRY}" 20 | tags: [ newbuilder ] 21 | script: 22 | - docker build --rm --pull -f Dockerfile -t $IMG --build-arg GIT_USER="${GIT_USER}" --build-arg GIT_ACCESS_TOKEN="${GIT_ACCESS_TOKEN}" --build-arg WORKDIR="${WORKDIR}" --build-arg GOINSTALLPATH="${GOINSTALLPATH}" . 23 | - docker push $IMG 24 | rules: 25 | - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && $GITLAB_USER_NAME != $GIT_USER' 26 | changes: 27 | - Dockerfile 28 | 29 | # Install node modules through here instead of in the docker container build because we're using the project dir as WORKDIR 30 | # And Gitlab checks out the repo to the project dir, removing anything added to it during container build 31 | 'Node Modules': 32 | image: $IMG 33 | stage: 'Node Modules' 34 | artifacts: 35 | paths: 36 | - node_modules/ 37 | rules: 38 | - if: '$CI_PIPELINE_SOURCE == "merge_request_event"' 39 | - if: '$CI_COMMIT_REF_NAME == $CI_DEFAULT_BRANCH' 40 | script: 41 | - npm ci --production=false --also=dev --include=peer 42 | 43 | test: 44 | image: $IMG 45 | stage: Test 46 | dependencies: 47 | - 'Node Modules' 48 | rules: 49 | - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && $GITLAB_USER_NAME == $GIT_USER' 50 | changes: 51 | - src/**/* 52 | - schema/**/* 53 | - output/**/* 54 | - if: '$CI_COMMIT_REF_NAME == $CI_DEFAULT_BRANCH' 55 | script: 56 | - npm test 57 | 58 | compile_go: 59 | image: $IMG 60 | stage: Test 61 | dependencies: 62 | - 'Node Modules' 63 | rules: 64 | - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && $GITLAB_USER_NAME == $GIT_USER' 65 | changes: 66 | - src/**/* 67 | - schema/**/* 68 | - output/**/* 69 | - if: '$CI_COMMIT_REF_NAME == $CI_DEFAULT_BRANCH' 70 | script: 71 | - cd $WORKDIR/output/go/nsa 72 | # verify go builds cleanly, then delete binary output 73 | - go build ./... 74 | - go clean 75 | 76 | generate: 77 | image: $IMG 78 | stage: Generate 79 | dependencies: 80 | - 'Node Modules' 81 | script: 82 | - echo "Generating code and schemas ${CI_PROJECT_PATH}" 83 | - git config push.default simple 84 | - git config --global url."https://".insteadOf git:/ 85 | - git config --global user.email "${GIT_USER}@nav.com" 86 | - git config --global user.name "${GIT_USER}" 87 | - git remote set-url origin https://${GIT_USER}:${GIT_ACCESS_TOKEN}@git.nav.com/${CI_PROJECT_PATH}.git 88 | - git fetch --all --tags 89 | - git checkout ${CI_MERGE_REQUEST_SOURCE_BRANCH_NAME} 90 | - git reset --hard origin/${CI_MERGE_REQUEST_SOURCE_BRANCH_NAME} 91 | - npm run generate-output:commit 92 | rules: 93 | - if: '$CI_PIPELINE_SOURCE == "merge_request_event" && $GITLAB_USER_NAME != $GIT_USER' 94 | changes: 95 | - src/**/* 96 | - schema/**/* 97 | 98 | publish: 99 | image: $IMG 100 | stage: Publish 101 | dependencies: 102 | - 'Node Modules' 103 | script: 104 | - echo "Publishing generated packages" 105 | - git config push.default simple 106 | - git config --global url."https://".insteadOf git:/ 107 | - git config --global user.email "${GIT_USER}@nav.com" 108 | - git config --global user.name "${GIT_USER}" 109 | - git remote set-url origin https://${GIT_USER}:${GIT_ACCESS_TOKEN}@git.nav.com/${CI_PROJECT_PATH}.git 110 | - git fetch --all --tags 111 | - git checkout master 112 | - git reset --hard origin/master 113 | - 'npm version patch -m "ci(publish): post-merge publish new version of nav schema" --unsafe-perm' 114 | rules: 115 | - if: '$CI_COMMIT_REF_NAME == $CI_DEFAULT_BRANCH && $GITLAB_USER_NAME != $GIT_USER' 116 | changes: 117 | - output/**/* 118 | -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | # Ignore artifacts: 2 | output 3 | schema 4 | .gitlab-ci.yml 5 | CHANGELOG.md 6 | package-lock.json 7 | -------------------------------------------------------------------------------- /.prettierrc.json: -------------------------------------------------------------------------------- 1 | { "semi": false, "singleQuote": true, "printWidth": 130 } 2 | -------------------------------------------------------------------------------- /.protolint.yaml: -------------------------------------------------------------------------------- 1 | lint: 2 | rules: 3 | all_default: true 4 | 5 | remove: 6 | - MAX_LINE_LENGTH 7 | - FIELD_NAMES_EXCLUDE_PREPOSITIONS 8 | - MESSAGE_NAMES_EXCLUDE_PREPOSITIONS 9 | - MESSAGES_HAVE_COMMENT 10 | - SERVICES_HAVE_COMMENT 11 | - RPCS_HAVE_COMMENT 12 | - FIELDS_HAVE_COMMENT 13 | - ENUMS_HAVE_COMMENT 14 | - ENUM_FIELDS_HAVE_COMMENT 15 | - FILE_HAS_COMMENT 16 | -------------------------------------------------------------------------------- /.rubocop.yml: -------------------------------------------------------------------------------- 1 | --- 2 | AllCops: 3 | TargetRubyVersion: 2.7 4 | NewCops: enable 5 | Gemspec/DateAssignment: 6 | Enabled: true 7 | Layout/EmptyLinesAroundAttributeAccessor: 8 | Enabled: true 9 | Layout/FirstHashElementIndentation: 10 | EnforcedStyle: consistent 11 | Layout/FirstParameterIndentation: 12 | EnforcedStyle: consistent 13 | # Autocorrect line length where possible, but allow for arbitrarily long lines 14 | Layout/LineLength: 15 | AutoCorrect: true 16 | Max: 1000 17 | Layout/MultilineArrayLineBreaks: 18 | Enabled: true 19 | Layout/SpaceAroundMethodCallOperator: 20 | Enabled: true 21 | Layout/SpaceBeforeBrackets: 22 | Enabled: true 23 | Lint/AmbiguousAssignment: 24 | Enabled: true 25 | Lint/DeprecatedConstants: 26 | Enabled: true 27 | Lint/DeprecatedOpenSSLConstant: 28 | Enabled: false 29 | Lint/DuplicateBranch: 30 | Enabled: true 31 | Lint/DuplicateRegexpCharacterClassElement: 32 | Enabled: true 33 | Lint/EmptyBlock: 34 | Enabled: true 35 | Lint/EmptyClass: 36 | Enabled: true 37 | Lint/LambdaWithoutLiteralBlock: 38 | Enabled: true 39 | Lint/MixedRegexpCaptureTypes: 40 | Enabled: true 41 | Lint/NoReturnInBeginEndBlocks: 42 | Enabled: true 43 | Lint/NumberedParameterAssignment: 44 | Enabled: true 45 | Lint/OrAssignmentToConstant: 46 | Enabled: true 47 | Lint/RaiseException: 48 | Enabled: true 49 | Lint/RedundantDirGlobSort: 50 | Enabled: true 51 | Lint/StructNewOverride: 52 | Enabled: true 53 | Lint/SymbolConversion: 54 | Enabled: true 55 | Lint/ToEnumArguments: 56 | Enabled: true 57 | Lint/TripleQuotes: 58 | Enabled: true 59 | Lint/UnexpectedBlockArity: 60 | Enabled: true 61 | Lint/UnmodifiedReduceAccumulator: 62 | Enabled: true 63 | Metrics/AbcSize: 64 | Enabled: false 65 | Metrics/CyclomaticComplexity: 66 | Enabled: false 67 | Metrics/MethodLength: 68 | Enabled: false 69 | Metrics/ModuleLength: 70 | Enabled: false 71 | Metrics/ParameterLists: 72 | Enabled: false 73 | Metrics/PerceivedComplexity: 74 | Enabled: false 75 | Naming/FileName: 76 | Enabled: false 77 | Naming/MethodName: 78 | Enabled: false 79 | Naming/MethodParameterName: 80 | Enabled: false 81 | Naming/RescuedExceptionsVariableName: 82 | PreferredName: exception 83 | Naming/VariableName: 84 | Enabled: false 85 | Style/ArgumentsForwarding: 86 | Enabled: true 87 | Style/ClassAndModuleChildren: 88 | Enabled: false 89 | Style/CollectionCompact: 90 | Enabled: true 91 | Style/DocumentDynamicEvalDefinition: 92 | Enabled: true 93 | Style/Documentation: 94 | Enabled: false 95 | Style/EndlessMethod: 96 | Enabled: true 97 | Style/ExponentialNotation: 98 | Enabled: true 99 | Style/FrozenStringLiteralComment: 100 | Enabled: false 101 | Style/HashConversion: 102 | Enabled: true 103 | Style/HashEachMethods: 104 | Enabled: true 105 | Style/HashExcept: 106 | Enabled: true 107 | Style/HashTransformKeys: 108 | Enabled: true 109 | Style/HashTransformValues: 110 | Enabled: true 111 | Style/IfWithBooleanLiteralBranches: 112 | Enabled: true 113 | Style/Lambda: 114 | EnforcedStyle: literal 115 | Style/MethodCallWithArgsParentheses: 116 | Enabled: true 117 | Style/NegatedIfElseCondition: 118 | Enabled: true 119 | Style/NilLambda: 120 | Enabled: true 121 | Style/RedundantArgument: 122 | Enabled: true 123 | Style/RedundantFetchBlock: 124 | Enabled: true 125 | Style/RedundantRegexpCharacterClass: 126 | Enabled: true 127 | Style/RedundantRegexpEscape: 128 | Enabled: true 129 | Style/SlicingWithRange: 130 | Enabled: true 131 | Style/StringChars: 132 | Enabled: true 133 | Style/StringLiterals: 134 | ConsistentQuotesInMultiline: true 135 | EnforcedStyle: double_quotes 136 | Style/SwapValues: 137 | Enabled: true 138 | Style/SymbolArray: 139 | Enabled: false 140 | -------------------------------------------------------------------------------- /.style.yapf: -------------------------------------------------------------------------------- 1 | [style] 2 | based_on_style = facebook 3 | ALIGN_CLOSING_BRACKET_WITH_VISUAL_INDENT=True 4 | BLANK_LINE_BEFORE_MODULE_DOCSTRING=True 5 | BLANK_LINE_BEFORE_CLASS_DOCSTRING=True 6 | BLANK_LINES_AROUND_TOP_LEVEL_DEFINITION=2 7 | COLUMN_LIMIT=120 8 | CONTINUATION_ALIGN_STYLE=SPACE 9 | CONTINUATION_INDENT_WIDTH=4 10 | DEDENT_CLOSING_BRACKETS=True 11 | INDENT_WIDTH=4 12 | JOIN_MULTIPLE_LINES=True 13 | SPACES_AROUND_POWER_OPERATOR=True 14 | USE_TABS=False 15 | -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | // Use IntelliSense to learn about possible attributes. 3 | // Hover to view descriptions of existing attributes. 4 | // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 5 | "version": "0.2.0", 6 | "configurations": [ 7 | { 8 | "type": "pwa-node", 9 | "request": "launch", 10 | "name": "Launch Program", 11 | "skipFiles": [ 12 | "/**" 13 | ], 14 | "program": "${workspaceFolder}\\src\\index.js" 15 | } 16 | ] 17 | } 18 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nav-inc/nav-schema-architecture/e218874e35f9d2de30b37d3a41d58725b1c1156d/CHANGELOG.md -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM registry.nav.engineering/goldmaster/node:ubuntu-20.04 2 | # Tooling is mostly JS, starting from the node goldmaster 3 | 4 | ARG GIT_USER 5 | ARG GIT_ACCESS_TOKEN 6 | ARG WORKDIR 7 | ARG GOINSTALLPATH 8 | 9 | ENV DEBIAN_FRONTEND noninteractive 10 | 11 | RUN git config --global url."https://$GIT_USER:$GIT_ACCESS_TOKEN@git.nav.com/".insteadOf https://git.nav.com/ 12 | 13 | RUN apt-get update \ 14 | && apt-get install -y \ 15 | apt-transport-https \ 16 | bc \ 17 | build-essential \ 18 | ca-certificates \ 19 | curl \ 20 | gcc \ 21 | git \ 22 | git-core \ 23 | gnupg-agent \ 24 | libcurl4-openssl-dev \ 25 | libffi-dev \ 26 | libreadline-dev \ 27 | librsvg2-dev \ 28 | libssl-dev \ 29 | libxml2-dev \ 30 | libxslt1-dev \ 31 | libyaml-dev \ 32 | make \ 33 | nodejs \ 34 | python3 \ 35 | python3-pip \ 36 | software-properties-common \ 37 | yarn \ 38 | zlib1g-dev \ 39 | && rm -rf /var/lib/apt/lists/* 40 | 41 | # Get Go and its dependencies 42 | RUN curl -sSfL https://golang.org/dl/go1.16.8.linux-amd64.tar.gz | tar xz -C /usr/local 43 | 44 | ENV GOPATH "${GOINSTALLPATH}" 45 | RUN mkdir -p "$GOPATH/src" "$GOPATH/bin" && chmod -R 777 "$GOPATH" 46 | ENV PATH $GOPATH/bin:/usr/local/go/bin:$PATH 47 | 48 | WORKDIR $WORKDIR 49 | 50 | COPY . . 51 | 52 | RUN curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s v1.27.0 \ 53 | && mv ./bin/* /usr/local/bin/ 54 | RUN GO111MODULE=on go get -v -u github.com/ory/go-acc golang.org/x/tools/cmd/goimports github.com/client9/misspell/cmd/misspell cloud.google.com/go github.com/yoheimuta/protolint/cmd/protolint 55 | 56 | RUN apt-add-repository ppa:brightbox/ruby-ng 57 | RUN apt-get update \ 58 | && apt-get install -y \ 59 | ruby2.7 \ 60 | ruby2.7-dev \ 61 | && rm -rf /var/lib/apt/lists/* 62 | 63 | RUN gem update --system 64 | RUN gem install bundler rubocop 65 | 66 | # Get python and its dependencies 67 | RUN pip3 install --upgrade pip distlib setuptools 68 | RUN pip3 install yapf unify bumpversion autoflake 69 | 70 | # CHANGE THIS COMMENT TO FORCE A DOCKER IMAGE REBUILD 4 71 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 Nav 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Nav Schema Architecture (NSA) 2 | 3 | NSA is using GraphQL to define a common data model (CDM) and event/message formats. The event or message formats are defined as GraphQL queries from the CDM. 4 | 5 | Using GraphQL allows a contract developer to describe both the data model and message format at the same time, rather than needing two sets of semantics. This is useful when an attribute may be optional on the underlying data model, but required when that model is used in a specific message. 6 | 7 | The primary purpose of NSA is to generate code and schemas in multiple languages, all based on the root definition using GraphQL. The outputs can be other schema languages, such as protobuf or JSON Schema, or code, with Go, Ruby, and Python currently supported. 8 | 9 | The benefit of a common data model comes from the ability to easily disseminate its implementation across multiple teams and services. A build pipeline will watch for schema changes on a feature branch, then launch a secondary pipeline to generate the output for all target languages. That output is then committed back to the feature branch, where a developer can review the changes before merging to the main branch. All relevant, language-specific output packages are rebuilt, versioned and tagged. 10 | 11 | [GraphQL Syntax Used for a Novel Approach to Schema Validation and Code Generation](https://www.infoq.com/news/2022/05/graphql-schema-validation) 12 | 13 | ## Why GraphQL? 14 | 15 | Schema languages are not new, and come by the dozens: SQL, XML Schema, json-schema, Protobuf, GraphQL... 16 | 17 | What's key to understand is that a schema language is (in general) perfectly capable of expressing a data model or a message format but not both at the same time, and that's the catch. 18 | 19 | A message format, in relation to a data model, must be expressed as a projection of the data model. As such you need some additional semantics to express that projection. 20 | 21 | For instance a given data entity may have 5 required fields to be considered valid. However, when a message carries that entity, it can convey an arbitrary number of attributes. Conversely, an attribute may be optional in a data schema, but in a particular context, a message schema may require that attribute. There is just no way around it, you need two distinct sets of semantics to express a data model schema and the message formats that carry entities of that data model. 22 | 23 | In general, message schemas (e.g. json-schema) should be self-standing (without imports) as they specify type projections which vary from message to message. It may also cumbersome and brittle to include a library of files to validate a single payload. 24 | 25 | GraphQL provides a unique set of semantics and tools that make it easy to create a Schema Architecture: 26 | 27 | - a Schema Language 28 | - a Query language which has "projection semantics" 29 | - custom directives to annotate the Schema or Query language 30 | - parsers for both syntaxes 31 | - modern editors with syntax coloring, validation... 32 | - Documentation generators 33 | 34 | In essence, all that's left to do is to implement schema generators that will take message format definitions (as GraphQL Queries) and generate other schema languages such as Protobuf, json-schema, XML Schema, ... and language specific client libraries. Just to be clear, there is absolutely no graphql engine running in NSA. We only use graphql as a syntax to capture the data model and message formats. One of the big benefits that we get with that approach is that most developers (backend and frontend) are familiar with graphql. 35 | 36 | ## Architecture 37 | 38 | ``` 39 | Metadata 40 | +----------------------------------------------+ 41 | | +----------+ +----------+ +----------+ | +-------------+ +----------+ +----------+ +----------+ +----------+ 42 | | | | | | | | | | | | Schema 1 | | Schema 1 | | msg_1.go | | msg_1.rb | 43 | | | Query 1 | | Query 2 | ... | Query N | | | | | | | | | | | | 44 | | | | | | | | | | | +----------+ +----------+ +----------+ +----------+ 45 | | +-----+----+ +----+-----+ +-----+----+ | | | +----------+ +----------+ +----------+ +----------+ 46 | | | | | | | Generator | | Schema 2 | | Schema 2 | | msg_2.go | | msg_2.rb | 47 | | +--------/--+------------------+ | ----\ | | ----\ | | | | | | | | 48 | | / | ----/ | | ----/ +----------+ +----------+ +----------+ +----------+ 49 | | +------------/------------+ | | | ... ... ... ... 50 | | | | | | | +----------+ +----------+ +----------+ +----------+ 51 | | | Integration Schema | | | | | Schema N | | Schema N | | msg_3.go | | msg_3.rb | 52 | | | | | | | | | | | | | | | 53 | | +-------------------------+ | +-------------+ +----------+ +----------+ +----------+ +----------+ 54 | | | | json-schema protobuf *.go *.ruby 55 | +-------------|--------------------------------+ 56 | | 57 | | 58 | | 59 | | +--------------------+ +--------------------+ 60 | | | | | | 61 | | | Voyager or | | Playground | 62 | | | GraphQLDoc | | (query editor) | 63 | | | | | | 64 | | +--------------------+ +--------------------+ 65 | | | | 66 | | | | 67 | | +--------------------+ | 68 | | | S3 Web Server | | 69 | +-------------------->| or | | 70 | | Apollo-Server |<-------------------------- + 71 | +--------------------+ 72 | ``` 73 | 74 | ## Instructions 75 | 76 | The code generator is meant to be run in the gitlab built pipeline and in any case, you should never commit code to the `output/` directory. However if you want to run the code generator locally, [install nodejs and npm](https://nodejs.org/en/). 77 | 78 | The exception is the package specifiction files: `go.mod`, `nav-schema-architecture.gemspec`, and `setup.py`. The version numbers in these files are managed by the pipelines and should not be changed, but other entries such as the project description may be changed manually. 79 | 80 | ``` 81 | npm install 82 | npm run generate-output:generate 83 | ``` 84 | 85 | The generate script uses rubocop (Ruby), unify, autoflake (Python) and goimports (Golang) 86 | 87 | ``` 88 | gem install rubocop 89 | pip install unify 90 | pip install --upgrade autoflake 91 | go get golang.org/x/tools/cmd/goimports 92 | ``` 93 | 94 | The project is modular. The list of current generators (json-schema, golang, protobuf, ruby and python) can be extended easily while [some of the generators may be turned off](https://github.com/nav-inc/nav-schema-architecture/blob/main/src/generators/index.js) 95 | 96 | It assumes the schema is in `./schema/` and the message definitions are in `./schema/message-schema-definitions` 97 | 98 | You can also specify a set of environment variables to point to a different set of directories 99 | 100 | | Env variable | Example | Description | 101 | | ---------------------- | --------------------------------- | -------------------------------------------------------------- | 102 | | SCHEMA | schema/integration-schema.gql | the location of the GraphQL schema (common data model) | 103 | | MESSAGE_DEFINITION_DIR | schema/message-schema-definitions | the parent directory where the message definitions are located | 104 | | OUTPUT_DIR_JSON_SCHEMA | output/json-schema | json-schema output directory | 105 | | OUTPUT_DIR_GO_STRUCT | output/go | Go struct output directory | 106 | | OUTPUT_DIR_RUBY | output/ruby | Ruby output directory | 107 | | GIT_ROOT | git.nav.com/engineering | The root of your git repo | 108 | 109 | ## The Common Data Model 110 | 111 | The schema architecture is using the GraphQL schema notation to model the Common Data Model (CDM) data types. 112 | 113 | ### Scalars 114 | 115 | Scalars represent `leaves data types`. Currently the generator supports simple type translations and regex validations but this could be extended in the future to more sophisticated schemes. Scalars can also be used to map to native data types, while retaining the option to add more validation later. Scalar definitions are specified in the `schema/scalars` directory (one file per code generator instance) 116 | 117 | Currently a scalar definion contains a type and optionally a regex pattern or an import statement: 118 | 119 | ```javascript 120 | const ZIPCode = { 121 | type: 'string', 122 | pattern: '^[0-9]{5}(?:-[0-9]{4})?$', 123 | } 124 | 125 | const DateTime = { 126 | type: 'google.protobuf.Timestamp', 127 | import: 'google/protobuf/timestamp.proto', 128 | } 129 | 130 | module.exports = { 131 | DateTime, 132 | CurrencyCent, 133 | UUID, 134 | Phone, 135 | ZIPCode, // <-- all scalar type definition needs to be exported 136 | Any, // <-- reserved scalar type 137 | } 138 | ``` 139 | 140 | The `Any` type is a reserved scalar type that is implemented without validation and should accept any (JSON) type. In Golang the `Any` type is implemented as a `json.RawMessage`. 141 | 142 | ### GraphQL Types 143 | 144 | The CDM types are defined as expected using GraphQL Types and type extensions. The CDM will be broken down in the future to enable teams for focus on their domain. That's for instance when type extensions will come handy as it will enable a team to add properties on a type without interfering the the work of others. 145 | 146 | ```graphql 147 | type Person { 148 | address: [Address] # Array 149 | firstName: String! # required 150 | lastName: String! # required 151 | phone: Phone # optional 152 | } 153 | 154 | extends type Person { 155 | height: Float 156 | } 157 | ``` 158 | 159 | Enums are supported as expected and the corresponding property values are validated accordingly 160 | 161 | ```graphql 162 | enum ProfileSource { 163 | USERREPORTED 164 | CREDITREPORT 165 | CASHFLOW 166 | CLOVER 167 | } 168 | ``` 169 | 170 | ### Message payload definitions 171 | 172 | Currently Message definitions associated to a single types (for instance `accountCreated` event -> `Account` type). It is preferable to use entity types when possible, but sometimes complex events contain a series of entity information. In that case, it is perfectly ok to create a composite type (for instance `AccountAndSubscription`). 173 | 174 | The first step to specify a message format is to declare it in the Query type of the schema (since we are using the GraphQL Query DSL to specify it): 175 | 176 | ```graphql 177 | # message definitions 178 | type Query { 179 | myTweets: User 180 | stats: Tweet 181 | profile: User 182 | # .... 183 | } 184 | ``` 185 | 186 | The next step is to create the message specification in the corresponding directory: 187 | 188 | ``` 189 | schema/ 190 | | 191 | +-- message-schema-definitions/ 192 | | 193 | +-- business-profile/ 194 | | 195 | | business-revenue-changed-v1.graphql 196 | ``` 197 | 198 | The corresponding message definition is a valid graphql query on the type declared about (though no validation is currently done on the query): 199 | 200 | ```graphql 201 | { 202 | profile 203 | @namespace(value: "api.tweet.profile") 204 | @title(value: "User profile payload") 205 | @description(value: "Sample user profile") 206 | @version(value: 1) { 207 | id @field(order: 1, required: true) 208 | username @field(order: 2, required: true) 209 | firstName @field(order: 3) 210 | lastName @field(order: 4) 211 | bio @field(order: 5) 212 | birthdate @field(order: 6) 213 | email @field(order: 7, required: true) 214 | accountType @field(order: 8) 215 | verified @field(order: 9) 216 | } 217 | } 218 | ``` 219 | 220 | The directives that currently used are: 221 | 222 | `@namespace` that map to the json-schema namespace, the golang package and the ruby module associated to generated code/schema. 223 | `@title` and `@description` are just comments added to the generated code. 224 | `@version` should be used to refer to the major version of the schema (SemVer). Message definitions should be designed with [compatibility](https://www.xml.com/pub/a/2004/07/21/design.html) in mind for the minor versions. 225 | 226 | The following values of the `@field` directive have been implemented: 227 | 228 | - `order` maps to the field order value in the protobuf code generator 229 | - `required` overrides the CDM schema field required attribute. When a field is required in the schema, it can be made optional in the payload definition (`required: false`). Conversely, when a field is required in the schema is can be made optional in the payload definition (`required: true`). When no changes are desired, then the field required attribute is not needed. 230 | 231 | Message payload definitions may also be implemented with a node module that must return a [parsed gql query](https://github.com/apollographql/graphql-tag). 232 | 233 | ### Limitations 234 | 235 | 1. If you are reusing the same type in a payload, you have to use the same attributes. For instance if Address is reused twice (business address, person address), it should use the same attributes (i.e. the same projection). 236 | 237 | ## NSA Walk through 238 | 239 | NSA's implementation is relatively simple. The [index.js](./src/index.js) shows three simple steps: 240 | 241 | - parse (schema and message payload definitions) 242 | - generate code 243 | - save output files 244 | 245 | The code generators are modular, all using a [`core generator`](./src/lib/generate.js) 246 | 247 | The core generator is in charge of computing the projections between the graphql queries and the graphql schema while orchestrating the code generation (enums, structs, root,...). 248 | 249 | The graphql parser produces a convoluted AST for both the schema and the message payload definition. The [parse.js](./src/lib/parse.js) library is a set of helper functions that transformed the graphql AST into code gen ready AST. The last library, [util.js](./src/lib/util.js) is made up of a series of general helper functions. 250 | 251 | The target code generators are all structured the same way with a series of call backs for the `core generator`: 252 | 253 | - `enumDef` 254 | - `structDef` 255 | - `generate` (generates the root element of the message payload definition) 256 | 257 | The code generators are all using their corresponding [scalar file](./schema/scalars/go-scalars.js). 258 | 259 | Finally, the `./src/generators` directory contains all the target code generators (go, python, ruby, json-schema and protobuf). 260 | 261 | The target code generators are activated via the `./src/generators/index.js` file which exports an array of configurations: 262 | 263 | ```javascript 264 | { 265 | generate: rubyGenerate, 266 | extension: 'rb', 267 | toDir: env('OUTPUT_DIR_RUBY', 'output/ruby/nsa'), 268 | staticDir: env('STATIC_DIR_RUBY', 'static/ruby/nsa'), 269 | type: 'ruby', 270 | outputFormatter: R.identity, 271 | postProcessStep: rubyPostProcessStep, 272 | } 273 | ``` 274 | 275 | The output formatter is a simple function which can be used to make final touches to the generated code (for instance for go-lang): 276 | 277 | ```javascript 278 | const stringFormatter = (p, context) => { 279 | switch (p) { 280 | case 'iD': 281 | return 'ID' 282 | } 283 | return p 284 | } 285 | ``` 286 | 287 | The post processing step can be used to generate ancillary files such as the require file in Ruby. 288 | 289 | The staticDir contains static files that need to be added to the generated directory structure. 290 | -------------------------------------------------------------------------------- /output/go/go.mod: -------------------------------------------------------------------------------- 1 | module git.nav.com/engineering/nav-schema-architecture/output/go 2 | 3 | go 1.18 4 | 5 | require cloud.google.com/go v0.107.0 6 | -------------------------------------------------------------------------------- /output/go/go.sum: -------------------------------------------------------------------------------- 1 | cloud.google.com/go v0.107.0 h1:qkj22L7bgkl6vIeZDlOY2po43Mx/TIa2Wsa7VR+PEww= 2 | cloud.google.com/go v0.107.0/go.mod h1:wpc2eNrD7hXUTy8EKS10jkxpZBjASrORK7goS+3YX2I= 3 | github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= 4 | -------------------------------------------------------------------------------- /output/go/nsa/enums/enums.go: -------------------------------------------------------------------------------- 1 | package enums 2 | 3 | import "fmt" 4 | 5 | type AccountType string 6 | 7 | const ( 8 | BUSINESS AccountType = "BUSINESS" 9 | CREATOR AccountType = "CREATOR" 10 | ) 11 | 12 | func (v AccountType) Validate() error { 13 | switch v { 14 | case BUSINESS, CREATOR: 15 | return nil 16 | default: 17 | return fmt.Errorf("%s is not a valid AccountType value", v) 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /output/go/nsa/profile/profile/profile_v1.go: -------------------------------------------------------------------------------- 1 | package api_tweet_profile_profile 2 | 3 | import ( 4 | "fmt" 5 | "time" 6 | 7 | "git.nav.com/engineering/nav-schema-architecture/output/go/nsa/enums" 8 | ) 9 | 10 | // User profile payload 11 | 12 | type Profile struct { 13 | Id *string `json:"id,omitempty"` 14 | Username *string `json:"username,omitempty"` 15 | FirstName *string `json:"firstName,omitempty"` 16 | LastName *string `json:"lastName,omitempty"` 17 | Bio *string `json:"bio,omitempty"` 18 | Birthdate *time.Time `json:"birthdate,omitempty"` 19 | Email *string `json:"email,omitempty"` 20 | AccountType *enums.AccountType `json:"accountType,omitempty"` 21 | Verified *bool `json:"verified,omitempty"` 22 | } 23 | 24 | func (o Profile) Validate() error { 25 | 26 | if o.Id == nil { 27 | return fmt.Errorf("Id is a required property of Profile") 28 | } 29 | 30 | if o.Username == nil { 31 | return fmt.Errorf("Username is a required property of Profile") 32 | } 33 | 34 | if o.Email == nil { 35 | return fmt.Errorf("Email is a required property of Profile") 36 | } 37 | 38 | if o.AccountType != nil { 39 | 40 | AccountTypeError := o.AccountType.Validate() 41 | if AccountTypeError != nil { 42 | return fmt.Errorf("AccountType is invalid %s", AccountTypeError) 43 | } 44 | 45 | } 46 | 47 | return nil 48 | } 49 | -------------------------------------------------------------------------------- /output/go/nsa/tweet/my_tweets/my_tweets_v1.go: -------------------------------------------------------------------------------- 1 | package api_tweet_myTweets_myTweets 2 | 3 | import ( 4 | "fmt" 5 | "time" 6 | ) 7 | 8 | // User Tweets payload 9 | 10 | type Tweet struct { 11 | Id *string `json:"id,omitempty"` 12 | Date *time.Time `json:"date,omitempty"` 13 | Body *string `json:"body,omitempty"` 14 | } 15 | 16 | func (o Tweet) Validate() error { 17 | 18 | if o.Id == nil { 19 | return fmt.Errorf("Id is a required property of Tweet") 20 | } 21 | 22 | if o.Date == nil { 23 | return fmt.Errorf("Date is a required property of Tweet") 24 | } 25 | 26 | if o.Body == nil { 27 | return fmt.Errorf("Body is a required property of Tweet") 28 | } 29 | 30 | return nil 31 | } 32 | 33 | type MyTweets struct { 34 | Id *string `json:"id,omitempty"` 35 | Username *string `json:"username,omitempty"` 36 | Tweets []Tweet `json:"tweets,omitempty"` 37 | PinnedTweet *Tweet `json:"pinnedTweet,omitempty"` 38 | } 39 | 40 | func (o MyTweets) Validate() error { 41 | 42 | if o.Id == nil { 43 | return fmt.Errorf("Id is a required property of MyTweets") 44 | } 45 | 46 | if o.Username == nil { 47 | return fmt.Errorf("Username is a required property of MyTweets") 48 | } 49 | 50 | if o.Tweets == nil { 51 | return fmt.Errorf("Tweets is a required property of MyTweets") 52 | } 53 | 54 | for _, tweetsElement := range o.Tweets { 55 | TweetsError := tweetsElement.Validate() 56 | if TweetsError != nil { 57 | return fmt.Errorf("An element of Tweets is invalid %s", TweetsError) 58 | } 59 | } 60 | 61 | if o.PinnedTweet != nil { 62 | 63 | PinnedTweetError := o.PinnedTweet.Validate() 64 | if PinnedTweetError != nil { 65 | return fmt.Errorf("PinnedTweet is invalid %s", PinnedTweetError) 66 | } 67 | 68 | } 69 | 70 | return nil 71 | } 72 | -------------------------------------------------------------------------------- /output/go/nsa/tweet/stats/stats_v1.go: -------------------------------------------------------------------------------- 1 | git add -Apackage api_tweet_stats_stats 2 | 3 | import ( 4 | "fmt" 5 | "time" 6 | ) 7 | 8 | // Tweet stats 9 | 10 | type Stat struct { 11 | Views *int64 `json:"views,omitempty"` 12 | Likes *int64 `json:"likes,omitempty"` 13 | Retweets *int64 `json:"retweets,omitempty"` 14 | Responses *int64 `json:"responses,omitempty"` 15 | } 16 | 17 | func (o Stat) Validate() error { 18 | 19 | if o.Views == nil { 20 | return fmt.Errorf("Views is a required property of Stat") 21 | } 22 | 23 | if o.Likes == nil { 24 | return fmt.Errorf("Likes is a required property of Stat") 25 | } 26 | 27 | if o.Retweets == nil { 28 | return fmt.Errorf("Retweets is a required property of Stat") 29 | } 30 | 31 | if o.Responses == nil { 32 | return fmt.Errorf("Responses is a required property of Stat") 33 | } 34 | 35 | return nil 36 | } 37 | 38 | type Stats struct { 39 | Id *string `json:"id,omitempty"` 40 | Date *time.Time `json:"date,omitempty"` 41 | Stats *Stat `json:"stats,omitempty"` 42 | } 43 | 44 | func (o Stats) Validate() error { 45 | 46 | if o.Id == nil { 47 | return fmt.Errorf("Id is a required property of Stats") 48 | } 49 | 50 | if o.Date == nil { 51 | return fmt.Errorf("Date is a required property of Stats") 52 | } 53 | 54 | if o.Stats == nil { 55 | return fmt.Errorf("Stats is a required property of Stats") 56 | } 57 | 58 | StatsError := o.Stats.Validate() 59 | if StatsError != nil { 60 | return fmt.Errorf("Stats is invalid %s", StatsError) 61 | } 62 | 63 | return nil 64 | } 65 | -------------------------------------------------------------------------------- /output/json-schema/profile/profile_v1.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "http://json-schema.org/draft-07/schema#", 3 | "$id": "http://nav.com/api.tweet.profile.schema.json/v1", 4 | "title": "User profile payload", 5 | "description": "Sample user profile", 6 | "required": [], 7 | "type": "object", 8 | "properties": { 9 | "id": { 10 | "type": "string", 11 | "pattern": "^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$" 12 | }, 13 | "username": { 14 | "type": "string" 15 | }, 16 | "firstName": { 17 | "type": "string" 18 | }, 19 | "lastName": { 20 | "type": "string" 21 | }, 22 | "bio": { 23 | "type": "string" 24 | }, 25 | "birthdate": { 26 | "type": "String", 27 | "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" 28 | }, 29 | "email": { 30 | "type": "string" 31 | }, 32 | "accountType": { 33 | "enum": [ 34 | "BUSINESS", 35 | "CREATOR" 36 | ] 37 | }, 38 | "verified": { 39 | "type": "boolean" 40 | } 41 | }, 42 | "definitions": {} 43 | } -------------------------------------------------------------------------------- /output/json-schema/tweet/my_tweets_v1.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "http://json-schema.org/draft-07/schema#", 3 | "$id": "http://nav.com/api.tweet.myTweets.schema.json/v1", 4 | "title": "User Tweets payload", 5 | "description": "An array of tweets for a given user", 6 | "required": [], 7 | "type": "object", 8 | "properties": { 9 | "id": { 10 | "type": "string", 11 | "pattern": "^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$" 12 | }, 13 | "username": { 14 | "type": "string" 15 | }, 16 | "tweets": { 17 | "$ref": "#Tweet" 18 | }, 19 | "pinnedTweet": { 20 | "$ref": "#Tweet" 21 | } 22 | }, 23 | "definitions": { 24 | "Tweet": { 25 | "$id": "#Tweet", 26 | "type": "object", 27 | "properties": { 28 | "id": { 29 | "type": [ 30 | "number", 31 | "string" 32 | ] 33 | } 34 | }, 35 | "required": [ 36 | "id" 37 | ] 38 | } 39 | } 40 | } -------------------------------------------------------------------------------- /output/json-schema/tweet/stats_v1.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "http://json-schema.org/draft-07/schema#", 3 | "$id": "http://nav.com/api.tweet.stats.schema.json/v1", 4 | "title": "Tweet stats", 5 | "description": "The stats of a given tweet", 6 | "required": [], 7 | "type": "object", 8 | "properties": { 9 | "id": { 10 | "type": [ 11 | "number", 12 | "string" 13 | ] 14 | }, 15 | "date": { 16 | "type": "String", 17 | "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" 18 | }, 19 | "stats": { 20 | "$ref": "#Stat" 21 | } 22 | }, 23 | "definitions": { 24 | "Stat": { 25 | "$id": "#Stat", 26 | "type": "object", 27 | "properties": { 28 | "views": { 29 | "type": "integer" 30 | }, 31 | "likes": { 32 | "type": "integer" 33 | }, 34 | "retweets": { 35 | "type": "integer" 36 | }, 37 | "responses": { 38 | "type": "integer" 39 | } 40 | } 41 | } 42 | } 43 | } -------------------------------------------------------------------------------- /output/protobuf/nsa/enums/enums.proto: -------------------------------------------------------------------------------- 1 | 2 | syntax = "proto3"; 3 | 4 | package enums; 5 | 6 | option go_package = "git.nav.com/engineering/nsa-go-proto/enums"; 7 | 8 | enum AccountType { 9 | ACCOUNT_TYPE_UNSPECIFIED = 0; 10 | BUSINESS = 1; 11 | CREATOR = 2; 12 | } 13 | 14 | -------------------------------------------------------------------------------- /output/protobuf/nsa/profile/profile/profile_v1.proto: -------------------------------------------------------------------------------- 1 | 2 | syntax = "proto3"; 3 | 4 | package api_tweet_profile_profile; 5 | 6 | // User profile payload 7 | 8 | import "nav/date.proto"; 9 | 10 | import "enums/enums.proto"; 11 | 12 | option go_package = "git.nav.com/engineering/nsa-go-proto/profile/profile"; 13 | 14 | 15 | 16 | 17 | 18 | 19 | message Profile { 20 | string id = 1; 21 | string username = 2; 22 | string first_name = 3; 23 | string last_name = 4; 24 | string bio = 5; 25 | navtypes.ISODate birthdate = 6; 26 | string email = 7; 27 | nsa.AccountType account_type = 8; 28 | bool verified = 9; 29 | } 30 | 31 | -------------------------------------------------------------------------------- /output/protobuf/nsa/tweet/my_tweets/my_tweets_v1.proto: -------------------------------------------------------------------------------- 1 | 2 | syntax = "proto3"; 3 | 4 | package api_tweet_myTweets_myTweets; 5 | 6 | // User Tweets payload 7 | 8 | 9 | 10 | 11 | option go_package = "git.nav.com/engineering/nsa-go-proto/my_tweets/my_tweets"; 12 | 13 | 14 | 15 | message Tweet { 16 | string id = 1; 17 | navtypes.ISODate date = 2; 18 | string body = 3; 19 | } 20 | 21 | 22 | message MyTweets { 23 | string id = 1; 24 | string username = 2; 25 | repeated Tweet tweets = 3; 26 | Tweet pinned_tweet = 4; 27 | } 28 | 29 | -------------------------------------------------------------------------------- /output/protobuf/nsa/tweet/stats/stats_v1.proto: -------------------------------------------------------------------------------- 1 | 2 | syntax = "proto3"; 3 | 4 | package api_tweet_stats_stats; 5 | 6 | // Tweet stats 7 | 8 | import "nav/date.proto"; 9 | 10 | 11 | 12 | option go_package = "git.nav.com/engineering/nsa-go-proto/stats/stats"; 13 | 14 | 15 | 16 | message Stat { 17 | int64 views = 1; 18 | int64 likes = 2; 19 | int64 retweets = 3; 20 | int64 responses = 4; 21 | } 22 | 23 | 24 | message Stats { 25 | string id = 1; 26 | navtypes.ISODate date = 2; 27 | Stat stats = 3; 28 | } 29 | 30 | -------------------------------------------------------------------------------- /output/python/nsa/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nav-inc/nav-schema-architecture/e218874e35f9d2de30b37d3a41d58725b1c1156d/output/python/nsa/__init__.py -------------------------------------------------------------------------------- /output/python/nsa/profile/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nav-inc/nav-schema-architecture/e218874e35f9d2de30b37d3a41d58725b1c1156d/output/python/nsa/profile/__init__.py -------------------------------------------------------------------------------- /output/python/nsa/profile/profile/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nav-inc/nav-schema-architecture/e218874e35f9d2de30b37d3a41d58725b1c1156d/output/python/nsa/profile/profile/__init__.py -------------------------------------------------------------------------------- /output/python/nsa/profile/profile/profile_v1.py: -------------------------------------------------------------------------------- 1 | import re 2 | import datetime 3 | from dateutil import parser as dateutil_parser 4 | from typing import Any, Callable, List, Literal, Optional, TypeVar, Union 5 | 6 | # api.tweet.profile 7 | 8 | # User profile payload 9 | # 10 | # generator version 1 11 | 12 | T = TypeVar('T') 13 | 14 | 15 | def from_str(x: Any) -> str: 16 | if x is None: return None 17 | assert isinstance(x, str) 18 | return x 19 | 20 | 21 | def from_int(x: Any) -> int: 22 | if x is None: return None 23 | assert isinstance(x, int) and not isinstance(x, bool) 24 | return x 25 | 26 | 27 | def from_list(f: Callable[[Any], T], x: Any) -> List[T]: 28 | if x is None: return None 29 | assert isinstance(x, list) 30 | return [f(y) for y in x] 31 | 32 | 33 | def from_bool(x: Any) -> bool: 34 | if x is None: return None 35 | assert isinstance(x, bool) 36 | return x 37 | 38 | 39 | def from_Any(x: Any) -> Any: 40 | return x 41 | 42 | 43 | def is_required(x: Any) -> Any: 44 | assert x is not None 45 | return x 46 | 47 | 48 | def from_Date(x: Union[str, datetime.date]) -> datetime.date: 49 | if x is None: return None 50 | if isinstance(x, str): x = datetime.date.fromisoformat(x) 51 | assert isinstance(x, datetime.date) 52 | return x 53 | 54 | 55 | def from_DateTime(x: Union[str, datetime.datetime]) -> datetime.datetime: 56 | if x is None: return None 57 | if isinstance(x, str): x = dateutil_parser.isoparse(x) 58 | assert isinstance(x, datetime.datetime) 59 | return x 60 | 61 | 62 | UUID_pattern = re.compile('^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$') 63 | 64 | 65 | def from_UUID(x: Any) -> str: 66 | if x is None: return None 67 | 68 | assert isinstance(x, str) 69 | assert UUID_pattern.match(x) 70 | return x 71 | 72 | 73 | Phone_pattern = re.compile('^\\+?[1-9]\\d{1,14}$') 74 | 75 | 76 | def from_Phone(x: Any) -> str: 77 | if x is None: return None 78 | 79 | assert isinstance(x, str) 80 | assert Phone_pattern.match(x) 81 | return x 82 | 83 | 84 | ZIPCode_pattern = re.compile('^[0-9]{5}(?:-[0-9]{4})?$') 85 | 86 | 87 | def from_ZIPCode(x: Any) -> str: 88 | if x is None: return None 89 | 90 | assert isinstance(x, str) 91 | assert ZIPCode_pattern.match(x) 92 | return x 93 | 94 | 95 | def from_Email(x: Any) -> str: 96 | if x is None: return None 97 | 98 | assert isinstance(x, str) 99 | return x 100 | 101 | 102 | def from_Any(x: Any) -> dict: 103 | if x is None: return None 104 | 105 | assert isinstance(x, dict) 106 | return x 107 | 108 | 109 | AccountType = Literal['BUSINESS', 'CREATOR'] 110 | 111 | 112 | def from_AccountType(x: AccountType): 113 | return x 114 | 115 | 116 | class Profile: 117 | def __init__( 118 | self, 119 | *, 120 | id: str, 121 | username: str, 122 | firstName: Optional[str] = None, 123 | lastName: Optional[str] = None, 124 | bio: Optional[str] = None, 125 | birthdate: Optional[datetime.date] = None, 126 | email: str, 127 | accountType: Optional[AccountType] = None, 128 | verified: Optional[bool] = None 129 | ) -> None: 130 | 131 | is_required(id) 132 | from_UUID(id) 133 | is_required(username) 134 | is_required(email) 135 | from_AccountType(accountType) 136 | 137 | self.id = id 138 | self.username = username 139 | self.firstName = firstName 140 | self.lastName = lastName 141 | self.bio = bio 142 | self.birthdate = birthdate 143 | self.email = email 144 | self.accountType = accountType 145 | self.verified = verified 146 | 147 | @staticmethod 148 | def from_dict(obj: Any) -> 'Profile': 149 | 150 | assert isinstance(obj, dict) 151 | 152 | id = from_UUID(obj.get('id')) 153 | username = from_str(obj.get('username')) 154 | firstName = from_str(obj.get('firstName')) 155 | lastName = from_str(obj.get('lastName')) 156 | bio = from_str(obj.get('bio')) 157 | birthdate = from_Date(obj.get('birthdate')) 158 | email = from_Email(obj.get('email')) 159 | accountType = AccountType.from_dict(obj.get('accountType')) 160 | verified = from_bool(obj.get('verified')) 161 | return Profile( 162 | id=id, 163 | username=username, 164 | firstName=firstName, 165 | lastName=lastName, 166 | bio=bio, 167 | birthdate=birthdate, 168 | email=email, 169 | accountType=accountType, 170 | verified=verified 171 | ) 172 | 173 | def to_dict(self) -> dict: 174 | hash: dict = {} 175 | hash['id'] = from_UUID(self.id) 176 | hash['username'] = from_str(self.username) 177 | hash['firstName'] = from_str(self.firstName) 178 | hash['lastName'] = from_str(self.lastName) 179 | hash['bio'] = from_str(self.bio) 180 | hash['birthdate'] = from_Date(self.birthdate) 181 | hash['email'] = from_Email(self.email) 182 | hash['accountType'] = from_AccountType(self.accountType) 183 | hash['verified'] = from_bool(self.verified) 184 | return hash 185 | 186 | 187 | def from_Profile(x: Any) -> Profile: 188 | assert isinstance(x, Profile) 189 | return x 190 | -------------------------------------------------------------------------------- /output/python/nsa/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nav-inc/nav-schema-architecture/e218874e35f9d2de30b37d3a41d58725b1c1156d/output/python/nsa/py.typed -------------------------------------------------------------------------------- /output/python/nsa/tweet/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nav-inc/nav-schema-architecture/e218874e35f9d2de30b37d3a41d58725b1c1156d/output/python/nsa/tweet/__init__.py -------------------------------------------------------------------------------- /output/python/nsa/tweet/my_tweets/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nav-inc/nav-schema-architecture/e218874e35f9d2de30b37d3a41d58725b1c1156d/output/python/nsa/tweet/my_tweets/__init__.py -------------------------------------------------------------------------------- /output/python/nsa/tweet/my_tweets/my_tweets_v1.py: -------------------------------------------------------------------------------- 1 | import re 2 | import datetime 3 | from dateutil import parser as dateutil_parser 4 | from typing import Any, Callable, List, Optional, TypeVar, Union 5 | 6 | # api.tweet.myTweets 7 | 8 | # User Tweets payload 9 | # 10 | # generator version 1 11 | 12 | T = TypeVar('T') 13 | 14 | 15 | def from_str(x: Any) -> str: 16 | if x is None: return None 17 | assert isinstance(x, str) 18 | return x 19 | 20 | 21 | def from_int(x: Any) -> int: 22 | if x is None: return None 23 | assert isinstance(x, int) and not isinstance(x, bool) 24 | return x 25 | 26 | 27 | def from_list(f: Callable[[Any], T], x: Any) -> List[T]: 28 | if x is None: return None 29 | assert isinstance(x, list) 30 | return [f(y) for y in x] 31 | 32 | 33 | def from_bool(x: Any) -> bool: 34 | if x is None: return None 35 | assert isinstance(x, bool) 36 | return x 37 | 38 | 39 | def from_Any(x: Any) -> Any: 40 | return x 41 | 42 | 43 | def is_required(x: Any) -> Any: 44 | assert x is not None 45 | return x 46 | 47 | 48 | def from_Date(x: Union[str, datetime.date]) -> datetime.date: 49 | if x is None: return None 50 | if isinstance(x, str): x = datetime.date.fromisoformat(x) 51 | assert isinstance(x, datetime.date) 52 | return x 53 | 54 | 55 | def from_DateTime(x: Union[str, datetime.datetime]) -> datetime.datetime: 56 | if x is None: return None 57 | if isinstance(x, str): x = dateutil_parser.isoparse(x) 58 | assert isinstance(x, datetime.datetime) 59 | return x 60 | 61 | 62 | UUID_pattern = re.compile('^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$') 63 | 64 | 65 | def from_UUID(x: Any) -> str: 66 | if x is None: return None 67 | 68 | assert isinstance(x, str) 69 | assert UUID_pattern.match(x) 70 | return x 71 | 72 | 73 | Phone_pattern = re.compile('^\\+?[1-9]\\d{1,14}$') 74 | 75 | 76 | def from_Phone(x: Any) -> str: 77 | if x is None: return None 78 | 79 | assert isinstance(x, str) 80 | assert Phone_pattern.match(x) 81 | return x 82 | 83 | 84 | ZIPCode_pattern = re.compile('^[0-9]{5}(?:-[0-9]{4})?$') 85 | 86 | 87 | def from_ZIPCode(x: Any) -> str: 88 | if x is None: return None 89 | 90 | assert isinstance(x, str) 91 | assert ZIPCode_pattern.match(x) 92 | return x 93 | 94 | 95 | def from_Email(x: Any) -> str: 96 | if x is None: return None 97 | 98 | assert isinstance(x, str) 99 | return x 100 | 101 | 102 | def from_Any(x: Any) -> dict: 103 | if x is None: return None 104 | 105 | assert isinstance(x, dict) 106 | return x 107 | 108 | 109 | class Tweet: 110 | def __init__(self, *, id: str, date: datetime.date, body: str) -> None: 111 | 112 | is_required(id) 113 | is_required(date) 114 | is_required(body) 115 | 116 | self.id = id 117 | self.date = date 118 | self.body = body 119 | 120 | @staticmethod 121 | def from_dict(obj: Any) -> 'Tweet': 122 | if obj is None: return None 123 | assert isinstance(obj, dict) 124 | 125 | is_required(obj.get('id')) 126 | is_required(obj.get('date')) 127 | is_required(obj.get('body')) 128 | 129 | id = from_str(obj.get('id')) 130 | date = from_Date(obj.get('date')) 131 | body = from_str(obj.get('body')) 132 | return Tweet(id=id, date=date, body=body) 133 | 134 | def to_dict(self) -> dict: 135 | hash: dict = {} 136 | 137 | is_required(self.id) 138 | is_required(self.date) 139 | is_required(self.body) 140 | 141 | hash['id'] = from_str(self.id) 142 | hash['date'] = from_Date(self.date) 143 | hash['body'] = from_str(self.body) 144 | return hash 145 | 146 | 147 | def from_Tweet(x: Any) -> Tweet: 148 | if x is None: return None 149 | assert isinstance(x, Tweet) 150 | return x 151 | 152 | 153 | class MyTweets: 154 | def __init__(self, *, id: str, username: str, tweets: List[Tweet], pinnedTweet: Optional[Tweet] = None) -> None: 155 | 156 | is_required(id) 157 | from_UUID(id) 158 | is_required(username) 159 | is_required(tweets) 160 | from_list(from_Tweet, tweets) 161 | 162 | self.id = id 163 | self.username = username 164 | self.tweets = tweets 165 | self.pinnedTweet = pinnedTweet 166 | 167 | @staticmethod 168 | def from_dict(obj: Any) -> 'MyTweets': 169 | 170 | assert isinstance(obj, dict) 171 | 172 | id = from_UUID(obj.get('id')) 173 | username = from_str(obj.get('username')) 174 | tweets = Tweet.from_dict(obj.get('tweets')) 175 | pinnedTweet = Tweet.from_dict(obj.get('pinnedTweet')) 176 | return MyTweets(id=id, username=username, tweets=tweets, pinnedTweet=pinnedTweet) 177 | 178 | def to_dict(self) -> dict: 179 | hash: dict = {} 180 | hash['id'] = from_UUID(self.id) 181 | hash['username'] = from_str(self.username) 182 | hash['tweets'] = self.tweets.to_dict() 183 | hash['pinnedTweet'] = None if self.pinnedTweet is None else self.pinnedTweet.to_dict() 184 | return hash 185 | 186 | 187 | def from_MyTweets(x: Any) -> MyTweets: 188 | assert isinstance(x, MyTweets) 189 | return x 190 | -------------------------------------------------------------------------------- /output/python/nsa/tweet/stats/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nav-inc/nav-schema-architecture/e218874e35f9d2de30b37d3a41d58725b1c1156d/output/python/nsa/tweet/stats/__init__.py -------------------------------------------------------------------------------- /output/python/nsa/tweet/stats/stats_v1.py: -------------------------------------------------------------------------------- 1 | import re 2 | import datetime 3 | from dateutil import parser as dateutil_parser 4 | from typing import Any, Callable, List, TypeVar, Union 5 | 6 | # api.tweet.stats 7 | 8 | # Tweet stats 9 | # 10 | # generator version 1 11 | 12 | T = TypeVar('T') 13 | 14 | 15 | def from_str(x: Any) -> str: 16 | if x is None: return None 17 | assert isinstance(x, str) 18 | return x 19 | 20 | 21 | def from_int(x: Any) -> int: 22 | if x is None: return None 23 | assert isinstance(x, int) and not isinstance(x, bool) 24 | return x 25 | 26 | 27 | def from_list(f: Callable[[Any], T], x: Any) -> List[T]: 28 | if x is None: return None 29 | assert isinstance(x, list) 30 | return [f(y) for y in x] 31 | 32 | 33 | def from_bool(x: Any) -> bool: 34 | if x is None: return None 35 | assert isinstance(x, bool) 36 | return x 37 | 38 | 39 | def from_Any(x: Any) -> Any: 40 | return x 41 | 42 | 43 | def is_required(x: Any) -> Any: 44 | assert x is not None 45 | return x 46 | 47 | 48 | def from_Date(x: Union[str, datetime.date]) -> datetime.date: 49 | if x is None: return None 50 | if isinstance(x, str): x = datetime.date.fromisoformat(x) 51 | assert isinstance(x, datetime.date) 52 | return x 53 | 54 | 55 | def from_DateTime(x: Union[str, datetime.datetime]) -> datetime.datetime: 56 | if x is None: return None 57 | if isinstance(x, str): x = dateutil_parser.isoparse(x) 58 | assert isinstance(x, datetime.datetime) 59 | return x 60 | 61 | 62 | UUID_pattern = re.compile('^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$') 63 | 64 | 65 | def from_UUID(x: Any) -> str: 66 | if x is None: return None 67 | 68 | assert isinstance(x, str) 69 | assert UUID_pattern.match(x) 70 | return x 71 | 72 | 73 | Phone_pattern = re.compile('^\\+?[1-9]\\d{1,14}$') 74 | 75 | 76 | def from_Phone(x: Any) -> str: 77 | if x is None: return None 78 | 79 | assert isinstance(x, str) 80 | assert Phone_pattern.match(x) 81 | return x 82 | 83 | 84 | ZIPCode_pattern = re.compile('^[0-9]{5}(?:-[0-9]{4})?$') 85 | 86 | 87 | def from_ZIPCode(x: Any) -> str: 88 | if x is None: return None 89 | 90 | assert isinstance(x, str) 91 | assert ZIPCode_pattern.match(x) 92 | return x 93 | 94 | 95 | def from_Email(x: Any) -> str: 96 | if x is None: return None 97 | 98 | assert isinstance(x, str) 99 | return x 100 | 101 | 102 | def from_Any(x: Any) -> dict: 103 | if x is None: return None 104 | 105 | assert isinstance(x, dict) 106 | return x 107 | 108 | 109 | class Stat: 110 | def __init__(self, *, views: int, likes: int, retweets: int, responses: int) -> None: 111 | 112 | is_required(views) 113 | is_required(likes) 114 | is_required(retweets) 115 | is_required(responses) 116 | 117 | self.views = views 118 | self.likes = likes 119 | self.retweets = retweets 120 | self.responses = responses 121 | 122 | @staticmethod 123 | def from_dict(obj: Any) -> 'Stat': 124 | if obj is None: return None 125 | assert isinstance(obj, dict) 126 | 127 | is_required(obj.get('views')) 128 | is_required(obj.get('likes')) 129 | is_required(obj.get('retweets')) 130 | is_required(obj.get('responses')) 131 | 132 | views = from_int(obj.get('views')) 133 | likes = from_int(obj.get('likes')) 134 | retweets = from_int(obj.get('retweets')) 135 | responses = from_int(obj.get('responses')) 136 | return Stat(views=views, likes=likes, retweets=retweets, responses=responses) 137 | 138 | def to_dict(self) -> dict: 139 | hash: dict = {} 140 | 141 | is_required(self.views) 142 | is_required(self.likes) 143 | is_required(self.retweets) 144 | is_required(self.responses) 145 | 146 | hash['views'] = from_int(self.views) 147 | hash['likes'] = from_int(self.likes) 148 | hash['retweets'] = from_int(self.retweets) 149 | hash['responses'] = from_int(self.responses) 150 | return hash 151 | 152 | 153 | def from_Stat(x: Any) -> Stat: 154 | if x is None: return None 155 | assert isinstance(x, Stat) 156 | return x 157 | 158 | 159 | class Stats: 160 | def __init__(self, *, id: str, date: datetime.date, stats: Stat) -> None: 161 | 162 | is_required(id) 163 | is_required(date) 164 | is_required(stats) 165 | 166 | self.id = id 167 | self.date = date 168 | self.stats = stats 169 | 170 | @staticmethod 171 | def from_dict(obj: Any) -> 'Stats': 172 | 173 | assert isinstance(obj, dict) 174 | 175 | id = from_str(obj.get('id')) 176 | date = from_Date(obj.get('date')) 177 | stats = Stat.from_dict(obj.get('stats')) 178 | return Stats(id=id, date=date, stats=stats) 179 | 180 | def to_dict(self) -> dict: 181 | hash: dict = {} 182 | hash['id'] = from_str(self.id) 183 | hash['date'] = from_Date(self.date) 184 | hash['stats'] = self.stats.to_dict() 185 | return hash 186 | 187 | 188 | def from_Stats(x: Any) -> Stats: 189 | assert isinstance(x, Stats) 190 | return x 191 | -------------------------------------------------------------------------------- /output/python/setup.py: -------------------------------------------------------------------------------- 1 | import pathlib 2 | from setuptools import setup, find_packages 3 | 4 | # The directory containing this file 5 | HERE = pathlib.Path(__file__) 6 | 7 | # The text of the README file 8 | README = (HERE.parent.parent.parent / 'README.md').read_text() 9 | 10 | setup( 11 | author='JJ Dubray', 12 | classifiers=['Programming Language :: Python :: 3.8'], 13 | description='Nav common data model and event formats', 14 | install_requires=['python-dateutil'], 15 | keywords='schema', 16 | long_description_content_type='text/markdown', 17 | long_description=README, 18 | name='nav-schema-architecture', 19 | packages=find_packages(include=['nsa', 'nsa.*']), 20 | package_data={'nsa': ['py.typed']}, 21 | python_requires='>= 3.8', 22 | setup_requires='pip >= 18', 23 | url='https://git.nav.com/engineering/nav-schema-architecture/output/python', 24 | version='1.1.0', 25 | ) 26 | -------------------------------------------------------------------------------- /output/ruby/nav-schema-architecture.gemspec: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | $LOAD_PATH.push(File.dirname(__FILE__)) 4 | 5 | # Describe your gem and declare its dependencies: 6 | Gem::Specification.new do |s| 7 | s.required_ruby_version = ">= 2.7" 8 | s.name = "nav-schema-architecture" 9 | s.version = "1.1.0" 10 | s.authors = ["JJ Dubray"] 11 | s.email = ["jdubray@gmail.com"] 12 | s.homepage = "https://git.nav.com/engineering/nav-schema-architecture/output/ruby" 13 | s.summary = "Nav Common Data Model" 14 | s.description = "Nav common data model and event formats" 15 | s.license = "Nonstandard" 16 | 17 | s.add_dependency("activesupport", ">= 5") 18 | 19 | s.files = Dir["nsa/**/*"] 20 | s.require_path = ["nsa"] 21 | s.metadata = { 22 | "rubygems_mfa_required" => "true" 23 | } 24 | end 25 | -------------------------------------------------------------------------------- /output/ruby/nsa/enums.rb: -------------------------------------------------------------------------------- 1 | require "active_support/core_ext/string/inflections" # for constantize() 2 | 3 | module NavSchemaArchitecture::Event 4 | module Enums 5 | # Every collection of enums here is dynamically generated based the arrays 6 | # in this gem's top-level file. 7 | # 8 | # Unfortunately, we can't refer to it in a readable way in our code. This 9 | # part allows us to do that, and to validate it. 10 | # 11 | # Example: 12 | # pry> MYENUM = %i[ONE TWO THREE FOUR] 13 | # pry> my_enum = NSA::Event::Enums::MyEnum::TWO 14 | # pry> my_enum 15 | # => :TWO 16 | # pry> NSA::Event::Enums.my_enum_validate_value(my_enum) 17 | # => true 18 | begin 19 | errors, consts = constants.partition { |e| e.to_s.end_with?("Error") } 20 | 21 | # Get friendly names of the modules from the errors 22 | names = {} 23 | errors.map do |sym| 24 | name = sym.to_s.delete_prefix("Invalid").delete_suffix("Error") 25 | const = name.upcase.to_sym 26 | names[const] = name if consts.include?(const) 27 | end 28 | 29 | consts.each do |sym| 30 | mod = const_set(names[sym], Module.new) 31 | items = "NavSchemaArchitecture::Event::Enums::#{sym}".constantize 32 | items.each { |item| mod.const_set(item, item) } 33 | end 34 | end 35 | end 36 | end 37 | -------------------------------------------------------------------------------- /output/ruby/nsa/nav_schema_architecture.rb: -------------------------------------------------------------------------------- 1 | # FIXME: This resolves classes belonging to non-existent modules. 2 | # This is a bad thing we're doing. 3 | module NavSchemaArchitecture 4 | module Event; end 5 | end 6 | 7 | # A shorthand alias for convenience 8 | NSA = NavSchemaArchitecture 9 | 10 | module NavSchemaArchitecture::Event::Enums 11 | ACCOUNTTYPE = %i[BUSINESS CREATOR].freeze 12 | 13 | def self.account_type_value_valid?(v) 14 | ACCOUNTTYPE.include?(v) 15 | end 16 | 17 | class InvalidAccountTypeError < StandardError 18 | end 19 | end 20 | 21 | require "active_support/core_ext/object/blank" 22 | require "date" 23 | 24 | require_relative "utils" 25 | require_relative "enums" 26 | require_relative "profile/profile/profile_v1" 27 | require_relative "tweet/my_tweets/my_tweets_v1" 28 | require_relative "tweet/stats/stats_v1" 29 | -------------------------------------------------------------------------------- /output/ruby/nsa/profile/profile/profile_v1.rb: -------------------------------------------------------------------------------- 1 | module NavSchemaArchitecture::Api::Tweet::Profile 2 | module Profile 3 | # User profile payload 4 | # 5 | # generator version 1 6 | 7 | def self.build(id, username, firstName, lastName, bio, birthdate, email, accountType, verified) 8 | raise(InvalidProfileError, "Required value id is undefined") unless defined? id 9 | raise(InvalidProfileError, "id must be a String") unless id.is_a?(String) 10 | raise(InvalidProfileError, "Invalid id type") if id.set? && !id.match?(/^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$/) 11 | raise(InvalidProfileError, "Required value username is undefined") unless defined? username 12 | raise(InvalidProfileError, "username must be a String") unless username.is_a?(String) 13 | 14 | raise(InvalidProfileError, "firstName must be a String") if firstName.set? && !firstName.is_a?(String) 15 | 16 | raise(InvalidProfileError, "lastName must be a String") if lastName.set? && !lastName.is_a?(String) 17 | 18 | raise(InvalidProfileError, "bio must be a String") if bio.set? && !bio.is_a?(String) 19 | 20 | raise(InvalidProfileError, "birthdate must be a Date") if birthdate.set? && !birthdate.is_a?(Date) 21 | 22 | raise(InvalidProfileError, "Required value email is undefined") unless defined? email 23 | raise(InvalidProfileError, "email must be a String") unless email.is_a?(String) 24 | raise(InvalidProfileError, "Enum value #{accountType} is invalid") if accountType.set? && !NavSchemaArchitecture::Event::Enums.account_type_value_valid?(accountType) 25 | 26 | raise(InvalidProfileError, "verified must be a boolean") if verified.set? && ![true, false].include?(verified) 27 | 28 | { 29 | "id" => id, 30 | "username" => username, 31 | "firstName" => firstName, 32 | "lastName" => lastName, 33 | "bio" => bio, 34 | "birthdate" => birthdate, 35 | "email" => email, 36 | "accountType" => accountType, 37 | "verified" => verified 38 | } 39 | end 40 | 41 | class InvalidProfileError < StandardError 42 | end 43 | end 44 | end 45 | -------------------------------------------------------------------------------- /output/ruby/nsa/tweet/my_tweets/my_tweets_v1.rb: -------------------------------------------------------------------------------- 1 | module NavSchemaArchitecture::Api::Tweet::MyTweets 2 | module MyTweets 3 | # User Tweets payload 4 | # 5 | # generator version 1 6 | 7 | def self.Tweet(id, date, body) 8 | raise(InvalidTweetError, "Required value id is undefined") unless defined? id 9 | raise(InvalidTweetError, "id must be a String") unless id.is_a?(String) 10 | raise(InvalidTweetError, "Required value date is undefined") unless defined? date 11 | raise(InvalidTweetError, "date must be a Date") unless date.is_a?(Date) 12 | raise(InvalidTweetError, "Required value body is undefined") unless defined? body 13 | raise(InvalidTweetError, "body must be a String") unless body.is_a?(String) 14 | 15 | { 16 | "id" => id, 17 | "date" => date, 18 | "body" => body 19 | } 20 | end 21 | 22 | class InvalidTweetError < StandardError 23 | end 24 | 25 | def self.build(id, username, tweets, pinnedTweet) 26 | raise(InvalidMyTweetsError, "Required value id is undefined") unless defined? id 27 | raise(InvalidMyTweetsError, "id must be a String") unless id.is_a?(String) 28 | raise(InvalidMyTweetsError, "Invalid id type") if id.set? && !id.match?(/^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$/) 29 | raise(InvalidMyTweetsError, "Required value username is undefined") unless defined? username 30 | raise(InvalidMyTweetsError, "username must be a String") unless username.is_a?(String) 31 | raise(InvalidMyTweetsError, "Required value tweets is undefined") unless defined? tweets 32 | raise(InvalidMyTweetsError, "tweets must be an array") unless tweets.is_a?(Array) 33 | 34 | raise(InvalidMyTweetsError, "pinnedTweet must be a Hash") if pinnedTweet.set? && !pinnedTweet.is_a?(Hash) 35 | 36 | { 37 | "id" => id, 38 | "username" => username, 39 | "tweets" => tweets, 40 | "pinnedTweet" => pinnedTweet 41 | } 42 | end 43 | 44 | class InvalidMyTweetsError < StandardError 45 | end 46 | end 47 | end 48 | -------------------------------------------------------------------------------- /output/ruby/nsa/tweet/stats/stats_v1.rb: -------------------------------------------------------------------------------- 1 | module NavSchemaArchitecture::Api::Tweet::Stats 2 | module Stats 3 | # Tweet stats 4 | # 5 | # generator version 1 6 | 7 | def self.Stat(views, likes, retweets, responses) 8 | raise(InvalidStatError, "Required value views is undefined") unless defined? views 9 | raise(InvalidStatError, "views must be a Numeric") unless views.is_a?(Numeric) 10 | raise(InvalidStatError, "Required value likes is undefined") unless defined? likes 11 | raise(InvalidStatError, "likes must be a Numeric") unless likes.is_a?(Numeric) 12 | raise(InvalidStatError, "Required value retweets is undefined") unless defined? retweets 13 | raise(InvalidStatError, "retweets must be a Numeric") unless retweets.is_a?(Numeric) 14 | raise(InvalidStatError, "Required value responses is undefined") unless defined? responses 15 | raise(InvalidStatError, "responses must be a Numeric") unless responses.is_a?(Numeric) 16 | 17 | { 18 | "views" => views, 19 | "likes" => likes, 20 | "retweets" => retweets, 21 | "responses" => responses 22 | } 23 | end 24 | 25 | class InvalidStatError < StandardError 26 | end 27 | 28 | def self.build(id, date, stats) 29 | raise(InvalidStatsError, "Required value id is undefined") unless defined? id 30 | raise(InvalidStatsError, "id must be a String") unless id.is_a?(String) 31 | raise(InvalidStatsError, "Required value date is undefined") unless defined? date 32 | raise(InvalidStatsError, "date must be a Date") unless date.is_a?(Date) 33 | raise(InvalidStatsError, "Required value stats is undefined") unless defined? stats 34 | raise(InvalidStatsError, "stats must be a Hash") unless stats.is_a?(Hash) 35 | 36 | { 37 | "id" => id, 38 | "date" => date, 39 | "stats" => stats 40 | } 41 | end 42 | 43 | class InvalidStatsError < StandardError 44 | end 45 | end 46 | end 47 | -------------------------------------------------------------------------------- /output/ruby/nsa/utils.rb: -------------------------------------------------------------------------------- 1 | class Object 2 | alias set? present? 3 | end 4 | 5 | class FalseClass 6 | def set? 7 | # special case where .present? and .blank? fails us 8 | true 9 | end 10 | end 11 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "nav-schema-architecture", 3 | "version": "1.1.0", 4 | "homepage": "https://github.com/nav-inc/nav-schema-architecture", 5 | "repository": { 6 | "type": "git", 7 | "url": "https://github.com/nav-inc/nav-schema-architecture.git" 8 | }, 9 | "description": "Nav common data model and event formats", 10 | "scripts": { 11 | "generate-output:commit": "scripty", 12 | "generate-output:generate": "scripty", 13 | "git:add-working-files": "scripty", 14 | "git:push-files-tags": "scripty", 15 | "preversion": "scripty", 16 | "postversion": "scripty", 17 | "test": "mocha", 18 | "nyc": "nyc --reporter=text mocha", 19 | "version": "npm run git:add-working-files" 20 | }, 21 | "author": "", 22 | "license": "ISC", 23 | "dependencies": { 24 | "graphql": "15.6.0", 25 | "graphql-tag": "2.12.6", 26 | "graphql-tools": "8.2.0", 27 | "ramda": "0.27.1" 28 | }, 29 | "scripty": { 30 | "silent": true 31 | }, 32 | "devDependencies": { 33 | "chalk": "^4.1.2", 34 | "eslint-config-prettier": "^8.5.0", 35 | "eslint-plugin-prettier": "^4.2.1", 36 | "generate-changelog": "^1.8.0", 37 | "mocha": "^10.1.0", 38 | "nyc": "^15.1.0", 39 | "prepend-file": "^2.0.1", 40 | "prettier": "2.4.1", 41 | "promisify-child-process": "^4.1.1", 42 | "scripty": "^2.1.0" 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /samples/profile_v1.html: -------------------------------------------------------------------------------- 1 | User profile payload

User profile payload

Type: object

Sample user profile

Type: string
Must match regular expression: ^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$

Type: string

Type: string

Type: string

Type: string

Type: String
Must match regular expression: ^[0-9]{4}-[0-9]{2}-[0-9]{2}$

Type: string

Type: enum (of string)

Must be one of:

  • "BUSINESS"
  • "CREATOR"

Type: boolean
-------------------------------------------------------------------------------- /samples/schema_doc.css: -------------------------------------------------------------------------------- 1 | body { 2 | font: 16px/1.5em "Overpass", "Open Sans", Helvetica, sans-serif; 3 | color: #333; 4 | font-weight: 300; 5 | padding: 40px; 6 | } 7 | 8 | .btn.btn-link { 9 | font-size: 18px; 10 | } 11 | 12 | .jsfh-animated-property { 13 | animation: eclair; 14 | animation-iteration-count: 1; 15 | animation-fill-mode: forwards; 16 | animation-duration: .75s; 17 | 18 | } 19 | 20 | @keyframes eclair { 21 | 0%,100% { 22 | transform: scale(1); 23 | } 24 | 50% { 25 | transform: scale(1.03); 26 | } 27 | } 28 | 29 | .btn.btn-primary { 30 | margin: 10px; 31 | } 32 | 33 | .btn.example-show.collapsed:before { 34 | content: "show" 35 | } 36 | 37 | .btn.example-show:before { 38 | content: "hide" 39 | } 40 | 41 | .description.collapse:not(.show) { 42 | max-height: 100px !important; 43 | overflow: hidden; 44 | 45 | display: -webkit-box; 46 | -webkit-line-clamp: 2; 47 | -webkit-box-orient: vertical; 48 | } 49 | 50 | .description.collapsing { 51 | min-height: 100px !important; 52 | } 53 | 54 | .collapse-description-link.collapsed:after { 55 | content: '+ Read More'; 56 | } 57 | 58 | .collapse-description-link:not(.collapsed):after { 59 | content: '- Read Less'; 60 | } 61 | 62 | .badge { 63 | font-size: 100%; 64 | margin-bottom: 0.5rem; 65 | margin-top: 0.5rem; 66 | } 67 | 68 | .badge.value-type { 69 | font-size: 120%; 70 | margin-right: 5px; 71 | margin-bottom: 10px; 72 | } 73 | 74 | 75 | .badge.default-value { 76 | font-size: 120%; 77 | margin-left: 5px; 78 | margin-bottom: 10px; 79 | } 80 | 81 | .badge.restriction { 82 | display: inline-block; 83 | } 84 | 85 | .badge.required-property,.badge.deprecated-property,.badge.pattern-property,.badge.no-additional { 86 | font-size: 100%; 87 | margin-left: 10px; 88 | } 89 | 90 | .accordion div.card:only-child { 91 | border-bottom: 1px solid rgba(0, 0, 0, 0.125); 92 | } 93 | 94 | .examples { 95 | padding: 1rem !important; 96 | } 97 | 98 | .examples pre { 99 | margin-bottom: 0; 100 | } 101 | 102 | .highlight.jumbotron { 103 | padding: 1rem !important; 104 | } 105 | 106 | .generated-by-footer { 107 | margin-top: 1em; 108 | text-align: right; 109 | } 110 | 111 | /* From https://github.com/richleland/pygments-css/blob/master/friendly.css, see https://github.com/trentm/python-markdown2/wiki/fenced-code-blocks */ 112 | .highlight { background: #e9ecef; } /* Changed from #f0f0f0 in the original style to be the same as bootstrap's jumbotron */ 113 | .highlight .hll { background-color: #ffffcc } 114 | .highlight .c { color: #60a0b0; font-style: italic } /* Comment */ 115 | .highlight .err { border: 1px solid #FF0000 } /* Error */ 116 | .highlight .k { color: #007020; font-weight: bold } /* Keyword */ 117 | .highlight .o { color: #666666 } /* Operator */ 118 | .highlight .ch { color: #60a0b0; font-style: italic } /* Comment.Hashbang */ 119 | .highlight .cm { color: #60a0b0; font-style: italic } /* Comment.Multiline */ 120 | .highlight .cp { color: #007020 } /* Comment.Preproc */ 121 | .highlight .cpf { color: #60a0b0; font-style: italic } /* Comment.PreprocFile */ 122 | .highlight .c1 { color: #60a0b0; font-style: italic } /* Comment.Single */ 123 | .highlight .cs { color: #60a0b0; background-color: #fff0f0 } /* Comment.Special */ 124 | .highlight .gd { color: #A00000 } /* Generic.Deleted */ 125 | .highlight .ge { font-style: italic } /* Generic.Emph */ 126 | .highlight .gr { color: #FF0000 } /* Generic.Error */ 127 | .highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */ 128 | .highlight .gi { color: #00A000 } /* Generic.Inserted */ 129 | .highlight .go { color: #888888 } /* Generic.Output */ 130 | .highlight .gp { color: #c65d09; font-weight: bold } /* Generic.Prompt */ 131 | .highlight .gs { font-weight: bold } /* Generic.Strong */ 132 | .highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */ 133 | .highlight .gt { color: #0044DD } /* Generic.Traceback */ 134 | .highlight .kc { color: #007020; font-weight: bold } /* Keyword.Constant */ 135 | .highlight .kd { color: #007020; font-weight: bold } /* Keyword.Declaration */ 136 | .highlight .kn { color: #007020; font-weight: bold } /* Keyword.Namespace */ 137 | .highlight .kp { color: #007020 } /* Keyword.Pseudo */ 138 | .highlight .kr { color: #007020; font-weight: bold } /* Keyword.Reserved */ 139 | .highlight .kt { color: #902000 } /* Keyword.Type */ 140 | .highlight .m { color: #40a070 } /* Literal.Number */ 141 | .highlight .s { color: #4070a0 } /* Literal.String */ 142 | .highlight .na { color: #4070a0 } /* Name.Attribute */ 143 | .highlight .nb { color: #007020 } /* Name.Builtin */ 144 | .highlight .nc { color: #0e84b5; font-weight: bold } /* Name.Class */ 145 | .highlight .no { color: #60add5 } /* Name.Constant */ 146 | .highlight .nd { color: #555555; font-weight: bold } /* Name.Decorator */ 147 | .highlight .ni { color: #d55537; font-weight: bold } /* Name.Entity */ 148 | .highlight .ne { color: #007020 } /* Name.Exception */ 149 | .highlight .nf { color: #06287e } /* Name.Function */ 150 | .highlight .nl { color: #002070; font-weight: bold } /* Name.Label */ 151 | .highlight .nn { color: #0e84b5; font-weight: bold } /* Name.Namespace */ 152 | .highlight .nt { color: #062873; font-weight: bold } /* Name.Tag */ 153 | .highlight .nv { color: #bb60d5 } /* Name.Variable */ 154 | .highlight .ow { color: #007020; font-weight: bold } /* Operator.Word */ 155 | .highlight .w { color: #bbbbbb } /* Text.Whitespace */ 156 | .highlight .mb { color: #40a070 } /* Literal.Number.Bin */ 157 | .highlight .mf { color: #40a070 } /* Literal.Number.Float */ 158 | .highlight .mh { color: #40a070 } /* Literal.Number.Hex */ 159 | .highlight .mi { color: #40a070 } /* Literal.Number.Integer */ 160 | .highlight .mo { color: #40a070 } /* Literal.Number.Oct */ 161 | .highlight .sa { color: #4070a0 } /* Literal.String.Affix */ 162 | .highlight .sb { color: #4070a0 } /* Literal.String.Backtick */ 163 | .highlight .sc { color: #4070a0 } /* Literal.String.Char */ 164 | .highlight .dl { color: #4070a0 } /* Literal.String.Delimiter */ 165 | .highlight .sd { color: #4070a0; font-style: italic } /* Literal.String.Doc */ 166 | .highlight .s2 { color: #4070a0 } /* Literal.String.Double */ 167 | .highlight .se { color: #4070a0; font-weight: bold } /* Literal.String.Escape */ 168 | .highlight .sh { color: #4070a0 } /* Literal.String.Heredoc */ 169 | .highlight .si { color: #70a0d0; font-style: italic } /* Literal.String.Interpol */ 170 | .highlight .sx { color: #c65d09 } /* Literal.String.Other */ 171 | .highlight .sr { color: #235388 } /* Literal.String.Regex */ 172 | .highlight .s1 { color: #4070a0 } /* Literal.String.Single */ 173 | .highlight .ss { color: #517918 } /* Literal.String.Symbol */ 174 | .highlight .bp { color: #007020 } /* Name.Builtin.Pseudo */ 175 | .highlight .fm { color: #06287e } /* Name.Function.Magic */ 176 | .highlight .vc { color: #bb60d5 } /* Name.Variable.Class */ 177 | .highlight .vg { color: #bb60d5 } /* Name.Variable.Global */ 178 | .highlight .vi { color: #bb60d5 } /* Name.Variable.Instance */ 179 | .highlight .vm { color: #bb60d5 } /* Name.Variable.Magic */ 180 | .highlight .il { color: #40a070 } /* Literal.Number.Integer.Long */ -------------------------------------------------------------------------------- /samples/schema_doc.min.js: -------------------------------------------------------------------------------- 1 | function flashElement(t){myElement=document.getElementById(t),myElement.classList.add("jsfh-animated-property"),setTimeout(function(){myElement.classList.remove("jsfh-animated-property")},1e3)}function setAnchor(t){history.pushState({},"",t)}function anchorOnLoad(){let t=window.location.hash.split("?")[0].split("&")[0];"#"===t[0]&&(t=t.substr(1)),t.length>0&&anchorLink(t)}function anchorLink(t){$("#"+t).parents().addBack().filter(".collapse:not(.show), .tab-pane, [role='tab']").each(function(t){if($(this).hasClass("collapse"))$(this).collapse("show");else if($(this).hasClass("tab-pane")){const t=$("a[href='#"+$(this).attr("id")+"']");t&&t.tab("show")}else"tab"===$(this).attr("role")&&$(this).tab("show")}),setTimeout(function(){let e=document.getElementById(t);e&&(e.scrollIntoView({block:"center",behavior:"smooth"}),setTimeout(function(){flashElement(t)},500))},1e3)}$(document).on("click",'a[href^="#"]',function(t){t.preventDefault(),history.pushState({},"",this.href)}); -------------------------------------------------------------------------------- /schema/integration-schema.gql: -------------------------------------------------------------------------------- 1 | # Adapted from Gildas Garcia 2 | # https://github.com/marmelab/GraphQL-example/blob/master/schema.graphql 3 | 4 | schema { 5 | query: Query 6 | } 7 | 8 | scalar ISODate 9 | scalar DateTime 10 | scalar Any 11 | scalar UUID 12 | scalar Email 13 | scalar ZIPCode 14 | scalar CurrencyCent 15 | scalar Phone 16 | 17 | enum Categories { 18 | AUTOMOTIVE 19 | AVIATION 20 | MARINE 21 | BEAUTY 22 | EDUCATION 23 | FINANCE 24 | RESTAURANT 25 | ADVERTISING 26 | RETAIL 27 | GOVERNMENT 28 | MEDICAL 29 | LAW 30 | MEDIA 31 | SCIENCE 32 | TECHNOLOGY 33 | ENTREPRENEUR 34 | } 35 | 36 | enum AccountType { 37 | BUSINESS 38 | CREATOR 39 | } 40 | 41 | type Tweet { 42 | id: ID! 43 | body: String! 44 | date: Date! 45 | author: User! 46 | stats: Stat 47 | conversations: [Tweet] 48 | } 49 | 50 | type User { 51 | id: UUID! 52 | username: String! 53 | firstName: String 54 | lastName: String 55 | fullName: String 56 | avatarUrl: String 57 | bio: String 58 | location: String 59 | website: String 60 | birthdate: Date 61 | mobileNumber: Phone 62 | email: Email 63 | tweets: [Tweet] 64 | retweets: [Tweet] 65 | replies: [Tweet] 66 | likes: [Tweet] 67 | pinnedTweet: Tweet 68 | signedUpOn: Date 69 | } 70 | 71 | # Professional User 72 | extend type User { 73 | category: Categories 74 | accountType: AccountType 75 | verified: Boolean 76 | } 77 | 78 | type Stat { 79 | views: Int 80 | likes: Int 81 | retweets: Int 82 | responses: Int 83 | } 84 | 85 | type Notification { 86 | id: ID 87 | date: Date 88 | type: String 89 | } 90 | 91 | type Meta { 92 | count: Int 93 | } 94 | 95 | # message definitions 96 | type Query { 97 | myTweets: User 98 | stats: Tweet 99 | profile: User 100 | } 101 | -------------------------------------------------------------------------------- /schema/message-schema-definitions/profile/profile-v1.graphql: -------------------------------------------------------------------------------- 1 | { 2 | profile @namespace(value: "api.tweet.profile") 3 | @title(value: "User profile payload") 4 | @description(value: "Sample user profile") 5 | @version(value: 1) 6 | { 7 | id @field(order: 1, required: true) 8 | username @field(order: 2, required: true) 9 | firstName @field(order: 3) 10 | lastName @field(order: 4) 11 | bio @field(order: 5) 12 | birthdate @field(order: 6) 13 | email @field(order: 7, required: true) 14 | accountType @field(order: 8) 15 | verified @field(order: 9) 16 | } 17 | } 18 | 19 | -------------------------------------------------------------------------------- /schema/message-schema-definitions/tweet/my-tweets-v1.graphql: -------------------------------------------------------------------------------- 1 | { 2 | myTweets @namespace(value: "api.tweet.myTweets") 3 | @title(value: "User Tweets payload") 4 | @description(value: "An array of tweets for a given user") 5 | @version(value: 1) 6 | { 7 | id @field(order: 1, required: true) 8 | username @field(order: 2) 9 | 10 | tweets @field(order: 3, required: true) { 11 | id @field(order: 1, required: true) 12 | date @field(order: 2, required: true) 13 | body @field(order: 3, required: true) 14 | } 15 | 16 | pinnedTweet @field(order: 4) { 17 | id @field(order: 1) 18 | } 19 | } 20 | } 21 | 22 | -------------------------------------------------------------------------------- /schema/message-schema-definitions/tweet/stats-v1.graphql: -------------------------------------------------------------------------------- 1 | { 2 | stats @namespace(value: "api.tweet.stats") 3 | @title(value: "Tweet stats") 4 | @description(value: "The stats of a given tweet") 5 | @version(value: 1) 6 | { 7 | id @field(order: 1, required: true) 8 | date @field(order: 2, required: true) 9 | stats @field(order: 3, required: true) { 10 | views @field(order: 1, required: true) 11 | likes @field(order: 2, required: true) 12 | retweets @field(order: 3, required: true) 13 | responses @field(order: 4, required: true) 14 | } 15 | } 16 | } 17 | 18 | -------------------------------------------------------------------------------- /schema/scalars/go-scalars.js: -------------------------------------------------------------------------------- 1 | const ISODate = { 2 | type: "civil.Date" 3 | } 4 | 5 | const DateTime = { 6 | type: 'time.Time', 7 | } 8 | 9 | const CurrencyCent = { 10 | type: 'int64', 11 | } 12 | 13 | const UUID = { 14 | type: 'string', 15 | pattern: '^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$', 16 | } 17 | 18 | const Phone = { 19 | type: 'string', 20 | pattern: '^\\\\+?[1-9]\\\\d{1,14}$', 21 | } 22 | 23 | const ZIPCode = { 24 | type: 'string', 25 | pattern: '^[0-9]{5}(?:-[0-9]{4})?$', 26 | } 27 | 28 | const Email = { 29 | type: 'string', 30 | } 31 | 32 | const Any = { 33 | type: 'json.RawMessage', 34 | } 35 | 36 | const converter = { 37 | String: 'string', 38 | ID: 'string', 39 | Int: 'int64', 40 | Float: 'float64', 41 | Boolean: 'bool', 42 | Any: 'json.RawMessage', 43 | } 44 | 45 | // This is an example of a generalized switch statement in case 46 | // more sophisticated conditions/transformations are needed 47 | // for instance context dependent behavior. 48 | // See unit tests for more examples, 49 | // the match function is duplicated here to avoid dependencies. 50 | const matched = (x) => ({ 51 | on: () => matched(x), 52 | otherwise: () => x, 53 | }) 54 | 55 | const match = (x) => ({ 56 | on: (pred, fn) => { 57 | const _pred = typeof pred !== 'function' ? (x) => x === pred : pred 58 | const _fn = typeof fn !== 'function' ? () => fn : fn 59 | return _pred(x) ? matched(_fn(x)) : match(x) 60 | }, 61 | otherwise: (fn) => (fn === undefined ? x : typeof fn !== 'function' ? fn : fn(x)), 62 | }) 63 | 64 | const stringFormatter = (p, context) => match(p).on('iD', 'ID').otherwise() 65 | 66 | module.exports = { 67 | ISODate, 68 | DateTime, 69 | CurrencyCent, 70 | UUID, 71 | Phone, 72 | Email, 73 | ZIPCode, 74 | Any, 75 | converter, 76 | stringFormatter, 77 | } 78 | -------------------------------------------------------------------------------- /schema/scalars/json-schema.js: -------------------------------------------------------------------------------- 1 | const ISODate = { 2 | type: "String", 3 | pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" 4 | } 5 | 6 | const DateTime = { 7 | type: 'string', 8 | pattern: '^[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}$', 9 | } 10 | 11 | const CurrencyCent = { 12 | type: 'integer', 13 | } 14 | 15 | const UUID = { 16 | type: 'string', 17 | pattern: '^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$', 18 | } 19 | 20 | const Phone = { 21 | type: 'string', 22 | pattern: '^\\+?[1-9]\\d{1,14}$', 23 | } 24 | 25 | const ZIPCode = { 26 | type: 'string', 27 | pattern: '^[0-9]{5}(?:-[0-9]{4})?$', 28 | } 29 | 30 | const Email = { 31 | type: 'string', 32 | } 33 | 34 | const Any = { 35 | type: ['object', 'array', 'string', 'number', 'boolean', 'null'], 36 | } 37 | 38 | const converter = { 39 | String: 'string', 40 | ID: ['number', 'string'], 41 | Int: 'integer', 42 | Float: 'number', 43 | Boolean: 'boolean', 44 | } 45 | 46 | module.exports = { 47 | ISODate, 48 | DateTime, 49 | CurrencyCent, 50 | Phone, 51 | Email, 52 | ZIPCode, 53 | UUID, 54 | Any, 55 | converter, 56 | } 57 | -------------------------------------------------------------------------------- /schema/scalars/protobuf-scalars.js: -------------------------------------------------------------------------------- 1 | const ISODate = { 2 | type: 'nav.Date', 3 | import: 'nav/date.proto' 4 | } 5 | 6 | const DateTime = { 7 | type: 'google.protobuf.Timestamp', 8 | import: 'google/protobuf/timestamp.proto', 9 | } 10 | 11 | const CurrencyCent = { 12 | type: 'int64', 13 | } 14 | 15 | const UUID = { 16 | type: 'string', 17 | } 18 | 19 | const Phone = { 20 | type: 'string', 21 | } 22 | 23 | const ZIPCode = { 24 | type: 'string', 25 | } 26 | 27 | const Email = { 28 | type: 'string', 29 | } 30 | 31 | const Any = { 32 | type: 'google.protobuf.Any', 33 | import: 'google/protobuf/any.proto' 34 | } 35 | 36 | const converter = { 37 | String: 'string', 38 | ID: 'string', 39 | Int: 'int64', 40 | Float: 'double', 41 | Boolean: 'bool', 42 | Any: 'google.protobuf.Any', 43 | } 44 | 45 | const stringFormatter = (p, context) => { 46 | switch (p) { 47 | case 'iD': 48 | return 'ID' 49 | } 50 | return p 51 | } 52 | 53 | module.exports = { 54 | ISODate, 55 | DateTime, 56 | CurrencyCent, 57 | UUID, 58 | Phone, 59 | Email, 60 | ZIPCode, 61 | Any, 62 | converter, 63 | stringFormatter, 64 | } 65 | -------------------------------------------------------------------------------- /schema/scalars/python-scalars.js: -------------------------------------------------------------------------------- 1 | const ISODate = { 2 | type: 'datetime.date', 3 | inputType: 'str', 4 | instantiate: 'datetime.date.fromisoformat' 5 | } 6 | 7 | const DateTime = { 8 | type: 'datetime.datetime', 9 | inputType: 'str', 10 | instantiate: 'dateutil_parser.isoparse' 11 | } 12 | 13 | const CurrencyCent = { 14 | type: 'int', 15 | } 16 | 17 | const Any = { 18 | type: 'dict', 19 | } 20 | 21 | const UUID = { 22 | type: 'str', 23 | pattern: '^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$', 24 | snippet: '', 25 | } 26 | 27 | const Phone = { 28 | type: 'str', 29 | pattern: '^\\\\+?[1-9]\\\\d{1,14}$', 30 | } 31 | 32 | const ZIPCode = { 33 | type: 'str', 34 | pattern: '^[0-9]{5}(?:-[0-9]{4})?$', 35 | } 36 | 37 | const Email = { 38 | type: 'str', 39 | } 40 | 41 | const converter = { 42 | String: 'str', 43 | ID: 'str', 44 | Int: 'int', 45 | Float: 'float', 46 | ListType: 'list', 47 | Boolean: 'bool', 48 | ObjectTypeDefinition: 'dict', 49 | EnumTypeDefinition: 'enumerate', 50 | } 51 | 52 | module.exports = { 53 | ISODate, 54 | DateTime, 55 | CurrencyCent, 56 | UUID, 57 | Phone, 58 | ZIPCode, 59 | Email, 60 | Any, 61 | converter, 62 | } 63 | -------------------------------------------------------------------------------- /schema/scalars/ruby-scalars.js: -------------------------------------------------------------------------------- 1 | const ISODate = { 2 | type: 'Date' 3 | } 4 | 5 | const DateTime = { 6 | type: 'DateTime' 7 | } 8 | 9 | const CurrencyCent = { 10 | type: 'Numeric', 11 | } 12 | 13 | const Any = { 14 | type: 'Hash', 15 | } 16 | 17 | const UUID = { 18 | type: 'String', 19 | pattern: '^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$', 20 | } 21 | 22 | const Phone = { 23 | type: 'String', 24 | pattern: '^\\+?[1-9]\\d{1,14}$', 25 | } 26 | 27 | const ZIPCode = { 28 | type: 'String', 29 | pattern: '^[0-9]{5}(?:-[0-9]{4})?$', 30 | } 31 | 32 | const Email = { 33 | type: 'String', 34 | } 35 | 36 | const stringFormatter = (p, context) => { 37 | switch (p) { 38 | case 'ID': 39 | return 'id' 40 | } 41 | return p 42 | } 43 | 44 | const converter = { 45 | String: 'String', 46 | ID: 'String', 47 | Int: 'Integer', 48 | Float: 'Float', 49 | ListType: 'Array', 50 | Boolean: 'Boolean', // FIXME: This actually has no type match in Ruby, oddly. 51 | ObjectTypeDefinition: 'Hash', 52 | EnumTypeDefinition: 'enum', 53 | } 54 | 55 | module.exports = { 56 | ISODate, 57 | DateTime, 58 | CurrencyCent, 59 | UUID, 60 | Phone, 61 | ZIPCode, 62 | Email, 63 | Any, 64 | converter, 65 | stringFormatter, 66 | } 67 | -------------------------------------------------------------------------------- /scripts/bump/python.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const chalk = require('chalk') 4 | const { spawn } = require('promisify-child-process') 5 | const { version } = require('../../package.json') 6 | 7 | async function main() { 8 | const pythonBumpCmd = `bumpversion --current-version ${version} patch ./output/python/setup.py --allow-dirty` 9 | console.log('\n', chalk.blue('Bumping python package version'), chalk.yellow(pythonBumpCmd)) 10 | try { 11 | const { stdout, stderr } = await spawn(pythonBumpCmd.split(' ')[0], pythonBumpCmd.split(' ').slice(1), { encoding: 'utf8' }) 12 | console.log(stdout, stderr) 13 | } catch (e) { 14 | console.error(chalk.red(e), e.stdout, e.stderr) 15 | throw e 16 | } 17 | } 18 | 19 | if (!module.parent) main() 20 | 21 | module.exports = main 22 | -------------------------------------------------------------------------------- /scripts/bump/ruby.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const chalk = require('chalk') 4 | const { spawn } = require('promisify-child-process') 5 | const { version } = require('../../package.json') 6 | 7 | async function main() { 8 | const rubyBumpCmd = `bumpversion --current-version ${version} patch ./output/ruby/nav-schema-architecture.gemspec --allow-dirty` 9 | console.log('\n', chalk.blue('Bumping ruby gem version'), chalk.yellow(rubyBumpCmd)) 10 | try { 11 | const { stdout, stderr } = await spawn(rubyBumpCmd.split(' ')[0], rubyBumpCmd.split(' ').slice(1), { encoding: 'utf8' }) 12 | console.log(stdout, stderr) 13 | } catch (e) { 14 | console.error(chalk.red(e), e.stdout, e.stderr) 15 | throw e 16 | } 17 | } 18 | 19 | if (!module.parent) main() 20 | 21 | module.exports = main 22 | -------------------------------------------------------------------------------- /scripts/changelog/index.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const chalk = require('chalk') 4 | const changelog = require('generate-changelog') 5 | const prepend = require('prepend-file') 6 | 7 | async function main() { 8 | console.log('\n', chalk.blue('Generating changelog')) 9 | 10 | const changes = await changelog.generate({ 11 | patch: true, 12 | allowUnknown: true, 13 | }) 14 | 15 | try { 16 | await prepend('CHANGELOG.md', changes) 17 | } catch (e) { 18 | console.error(chalk.red(e)) 19 | throw e 20 | } 21 | 22 | console.log('\n', chalk.green('Changelog saved to CHANGELOG.md')) 23 | } 24 | 25 | if (!module.parent) main() 26 | 27 | module.exports = main 28 | -------------------------------------------------------------------------------- /scripts/clean-output/index.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const chalk = require('chalk') 4 | const { exec } = require('promisify-child-process') 5 | 6 | async function main() { 7 | ALLOWED_ERROR_MESSAGES = ['find: -delete: ./output: relative path potentially not safe', 'Directory not empty'] 8 | const cleanCmd = 9 | 'find ./output ! -name go.mod ! -name go.sum ! -name nav-schema-architecture.gemspec ! -name setup.py ! -name py.typed -delete' 10 | console.log('\n', chalk.blue('Cleaning output'), chalk.yellow(cleanCmd)) 11 | 12 | try { 13 | const { stdout, stderr } = await exec(cleanCmd) 14 | console.log(stdout, stderr) 15 | } catch (e) { 16 | if ( 17 | !ALLOWED_ERROR_MESSAGES.reduce((errorContainsAllowedError, msg) => { 18 | return errorContainsAllowedError || e.message.includes(msg) 19 | }, false) 20 | ) { 21 | console.error(chalk.red(e), e.stdout, e.stderr) 22 | throw e 23 | } 24 | return 25 | } 26 | } 27 | 28 | if (!module.parent) main() 29 | 30 | module.exports = main 31 | -------------------------------------------------------------------------------- /scripts/format/go-mod.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const chalk = require('chalk') 4 | const { spawn } = require('promisify-child-process') 5 | 6 | async function main() { 7 | const goModTidyCmd = 'go mod tidy' 8 | console.log('\n', chalk.blue('Resolving go dependencies'), chalk.yellow(goModTidyCmd)) 9 | try { 10 | const { stdout, stderr } = await spawn(goModTidyCmd.split(' ')[0], goModTidyCmd.split(' ').slice(1), { 11 | cwd: 'output/go', 12 | encoding: 'utf8', 13 | }) 14 | console.log(stdout, stderr) 15 | } catch (e) { 16 | console.error(chalk.red(e), e.stdout, e.stderr) 17 | throw e 18 | } 19 | } 20 | 21 | if (!module.parent) main() 22 | 23 | module.exports = main 24 | -------------------------------------------------------------------------------- /scripts/format/go.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const chalk = require('chalk') 4 | const { spawn } = require('promisify-child-process') 5 | 6 | async function main() { 7 | const goimportsCmd = 'goimports -w output/go' 8 | console.log('\n', chalk.blue('Formatting go output'), chalk.yellow(goimportsCmd)) 9 | try { 10 | const { stdout, stderr } = await spawn(goimportsCmd.split(' ')[0], goimportsCmd.split(' ').slice(1), { encoding: 'utf8' }) 11 | console.log(stdout, stderr) 12 | } catch (e) { 13 | console.error(chalk.red(e), e.stdout, e.stderr) 14 | throw e 15 | } 16 | } 17 | 18 | if (!module.parent) main() 19 | 20 | module.exports = main 21 | -------------------------------------------------------------------------------- /scripts/format/protobuf.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const chalk = require('chalk') 4 | const { spawn } = require('promisify-child-process') 5 | 6 | async function main() { 7 | const protolintFixCmd = 'protolint lint -fix ./output/protobuf/' 8 | console.log('\n', chalk.blue('Formatting protobuf output'), chalk.yellow(protolintFixCmd)) 9 | try { 10 | const { stdout, stderr } = await spawn(protolintFixCmd.split(' ')[0], protolintFixCmd.split(' ').slice(1), { encoding: 'utf8' }) 11 | console.log(stdout, stderr) 12 | } catch (fixErr) { 13 | // protolint outputs lint failures that were fixed to stderr, causing spawn to initially fail 14 | console.log(chalk.yellow(fixErr), fixErr.stdout, fixErr.stderr) 15 | 16 | // run protolint a second time to verify there are no additional unfixed linting errors 17 | const protolintCmd = 'protolint lint ./output/protobuf/' 18 | console.log('\n', chalk.blue('Verifying protolint results'), chalk.yellow(protolintCmd)) 19 | 20 | try { 21 | const { stdout, stderr } = await spawn(protolintCmd.split(' ')[0], protolintCmd.split(' ').slice(1), { encoding: 'utf8' }) 22 | console.log(stdout, stderr) 23 | } catch (e) { 24 | console.error(chalk.red(e), e.stdout, e.stderr) 25 | 26 | throw e 27 | } 28 | } 29 | } 30 | 31 | if (!module.parent) main() 32 | 33 | module.exports = main 34 | -------------------------------------------------------------------------------- /scripts/format/python.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const chalk = require('chalk') 4 | const { spawn } = require('promisify-child-process') 5 | 6 | async function main() { 7 | const yapfCmd = 'yapf -irp --verbose *.py output/python' 8 | const unifyCmd = 'unify -ir output/python' 9 | const autoflakeCmd = 'autoflake -ir --remove-all-unused-imports --ignore-init-module-imports output/python' 10 | console.log('\n', chalk.blue('Formatting python output'), chalk.yellow(yapfCmd)) 11 | console.log('\n', chalk.blue('Formatting python output'), chalk.yellow(unifyCmd)) 12 | console.log('\n', chalk.blue('Formatting python output'), chalk.yellow(autoflakeCmd)) 13 | try { 14 | let stdout, stderr 15 | ;({ stdout, stderr } = await spawn(yapfCmd.split(' ')[0], yapfCmd.split(' ').slice(1), { encoding: 'utf8' })) 16 | console.log(stdout, stderr) 17 | ;({ stdout, stderr } = await spawn(unifyCmd.split(' ')[0], unifyCmd.split(' ').slice(1), { encoding: 'utf8' })) 18 | console.log(stdout, stderr) 19 | ;({ stdout, stderr } = await spawn(autoflakeCmd.split(' ')[0], autoflakeCmd.split(' ').slice(1), { encoding: 'utf8' })) 20 | console.log(stdout, stderr) 21 | } catch (e) { 22 | console.error(chalk.red(e), e.stdout, e.stderr) 23 | throw e 24 | } 25 | } 26 | 27 | if (!module.parent) main() 28 | 29 | module.exports = main 30 | -------------------------------------------------------------------------------- /scripts/format/ruby.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const chalk = require('chalk') 4 | const { spawn } = require('promisify-child-process') 5 | 6 | async function main() { 7 | const rubocopCmd = 'rubocop -AD --format simple ./output/ruby/' 8 | console.log('\n', chalk.blue('Formatting ruby output'), chalk.yellow(rubocopCmd)) 9 | try { 10 | const { stdout, stderr } = await spawn(rubocopCmd.split(' ')[0], rubocopCmd.split(' ').slice(1), { 11 | encoding: 'utf8', 12 | maxBuffer: 1024 * 1000, 13 | }) 14 | console.log(stdout) 15 | console.log(stderr) 16 | } catch (e) { 17 | console.error(chalk.red(e), e.stdout, e.stderr) 18 | throw e 19 | } 20 | } 21 | 22 | if (!module.parent) main() 23 | 24 | module.exports = main 25 | -------------------------------------------------------------------------------- /scripts/generate-output/commit.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const chalk = require('chalk') 4 | const addWorkingFiles = require('../git/add-working-files') 5 | const commit = require('../git/commit') 6 | const pushFiles = require('../git/push-files') 7 | const generate = require('./generate') 8 | 9 | async function main() { 10 | console.log('\n', chalk.blue('Generating and formatting output schemas')) 11 | try { 12 | await generate() 13 | await addWorkingFiles() 14 | await commit() 15 | await pushFiles() 16 | } catch (e) { 17 | console.error(chalk.red(e)) 18 | process.exit(1) 19 | } 20 | } 21 | 22 | if (!module.parent) main() 23 | 24 | module.exports = main 25 | -------------------------------------------------------------------------------- /scripts/generate-output/generate.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const chalk = require('chalk') 4 | const cleanOutput = require('../clean-output') 5 | const parse = require('../../src/index') 6 | const pythonPackages = require('../python/packages') 7 | const goimports = require('../format/go') 8 | const gomod = require('../format/go-mod') 9 | const rubocop = require('../format/ruby') 10 | const protolint = require('../format/protobuf') 11 | const yapf = require('../format/python') 12 | 13 | async function main() { 14 | console.log('\n', chalk.blue('Generating and formatting output schemas')) 15 | try { 16 | await cleanOutput() 17 | await Promise.resolve(parse()) 18 | await pythonPackages() 19 | await goimports() 20 | await gomod() 21 | await rubocop() 22 | await protolint() 23 | await yapf() 24 | } catch (e) { 25 | console.error(chalk.red(e)) 26 | process.exit(1) 27 | } 28 | } 29 | 30 | if (!module.parent) main() 31 | 32 | module.exports = main 33 | -------------------------------------------------------------------------------- /scripts/git/add-working-files.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const chalk = require('chalk') 4 | const { spawn } = require('promisify-child-process') 5 | 6 | async function main() { 7 | const gitCmd = 'git add output/ CHANGELOG.md --force' 8 | console.log('\n', chalk.blue('Staging files'), '\n', chalk.yellow(gitCmd)) 9 | 10 | try { 11 | const { stdout, stderr } = await spawn(gitCmd.split(' ')[0], gitCmd.split(' ').slice(1), { encoding: 'utf8' }) 12 | console.log(stdout, stderr) 13 | } catch (e) { 14 | console.error(chalk.red(e), e.stdout, e.stderr) 15 | throw e 16 | } 17 | } 18 | 19 | if (!module.parent) main() 20 | 21 | module.exports = main 22 | -------------------------------------------------------------------------------- /scripts/git/commit.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const chalk = require('chalk') 4 | const { spawn } = require('promisify-child-process') 5 | 6 | async function main() { 7 | const gitCmd = 'git commit --allow-empty -m "ci(build): generated new nav schema output"' 8 | console.log('\n', chalk.blue('Committing schema output'), chalk.yellow(gitCmd)) 9 | 10 | try { 11 | const { stdout, stderr } = await spawn( 12 | 'git', 13 | ['commit', '--allow-empty', '-m', 'ci(build): generated new nav schema output'], 14 | { encoding: 'utf8' } 15 | ) 16 | console.log(stdout, stderr) 17 | } catch (e) { 18 | console.error(chalk.red(e), e.stdout, e.stderr) 19 | throw e 20 | } 21 | } 22 | 23 | if (!module.parent) main() 24 | 25 | module.exports = main 26 | -------------------------------------------------------------------------------- /scripts/git/push-files-tags.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const chalk = require('chalk') 4 | const { spawn } = require('promisify-child-process') 5 | 6 | async function main() { 7 | const gitCmd = 'git push' 8 | const gitTagCmd = 'git push --tags' 9 | console.log('\n', 'Pushing commits and tags', chalk.yellow(gitCmd), chalk.yellow(gitTagCmd)) 10 | 11 | try { 12 | let stdout, stderr 13 | ;({ stdout, stderr } = await spawn(gitCmd.split(' ')[0], gitCmd.split(' ').slice(1), { encoding: 'utf8' })) 14 | console.log(stdout, stderr) 15 | ;({ stdout, stderr } = await spawn(gitTagCmd.split(' ')[0], gitTagCmd.split(' ').slice(1), { encoding: 'utf8' })) 16 | console.log(stdout, stderr) 17 | } catch (e) { 18 | console.error(chalk.red(e), e.stdout, e.stderr) 19 | throw e 20 | } 21 | } 22 | 23 | if (!module.parent) main() 24 | 25 | module.exports = main 26 | -------------------------------------------------------------------------------- /scripts/git/push-files.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const chalk = require('chalk') 4 | const { spawn } = require('promisify-child-process') 5 | 6 | async function main() { 7 | const gitCmd = 'git push' 8 | console.log('\n', 'Pushing commits', chalk.yellow(gitCmd)) 9 | 10 | try { 11 | const { stdout, stderr } = await spawn(gitCmd.split(' ')[0], gitCmd.split(' ').slice(1), { encoding: 'utf8' }) 12 | console.log(stdout, stderr) 13 | } catch (e) { 14 | console.error(chalk.red(e), e.stdout, e.stderr) 15 | throw e 16 | } 17 | } 18 | 19 | if (!module.parent) main() 20 | 21 | module.exports = main 22 | -------------------------------------------------------------------------------- /scripts/git/tag-go-module.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const chalk = require('chalk') 4 | const { spawn } = require('promisify-child-process') 5 | const { version } = require('../../package.json') 6 | 7 | async function main() { 8 | const gitCmd = `git tag -a "output/go/v${version}" -m "go module tag version v${version}"` 9 | console.log('\n', chalk.blue('Tagging for go module definition'), chalk.yellow(gitCmd)) 10 | 11 | try { 12 | const { stdout, stderr } = await spawn( 13 | 'git', 14 | ['tag', '-a', `output/go/v${version}`, '-m', `go module tag version v${version}`], 15 | { encoding: 'utf8' } 16 | ) 17 | console.log(stdout, stderr) 18 | } catch (e) { 19 | console.error(chalk.red(e), e.stdout, e.stderr) 20 | throw e 21 | } 22 | } 23 | 24 | if (!module.parent) main() 25 | 26 | module.exports = main 27 | -------------------------------------------------------------------------------- /scripts/postversion/index.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const chalk = require('chalk') 4 | const tagGoModule = require('../git/tag-go-module') 5 | const pushFilesTags = require('../git/push-files-tags') 6 | 7 | async function main() { 8 | try { 9 | await tagGoModule() 10 | await pushFilesTags() 11 | } catch (e) { 12 | console.error(chalk.red(e)) 13 | process.exit(1) 14 | } 15 | } 16 | 17 | if (!module.parent) main() 18 | 19 | module.exports = main 20 | -------------------------------------------------------------------------------- /scripts/preversion/index.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const chalk = require('chalk') 4 | const bumpRuby = require('../bump/ruby') 5 | const bumpPython = require('../bump/python') 6 | const generateChangelog = require('../changelog') 7 | const { IS_VERSION_ALLOWED } = process.env 8 | 9 | async function main() { 10 | if (!IS_VERSION_ALLOWED) { 11 | console.error(chalk.red('Error: environment settings indicate that versioning is not allowed')) 12 | process.exit(1) 13 | } 14 | try { 15 | await bumpRuby() 16 | await bumpPython() 17 | await generateChangelog() 18 | } catch (e) { 19 | console.error(chalk.red(e)) 20 | process.exit(1) 21 | } 22 | } 23 | 24 | if (!module.parent) main() 25 | 26 | module.exports = main 27 | -------------------------------------------------------------------------------- /scripts/python/packages.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const chalk = require('chalk') 4 | const { exec } = require('promisify-child-process') 5 | 6 | async function main() { 7 | const generatePkgCommand = 'find ./output/python/nsa -type d -exec touch {}/__init__.py \\;' 8 | console.log('\n', chalk.blue('Creating __init__.py files for python packages'), chalk.yellow(generatePkgCommand)) 9 | try { 10 | let { stdout, stderr } = await exec(generatePkgCommand) 11 | console.log(stdout, stderr) 12 | } catch (e) { 13 | console.error(chalk.red(e), e.stdout, e.stderr) 14 | throw e 15 | } 16 | } 17 | 18 | if (!module.parent) main() 19 | 20 | module.exports = main 21 | -------------------------------------------------------------------------------- /src/generators/go-struct.js: -------------------------------------------------------------------------------- 1 | const R = require('ramda') 2 | const { 3 | name: __name, 4 | __getType: fieldType, 5 | __getFields, 6 | metadata, 7 | schemaTypes, 8 | processFieldModifiers, 9 | clone, 10 | rootType, 11 | capitalize, 12 | firstLowerCase, 13 | mapIndexed, 14 | key, 15 | valueForKey, 16 | crlf, 17 | projectionTypes, 18 | fieldPattern, 19 | set, 20 | extractName, 21 | extractValue, 22 | extractTypeFromProp, 23 | exists, 24 | log, 25 | fileDescriptor, 26 | extractDir, 27 | isString, 28 | tab, 29 | additionalCodeFrom, 30 | isFieldTypeAnEnum, 31 | typeConverterFrom, 32 | setOfFilesFrom, 33 | foldObject, 34 | modifierSelector, 35 | } = require('../lib') 36 | 37 | const { EOL } = require('os') 38 | 39 | const scalars = require('../../schema/scalars/go-scalars.js') 40 | const convertType = typeConverterFrom(scalars) 41 | const canBeZeroTypes = ['int64','double','bool'] 42 | 43 | const enumDef = (e, values) => ` 44 | type ${e} string 45 | 46 | const ( 47 | ${mapIndexed( 48 | (v, idx) => `${tab(idx)}${extractValue(v)} ${e} = "${extractValue(v)}"${crlf(idx, values.length - 1)}`, 49 | values 50 | ).join('')} 51 | ) 52 | 53 | func (v ${e}) Validate() error { 54 | switch v { 55 | case ${mapIndexed((v, idx) => extractValue(v), values).join(', ')}: 56 | return nil 57 | default: 58 | return fmt.Errorf("%s is not a valid ${e} value", v) 59 | } 60 | } 61 | 62 | ` 63 | 64 | const structDef = ({ name, fields }, isRequiredProps, isArrayProps, isEnumProps, stringFormatter) => { 65 | const tpe = `type ${name} struct { 66 | ${mapIndexed((v, idx) => { 67 | const ftv = fieldType(valueForKey(v)) || valueForKey(v) 68 | const isArray = isArrayProps.includes(key(v)) 69 | const isEnum = exists(R.find((e) => e.pName === ftv, isEnumProps)) 70 | const output = `${tab(idx)}${capitalize(key(v))} ${isArray ? '[]' : '*'}${isEnum ? 'enums.' : ''}${convertType( 71 | ftv 72 | )} \`json:"${stringFormatter(firstLowerCase(key(v)), { 73 | element: 'json', 74 | })},omitempty"\`${crlf(idx, fields.length - 1)}` 75 | return output 76 | }, fields).join('')} 77 | } 78 | 79 | func (o ${name}) Validate() error { 80 | ${mapIndexed((v, idx) => { 81 | const ftv = fieldType(valueForKey(v)) || valueForKey(v) 82 | const rawFieldName = key(v) 83 | const isRequired = isRequiredProps.includes(rawFieldName) 84 | const isArray = isArrayProps.includes(rawFieldName) 85 | const field = capitalize(rawFieldName) 86 | const pattern = fieldPattern(valueForKey(v)) 87 | let requiredError = '' 88 | let patternError = '' 89 | let validationError = '' 90 | 91 | if (isRequired) { 92 | requiredError = ` 93 | if o.${field} == nil { 94 | return fmt.Errorf("${field} is a required property of ${name}") 95 | }` 96 | if (isArray) { 97 | requiredError += ` else if len(o.${field}) == 0 { 98 | return fmt.Errorf("${field} is a required property of ${name}") 99 | }` 100 | } else if (!canBeZeroTypes.includes(convertType(ftv))) { 101 | requiredError += ` else if reflect.ValueOf(*o.${field}).IsZero() { 102 | return fmt.Errorf("${field} is a required property of ${name}") 103 | }` 104 | } 105 | } 106 | 107 | if (pattern) { 108 | patternError = ` 109 | matched${field}, err := regexp.MatchString(\"${pattern}\", *o.${field}) 110 | if !matched${field} || err != nil { 111 | return fmt.Errorf("invalid ${field} value of (%s) for ${name}: (%w)", *o.${field}, err) 112 | }` 113 | if (!isRequired) { 114 | patternError = ` 115 | if o.${field} != nil { 116 | ${isArray ? patternError : `if !reflect.ValueOf(*o.${field}).IsZero() {${patternError}}`} 117 | }` 118 | } 119 | } 120 | 121 | if ((isString(valueForKey(v)) && !pattern) || isFieldTypeAnEnum(ftv, isEnumProps)) { 122 | if (isArray) { 123 | validationError = ` 124 | for _, ${rawFieldName}Element := range o.${field} { 125 | ${field}Error := ${rawFieldName}Element.Validate() 126 | if ${field}Error != nil { 127 | return fmt.Errorf("An element of ${field} is invalid %s", ${field}Error) 128 | } 129 | } 130 | ` 131 | } else { 132 | validationError = ` 133 | ${field}Error := o.${field}.Validate() 134 | if ${field}Error != nil { 135 | return fmt.Errorf("${field} is invalid %s", ${field}Error) 136 | } 137 | ` 138 | } 139 | if (!isRequired) { 140 | validationError = ` 141 | if o.${field} != nil {${validationError}}` 142 | } 143 | } 144 | return requiredError + patternError + validationError 145 | }, fields).join('')} 146 | return nil 147 | } 148 | ` 149 | return tpe 150 | } 151 | 152 | const isEntity = (propName, typeStruct) => typeof fieldType(R.last(R.filter(R.propEq('name', propName), typeStruct))) !== 'object' 153 | 154 | const generate = (message, schema, stringFormatter = scalars.stringFormatter) => 155 | generateGolang({ message, schema: clone(schema), stringFormatter }, metadata(message)) 156 | 157 | const generateGolang = ( 158 | { message, schema, stringFormatter }, 159 | { name, title, namespace, version, node }, 160 | root = rootType(schema, name) 161 | ) => { 162 | try { 163 | const st = processFieldModifiers(schemaTypes(schema)) 164 | const goTypes = projectionTypes({ 165 | message, 166 | schemaTypes: st, 167 | root, 168 | scalars, 169 | convertType, 170 | structDef, 171 | enumDef, 172 | stringFormatter, 173 | }) 174 | const rootStruct = R.last(goTypes) 175 | const rootDirectives = foldObject('name', 'directives', rootStruct) 176 | const rootPropNames = R.map(extractName, rootStruct) 177 | const rootTypeFields = __getFields(R.head(R.filter((s) => __name(s) === root, st))) 178 | const rootStructTypes = R.map((p) => R.head(R.filter((rtf) => __name(rtf) === p.name, rootTypeFields)), rootStruct) 179 | const [isRequired, isArray] = R.map((p) => modifierSelector(p, rootStructTypes, rootDirectives), ['isRequired', 'isArray']) 180 | 181 | const generatedEnums = [] 182 | 183 | const goStructs = ` 184 | package ${R.replace(/\./g, '_', namespace)}${version > 1 ? `_v${version}`: ''}_${name} 185 | 186 | import ( 187 | "fmt" 188 | "git.nav.com/engineering/nav-schema-architecture/output/go/nsa/enums" 189 | "encoding/json" 190 | "errors" 191 | "regexp" 192 | "reflect" 193 | ) 194 | 195 | // ${title} 196 | 197 | 198 | ${R.map(({ addOn, isEnum, type }) => { 199 | if (!isEnum) { 200 | return addOn 201 | } 202 | generatedEnums.push({ type, addOn }) 203 | }, set(goTypes, 'type')) 204 | .join(EOL) 205 | .slice(0, -1)} 206 | 207 | type ${capitalize(name)} struct { 208 | ${mapIndexed((prop, idx) => { 209 | const propType = extractTypeFromProp(prop) 210 | const isArrayType = isArray.includes(rootPropNames[idx]) 211 | const isEnum = R.reduce((isAnEnum, e) => isAnEnum || R.equals(e.type, propType), false, generatedEnums) 212 | return `${tab(idx)}${capitalize(extractName(prop))} ${isArrayType ? '[]' : '*'}${ 213 | isEnum ? 'enums.' : '' 214 | }${propType} \`json:"${stringFormatter(firstLowerCase(extractName(prop)), { element: 'json' })},omitempty"\`` 215 | }, rootStruct).join(EOL)} 216 | } 217 | 218 | func (o ${capitalize(name)}) Validate() error { 219 | ${mapIndexed((v, idx) => { 220 | const rpn = rootPropNames[idx] 221 | const isReq = isRequired.includes(rpn) 222 | const isArr = isArray.includes(rpn) 223 | const field = capitalize(rpn) 224 | const pattern = fieldPattern(v?.type) 225 | const fieldType = extractTypeFromProp(v) 226 | let patternError = '' 227 | let requiredError = '' 228 | let validationError = '' 229 | 230 | if (isReq) { 231 | requiredError = ` 232 | if o.${field} == nil { 233 | return fmt.Errorf("${field} is a required property of ${name}") 234 | }` 235 | if (isArr) { 236 | requiredError += ` else if len(o.${field}) == 0 { 237 | return fmt.Errorf("${field} is a required property of ${name}") 238 | }` 239 | } else if (!canBeZeroTypes.includes(convertType(fieldType))) { 240 | requiredError += ` else if reflect.ValueOf(*o.${field}).IsZero() { 241 | return fmt.Errorf("${field} is a required property of ${name}") 242 | }` 243 | } 244 | } 245 | 246 | if (pattern) { 247 | patternError = ` 248 | matched${field}, err := regexp.MatchString(\"${pattern}\", *o.${field}) 249 | if !matched${field} || err != nil { 250 | return fmt.Errorf("invalid ${field} value of (%s) for ${name}: (%w)", *o.${field}, err) 251 | }` 252 | if (!isReq) { 253 | patternError = ` 254 | if o.${field} != nil { 255 | ${isArr ? patternError : `if !reflect.ValueOf(*o.${field}).IsZero() {${patternError}}`} 256 | }` 257 | } 258 | } 259 | 260 | if (isEntity(rpn, rootStruct)) { 261 | if (isArr) { 262 | validationError = ` 263 | for _, ${rpn}Element := range o.${field} { 264 | ${field}Error := ${rpn}Element.Validate() 265 | if ${field}Error != nil { 266 | return fmt.Errorf("An element of ${field} is invalid %s", ${field}Error) 267 | } 268 | } 269 | ` 270 | } else { 271 | validationError = ` 272 | ${field}Error := o.${field}.Validate() 273 | if ${field}Error != nil { 274 | return fmt.Errorf("${field} is invalid %s", ${field}Error) 275 | } 276 | ` 277 | if (!isReq) { 278 | validationError = ` 279 | if o.${field} != nil { 280 | ${validationError} 281 | } 282 | ` 283 | } 284 | } 285 | } 286 | return requiredError + patternError + validationError 287 | }, rootStruct).join('')} 288 | return nil 289 | } 290 | 291 | ` 292 | return { code: goStructs, path: name, additionalCode: generatedEnums, imports: [] } 293 | } catch (e) { 294 | const err = `[ERROR] Message ${message?.name?.value} specification is incorrect: ${e}` 295 | log(err) 296 | return err 297 | } 298 | } 299 | 300 | const postProcessStep = (files, messageDefinitionDir) => { 301 | const enumsFile = additionalCodeFrom('go', files) 302 | const enumsSet = setOfFilesFrom('type', enumsFile) 303 | const code = ` 304 | package enums 305 | 306 | import "fmt" 307 | 308 | ${R.map(R.path(['addOn']), enumsSet).join(EOL)} 309 | 310 | ` 311 | if (exists(enumsSet) && !R.isEmpty(enumsSet)) { 312 | files.push(fileDescriptor(extractDir(enumsFile), 'enums/enums.go', code)) 313 | } 314 | } 315 | 316 | const schemaType = 'go-struct' 317 | 318 | module.exports = { 319 | generate, 320 | schemaType, 321 | postProcessStep, 322 | } 323 | -------------------------------------------------------------------------------- /src/generators/index.js: -------------------------------------------------------------------------------- 1 | const R = require('ramda') 2 | const { generate: goStructGenerate, postProcessStep: goPostProcessStep } = require('./go-struct.js') 3 | const { generate: protobufGenerate, postProcessStep: protoPostProcessStep } = require('./protobuf.js') 4 | const { generate: rubyGenerate, postProcessStep: rubyPostProcessStep } = require('./ruby.js') 5 | const { generate: pythonGenerate } = require('./python.js') 6 | const { generate: jsonSchemaGenerate } = require('./json-schema.js') 7 | const { env } = require('../lib') 8 | const config = [ 9 | { 10 | generate: jsonSchemaGenerate, 11 | extension: 'json', 12 | toDir: env('OUTPUT_DIR_JSON_SCHEMA', 'output/json-schema'), 13 | type: 'json-schema', 14 | outputFormatter: (code) => JSON.stringify(code, null, 4), 15 | }, 16 | { 17 | generate: rubyGenerate, 18 | extension: 'rb', 19 | toDir: env('OUTPUT_DIR_RUBY', 'output/ruby/nsa'), 20 | staticDir: env('STATIC_DIR_RUBY', 'static/ruby/nsa'), 21 | type: 'ruby', 22 | outputFormatter: R.identity, 23 | postProcessStep: rubyPostProcessStep, 24 | }, 25 | { 26 | generate: pythonGenerate, 27 | extension: 'py', 28 | toDir: env('OUTPUT_DIR_PYTHON', 'output/python/nsa'), 29 | type: 'python', 30 | outputFormatter: R.identity, 31 | }, 32 | { 33 | generate: goStructGenerate, 34 | extension: 'go', 35 | toDir: env('OUTPUT_DIR_GO_STRUCT', 'output/go/nsa'), 36 | type: 'golang', 37 | outputFormatter: R.identity, 38 | postProcessStep: goPostProcessStep, 39 | }, 40 | { 41 | generate: protobufGenerate, 42 | extension: 'proto', 43 | toDir: env('OUTPUT_DIR_GO_STRUCT', 'output/protobuf/nsa'), 44 | type: 'protobuf', 45 | outputFormatter: R.identity, 46 | postProcessStep: protoPostProcessStep, 47 | }, 48 | ] 49 | 50 | module.exports = config 51 | -------------------------------------------------------------------------------- /src/generators/json-schema.js: -------------------------------------------------------------------------------- 1 | const R = require('ramda') 2 | const { 3 | env, 4 | name, 5 | hasName, 6 | enumValues, 7 | kind, 8 | metadata, 9 | schemaTypes, 10 | fieldTypeFromTypeDef, 11 | processFieldModifiers, 12 | extractType, 13 | extractName, 14 | __extractName, 15 | extractValue, 16 | findElement, 17 | typeIsValid, 18 | exists, 19 | rootType, 20 | fields, 21 | selections, 22 | clone, 23 | typeConverterFrom, 24 | propObject, 25 | } = require('../lib') 26 | 27 | const scalars = require('../../schema/scalars/json-schema.js') 28 | 29 | const jsonSchemaVersion = env('JSON_SCHEMA_VERSION', 'http://json-schema.org/draft-07/schema#') 30 | 31 | const { converter, stringFormatter = R.identity } = scalars 32 | converter.ObjectTypeDefinition = 'object' 33 | converter.ListType = 'array' 34 | 35 | const reference = (t) => ({ $ref: `#${t}` }) 36 | const enumDef = (values) => ({ 37 | enum: R.map(extractValue, values), 38 | }) 39 | 40 | const convertType = typeConverterFrom(scalars) 41 | /** 42 | * Generates the projected type, recursively 43 | * @param {string} prop - the property definition to be processed 44 | * @param {string} schemaTypes - the array of schema types, for easy lookup 45 | * @param {string} refTypes - The accumulator of message referenced types that need to be added to the schema at the end 46 | * @param {string} messageFields - The message selections associated to the prop 47 | */ 48 | const typeFor = (prop, schemaTypes, refTypes, messageFields) => { 49 | const propType = extractType(prop) 50 | const propName = extractName(prop) 51 | if (typeIsValid(propType) && exists(propName)) { 52 | const typeDef = findElement(propType, schemaTypes) 53 | switch (kind(typeDef)) { 54 | case 'ScalarTypeDefinition': 55 | const stype = scalars[name(typeDef)] 56 | if (R.isNil(stype)) { 57 | throw `Scalar type not defined ${stype}` 58 | } 59 | return stype 60 | case 'EnumTypeDefinition': 61 | return enumDef(enumValues(typeDef)) 62 | case 'ObjectTypeDefinition': 63 | const objectField = hasName(propName)(messageFields) 64 | const childMessageFields = selections(objectField) 65 | // in theory this type should be named specifically for this element since 66 | // we could have another element using the same base type but a different set of 67 | // properties --> to be investigated: do we want to support that case? / hard to enforce. 68 | const isRequiredProps = [] 69 | 70 | const objectType = propObject(propType, { 71 | $id: `#${propType}`, 72 | type: 'object', 73 | properties: R.mergeAll( 74 | R.map((p) => { 75 | const pName = name(p) 76 | const pType = fieldTypeFromTypeDef(pName, typeDef) 77 | const pTypeName = name(pType) 78 | if (pType?.isRequired) { 79 | isRequiredProps.push(pName) 80 | } 81 | const pTypeDef = typeFor( 82 | { 83 | name: pName, 84 | type: convertType(pTypeName), 85 | }, 86 | schemaTypes, 87 | refTypes, 88 | childMessageFields 89 | ) 90 | 91 | return pType?.isArray 92 | ? propObject(pName, { 93 | type: 'array', 94 | items: pTypeDef, 95 | }) 96 | : propObject(pName, pTypeDef) 97 | }, childMessageFields) 98 | ), 99 | }) 100 | if (isRequiredProps.length > 0) { 101 | objectType[propType].required = isRequiredProps 102 | } 103 | refTypes.push(objectType) 104 | return reference(propType) 105 | case 'UnionTypeDefinition': 106 | const unionField = hasName(propName)(messageFields) 107 | const childTypes = selections(unionField) 108 | const unionType = propObject(propType, { 109 | $id: `#${propType}`, 110 | type: ['number', 'string', 'boolean', 'object', 'array', 'null'], 111 | }) 112 | refTypes.push(unionType) 113 | return reference(propType) 114 | } 115 | } 116 | return { type: convertType(propType) } 117 | } 118 | 119 | /** 120 | * Calculates the intersection between the message definition and the schema types 121 | * The projection types are unique to that message schema and must not be reused 122 | * Payload schemas must be self-standing, because a type can have any number of 123 | * projections and projections can vary arbitrarily 124 | * @param message - The parsed graphql message definition 125 | * @param schemaTypes - The parsed graphql schema types 126 | * @param root - The name of the root element of the message schema/class 127 | */ 128 | const projectionTypes = ({ message, schemaTypes, root }) => { 129 | const referencedTypes = [] 130 | const rootSchemaType = R.last(R.filter((t) => R.equals(name(t), root))(schemaTypes)) 131 | const messageProps = R.map((p) => name(p), selections(message)) 132 | const typeFields = R.filter((f) => R.includes(__extractName(f), messageProps), fields(rootSchemaType)) 133 | 134 | // filter props from message definition 135 | const typeDef = propObject(root, { 136 | $id: `#${root}`, 137 | type: 'object', 138 | properties: R.mergeAll( 139 | R.map((prop) => propObject(extractName(prop), typeFor(prop, schemaTypes, referencedTypes, selections(message))), typeFields) 140 | ), 141 | }) 142 | referencedTypes.push(typeDef) 143 | return referencedTypes 144 | } 145 | 146 | const generate = (message, schema, stringFormatter = scalars.stringFormatter) => 147 | generateJsonSchema({ message, schema: clone(schema), stringFormatter }, metadata(message)) 148 | 149 | const generateJsonSchema = ( 150 | { message, schema, stringFormatter }, 151 | { name, title, description, namespace, version, node }, 152 | root = rootType(schema, name) 153 | ) => ({ 154 | code: rootless({ 155 | $schema: jsonSchemaVersion, 156 | $id: `${env('URI_SCHEME', 'http://nav.com')}/${namespace}.schema.json/v${version}`, 157 | title: title, 158 | description: description, 159 | required: [name], 160 | type: 'object', 161 | properties: propObject(name, reference(root)), 162 | definitions: R.mergeAll( 163 | projectionTypes({ 164 | message, 165 | schemaTypes: processFieldModifiers(schemaTypes(schema)), 166 | root, 167 | stringFormatter, 168 | }) 169 | ), 170 | required: [], 171 | }), 172 | }) 173 | 174 | const rootless = (schema, isRootless = true) => { 175 | if (isRootless) { 176 | const s = R.clone(schema) 177 | const props = R.pathOr({}, ['properties'], s) 178 | const root = R.last(R.keys(props)) 179 | const type = props[root]['$ref'].substring(1, props[root]['$ref'].length, R.pathOr('', [root, '$ref'], props)) 180 | 181 | const rootTypeDef = R.pathOr(null, ['definitions', type], s) 182 | 183 | if (rootTypeDef) { 184 | s.properties = rootTypeDef.properties 185 | delete s.definitions[type] 186 | } 187 | 188 | return s 189 | } 190 | return schema 191 | } 192 | 193 | const schemaType = 'json-schema' 194 | 195 | module.exports = { 196 | generate, 197 | schemaType, 198 | rootless, 199 | convertType, 200 | } 201 | -------------------------------------------------------------------------------- /src/generators/protobuf.js: -------------------------------------------------------------------------------- 1 | const R = require('ramda') 2 | const { 3 | name: __name, 4 | metadata, 5 | schemaTypes, 6 | rootType, 7 | __getFields, 8 | __getType: getTypeOfField, 9 | clone, 10 | exists, 11 | set, 12 | extractName, 13 | directives: __directives, 14 | mapIndexed, 15 | extractTypeFromProp, 16 | extractValue, 17 | capitalize, 18 | processFieldModifiers, 19 | projectionTypes, 20 | camelToSnakeCase, 21 | noNullElementArray, 22 | additionalCodeFrom, 23 | setOfFilesFrom, 24 | fileDescriptor, 25 | extractDir, 26 | isString, 27 | tab, 28 | key, 29 | order, 30 | valueForKey, 31 | } = require('../lib') 32 | 33 | const { EOL } = require('os') 34 | 35 | const scalars = require('../../schema/scalars/protobuf-scalars.js') 36 | 37 | const { converter } = scalars 38 | converter.ObjectTypeDefinition = 'message' 39 | converter.EnumTypeDefinition = 'enum' 40 | 41 | const typeConverter = (t) => converter[t] 42 | 43 | const value = (v) => v[key(v)] 44 | 45 | const enumDef = (e, values) => `enum ${e} { 46 | \t${camelToSnakeCase(e).toUpperCase()}_UNSPECIFIED = 0; 47 | ${mapIndexed( 48 | (v, idx) => `\t${camelToSnakeCase(extractValue(v)).toUpperCase()} = ${idx + 1};`, 49 | R.filter((v) => extractValue(v).indexOf('UNSPECIFIED') < 1, values) 50 | ).join(EOL)} 51 | }` 52 | 53 | const messageDef = ({ name, fields, directives }, _isRequiredProps, isArrayProps, isEnumProps, _stringFormatter) => { 54 | return `message ${name} { 55 | ${mapIndexed( 56 | (v, idx) => { 57 | const fieldName = key(v) 58 | const fieldType = getTypeOfField(valueForKey(v)) || valueForKey(v) 59 | const isArray = isArrayProps.includes(fieldName) 60 | const isEnum = exists(R.find((e) => e.pName === fieldType, isEnumProps)) 61 | return `${tab(idx)}${isArray ? 'repeated ' : ''}${isEnum ? 'enums.' : ''}${convertType( 62 | isString(value(v)) ? value(v) : fieldTypeConverter(value(v)) 63 | )} ${key(v)} = ${order(directives[key(v)])};` 64 | }, 65 | fields.sort((a, b) => { 66 | if (parseInt(order(directives[key(a)])) > parseInt(order(directives[key(b)]))) { 67 | return 1 68 | } 69 | 70 | if (parseInt(order(directives[key(a)])) < parseInt(order(directives[key(b)]))) { 71 | return -1 72 | } 73 | 74 | return 0 75 | }) 76 | ).join(EOL)} 77 | }` 78 | } 79 | 80 | const fieldTypeConverter = (prop) => R.pathOr('undefined_type_error', ['type'], prop) 81 | 82 | const convertType = (t) => R.or(typeConverter(t), t) 83 | 84 | const generate = (message, schema, stringFormatter = scalars.stringFormatter) => { 85 | return generateProtobuf({ message, schema: clone(schema), stringFormatter }, metadata(message)) 86 | } 87 | 88 | const generateProtobuf = ({ message, schema, stringFormatter }, { name, title, namespace, version }, root = rootType(schema, name)) => { 89 | const st = processFieldModifiers(schemaTypes(schema)) 90 | const protoTypes = set( 91 | projectionTypes({ 92 | message, 93 | schemaTypes: st, 94 | root, 95 | scalars, 96 | convertType, 97 | structDef: messageDef, 98 | enumDef, 99 | stringFormatter, 100 | }), 101 | (i) => (typeof i.type === 'object' ? i.type?.type : i.type) 102 | ) 103 | 104 | const generatedEnums = [] 105 | const generatedScalars = [] 106 | const entity = `${camelToSnakeCase(R.last(namespace.split('.')))}${version > 1 ? `_v${version}` : ''}` 107 | const rootStruct = R.last(protoTypes).sort((a, b) => { 108 | const aDirectives = __directives(a) 109 | const bDirectives = __directives(b) 110 | if (parseInt(order(aDirectives[extractName(a)])) > parseInt(order(bDirectives[extractName(b)]))) { 111 | return 1 112 | } 113 | 114 | if (parseInt(order(aDirectives[extractName(a)])) < parseInt(order(bDirectives[extractName(b)]))) { 115 | return -1 116 | } 117 | 118 | return 0 119 | }) 120 | const rootPropNames = R.map(extractName, rootStruct) 121 | const rootTypeFields = __getFields(R.head(R.filter((s) => __name(s) === root, st))) 122 | const rootStructTypes = R.map((p) => R.head(R.filter((rtf) => __name(rtf) === p.name, rootTypeFields)), rootStruct) 123 | 124 | // TODO: enable for non-optional fields in proto3 when the rest of the nav ecosystem supports proto3 optional fields 125 | /* const isRequired = R.map( 126 | (p) => p.value, 127 | noNullElementArray(R.map((p) => (p?.type?.isRequired === true ? p.name : undefined), rootStructTypes)) 128 | ) */ 129 | 130 | const isArray = R.map( 131 | (p) => p.value, 132 | noNullElementArray(R.map((p) => (p?.type?.isArray === true ? p.name : undefined), rootStructTypes)) 133 | ) 134 | 135 | const protoTypesGen = R.map(({ addOn, isEnum, isScalar, type }) => { 136 | if (isScalar || R.path('scalar', type)) { 137 | generatedScalars.push(type) 138 | } 139 | if (!isEnum) { 140 | return addOn 141 | } 142 | generatedEnums.push({ type, addOn }) 143 | }, protoTypes) 144 | 145 | return { 146 | code: `syntax = "proto3"; 147 | 148 | package nsa.${camelToSnakeCase(namespace)}${version > 1 ? `.v${version}` : ''}.${camelToSnakeCase(name)}; 149 | 150 | // ${title} 151 | 152 | ${[ 153 | ...new Set( 154 | R.concat( 155 | R.map((t) => (exists(R.path(['type', 'import'], t)) ? `import "${R.path(['type', 'import'], t)}";${EOL}` : ''), rootStruct), 156 | R.map((i) => (exists(i) ? `import "${i}";${EOL}` : ''), R.map(R.path(['import']), generatedScalars)) 157 | ) 158 | ), 159 | ].join('')} 160 | ${generatedEnums.length > 0 ? 'import "nsa/enums/enums.proto";' : ''} 161 | 162 | option go_package = "git.nav.com/backend/go-proto/nsa/${exists(entity) ? `${entity}/` : ''}${camelToSnakeCase(name)}"; 163 | 164 | ${protoTypesGen.join(EOL + EOL).slice(0, -1)} 165 | 166 | message ${capitalize(name)} { 167 | ${mapIndexed((prop, idx) => { 168 | const propType = extractTypeFromProp(prop) 169 | const directives = __directives(prop) 170 | const isArrayType = isArray.includes(rootPropNames[idx]) 171 | const isEnum = R.reduce((isAnEnum, e) => isAnEnum || R.equals(e.type, propType), false, generatedEnums) 172 | return `${tab(idx)}${isArrayType ? 'repeated ' : ''}${isEnum ? 'enums.' : ''}${propType} ${camelToSnakeCase( 173 | extractName(prop) 174 | )} = ${order(directives[extractName(prop)])};` 175 | }, rootStruct).join(EOL)} 176 | } 177 | 178 | `, 179 | path: name, 180 | additionalCode: generatedEnums, 181 | imports: [], 182 | } 183 | } 184 | 185 | const postProcessStep = (files, messageDefinitionDir) => { 186 | const enumsFile = additionalCodeFrom('proto', files) 187 | const enumsSet = setOfFilesFrom('type', enumsFile) 188 | const code = ` 189 | syntax = "proto3"; 190 | 191 | package nsa.enums; 192 | 193 | option go_package = "git.nav.com/backend/go-proto/nsa/enums"; 194 | 195 | ${R.map(R.path(['addOn']), enumsSet).join(EOL + EOL)} 196 | 197 | ` 198 | if (exists(enumsSet) && !R.isEmpty(enumsSet)) { 199 | files.push(fileDescriptor(extractDir(enumsFile), 'enums/enums.proto', code)) 200 | } 201 | } 202 | 203 | const schemaType = 'protobuf' 204 | 205 | module.exports = { generate, schemaType, postProcessStep } 206 | -------------------------------------------------------------------------------- /src/generators/python.js: -------------------------------------------------------------------------------- 1 | const R = require('ramda') 2 | const { 3 | metadata, 4 | schemaTypes, 5 | processFieldModifiers, 6 | projectionTypes, 7 | clone, 8 | rootType, 9 | extractType, 10 | foldObject, 11 | extractName, 12 | extractValue, 13 | set, 14 | modifierSelector, 15 | firstUpperCase, 16 | startsWithCapitalLetter, 17 | mapIndexed, 18 | valueForKey, 19 | _name, 20 | key, 21 | findElement, 22 | log, 23 | getFields, 24 | isString, 25 | typeConverterFrom, 26 | isFieldTypeAnEnum, 27 | propObject, 28 | match, 29 | isNotEmpty, 30 | } = require('../lib') 31 | 32 | const scalars = require('../../schema/scalars/python-scalars.js') 33 | 34 | const typeConverter = typeConverterFrom(scalars) 35 | 36 | const typeValidationConverter = (t, isArray = false) => 37 | match(t) 38 | .on(() => isArray, `list(from_${t}`) 39 | .otherwise() 40 | 41 | const enumDef = (e, values) => ` 42 | ${e} = Literal[${mapIndexed((v) => `'${extractValue(v)}'`, values).join(', ')}] 43 | 44 | def from_${e}(x: ${e}): 45 | \treturn x 46 | ` 47 | const { EOL } = require('os') 48 | 49 | const formatter = (propName, format) => (format ? R.replace('%s', propName, format) : propName) 50 | const validationRules = (fieldName, fieldType, fieldClass, pattern, isRequired, isArray, isEnum, format, isRequiredOnly) => { 51 | const enumCheck = isEnum ? `\t\tfrom_${extractType3(fieldType)}(${formatter(fieldName, format)})` + EOL : '' 52 | 53 | let requiredCheck = isRequired ? `\t\tis_required(${formatter(fieldName, format)})` + EOL : '' 54 | let arrayCheck = isArray ? `\t\tfrom_list(from_${extractType3(fieldType)}, ${formatter(fieldName, format)})` + EOL : '' 55 | 56 | let scalar = pattern ? `\t\tfrom_${fieldType.scalar}(${formatter(fieldName, format)})` + EOL : '' 57 | 58 | return isRequiredOnly ? requiredCheck : requiredCheck + arrayCheck + enumCheck + scalar 59 | } 60 | 61 | const scalarValidation = (s) => { 62 | const scalar = R.path([s], scalars) 63 | const scalarType = extractType(scalar) 64 | const pattern = R.path(['pattern'], scalar) 65 | const inputType = R.path(['inputType'], scalar) 66 | const instantiate = R.path(['instantiate'], scalar) 67 | return `${pattern ? `${s}_pattern = re.compile("${pattern}")` : ''} 68 | def from_${s}(x: ${inputType ? `Union[${inputType}, ${scalarType}]` : 'Any'}) -> ${scalarType}: 69 | \tif x is None: return None 70 | ${(() => { 71 | if (instantiate && inputType) { 72 | return `\tif isinstance(x, ${inputType}): x = ${instantiate}(x)` 73 | } else if (instantiate) { 74 | return `x = ${instantiate}(x)` 75 | } 76 | return '' 77 | })()} 78 | \tassert isinstance(x, ${scalarType})${pattern ? EOL + `\tassert ${s}_pattern.match(x)` : ''} 79 | \treturn x 80 | ` 81 | } 82 | 83 | const generateValidationRules = (name, fields, isRequiredProps, isArrayProps, isEnumProps, template, isRequiredOnly) => 84 | EOL + 85 | mapIndexed((v, idx) => { 86 | const fieldName = key(v) 87 | const fieldType = valueForKey(v) 88 | const isRequired = isRequiredProps.includes(fieldName) 89 | const isArray = isArrayProps.includes(fieldName) 90 | const isEnum = isFieldTypeAnEnum(fieldType, isEnumProps) 91 | const scalar = fieldType?.scalar ? scalars[fieldType.scalar] : undefined 92 | const pattern = scalar?.pattern 93 | return validationRules(fieldName, fieldType, name, pattern, isRequired, isArray, isEnum, template, isRequiredOnly) 94 | }, fields).join('') 95 | 96 | const classVariables = (fields, isArray, isRequiredProps) => 97 | R.map((f) => { 98 | const fieldName = key(f) 99 | const fieldType = typeConverter(extractType2(f[fieldName])) 100 | const list = isArray && isArray.includes(fieldName) 101 | const isRequired = isRequiredProps && isRequiredProps.includes(fieldName) 102 | 103 | let returnVariable = `${key(f)}: ` 104 | 105 | let typeHint = list ? `List[${fieldType}]` : fieldType 106 | 107 | return ( 108 | returnVariable + 109 | match(isRequired) 110 | .on(false, () => { 111 | typeHint = `Optional[${typeHint}]` 112 | return `${typeHint} = None` 113 | }) 114 | .otherwise(() => typeHint) 115 | ) 116 | }, fields) 117 | 118 | const structDef = ({ name, fields }, isRequiredProps, isArrayProps, isEnumProps) => { 119 | const fieldNames = R.map((f) => key(f), fields) 120 | const classVars = classVariables(fields, isArrayProps, isRequiredProps) 121 | const tpe = ` 122 | class ${name}: 123 | 124 | \tdef __init__(self, *, ${classVars.join(', ')}) -> None: 125 | ${generateValidationRules(name, fields, isRequiredProps, isArrayProps, isEnumProps)} 126 | \t\t${R.map((n) => `self.${n} = ${n}`, fieldNames).join(EOL + '\t\t')} 127 | 128 | \t@staticmethod 129 | \tdef from_dict(obj: Any) -> '${name}': 130 | \t\tif obj is None: return None 131 | \t\tassert isinstance(obj, dict) 132 | ${generateValidationRules(name, fields, isRequiredProps, isArrayProps, isEnumProps, 'obj.get("%s")', true)} 133 | \t\t${R.map((f) => { 134 | const fieldName = key(f) 135 | const fieldType = valueForKey(f) 136 | const isClass = isString(fieldType) 137 | const enums = R.map((e) => e.pName, isEnumProps) 138 | const isEnum = enums.includes(fieldType) 139 | return match(fieldType) 140 | .on('Any', `${fieldName} = obj.get("${fieldName}")`) 141 | .otherwise((ft) => 142 | isClass && !isEnum 143 | ? `${fieldName} = ${ft}.from_dict(obj.get("${fieldName}"))` 144 | : `${fieldName} = from_${typeValidationConverter( 145 | extractType3(ft), 146 | isArrayProps.includes(fieldName) 147 | )}(obj.get("${fieldName}"))${isArrayProps.includes(fieldName) ? ')' : ''}` 148 | ) 149 | }, fields).join(EOL + '\t\t')} 150 | \t\treturn ${name}(${fieldNames.map((fieldName) => `${fieldName}=${fieldName}`)}) 151 | 152 | \tdef to_dict(self) -> dict: 153 | \t\thash: dict = {} 154 | ${generateValidationRules(name, fields, isRequiredProps, isArrayProps, isEnumProps, 'self.%s', true)} 155 | \t\t${R.map((f) => { 156 | const fieldName = key(f) 157 | const fieldType = valueForKey(f) 158 | const isClass = isString(fieldType) 159 | const enums = R.map((e) => e.pName, isEnumProps) 160 | const isEnum = enums.includes(fieldType) 161 | const isFieldRequired = isRequiredProps && isRequiredProps.includes(fieldName) 162 | return match(fieldType) 163 | .on('Any', `hash["${fieldName}"] = self.${fieldName}`) 164 | .otherwise((ft) => { 165 | const isHashable = isClass && !isEnum 166 | 167 | let returnFieldDef = isHashable 168 | ? `self.${fieldName}.to_dict()` 169 | : `from_${typeValidationConverter(extractType3(ft), isArrayProps.includes(fieldName))}(self.${fieldName})${ 170 | isArrayProps.includes(fieldName) ? ')' : '' 171 | }` 172 | 173 | if (isHashable && !isFieldRequired) { 174 | returnFieldDef = `None if self.${fieldName} is None else ${returnFieldDef}` 175 | } 176 | 177 | return `hash["${fieldName}"] = ${returnFieldDef}` 178 | }) 179 | }, fields).join(EOL + '\t\t')} 180 | \t\treturn hash 181 | 182 | def from_${name}(x: Any) -> ${name}: 183 | if x is None: return None 184 | assert isinstance(x, ${name}) 185 | return x 186 | ` 187 | return tpe 188 | } 189 | 190 | const extractType2 = (t) => { 191 | return isString(t) ? t : t.isArray ? 'list' : extractType(t) 192 | } 193 | 194 | const extractType3 = (t) => { 195 | return isString(t) ? t : t.isArray ? 'list' : t.scalar ? t.scalar : extractType(t) 196 | } 197 | 198 | const generate = (message, schema, stringFormatter = scalars.stringFormatter) => 199 | generatePython({ message, schema: clone(schema), stringFormatter }, metadata(message)) 200 | 201 | const generatePython = ( 202 | { message, description, schema, stringFormatter = R.identity }, 203 | { name, title, namespace, version, node }, 204 | root = rootType(schema, name) 205 | ) => { 206 | try { 207 | const st = processFieldModifiers(schemaTypes(schema)) 208 | const pythonTypes = projectionTypes({ 209 | message, 210 | schemaTypes: st, 211 | root, 212 | scalars, 213 | convertType: typeConverter, 214 | structDef, 215 | enumDef, 216 | stringFormatter, 217 | }) 218 | const generatedTypeCode = R.map(({ addOn }) => addOn, set(pythonTypes, 'type')) 219 | .join(EOL) 220 | .slice(0, -1) 221 | const generatedScalarCode = R.map(scalarValidation, R.filter(startsWithCapitalLetter, R.keys(scalars))).join(EOL) 222 | 223 | const rootName = firstUpperCase(name) 224 | const rootStruct = R.last(pythonTypes) 225 | const rootFieldNames = R.map(extractName, rootStruct) 226 | const rootTypes = foldObject('name', 'type', rootStruct) 227 | const rootDirectives = foldObject('name', 'directives', rootStruct) 228 | const rootTypeFields = getFields(findElement(root, st)) 229 | const rootStructTypes = R.map((p) => findElement(p.name, rootTypeFields), rootStruct) 230 | const [isRequired, isArray, isEnum] = R.map( 231 | (p) => modifierSelector(p, rootStructTypes, rootDirectives), 232 | ['isRequired', 'isArray', 'isEnum'] 233 | ) 234 | const entryArray = R.map((e) => propObject(e, rootTypes[e]), R.keys(rootTypes)) 235 | const classVars = classVariables(entryArray, isArray, isRequired) 236 | 237 | const code = ` 238 | import re 239 | import datetime 240 | from dateutil import parser as dateutil_parser 241 | from typing import Any, Callable, List, Literal, Optional, TypeVar, Union 242 | 243 | # ${namespace} 244 | 245 | # ${title} 246 | ${description ? `# ${description}` : '#'} 247 | # generator version ${version} 248 | 249 | T = TypeVar("T") 250 | 251 | def from_str(x: Any) -> str: 252 | if x is None: return None 253 | assert isinstance(x, str) 254 | return x 255 | 256 | 257 | def from_int(x: Any) -> int: 258 | if x is None: return None 259 | assert isinstance(x, int) and not isinstance(x, bool) 260 | return x 261 | 262 | def from_list(f: Callable[[Any], T], x: Any) -> List[T]: 263 | if x is None: return None 264 | assert isinstance(x, list) 265 | return [f(y) for y in x] 266 | 267 | 268 | def from_bool(x: Any) -> bool: 269 | if x is None: return None 270 | assert isinstance(x, bool) 271 | return x 272 | 273 | def from_Any(x: Any) -> Any: 274 | return x 275 | 276 | 277 | def is_required(x: Any) -> Any: 278 | assert x is not None 279 | return x 280 | 281 | ${generatedScalarCode} 282 | 283 | ${generatedTypeCode} 284 | 285 | class ${rootName}${version > 1 ? `_V${version}`: ''}: 286 | 287 | \tdef __init__(self, *, ${classVars.join(', ')}) -> None: 288 | 289 | ${R.filter( 290 | isNotEmpty, 291 | mapIndexed( 292 | (fieldName, idx) => 293 | validationRules( 294 | fieldName, 295 | rootTypes[fieldName], 296 | rootName, 297 | rootTypes[fieldName]?.pattern, 298 | isRequired.includes(fieldName), 299 | isArray.includes(fieldName), 300 | isEnum.includes(fieldName) 301 | ), 302 | rootFieldNames 303 | ) 304 | ).join('')} 305 | \t\t${R.map((n) => `self.${n} = ${n}`, rootFieldNames).join(EOL + '\t\t')} 306 | 307 | 308 | \t@staticmethod 309 | \tdef from_dict(obj: Any) -> '${rootName}': 310 | 311 | \t\tassert isinstance(obj, dict) 312 | ${generateValidationRules(rootName, Object.entries(rootTypes), isRequired, isArray, isEnum, 'obj.get("%s")', true)} 313 | \t\t${R.map((f) => { 314 | const [fieldName, fieldType] = f 315 | const isClass = isString(fieldType) 316 | const enums = R.map((e) => e.pName, isEnum) 317 | const isAnArray = isArray.includes(fieldName) 318 | const isAnEnum = enums.includes(fieldType) 319 | 320 | return isClass && !isAnEnum 321 | ? `${fieldName} = ${fieldType}.from_dict(obj.get("${fieldName}"))` 322 | : `${fieldName} = from_${typeValidationConverter(extractType3(fieldType), isAnArray)}(obj.get("${fieldName}"))${ 323 | isAnArray ? ')' : '' 324 | }` 325 | }, Object.entries(rootTypes)).join(EOL + '\t\t')} 326 | \t\treturn ${rootName}(${rootFieldNames.map((rootFieldName) => `${rootFieldName}=${rootFieldName}`)}) 327 | 328 | \tdef to_dict(self) -> dict: 329 | \t\thash: dict = {} 330 | \t\t${R.map((f) => { 331 | const fieldName = _name(f) 332 | const fieldType = _name(extractType(f)) 333 | const isFieldRequired = isRequired.includes(fieldName) 334 | const isClass = isString(rootTypes[fieldName]) 335 | const isAnEnum = R.pathOr(false, ['type', 'isEnum'], f) 336 | 337 | const isHashable = isClass && !isAnEnum 338 | 339 | let returnFieldDef = isHashable 340 | ? `self.${fieldName}.to_dict()` 341 | : `from_${typeValidationConverter(typeConverter(fieldType))}(self.${fieldName})` 342 | 343 | if (isHashable && !isFieldRequired) { 344 | returnFieldDef = `None if self.${fieldName} is None else ${returnFieldDef}` 345 | } 346 | 347 | return `hash["${fieldName}"] = ${returnFieldDef}` 348 | }, rootStructTypes).join(EOL + '\t\t')} 349 | \t\treturn hash 350 | 351 | def from_${rootName}(x: Any) -> ${rootName}: 352 | assert isinstance(x, ${rootName}) 353 | return x 354 | 355 | ` 356 | return { 357 | code, 358 | path: rootName, 359 | } 360 | } catch (e) { 361 | const err = `[ERROR] Message ${message?.name?.value} specification is incorrect: ${e.message}` 362 | log(err) 363 | return err 364 | } 365 | } 366 | 367 | const schemaType = 'python' 368 | const version = '1.1.0' 369 | 370 | module.exports = { 371 | generate, 372 | schemaType, 373 | version, 374 | } 375 | -------------------------------------------------------------------------------- /src/generators/ruby.js: -------------------------------------------------------------------------------- 1 | const R = require('ramda') 2 | const { 3 | metadata, 4 | schemaTypes, 5 | rootType, 6 | extractType, 7 | projectionTypes, 8 | processFieldModifiers, 9 | findElement, 10 | getFields, 11 | extractName, 12 | extractValue, 13 | fieldPattern, 14 | clone, 15 | foldObject, 16 | set, 17 | modifierSelector, 18 | firstUpperCase, 19 | mapIndexed, 20 | valueForKey, 21 | key, 22 | camelToSnakeCase, 23 | log, 24 | exists, 25 | setOfFilesFrom, 26 | additionalCodeFrom, 27 | fileDescriptor, 28 | filesWithExtension, 29 | extractDir, 30 | isString, 31 | tab, 32 | typeConverterFrom, 33 | isFieldTypeAnEnum, 34 | isNotEmpty, 35 | } = require('../lib') 36 | 37 | const scalars = require('../../schema/scalars/ruby-scalars.js') 38 | 39 | const convertType = typeConverterFrom(scalars) 40 | 41 | const objectFormatter = { element: 'ObjectTypeDefinition' } 42 | 43 | const enumDef = (e, values) => ` 44 | ${R.toUpper(e)} = %i[${R.map((v) => extractValue(v), values).join(' ')}].freeze 45 | 46 | def self.${camelToSnakeCase(e)}_value_valid?(v) 47 | ${R.toUpper(e)}.include?(v) 48 | end 49 | 50 | class Invalid${firstUpperCase(e)}Error < StandardError 51 | end 52 | 53 | ` 54 | const { EOL } = require('os') 55 | 56 | const isNotNil = (field) => `${field}.set?` 57 | 58 | const validationRules = (fieldName, fieldType, fieldClass, pattern, isRequired, isArray, isEnum) => { 59 | const raise = `\t\traise(Invalid${fieldClass}Error, ` 60 | const enumCheck = isEnum 61 | ? `${raise}"Enum value #{${fieldName}} is invalid") if ${isNotNil( 62 | fieldName 63 | )} && !NavSchemaArchitecture::Event::Enums::${camelToSnakeCase(fieldType)}_value_valid?(${fieldName})${EOL}` 64 | : '' 65 | let requiredCheck = isRequired ? `${raise}"Required value ${fieldName} is undefined") unless defined? ${fieldName}${EOL}` : '' 66 | let arrayCheck = isArray ? `${raise}"${fieldName} must be an array") unless ${fieldName}.is_a?(Array)${EOL}` : '' 67 | 68 | if (!isArray) { 69 | const nativeType = extractType2(fieldType) 70 | arrayCheck = isEnum 71 | ? '' 72 | : nativeType === 'Hash' 73 | ? `${raise}"${fieldName} must be a Hash") unless ${fieldName}.is_a?(Hash)${EOL}` 74 | : nativeType === 'Boolean' 75 | ? `${raise}"${fieldName} must be a boolean") unless [true, false].include?(${fieldName})${EOL}` 76 | : `${raise}"${fieldName} must be a ${nativeType}") unless ${fieldName}.is_a?(${nativeType})${EOL}` 77 | } 78 | 79 | if (!isRequired && exists(arrayCheck) && arrayCheck.length > 0) { 80 | arrayCheck = ` 81 | if ${isNotNil(fieldName)} then 82 | ${arrayCheck} 83 | end 84 | 85 | ` 86 | } 87 | 88 | if (pattern) { 89 | arrayCheck += `${raise}"Invalid ${fieldName} type") if ${isNotNil(fieldName)} && !${fieldName}.match?(/${pattern}/)${EOL}` 90 | } 91 | return requiredCheck + enumCheck + arrayCheck 92 | } 93 | 94 | const structDef = ({ name, fields }, isRequiredProps, isArrayProps, isEnumProps, stringFormatter) => { 95 | const fieldNames = R.map((f) => stringFormatter(key(f, objectFormatter)), fields) 96 | const tpe = ` 97 | def self.${name}(${fieldNames.join(', ')}) 98 | ${ 99 | EOL + 100 | mapIndexed((v, idx) => { 101 | const fieldName = key(v) 102 | const fieldType = valueForKey(v) 103 | const pattern = fieldPattern(fieldType) 104 | const isRequired = isRequiredProps.includes(fieldName) 105 | const isArray = isArrayProps.includes(fieldName) 106 | const isEnum = isFieldTypeAnEnum(fieldType, isEnumProps) 107 | 108 | return validationRules( 109 | stringFormatter(fieldName, objectFormatter), 110 | fieldType, 111 | name, 112 | pattern, 113 | isRequired, 114 | isArray, 115 | isEnum 116 | ) 117 | }, fields).join('') 118 | } 119 | { 120 | ${mapIndexed((v, idx) => { 121 | const fieldName = key(v) 122 | return `${tab(idx)}${tab(idx)}${tab(idx)}"${fieldName}" => ${stringFormatter(fieldName, objectFormatter)}` 123 | }, fields).join(',' + EOL)} 124 | } 125 | 126 | end 127 | 128 | class Invalid${name}Error < StandardError 129 | end 130 | ` 131 | return tpe 132 | } 133 | 134 | const extractType2 = (t) => (isString(t) ? 'Hash' : extractType(t)) 135 | 136 | const generate = (message, schema, stringFormatter = scalars.stringFormatter) => 137 | generateRuby({ message, schema: clone(schema), stringFormatter }, metadata(message)) 138 | 139 | const generateRuby = ( 140 | { message, description, schema, stringFormatter = R.identity }, 141 | { name, title, namespace, version, node }, 142 | root = rootType(schema, name) 143 | ) => { 144 | try { 145 | const st = processFieldModifiers(schemaTypes(schema)) 146 | const rubyTypes = projectionTypes({ 147 | message, 148 | schemaTypes: st, 149 | root, 150 | scalars, 151 | convertType, 152 | structDef, 153 | enumDef, 154 | stringFormatter, 155 | }) 156 | const generatedEnums = [] 157 | const generatedTypeCode = R.map(({ addOn, isEnum, type }) => { 158 | if (!isEnum) { 159 | return addOn 160 | } 161 | generatedEnums.push({ type, addOn }) 162 | }, set(rubyTypes, 'type')) 163 | .join(EOL) 164 | .slice(0, -1) 165 | 166 | const rootName = firstUpperCase(name) 167 | const rootStruct = R.last(rubyTypes) 168 | const rootDirectives = foldObject('name', 'directives', rootStruct) 169 | const rootTypes = foldObject('name', 'type', rootStruct) 170 | const rootFieldNames = R.map(extractName, rootStruct) 171 | const rootTypeFields = getFields(findElement(root, st)) 172 | const rootStructTypes = R.map((p) => findElement(p.name, rootTypeFields), rootStruct) 173 | const [isRequired, isArray, isEnum] = R.map( 174 | (p) => modifierSelector(p, rootStructTypes, rootDirectives), 175 | ['isRequired', 'isArray', 'isEnum'] 176 | ) 177 | 178 | const rubyCode = ` 179 | module NavSchemaArchitecture::${R.map((n) => firstUpperCase(n), namespace.split('.')).join('::')}${version > 1 ? `::V${version}`: ''} 180 | module ${rootName} 181 | 182 | # ${title} 183 | ${description ? `# ${description}` : '#'} 184 | # generator version ${version} 185 | ${generatedTypeCode} 186 | 187 | def self.build(${R.map((n) => stringFormatter(n, objectFormatter), rootFieldNames).join(', ')}) 188 | ${ 189 | EOL + 190 | R.filter( 191 | isNotEmpty, 192 | mapIndexed( 193 | (fieldName, idx) => 194 | validationRules( 195 | stringFormatter(fieldName, objectFormatter), 196 | rootTypes[fieldName], 197 | rootName, 198 | rootTypes[fieldName]?.pattern, 199 | isRequired.includes(fieldName), 200 | isArray.includes(fieldName), 201 | isEnum.includes(fieldName) 202 | ), 203 | rootFieldNames 204 | ) 205 | ).join('') 206 | } 207 | return { 208 | ${mapIndexed( 209 | (fieldName, idx) => 210 | `${tab(idx)}${tab(idx)}${tab(idx)}"${fieldName}" => ${stringFormatter(fieldName, objectFormatter)},`, 211 | rootFieldNames 212 | ).join(EOL)} 213 | } 214 | end 215 | 216 | class Invalid${rootName}Error < StandardError 217 | end 218 | end 219 | end 220 | ` 221 | return { code: rubyCode, path: rootName, additionalCode: generatedEnums, imports: [] } 222 | } catch (e) { 223 | log(e.stack) 224 | const err = `[ERROR] Message ${message?.name?.value} specification is incorrect: ${e.message}` 225 | log(err) 226 | return err 227 | } 228 | } 229 | 230 | const processFileName = (fname, dir, path) => { 231 | let pfn = R.replace(dir, '', fname) 232 | pfn = R.head(pfn) === '/' ? pfn.substring(1, pfn.length) : pfn 233 | let elements = pfn.split('/') 234 | const fileName = R.head(elements[elements.length - 1].split('.')) 235 | elements.fill(path, -1) 236 | elements.push(fileName) 237 | elements = R.map((e) => e.split('-').join('_'), elements) 238 | return elements.join('/') 239 | } 240 | 241 | const postProcessStep = (files, messageDefinitionDir) => { 242 | const rubyFiles = filesWithExtension('rb', files) 243 | const enumsFile = additionalCodeFrom('rb', files) 244 | const enumsSet = setOfFilesFrom('type', enumsFile) 245 | 246 | if (exists(rubyFiles) && !R.isEmpty(rubyFiles)) { 247 | const code = ` 248 | # FIXME: This resolves classes belonging to non-existent modules. 249 | # This is a bad thing we're doing. 250 | module NavSchemaArchitecture 251 | module Event; end 252 | end 253 | 254 | # A shorthand alias for convenience 255 | NSA = NavSchemaArchitecture 256 | 257 | module NavSchemaArchitecture::Event::Enums 258 | ${exists(enumsSet) && !R.isEmpty(enumsSet) ? R.map((e) => e.addOn, enumsSet).join(EOL) : ''} 259 | end 260 | 261 | require "active_support/core_ext/object/blank" 262 | require "date" 263 | 264 | require_relative "utils" 265 | require_relative "enums" 266 | ${R.map( 267 | (f) => `require_relative "${processFileName(f.outputFileName, messageDefinitionDir, camelToSnakeCase(f.path))}"`, 268 | rubyFiles 269 | ).join(EOL)} 270 | 271 | ` 272 | files.push(fileDescriptor(extractDir(rubyFiles), 'nav-schema-architecture.rb', code)) 273 | } 274 | } 275 | 276 | const schemaType = 'ruby' 277 | const version = '1.1.0' 278 | 279 | module.exports = { 280 | generate, 281 | schemaType, 282 | version, 283 | postProcessStep, 284 | processFileName, 285 | } 286 | -------------------------------------------------------------------------------- /src/index.js: -------------------------------------------------------------------------------- 1 | const R = require('ramda') 2 | const { EOL } = require('os') 3 | const { 4 | name, 5 | parseSchema, 6 | parseMessageDefinition, 7 | schemaMessageDefinitions, 8 | writeFileSafe, 9 | filesFromDir, 10 | log, 11 | noNullElementArray, 12 | generate, 13 | env, 14 | prepareFile, 15 | cp, 16 | match, 17 | } = require('./lib') 18 | const generators = require('./generators') 19 | 20 | function parse( 21 | messageDefinitionDir = env('MESSAGE_DEFINITION_DIR', 'schema/message-schema-definitions'), 22 | schema = env('SCHEMA', '/schema/integration-schema.gql') 23 | ) { 24 | schema = parseSchema(process.cwd() + (process.env.SCHEMA || '/schema/integration-schema.gql')) 25 | 26 | const messageFiles = filesFromDir(messageDefinitionDir, ['graphql', 'gql']) 27 | const schemaMessageDefs = schemaMessageDefinitions(schema) 28 | log(EOL + EOL + `[INFO] Processing message Definitions:`) 29 | 30 | const messages = noNullElementArray( 31 | R.map((messageFileName) => { 32 | const message = parseMessageDefinition(`${process.cwd()}/${messageFileName}`) 33 | return match(name(message)) 34 | .on( 35 | (n) => schemaMessageDefs.includes(n), 36 | (n) => { 37 | log(` - ${n} (${messageFileName})`) 38 | message.fileName = messageFileName 39 | return message 40 | } 41 | ) 42 | .otherwise((n) => { 43 | log(EOL + `[ERROR] ${n} is not defined in the schema` + EOL) 44 | return null 45 | }) 46 | }, messageFiles) 47 | ) 48 | 49 | const code = generate(messages, schema, generators, messageDefinitionDir) 50 | 51 | R.forEach( 52 | (file) => 53 | writeFileSafe(...prepareFile(file.outputFileName, file.toDir, file.path, file.extension, file.code, messageDefinitionDir)), 54 | code.filter(Boolean) 55 | ) 56 | 57 | R.forEach(({ staticDir, toDir }) => { 58 | cp(staticDir, toDir) 59 | }, generators) 60 | } 61 | 62 | // Make runnable through npm 63 | if (!module.parent) parse() 64 | 65 | // Or runnable in other scripts 66 | module.exports = parse 67 | -------------------------------------------------------------------------------- /src/lib/generate.js: -------------------------------------------------------------------------------- 1 | const R = require('ramda') 2 | const { EOL } = require('os') 3 | const { 4 | name, 5 | extractName, 6 | value, 7 | kind, 8 | extractType, 9 | typeIsValid, 10 | fieldTypeFromTypeDef, 11 | fields, 12 | modifiers, 13 | selections, 14 | enumValues, 15 | findElement, 16 | directives, 17 | additionalCode, 18 | isAScalar, 19 | isAnEnum, 20 | isAUnion, 21 | isObjectTypeDefinition, 22 | directivesFrom, 23 | pDirectivesFrom, 24 | } = require('./parse.js') 25 | const { set, propObject, log, exists, splitByLast, filesWithExtension, match } = require('./util.js') 26 | 27 | const inProcessedTypes = 'processedTypes' 28 | const inProcessedEnums = 'processedEnums' 29 | 30 | const fromContext = (context, pType, ctx = inProcessedTypes) => { 31 | if (!exists(context[ctx])) { 32 | context[ctx] = {} 33 | } 34 | return context[ctx][pType] 35 | } 36 | 37 | const addToContext = (context, pType, value, ctx = inProcessedTypes) => { 38 | if (Array.isArray(ctx)) { 39 | ctx.forEach((c) => addToContext(context, pType, value, c)) 40 | } else { 41 | if (!exists(context[ctx])) { 42 | context[ctx] = {} 43 | } 44 | context[ctx][pType] = value 45 | } 46 | return value 47 | } 48 | 49 | const additionalCodeFrom = (extension, files) => 50 | R.filter( 51 | exists, 52 | R.map((f) => (exists(additionalCode(f)) && !R.isEmpty(additionalCode(f)) ? f : null), filesWithExtension(extension, files)) 53 | ) 54 | 55 | const setOfFilesFrom = (type = 'type', files, selector = additionalCode) => set(R.flatten(R.map(selector, files)), type) 56 | 57 | const fileDescriptor = (toDir, fileName, code, staticDir) => { 58 | const [qualifiedFileName, extension] = fileName.split('.') 59 | const [path, outputFileName] = splitByLast('/', qualifiedFileName) 60 | return { 61 | outputFileName, 62 | toDir, 63 | staticDir, 64 | path, 65 | extension, 66 | code, 67 | } 68 | } 69 | 70 | const extractDir = (files) => `${R.path(['toDir'], R.head(files))}` 71 | 72 | const updateModifiers = ( 73 | pType, 74 | pName, 75 | pTypeName, 76 | requiredDirective, 77 | isRequiredProps, 78 | isArrayProps, 79 | isEnumProps, 80 | isScalarProps 81 | ) => { 82 | if ((pType?.isRequired && R.isNil(requiredDirective)) || requiredDirective) { 83 | isRequiredProps.push(pName) 84 | } 85 | if (pType?.isArray) { 86 | isArrayProps.push(pName) 87 | } 88 | if (pType?.isEnum) { 89 | isEnumProps.push({ pName: pTypeName }) 90 | } 91 | if (pType?.isScalar) { 92 | isScalarProps.push({ pName: pTypeName }) 93 | } 94 | } 95 | 96 | const computeTypeModifiers = (typeFields, scalars) => 97 | R.reduce( 98 | (acc, t) => { 99 | acc[t.name] = { 100 | isRequired: t.field.type.isRequired, 101 | isArray: t.field.type.isArray, 102 | isEnum: t.field.type.isEnum, 103 | isScalar: exists(scalars[name(t.field.type)]) ? scalars[name(t.field.type)] : false, 104 | } 105 | return acc 106 | }, 107 | {}, 108 | typeFields 109 | ) 110 | 111 | const assign = (typeModifiers, typeDef) => { 112 | R.forEach((d) => { 113 | const isRequiredDirective = R.path(['directives', d.name, 'required'], d) 114 | if (typeof d.type === 'string') { 115 | d.isRequired = exists(isRequiredDirective) ? isRequiredDirective : typeModifiers[d.name].isRequired 116 | d.isArray = typeModifiers[d.name].isArray 117 | d.isEnum = typeModifiers[d.name].isEnum 118 | d.isScalar = typeModifiers[d.name].isScalar 119 | } else { 120 | d.type.isRequired = exists(isRequiredDirective) ? isRequiredDirective : typeModifiers[d.name].isRequired 121 | d.type.isArray = typeModifiers[d.name].isArray 122 | d.type.isEnum = typeModifiers[d.name].isEnum 123 | d.type.isScalar = typeModifiers[d.name].isScalar 124 | } 125 | }, typeDef) 126 | } 127 | 128 | const generatedCode = (message, additionalFile, toDir, staticDir, path, extension, code, additionalCode, formatter) => { 129 | const pkg = [ 130 | { 131 | outputFileName: message.fileName, 132 | toDir, 133 | staticDir, 134 | path, 135 | extension, 136 | code: formatter(code), 137 | additionalCode, 138 | }, 139 | ] 140 | if (exists(additionalFile)) { 141 | pkg.push({ 142 | outputFileName: message.fileName.split('/').fill(additionalFile.name, -1).join('/'), 143 | toDir, 144 | staticDir, 145 | path, 146 | extension: extension || additionalFile.extension, 147 | code: additionalFile.code, 148 | additionalCode, 149 | }) 150 | } 151 | return pkg 152 | } 153 | 154 | /** 155 | * typeFor recursively calls the code generator functions (structDef, enumDef, convertType, scalar types) 156 | * @param prop - the property specification to generate from 157 | * 158 | * @param schemaTypes - The schema type definitions 159 | * @param refTypes - The list of types referenced in the message definition 160 | * @param messageFields - The message definition (to extract directives for instance) 161 | */ 162 | const typeFor = ( 163 | prop, 164 | schemaTypes, 165 | refTypes, 166 | messageFields, 167 | context, 168 | scalars, 169 | convertType, 170 | structDef, 171 | enumDef, 172 | stringFormatter = R.identity 173 | ) => { 174 | const propType = extractType(prop) 175 | const propName = extractName(prop) 176 | const { isArray, isEnum, isRequired, isScalar } = modifiers(prop) 177 | 178 | if (R.not(typeIsValid(propType) && exists(propName))) { 179 | throw `invalid type ${propType} and/or ${propName}\n ${JSON.stringify(prop)}` 180 | } 181 | return match(findElement(propType, schemaTypes)) 182 | .on(isAScalar, (t, scalarName = name(t)) => { 183 | const scalarType = scalars[scalarName] 184 | if (R.isNil(scalarType)) { 185 | throw `Scalar type not defined: ${scalarName}` 186 | } 187 | scalarType.scalar = stringFormatter(scalarName, { 188 | element: 'ScalarTypeDefinition', 189 | }) 190 | refTypes.push({ 191 | type: scalarType, 192 | isScalar: true, 193 | }) 194 | return scalarType 195 | }) 196 | .on(isAnEnum, (t) => 197 | match(fromContext(context, propType, inProcessedEnums)) 198 | .on(exists, { type: convertType(extractType(prop)) }) 199 | .otherwise((ctx) => { 200 | refTypes.push({ 201 | type: convertType(propType), 202 | addOn: addToContext(context, propType, enumDef(propType, enumValues(t), stringFormatter), [ 203 | inProcessedTypes, 204 | inProcessedEnums, 205 | ]), 206 | isEnum: true, 207 | }) 208 | return propType 209 | }) 210 | ) 211 | .on(isAUnion, () => { 212 | const scalarType = scalars['Any'] 213 | if (R.isNil(scalarType)) { 214 | throw `Scalar type not defined: Any` 215 | } 216 | scalarType.scalar = stringFormatter('Any', { 217 | element: 'ScalarTypeDefinition', 218 | }) 219 | refTypes.push({ 220 | type: scalarType, 221 | isScalar: true, 222 | }) 223 | return scalarType 224 | }) 225 | .on(isObjectTypeDefinition, (t) => { 226 | const objectField = findElement(propName, messageFields) 227 | const childMessageFields = selections(objectField) 228 | const isRequiredProps = [] 229 | const isArrayProps = [] 230 | const isEnumProps = [] 231 | const isScalarProps = [] 232 | const fields = R.map((p) => { 233 | const pName = name(p) 234 | const pType = fieldTypeFromTypeDef(pName, t) 235 | const pTypeName = name(pType) 236 | const pDirectives = pDirectivesFrom(R.head(directives(p)).arguments) 237 | const requiredDirective = R.path(['required'], pDirectives) 238 | updateModifiers(pType, pName, pTypeName, requiredDirective, isRequiredProps, isArrayProps, isEnumProps, isScalarProps) 239 | return propObject( 240 | pName, 241 | typeFor( 242 | { 243 | name: pName, 244 | type: convertType(pTypeName), 245 | directives: pDirectives, 246 | }, 247 | schemaTypes, 248 | refTypes, 249 | childMessageFields, 250 | context, 251 | scalars, 252 | convertType, 253 | structDef, 254 | enumDef, 255 | stringFormatter 256 | ) 257 | ) 258 | }, childMessageFields) 259 | const genStructDef = structDef( 260 | { 261 | name: propType, 262 | fields, 263 | directives: directivesFrom(childMessageFields), 264 | }, 265 | isRequiredProps, 266 | isArrayProps, 267 | isEnumProps, 268 | stringFormatter 269 | ) 270 | refTypes.push({ 271 | type: propType, 272 | addOn: addToContext(context, propType, genStructDef), 273 | }) 274 | return propType 275 | }) 276 | .otherwise({ 277 | type: convertType(extractType(prop)), 278 | isRequired, 279 | isArray, 280 | isEnum, 281 | isScalar, 282 | }) 283 | } 284 | 285 | /** 286 | * Calculates the intersection between the message definition and the schema types 287 | * The projection types are unique to that message schema and must not be reused 288 | * Payload schemas must be self-standing, because a type can have any number of 289 | * projections and projections can vary arbitrarily 290 | * @param typeFor - the function that recursively computes the types to generate code from 291 | * 292 | * @param message - The parsed graphql message definition 293 | * @param schemaTypes - The parsed graphql schema types 294 | * @param root - The name of the root element of the message schema/class 295 | */ 296 | const projection = 297 | (typeFor) => 298 | ({ message, schemaTypes, root, scalars, convertType, structDef, enumDef, stringFormatter }) => { 299 | const context = { inProcessedTypes: {}, inProcessedEnums: {} } 300 | const referencedTypes = [] 301 | const rootSchemaType = R.last(R.filter((t) => R.equals(name(t), root) && kind(t) !== 'ObjectTypeExtension')(schemaTypes)) 302 | const messageProps = R.map(name, selections(message)) 303 | const typeFields = R.filter((f) => R.includes(extractName(f), messageProps), fields(rootSchemaType)) 304 | const messageSelections = selections(message) 305 | 306 | const typeDef = R.map( 307 | (prop) => ({ 308 | name: extractName(prop), 309 | type: typeFor( 310 | prop, 311 | schemaTypes, 312 | referencedTypes, 313 | messageSelections, 314 | context, 315 | scalars, 316 | convertType, 317 | structDef, 318 | enumDef, 319 | stringFormatter 320 | ), 321 | directives: directivesFrom(messageSelections, prop), 322 | }), 323 | typeFields 324 | ) 325 | const typeModifiers = computeTypeModifiers(typeFields, scalars) 326 | assign(typeModifiers, typeDef) 327 | referencedTypes.push(typeDef) 328 | return referencedTypes 329 | } 330 | 331 | const generate = (messages, schema, generators, messageDir) => { 332 | log(EOL) 333 | const files = R.flatten( 334 | R.map((message) => { 335 | log(EOL + `[INFO] processing ${name(message)}`) 336 | return R.map(({ generate, extension, toDir, staticDir, outputFormatter = R.identity }) => { 337 | try { 338 | const { code, path, additionalFile, additionalCode } = generate(message, schema) 339 | log(` - ${extension}`) 340 | return generatedCode(message, additionalFile, toDir, staticDir, path, extension, code, additionalCode, outputFormatter) 341 | } catch (err) { 342 | log(`[ERROR] ${err.message}${EOL}`) 343 | } 344 | }, generators) 345 | }, messages) 346 | ) 347 | R.forEach(({ postProcessStep = R.identity }) => postProcessStep(files, messageDir), generators) 348 | 349 | return files 350 | } 351 | 352 | const projectionTypes = projection(typeFor) 353 | 354 | module.exports = { 355 | projection, 356 | typeFor, 357 | projectionTypes, 358 | generate, 359 | additionalCodeFrom, 360 | setOfFilesFrom, 361 | fileDescriptor, 362 | extractDir, 363 | addToContext, 364 | fromContext, 365 | updateModifiers, 366 | generatedCode, 367 | computeTypeModifiers, 368 | assign, 369 | } 370 | -------------------------------------------------------------------------------- /src/lib/index.js: -------------------------------------------------------------------------------- 1 | const utils = require('./util.js') 2 | const parse = require('./parse.js') 3 | const generate = require('./generate.js') 4 | module.exports = { 5 | ...utils, 6 | ...parse, 7 | ...generate, 8 | } 9 | -------------------------------------------------------------------------------- /src/lib/parse.js: -------------------------------------------------------------------------------- 1 | const R = require('ramda') 2 | const { parse } = require('graphql') 3 | const { gql } = require('graphql-tag') 4 | 5 | const { returnArray, fromFile, noNullElementArray, hasOneElement, exists, cdw, match } = require('./util') 6 | 7 | // Helpers 8 | const kind = R.path(['kind']) 9 | const isA = 10 | (value, comparator = name) => 11 | (e) => 12 | R.equals(comparator(e), value) 13 | const isObjectTypeDefinition = isA('ObjectTypeDefinition', kind) 14 | const isObjectTypeExtension = isA('ObjectTypeExtension', kind) 15 | const isNotObjectTypeExtension = R.compose(R.not, isA('ObjectTypeExtension', kind)) 16 | 17 | const isAnArray = isA('ListType', kind) 18 | const isARequiredProp = isA('NonNullType', kind) 19 | const isAnEnum = isA('EnumTypeDefinition', kind) 20 | const isAUnion = isA('UnionTypeDefinition', kind) 21 | const isAScalar = isA('ScalarTypeDefinition', kind) 22 | 23 | const isName = (prop) => isA(prop, R.path(['name'])) 24 | const _name = R.path(['name', 'value']) 25 | const name = (t) => _name(t) || _name(t?.type) || _name(t?.type?.type) || _name(t?.type?.type?.type) 26 | 27 | const hasName = (prop) => R.compose(R.last, R.filter(isA(prop))) 28 | const sameName = (a, b) => R.equals(name(a), name(b)) 29 | 30 | const findElement = (propType, schemaTypes) => 31 | R.last(R.filter((t) => isA(propType)(t) && isNotObjectTypeExtension(t))(schemaTypes)) 32 | 33 | // Query 34 | const query = (document) => R.find(R.propEq('operation', 'query'))(definitions(document)) 35 | const definitions = returnArray('definitions') 36 | const selections = R.pathOr([], ['selectionSet', 'selections']) 37 | const message = R.compose(R.head, selections, query) 38 | 39 | const metadata = (message) => ({ 40 | name: name(message), 41 | title: directiveValue(message, 'title'), 42 | description: directiveValue(message, 'description'), 43 | namespace: directiveValue(message, 'namespace'), 44 | version: directiveValue(message, 'version'), 45 | node: message, 46 | }) 47 | 48 | /** 49 | * 50 | * @param document - the parsed graphql query (json document - see parsed-query-with-sender.json file) 51 | * @returns the selections of the graphql query 52 | */ 53 | const getMessage = (document) => ({ 54 | message: message(document), 55 | description: document.description, 56 | }) 57 | 58 | const queryForFile = (f) => 59 | gql` 60 | ${fromFile(f)} 61 | ` 62 | 63 | //Field 64 | const typeOf = (t) => _name(t?.type) || _name(t?.type?.type?.type) 65 | const value = R.path(['value', 'value']) 66 | const valueType = (a) => kind(R.path(['value'], a)) 67 | const args = returnArray('arguments') 68 | const directives = returnArray('directives') 69 | const values = returnArray('values') 70 | const argument = (argName) => R.find((a) => isA(argName)(a)) 71 | const directiveValue = (message, prop, valueParameter = 'value') => 72 | value(argument(valueParameter)(args(argument(prop)(directives(message))))) 73 | const enumValues = (enumType) => R.map((v) => ({ value: name(v), directives: directives(v) }), values(enumType)) 74 | const order = R.path(['order']) 75 | 76 | const getFields = R.pathOr([], ['fields']) 77 | const __getFields = R.path(['fields']) 78 | 79 | const fields = (schemaType) => 80 | R.map( 81 | (f) => ({ 82 | name: name(f), 83 | // no support for arguments? 84 | type: getFieldType(f), 85 | field: f, 86 | }), 87 | getFields(schemaType) 88 | ) 89 | 90 | const typeOfField = (f) => R.path(['field', 'type'], f) 91 | const field = (p) => R.path(['field'], p) 92 | const fieldPattern = (prop) => R.path(['pattern'], prop) 93 | 94 | const fieldTypeFromTypeDef = (prop, typeDef = []) => typeOfField(R.find(isName(prop), fields(typeDef))) 95 | const getFieldType = (f) => { 96 | switch (kind(__getType(f))) { 97 | case 'NamedType': 98 | return name(__getType(f)) 99 | } 100 | return undefined 101 | } 102 | 103 | const directivesFrom = (childMessageFields, prop) => 104 | R.reduce( 105 | (c, p) => { 106 | if (R.isNil(prop) || extractName(prop) === name(p)) 107 | c[name(p)] = R.reduce( 108 | (d, a) => { 109 | d[name(a)] = value(a) 110 | return d 111 | }, 112 | {}, 113 | directives(p)[0].arguments 114 | ) 115 | return c 116 | }, 117 | {}, 118 | childMessageFields 119 | ) 120 | 121 | const pDirectivesFrom = (pD) => 122 | R.reduce( 123 | (d, a) => { 124 | d[name(a)] = value(a) 125 | return d 126 | }, 127 | {}, 128 | pD 129 | ) 130 | 131 | // Schema 132 | const parseSchema = (filename) => parse(fromFile(filename)) 133 | const schemaMessageDefinitions = (schema) => R.map(name, getFields(R.last(R.filter(isQuery, schema.definitions)))) 134 | const parseMessageDefinition = (f) => { 135 | const messageFormat = f.indexOf('graphql') > 0 ? queryForFile(f) : require(cdw(f)) 136 | const { message } = getMessage(messageFormat) 137 | return message 138 | } 139 | 140 | const isQuery = isA('Query') 141 | const isMutation = isA('Mutation') 142 | const isAType = (e) => R.not(isQuery(e) || isMutation(e)) 143 | const getType = R.pathOr({}, ['type']) 144 | const __getType = R.path(['type']) 145 | const ___getType = (arr) => (Array.isArray(arr) && arr.length == 2 ? extractType(arr[1]) : undefined) 146 | 147 | const extractType = (prop) => __getType(prop) || name(__getType(field(prop))) || 'undefined_type_error' 148 | 149 | const extractTypeFromProp = (prop) => R.pathOr(R.pathOr('undefined_type_error', ['type'], prop), ['type', 'type'], prop) 150 | const extractName = R.pathOr('undefined_name_error', ['name']) 151 | const __extractName = R.path(['name']) 152 | const extractValue = R.path(['value']) 153 | const typeIsValid = (t) => !R.equals('undefined_type_error', t) 154 | 155 | const modifiers = (type) => { 156 | let isArray = false 157 | let isRequired = false 158 | if (isAnArray(type)) { 159 | type = __getType(type) 160 | isArray = true 161 | if (isARequiredProp(type)) { 162 | type = __getType(type) 163 | isRequired = true 164 | } 165 | } else { 166 | if (isARequiredProp(type)) { 167 | type = __getType(type) 168 | isRequired = true 169 | if (isAnArray(type)) { 170 | type = __getType(type) 171 | isArray = true 172 | } 173 | } 174 | } 175 | 176 | let isEnum = isAnEnum(type) 177 | 178 | return { isArray, isRequired, isEnum, type } 179 | } 180 | 181 | const processFieldModifiers = (schemaTypes) => { 182 | R.map((t) => { 183 | R.map((f) => { 184 | const { isArray, isRequired, isEnum, type } = modifiers(__getType(f)) 185 | type.isArray = isArray 186 | type.isRequired = isRequired 187 | type.isEnum = hasOneElement(R.filter((t) => isAnEnum(t) && sameName(__getType(f), t), schemaTypes)) 188 | type.directives = directives(f) 189 | if (exists(type.loc)) delete type.loc 190 | f.type = type 191 | }, getFields(t)) 192 | return t 193 | }, schemaTypes) 194 | const extensions = R.filter(isObjectTypeExtension, schemaTypes) 195 | R.map((e) => { 196 | const parentType = R.last(R.filter((t) => isObjectTypeDefinition(t) && sameName(e, t), schemaTypes)) 197 | const mergedFields = R.concat(getFields(parentType), getFields(e)) 198 | parentType.fields = mergedFields 199 | }, extensions) 200 | return R.filter(isNotObjectTypeExtension, schemaTypes) 201 | } 202 | 203 | const modifierSelector = (modifier, typeSelection, directives) => 204 | match(modifier) 205 | .on( 206 | 'isRequired', 207 | R.map( 208 | (p) => p.value, 209 | noNullElementArray( 210 | R.map((p) => { 211 | const directive = directives[name(p)][name(p)].required 212 | return (exists(directive) ? directive : getType(p).isRequired) ? p.name : undefined 213 | }, typeSelection) 214 | ) 215 | ) 216 | ) 217 | .otherwise( 218 | R.map((t) => t.value, noNullElementArray(R.map((p) => (getType(p)[modifier] ? p.name : undefined), typeSelection))) 219 | ) 220 | 221 | const messageEntryPoints = (document) => 222 | R.map( 223 | (f) => ({ 224 | name: name(f), 225 | type: typeOf(f), 226 | arguments: args(f), 227 | directives: directives(f), 228 | kind: kind(f), 229 | node: f, 230 | }), 231 | getFields(R.find((e) => isQuery(e) && isObjectTypeDefinition(e))(definitions(document))) 232 | ) 233 | 234 | const schemaTypes = (document) => R.filter(isAType)(definitions(document)) 235 | const schemaTypeDefinitionForTypeName = (typeName, schemaTypeDefs) => R.find((t) => isA(typeName)(t), schemaTypeDefs) 236 | const rootType = (schema, root) => 237 | R.compose( 238 | getType, 239 | R.last, 240 | R.filter((n) => R.equals(n.name, root)), 241 | messageEntryPoints 242 | )(schema) 243 | 244 | const additionalCode = R.path(['additionalCode']) 245 | 246 | const typeConverterFrom = (scalars) => (t) => R.pathOr(t, ['converter', t], scalars) 247 | 248 | const pNames = (enumProps) => R.map((e) => R.path(['pName'], e), enumProps) 249 | 250 | const isFieldTypeAnEnum = (fieldType, isEnumProps) => R.includes(fieldType, pNames(isEnumProps)) 251 | 252 | module.exports = { 253 | parseSchema, 254 | schemaMessageDefinitions, 255 | parseMessageDefinition, 256 | kind, 257 | isARequiredProp, 258 | query, 259 | message, 260 | getMessage, 261 | metadata, 262 | selections, 263 | args, 264 | directives, 265 | directiveValue, 266 | name, 267 | _name, 268 | pNames, 269 | hasName, 270 | sameName, 271 | typeOf, 272 | argument, 273 | value, 274 | valueType, 275 | order, 276 | messageEntryPoints, 277 | schemaTypes, 278 | fieldTypeFromTypeDef, 279 | __getType, 280 | ___getType, 281 | typeConverterFrom, 282 | modifiers, 283 | processFieldModifiers, 284 | modifierSelector, 285 | schemaTypeDefinitionForTypeName, 286 | extractType, 287 | extractTypeFromProp, 288 | extractName, 289 | __extractName, 290 | extractValue, 291 | rootType, 292 | fields, 293 | getFields, 294 | __getFields, 295 | enumValues, 296 | getFieldType, 297 | typeOfField, 298 | typeIsValid, 299 | findElement, 300 | fieldPattern, 301 | definitions, 302 | isA, 303 | additionalCode, 304 | isFieldTypeAnEnum, 305 | isAScalar, 306 | isAnEnum, 307 | isAUnion, 308 | isObjectTypeDefinition, 309 | directivesFrom, 310 | pDirectivesFrom, 311 | } 312 | -------------------------------------------------------------------------------- /src/lib/util.js: -------------------------------------------------------------------------------- 1 | const R = require('ramda') 2 | const { readFileSync } = require('fs') 3 | const console = require('console') 4 | 5 | const fs = require('fs') 6 | const path = require('path') 7 | const { EOL } = require('os') 8 | 9 | const env = (e, def) => R.pathOr(def, ['env', e], process) 10 | 11 | const log = (ref, off = env('LOG_OFF', false)) => { 12 | !off && console.log(typeof ref === 'object' ? JSON.stringify(ref) : ref) 13 | return ref 14 | } 15 | 16 | const capitalize = (s) => 17 | match(typeof s) 18 | .on('string', (t) => s.charAt(0).toUpperCase() + s.slice(1)) 19 | .otherwise('') 20 | 21 | const firstLowerCase = (s) => 22 | match(typeof s) 23 | .on('string', (t) => s.charAt(0).toLowerCase() + s.slice(1)) 24 | .otherwise('') 25 | 26 | const firstUpperCase = (s) => 27 | match(typeof s) 28 | .on('string', (t) => s.charAt(0).toUpperCase() + s.slice(1)) 29 | .otherwise('') 30 | 31 | const startsWithCapitalLetter = (word) => exists(word) && exists(word[0]) && word[0] === word[0].toUpperCase() 32 | 33 | const startsWith = (str, c) => exists(str) && str.length > 0 && str[0] === c 34 | 35 | const crlf = (idx, n) => (idx === (Array.isArray(n) ? n.length - 1 : n) ? '' : EOL) 36 | const tab = (idx) => (idx < 1 ? '' : '\t') 37 | 38 | const substringFrom = (start, str) => (isString(str) ? str.substring(start, str.length) : undefined) 39 | 40 | const __ = (s) => (exists(s) && isString(s) ? s.split('-').join('_') : '') 41 | 42 | const handleCapitalizedWords = (str) => { 43 | let out = '' 44 | if (str.length <= 3) return str 45 | let i = 0 46 | let padded = false 47 | if (str[str.length - 2] === '_') { 48 | str = `${str}_` 49 | padded = true 50 | } 51 | while (i < str.length - 3) { 52 | if (str[i] === '_' && str[i + 2] === '_') { 53 | const start = i 54 | let end = start + 2 55 | while (str[end] === '_' && str[end + 2] === '_') { 56 | end = end + 2 57 | } 58 | if (end > start + 2) { 59 | const fragment = str.substring(start, end + 1) 60 | out = out + '_' + fragment.split('_').join('') + '_' 61 | i = end 62 | } else { 63 | out = out + str[i] 64 | } 65 | } else { 66 | out = out + str[i] 67 | } 68 | i++ 69 | } 70 | out = out + str.substring(i, str.length + 1) 71 | return match(padded) 72 | .on(true, out.substring(0, out.length - 1)) 73 | .otherwise(out) 74 | } 75 | 76 | const processCameToSnake = (str) => { 77 | let i = 0 78 | let out = '' 79 | let lastUpperCaseStart = 0 80 | while (i < str.length) { 81 | if (`${str[i + 1]}`.match(/[A-Z]/) && !`${str[i]}`.match(/[A-Z]/g) && str[i] !== '_') { 82 | out = out + str[i] + '_' 83 | lastUpperCaseStart = i + 1 84 | } else { 85 | if (`${str[i]}`.match(/[A-Z]/) && `${str[i + 1]}`.match(/[a-z]/g) && i < str.length - 1 && i - lastUpperCaseStart > 1) { 86 | out = out + '_' 87 | } 88 | out = out + str[i] 89 | } 90 | i++ 91 | } 92 | return out 93 | } 94 | 95 | const camelToSnakeCase = (str) => (str.length < 3 ? str : handleCapitalizedWords(processCameToSnake(str).toLowerCase())) 96 | 97 | const isString = (str) => str && typeof str === 'string' 98 | 99 | const isNotEmpty = (str) => R.not(R.isEmpty(str)) 100 | 101 | const foldObject = (prop1, prop2, obj) => 102 | R.reduce( 103 | (acc, t) => { 104 | acc[t[prop1]] = t[prop2] 105 | return acc 106 | }, 107 | {}, 108 | obj 109 | ) 110 | 111 | const clone = (obj) => JSON.parse(JSON.stringify(obj)) 112 | 113 | const key = (r) => R.last(R.keys(r)) 114 | 115 | const valueForKey = (v) => v[key(v)] 116 | 117 | const returnArray = (prop) => R.pathOr([], [prop]) 118 | 119 | const mapIndexed = R.addIndex(R.map) 120 | 121 | const set = (arr, prop) => 122 | match(prop) 123 | .on(exists, (prop) => { 124 | const uniqueSet = [] 125 | const pivot = {} 126 | arr.forEach((i) => { 127 | const typeName = typeof prop === 'string' ? i[prop] : prop(i) 128 | if (R.isNil(pivot[typeName])) uniqueSet.push(i) 129 | pivot[typeName] = i 130 | }) 131 | return uniqueSet 132 | }) 133 | .otherwise([...new Set(arr)]) 134 | 135 | const exists = (t) => !R.isNil(t) 136 | 137 | const propObject = (prop, obj) => ({ [prop]: obj }) 138 | 139 | const noNullElementArray = (a) => R.filter(exists, a) 140 | const filter = (f = (exists) => R.filter(f)) 141 | const hasOneElement = (arr = []) => arr.length === 1 142 | 143 | const splitByLast = (separator = '/', qualifiedName = '') => 144 | match(qualifiedName.lastIndexOf(separator)) 145 | .on((lastIndex) => lastIndex < 0, ['', qualifiedName]) 146 | .otherwise((lastIndex) => [qualifiedName.substr(0, lastIndex), qualifiedName.substr(lastIndex + 1)]) 147 | 148 | const readJSONFileSync = (filename) => JSON.parse(readFileSync(process.cwd() + filename).toString()) 149 | 150 | const prepareFile = (f, outputDir, path, extension, code, messageDefinitionDir) => { 151 | f = match(messageDefinitionDir) 152 | .on( 153 | (d) => exists(d) && f.lastIndexOf(d) !== 0, 154 | (d) => d + '/' + f 155 | ) 156 | .otherwise(f) 157 | 158 | const fileName = `${process.cwd()}/${R.replace(__(messageDefinitionDir), outputDir, R.head(R.split('.', __(f))))}` 159 | const p = fileName.lastIndexOf('/') 160 | return [ 161 | fileName.substring(0, p) + 162 | (path ? '/' + camelToSnakeCase(__(path)) : '') + 163 | fileName.substring(p, fileName.length) + 164 | '.' + 165 | extension, 166 | code, 167 | ] 168 | } 169 | 170 | const writeFileSafe = (f, payload) => { 171 | ensureDirectoryExistence(f) 172 | fs.writeFileSync(f, payload) 173 | } 174 | 175 | function ensureDirectoryExistence(filePath) { 176 | match(path.dirname(filePath)) 177 | .on(fs.existsSync, true) 178 | .otherwise((dirname) => { 179 | ensureDirectoryExistence(dirname) 180 | fs.mkdirSync(dirname) 181 | }) 182 | } 183 | 184 | const cdw = (d) => `${process.cwd()}${startsWith(d, '/') ? '' : '/'}${d}` 185 | 186 | const fromFile = (f) => fs.readFileSync(f).toString() 187 | 188 | const filesFromDir = (directory, ext = []) => { 189 | const files = [] 190 | walkDirSync(directory, (f) => { 191 | const hasExt = ext.length === 0 || ext.includes(R.last(f.split('.'))) 192 | if (hasExt) files.push(f) 193 | }) 194 | return files 195 | } 196 | 197 | const walkDirSync = (dir, callback) => { 198 | const files = fs.readdirSync(dir) 199 | R.forEach((file) => { 200 | let filepath = path.join(dir, file) 201 | match(fs.statSync(filepath)) 202 | .on( 203 | (stats) => stats.isDirectory(), 204 | () => { 205 | walkDirSync(filepath, callback) 206 | } 207 | ) 208 | .on( 209 | (stats) => stats.isFile(), 210 | (stats) => { 211 | callback(filepath, stats) 212 | } 213 | ) 214 | }, files) 215 | } 216 | 217 | const filesWithExtension = (extension, files) => R.filter((f) => R.path(['extension'], f) === extension, files) 218 | 219 | const cp = (fromDir, toDir) => { 220 | if (exists(fromDir) && exists(toDir)) { 221 | const files = filesFromDir(cdw(fromDir)) 222 | files.forEach((f) => { 223 | const content = fromFile(f) 224 | const targetName = R.replace(fromDir, toDir, f) 225 | fs.writeFileSync(targetName, content) 226 | }) 227 | } 228 | } 229 | 230 | // Adapted from 231 | // and credits to https://codeburst.io/alternative-to-javascripts-switch-statement-with-a-functional-twist-3f572787ba1c 232 | 233 | const matched = (x) => ({ 234 | on: () => matched(x), 235 | otherwise: () => x, 236 | }) 237 | 238 | const match = (...x) => ({ 239 | on: (pred, fn) => { 240 | const _pred = typeof pred !== 'function' ? (z) => z === pred : pred 241 | const _fn = typeof fn !== 'function' ? () => fn : fn 242 | return _pred(...x) ? matched(_fn(...x)) : match(...x) 243 | }, 244 | otherwise: (fn) => (fn === undefined ? x[0] : typeof fn !== 'function' ? fn : fn(...x)), 245 | }) 246 | 247 | const matchReduce = (...x) => ({ 248 | on: (pred, fn) => { 249 | const _pred = typeof pred !== 'function' ? (z) => z === pred : pred 250 | const _fn = typeof fn !== 'function' ? () => fn : fn 251 | return _pred(...x) ? matchReduce(_fn(...x)) : matchReduce(...x) 252 | }, 253 | end: () => (x.length > 1 ? x : R.head(x)), 254 | }) 255 | 256 | module.exports = { 257 | env, 258 | log, 259 | exists, 260 | noNullElementArray, 261 | hasOneElement, 262 | capitalize, 263 | startsWithCapitalLetter, 264 | firstLowerCase, 265 | firstUpperCase, 266 | crlf, 267 | tab, 268 | substringFrom, 269 | splitByLast, 270 | isString, 271 | __, 272 | handleCapitalizedWords, 273 | processCameToSnake, 274 | camelToSnakeCase, 275 | readJSONFileSync, 276 | filesFromDir, 277 | cdw, 278 | walkDirSync, 279 | ensureDirectoryExistence, 280 | writeFileSafe, 281 | fromFile, 282 | filesWithExtension, 283 | cp, 284 | clone, 285 | foldObject, 286 | returnArray, 287 | set, 288 | propObject, 289 | filter, 290 | mapIndexed, 291 | key, 292 | valueForKey, 293 | prepareFile, 294 | match, 295 | isNotEmpty, 296 | matchReduce, 297 | } 298 | -------------------------------------------------------------------------------- /static/ruby/nsa/enums.rb: -------------------------------------------------------------------------------- 1 | require "active_support/core_ext/string/inflections" # for constantize() 2 | 3 | module NavSchemaArchitecture::Event 4 | module Enums 5 | # Every collection of enums here is dynamically generated based the arrays 6 | # in this gem's top-level file. 7 | # 8 | # Unfortunately, we can't refer to it in a readable way in our code. This 9 | # part allows us to do that, and to validate it. 10 | # 11 | # Example: 12 | # pry> MYENUM = %i[ONE TWO THREE FOUR] 13 | # pry> my_enum = NSA::Event::Enums::MyEnum::TWO 14 | # pry> my_enum 15 | # => :TWO 16 | # pry> NSA::Event::Enums.my_enum_validate_value(my_enum) 17 | # => true 18 | begin 19 | errors, consts = constants.partition { |e| e.to_s.end_with?("Error") } 20 | 21 | # Get friendly names of the modules from the errors 22 | names = {} 23 | errors.map do |sym| 24 | name = sym.to_s.delete_prefix("Invalid").delete_suffix("Error") 25 | const = name.upcase.to_sym 26 | names[const] = name if consts.include?(const) 27 | end 28 | 29 | consts.each do |sym| 30 | mod = const_set(names[sym], Module.new) 31 | items = "NavSchemaArchitecture::Event::Enums::#{sym}".constantize 32 | items.each { |item| mod.const_set(item, item) } 33 | end 34 | end 35 | end 36 | end 37 | -------------------------------------------------------------------------------- /static/ruby/nsa/utils.rb: -------------------------------------------------------------------------------- 1 | class Object 2 | alias set? present? 3 | end 4 | 5 | class FalseClass 6 | def set? 7 | # special case where .present? and .blank? fails us 8 | true 9 | end 10 | end 11 | -------------------------------------------------------------------------------- /test/samples/message-definitions/new-address.graphql: -------------------------------------------------------------------------------- 1 | { 2 | newAddress 3 | @namespace(value: "customer") 4 | @title(value: "New Customer Address") 5 | @description( 6 | value: "This is the fields you need to provide when creating a new address" 7 | ) 8 | @version(value: 1) { 9 | street @field(order: 1) 10 | city @field(order: 2) 11 | state @field(order: 3) 12 | zipCode @field(order: 4) 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /test/samples/test-schema.gql: -------------------------------------------------------------------------------- 1 | scalar ZIPCode 2 | 3 | enum State { 4 | CA # CALIFORNIA 5 | CO # COLORADO 6 | FL # FLORIDA 7 | MA # MASSACHUSETTS 8 | NC # NORTH-CAROLINA 9 | NV # NEVADA 10 | OR # OREGON 11 | PA # Pennsylvania 12 | TX # TEXAS 13 | UT # UTAH 14 | VA # VIRGINIA 15 | WA # WASHINGTON 16 | } 17 | 18 | type Address { 19 | street: [String]! 20 | city: String! 21 | state: State! 22 | zipCode: ZIPCode 23 | } 24 | 25 | schema { 26 | query: Query 27 | } 28 | 29 | type Query { 30 | newAddress: Address 31 | } 32 | -------------------------------------------------------------------------------- /test/test.generators.js: -------------------------------------------------------------------------------- 1 | const { strictEqual } = require('assert') 2 | const R = require('ramda') 3 | 4 | const { 5 | parseSchema, 6 | parseMessageDefinition, 7 | filesFromDir, 8 | generate, 9 | env, 10 | additionalCodeFrom, 11 | extractDir, 12 | setOfFilesFrom, 13 | fileDescriptor, 14 | splitByLast, 15 | addToContext, 16 | fromContext, 17 | updateModifiers, 18 | generatedCode, 19 | computeTypeModifiers, 20 | assign, 21 | } = require('../src/lib') 22 | 23 | const { processFileName } = require('../src/generators/ruby') 24 | const { generate: goStructGenerate } = require('../src/generators/go-struct') 25 | const { generate: rubyGenerate } = require('../src/generators/ruby') 26 | const { generate: pythonGenerate } = require('../src/generators/python') 27 | const { generate: jsonSchemaGenerate } = require('../src/generators/json-schema') 28 | 29 | const generators = [ 30 | { 31 | generate: jsonSchemaGenerate, 32 | extension: 'json', 33 | toDir: env('OUTPUT_DIR_JSON_SCHEMA', 'test/samples/output/json-schema'), 34 | type: 'json-schema', 35 | outputFormatter: (code) => JSON.stringify(code, null, 4), 36 | }, 37 | { 38 | generate: rubyGenerate, 39 | extension: 'rb', 40 | toDir: env('OUTPUT_DIR_RUBY', 'test/samples/output/ruby/nsa'), 41 | type: 'ruby', 42 | outputFormatter: R.identity, 43 | }, 44 | { 45 | generate: pythonGenerate, 46 | extension: 'py', 47 | toDir: env('OUTPUT_DIR_PYTHON', 'test/samples/output/python/nsa'), 48 | type: 'python', 49 | outputFormatter: R.identity, 50 | }, 51 | { 52 | generate: goStructGenerate, 53 | extension: 'go', 54 | toDir: env('OUTPUT_DIR_GO_STRUCT', 'test/samples/output/go/nsa'), 55 | type: 'golang', 56 | outputFormatter: R.identity, 57 | }, 58 | ] 59 | 60 | describe('Generators', function () { 61 | this.beforeAll(() => { 62 | process.env['LOG_OFF'] = true 63 | }) 64 | 65 | describe('#ruby', function () { 66 | it('should translate a file name into a require statement', () => { 67 | const rootDir = 'schema/foo' 68 | const fileName = rootDir + '/sub-directory/file-name.rb' 69 | const entity = 'Entity' 70 | strictEqual(processFileName(fileName, rootDir, entity), 'sub_directory/Entity/file_name') 71 | }) 72 | }) 73 | 74 | describe('#Code generation', function () { 75 | it('should generate code', () => { 76 | const schema = parseSchema(process.cwd() + (process.env.SCHEMA || '/test/samples/test-schema.gql')) 77 | 78 | const messageFileName = `${process.cwd()}/test/samples/message-definitions/new-address.graphql` 79 | const messageDefinitionAsGQLQuery = parseMessageDefinition(messageFileName) 80 | 81 | messageDefinitionAsGQLQuery.fileName = messageFileName 82 | const messages = [messageDefinitionAsGQLQuery] 83 | 84 | const code = generate(messages, schema, generators, 'schema/message-schema-definitions') 85 | 86 | strictEqual(code.length, 4) 87 | strictEqual(code[0].extension, 'json') 88 | strictEqual(code[0].code.indexOf('"pattern": "^[0-9]{5}(?:-[0-9]{4})?$"') > 0, true) 89 | strictEqual(code[1].extension, 'rb') 90 | strictEqual( 91 | code[1].code.indexOf('raise(InvalidNewAddressError, "Required value city is undefined") unless defined? city') > 0, 92 | true 93 | ) 94 | strictEqual(code[2].extension, 'py') 95 | strictEqual(code[2].code.indexOf('ZIPCode_pattern = re.compile("^[0-9]{5}(?:-[0-9]{4})?$")') > 0, true) 96 | strictEqual(code[2].code.indexOf('street = from_list(from_list(obj.get("street")))') > 0, true) 97 | strictEqual(code[3].extension, 'go') 98 | strictEqual(code[3].code.indexOf('func (o NewAddress) Validate() error {') > 0, true) 99 | strictEqual(code[3].code.indexOf('State *enums.State `json:"state,omitempty"') > 0, true) 100 | strictEqual(R.path(['type'], R.head(code[3].additionalCode)), 'State') 101 | }) 102 | 103 | it('should update type modifiers', () => { 104 | const pType = { isRequired: true, isArray: true, isScalar: true, isEnum: true } 105 | const [isRequired, isArray, isEnum, isScalar] = [[], [], [], []] 106 | updateModifiers(pType, 'test', 'Test', false, isRequired, isArray, isEnum, isScalar) 107 | strictEqual(R.head(isArray), 'test') 108 | strictEqual(isRequired.length, 0) 109 | strictEqual(R.head(isEnum).pName, 'Test') 110 | strictEqual(R.head(isScalar).pName, 'Test') 111 | updateModifiers(pType, 'test', 'Test', true, isRequired, isArray, isEnum, isScalar) 112 | strictEqual(R.head(isRequired), 'test') 113 | }) 114 | }) 115 | describe('#Code generation helpers', function () { 116 | it('should extract the output directory of the first file', () => { 117 | const files = [{ toDir: 'a/' }, { toDir: 'b/' }] 118 | 119 | strictEqual(extractDir(files), files[0].toDir) 120 | }) 121 | 122 | it('should return a proper file descriptor', () => { 123 | const fd = fileDescriptor('outputDir/', 'this/path/filename.ext', ['some-generated-code']) 124 | 125 | strictEqual('this/path', fd.path) 126 | strictEqual('ext', fd.extension) 127 | strictEqual('filename', fd.outputFileName) 128 | strictEqual('outputDir/', fd.toDir) 129 | }) 130 | 131 | it('should split by last character', () => { 132 | const [a, b] = splitByLast('/', 'filename.ext') 133 | 134 | strictEqual(a, '') 135 | strictEqual(b, 'filename.ext') 136 | 137 | const [c, d] = splitByLast('/', 'output/dir/filename.ext') 138 | 139 | strictEqual(c, 'output/dir') 140 | strictEqual(d, 'filename.ext') 141 | }) 142 | 143 | it('should return the files from the current dir', () => { 144 | const files = filesFromDir(process.cwd() + '/test', ['js']) 145 | 146 | strictEqual(files.length, 2) 147 | }) 148 | 149 | it('should return the additional code for the set of types of a given extension', () => { 150 | const files = [ 151 | { extension: 'go', additionalCode: [{ type: 'XYZ', addOn: 'xyz' }] }, 152 | { extension: 'go', additionalCode: [{ type: 'TUV', addOn: 'tuv' }] }, 153 | { extension: 'go', additionalCode: [{ type: 'TUV', addOn: 'tuv' }] }, 154 | { extension: 'rb', additionalCode: [{ type: 'XYZ', addOn: 'xyz' }] }, 155 | ] 156 | 157 | const a = additionalCodeFrom('go', files) 158 | const s = setOfFilesFrom('type', a) 159 | strictEqual(s[0].addOn, 'xyz') 160 | strictEqual(s[1].addOn, 'tuv') 161 | strictEqual(s.length, 2) 162 | }) 163 | 164 | it('should return the additional code of all files from a given extension', () => { 165 | const files = [ 166 | { extension: 'go', additionalCode: [{ type: 'XYZ', addOn: 'xyz' }] }, 167 | { extension: 'go', additionalCode: [{ type: 'TUV', addOn: 'tuv' }] }, 168 | { extension: 'go', additionalCode: [{ type: 'TUV', addOn: 'tuv' }] }, 169 | { extension: 'rb', additionalCode: [{ type: 'XYZ', addOn: 'xyz' }] }, 170 | ] 171 | 172 | const s = additionalCodeFrom('go', files) 173 | strictEqual(s.length, 3) 174 | strictEqual(s[2].additionalCode[0].addOn, 'tuv') 175 | }) 176 | 177 | it('should add to and retrieve from the generator context', () => { 178 | let context = {} 179 | addToContext(context, 'ABC', 'abc', 'enums') 180 | const value = fromContext(context, 'ABC', 'enums') 181 | strictEqual(value, 'abc') 182 | }) 183 | 184 | it('should prepare a code package', () => { 185 | const message = { fileName: '/path/to/fileName.go', code: 'tux' } 186 | const additionalFile = { name: 'additional.go', code: 'xyz' } 187 | const formatter = (str) => str.toUpperCase() 188 | 189 | const [code, additionalCode] = generatedCode( 190 | message, 191 | additionalFile, 192 | 'toDir', 193 | 'staticDir', 194 | 'path/', 195 | 'go', 196 | message.code, 197 | 'vwx', 198 | formatter 199 | ) 200 | 201 | strictEqual(code.outputFileName, '/path/to/fileName.go') 202 | strictEqual(code.code, 'TUX') 203 | strictEqual(additionalCode.outputFileName, '/path/to/additional.go') 204 | strictEqual(additionalCode.code, 'xyz') 205 | strictEqual(additionalCode.extension, 'go') 206 | }) 207 | 208 | it('should compute type modifiers from an array of type fields', () => { 209 | const typeFields = [ 210 | { name: 'field1', field: { type: { name: { value: 'String' }, isRequired: true, isArray: true, isEnum: false } } }, 211 | { name: 'field2', field: { type: { name: { value: 'State' }, isRequired: false, isArray: false, isEnum: true } } }, 212 | ] 213 | scalars = { 214 | String: 'string', 215 | } 216 | const { field1, field2 } = computeTypeModifiers(typeFields, scalars) 217 | strictEqual(field1.isRequired, true) 218 | strictEqual(field1.isArray, true) 219 | strictEqual(field1.isScalar, 'string') 220 | strictEqual(field2.isEnum, true) 221 | 222 | const typeDef = [ 223 | { name: 'field1', type: {} }, 224 | { name: 'field2', type: {} }, 225 | ] 226 | assign({ field1, field2 }, typeDef) 227 | strictEqual(typeDef[0].type.isRequired, true) 228 | strictEqual(typeDef[0].type.isScalar, 'string') 229 | strictEqual(typeDef[1].type.isEnum, true) 230 | }) 231 | }) 232 | }) 233 | --------------------------------------------------------------------------------