├── .gitignore
├── .travis.yml
├── LICENSE
├── README.md
├── cmd
├── diff.go
├── infer.go
├── init.go
├── root.go
└── translate.go
├── decode
└── serialize.go
├── diff
├── config.go
├── diff.go
└── types.go
├── e2e_tests
├── diff.sh
├── diff_test_files
│ ├── array_of_objects1.json
│ ├── array_of_objects1.schema
│ ├── array_of_objects1.toml
│ ├── array_of_objects1.yaml
│ ├── array_of_objects2.json
│ ├── array_of_objects2.schema
│ ├── array_of_objects2.toml
│ ├── array_of_objects2.yaml
│ ├── minimal_differing_types1.json
│ ├── minimal_differing_types1.schema
│ ├── minimal_differing_types1.toml
│ ├── minimal_differing_types1.yaml
│ ├── minimal_differing_types2.json
│ ├── minimal_differing_types2.schema
│ ├── minimal_differing_types2.toml
│ ├── minimal_differing_types2.yaml
│ ├── minimal_missing_field1.json
│ ├── minimal_missing_field1.schema
│ ├── minimal_missing_field1.toml
│ ├── minimal_missing_field1.yaml
│ ├── minimal_missing_field2.json
│ ├── minimal_missing_field2.schema
│ ├── minimal_missing_field2.toml
│ └── minimal_missing_field2.yaml
├── infer.sh
├── init.sh
├── lib.sh
└── translate.sh
├── example
└── example.go
├── go.mod
├── go.sum
├── graphqlsch
├── graphqlsch.go
└── serialize.go
├── images
└── schema_logo_circle_transparent.png
├── infer
├── config.go
└── infer.go
├── initcmd
├── config.go
└── initcmd.go
├── jsonsch
├── from.go
├── init.go
├── initrandom.go
├── jsonsch.go
├── primitives.go
├── replacerefs.go
├── schemainclreq.go
├── schemaomitreq.go
├── serialize.go
└── util.go
├── main.go
├── man_pages
└── schema.1
├── test_all
└── translate
├── config.go
└── translate.go
/.gitignore:
--------------------------------------------------------------------------------
1 | vendor/
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | language: go
2 |
3 | go:
4 | - "1.x"
5 | - "1.8"
6 | - "1.10.x"
7 | - master
8 |
9 | script: env GO111MODULE=on ./test_all
10 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Copyright © 2018 Thomas Fischer
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in
13 | all copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
21 | THE SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | [](https://travis-ci.org/Confbase/schema) [](https://goreportcard.com/report/github.com/Confbase/schema)
6 |
7 | # Table of Contents
8 |
9 | * [Introduction](#introduction)
10 | * [Installation](#installation)
11 | * [FAQ](#faq)
12 | * [Testing](#testing)
13 | * [Contributing](#contributing)
14 |
15 | # Introduction
16 |
17 | **schema** is a schema generator, instantiator, and validator tool.
18 |
19 | Common uses cases:
20 |
21 | * Infer [GraphQL](https://graphql.org) schemas from arbirtrary JSON, YAML,
22 | TOML, and XML:
23 |
24 | ```
25 | $ curl http://piedpiper.tld/some_endpoint | schema infer --graphql
26 | type People {
27 | age: Float!
28 | name: String!
29 | }
30 |
31 | type Object {
32 | people: [People!]!
33 | }
34 | ```
35 |
36 | * Omit `--graphql` to get [JSON Schema](https://json-schema.org):
37 |
38 | ```
39 | $ curl http://piedpiper.tld/some_endpoint | schema infer
40 | {
41 | "title": "",
42 | "type": "object",
43 | "properties": {
44 | "people": {
45 | "type": "array",
46 | "items": {
47 | "title": "",
48 | "type": "object",
49 | "properties": {
50 | "name": {
51 | "type": "string"
52 | },
53 | "age": {
54 | "type": "number"
55 | }
56 | }
57 | }
58 | }
59 | }
60 | }
61 | ```
62 |
63 | * `schema infer` automatically detects the format of the incoming data, so
64 | there's no need to specify whether it is JSON, YAML, TOML, etc.:
65 |
66 | ```
67 | $ cat config.yaml | schema infer
68 | {
69 | "title": "",
70 | "type": "object",
71 | "properties": {
72 | "addr": {
73 | "type": "string"
74 | },
75 | "port": {
76 | "type": "number"
77 | }
78 | }
79 | }
80 | ```
81 |
82 | * Instantiate JSON, GraphQL queries, protocol buffers, YAML, TOML, and XML
83 | from inferred schemas:
84 |
85 | ```
86 | $ cat my_schema | schema init
87 | {
88 | "age": 0,
89 | "name": ""
90 | }
91 | ```
92 |
93 | * Instantiate in a specific format:
94 |
95 | ```
96 | $ cat my_schema | schema init --yaml
97 | age: 0
98 | name: ""
99 | ```
100 |
101 | * Another Example:
102 |
103 | ```
104 | $ cat my_schema | schema init --toml
105 | age = 0
106 | name = ""
107 | ```
108 |
109 | * Instantiate with random values:
110 |
111 | ```
112 | $ cat my_schema | schema init --random
113 | {
114 | "age": -2921.198,
115 | "name": "lOIslkjf"
116 | }
117 | ```
118 |
119 | # Installation
120 |
121 | See the Releases page for static binaries.
122 |
123 | Run `go get -u github.com/Confbase/schema` to build from source.
124 |
125 | # FAQ
126 |
127 | * [How do I infer GraphQL schemas from data with null values?](#how-do-i-infer-graphql-schemas-from-data-with-null-values)
128 | * [How do I make fields required in inferred schemas?](#how-do-i-make-fields-required-in-inferred-schemas)
129 | * [How do I generate compact schemas?](#how-do-i-generate-compact-schemas)
130 | * [Why am I getting the error 'toml: cannot marshal nil interface {}'?](#why-am-i-getting-the-error-toml-cannot-marshal-nil-interface)
131 | * [What is the behavior of inferring and translating XML?](#what-is-the-behavior-of-inferring-and-translating-xml)
132 | * [How do I initialize empty lists?](#how-do-i-initialize-empty-lists)
133 | * [Where is the `$schema` field in inferred schemas?](#where-is-the-schema-field-in-inferred-schemas)
134 |
135 | ### How do I infer GraphQL schemas from data with null values?
136 |
137 | There are a few different approaches which solve this problem.
138 |
139 | The most laborious---but also the safest---approach is to manually replace
140 | null values with non-null values before inferring the schema.
141 |
142 | There are two approaches which are quicker, but more prone to error.
143 |
144 | If your data has one field per line, you could remove all lines with the string
145 | "null", then manually add the fields which were omitted. *Warning*: This is
146 | prone to errors. Specifically, in addition to all fields with null values being
147 | omitted, all fields whose names contain the string "null" will be omitted as
148 | well.
149 |
150 | ```
151 | $ cat my_data.json | grep -v 'null' | schema infer --graphql
152 | ```
153 |
154 | Another approach is to replace the string "null" with the empty string "". This
155 | means the fields with null values will now have the type `String` in the
156 | inferred schema. *Warning*: Fields whose names contain the string "null" will
157 | be clobbered.
158 |
159 | ```
160 | $ cat my_data.yaml | sed 's/null/""/g' | schema infer --graphql
161 | ```
162 |
163 | ### How do I make fields required in inferred schemas?
164 |
165 | Use `--make-required`. If specified with no arguments, all fields will be
166 | required. Example:
167 |
168 | ```
169 | $ printf '{"name":"Thomas","color":"blue"}' | schema infer --make-required
170 | {
171 | "title": "",
172 | "type": "object",
173 | "properties": {
174 | "color": {
175 | "type": "string"
176 | },
177 | "name": {
178 | "type": "string"
179 | }
180 | },
181 | "required": [
182 | "name",
183 | "color"
184 | ]
185 | }
186 | ```
187 |
188 | Use `--omit-required=false` to always include the 'required' field in the
189 | inferred schema, even if it is an empty array:
190 |
191 | ```
192 | $ printf '{"name":"Thomas","color":"blue"}' | schema infer --omit-required=false
193 | {
194 | "title": "",
195 | "type": "object",
196 | "properties": {
197 | "color": {
198 | "type": "string"
199 | },
200 | "name": {
201 | "type": "string"
202 | }
203 | },
204 | "required": []
205 | }
206 | ```
207 |
208 | ### How do I generate compact schemas?
209 |
210 | Disable pretty-printing with `--pretty=false`. Example:
211 |
212 | ```
213 | $ printf '{"name":"Thomas","color":"blue"}' | schema infer --pretty=false
214 | {"title":"","type":"object","properties":{"color":{"type":"string"},"name":{"type":"string"}}}
215 | ```
216 |
217 | ### Why am I getting the error 'toml: cannot marshal nil interface {}'?
218 |
219 | Currently, toml does not support nil/null values. See
220 | [this issue on the toml GitHub page](https://github.com/toml-lang/toml/issues/30).
221 |
222 | ### What is the behavior of inferring and translating XML?
223 |
224 | There is no well-defined mapping between XML and key-value stores. Despite this,
225 | schema still provides some support for inferring the schema of XML. schema uses
226 | the library github.com/clbanning/mxj. Users can expect the behavior of schema's
227 | infer command to match the behavior of github.com/clbanning/mxj's
228 | NewMapXmlReader function when parsing XML.
229 |
230 | To give an idea of this behavior, consider this example:
231 |
232 | ```
233 | $ cat example.xml
234 |
235 | Tove
236 | Jani
237 | Reminder
238 | Don't forget me this weekend!
239 |
240 | $ cat example.xml | schema translate --yaml
241 | note:
242 | body: Don't forget me this weekend!
243 | from: Jani
244 | heading: Reminder
245 | to: Tove
246 | ```
247 |
248 | **WARNING**: Here is an example of where the mapping fails:
249 |
250 | ```
251 | $ schema translate --xml
252 | {}
253 | ^D
254 |
255 | $ schema translate --xml | schema translate --json
256 | {}
257 | ^D
258 | {
259 | "doc": ""
260 | }
261 | ```
262 |
263 | As demonstrated by the example above, there are inputs **X** such that
264 | translating **X** from format **F** to XML and back to format **F** gives an
265 | output not equal to **X**. In the example, an empty JSON object (`{}`) was
266 | translated to XML and then translated back to JSON. The resulting JSON
267 | (`{"doc":""}`) is clearly not an empty object.
268 |
269 | **WARNING**: All type information is lost in XML.
270 |
271 | For example:
272 |
273 | ```
274 | $ schema translate --xml | schema translate --json
275 | {"height": 6.0, "isAcadian": true}
276 | {
277 | "doc": {
278 | "height": "6",
279 | "isAcadian": "true"
280 | }
281 | }
282 | ```
283 |
284 | All values are interpreted as strings.
285 |
286 | ### How do I initialize empty lists?
287 |
288 | By default, `schema init` will initialize one element of each list. To
289 | initialize empty lists instead, use the flag `--populate-lists=false`.
290 | Example:
291 |
292 |
293 | ```
294 | $ cat schema.json | schema init --populate-lists=false
295 | {
296 | "truthinesses": []
297 | }
298 | ```
299 |
300 | Compared to the default behavior:
301 |
302 | ```
303 | $ cat schema.json | schema init
304 | {
305 | "truthinesses": [
306 | false
307 | ]
308 | }
309 | ```
310 |
311 | ### Where is the `$schema` field in inferred schemas?
312 |
313 | The `$schema` field can be specified with the `--schema-field` (short form `-s`)
314 | flag.
315 |
316 | Example:
317 |
318 | ```
319 | $ cat my_data.json | schema infer -s 'http://json-schema.org/draft-06/schema'
320 | {
321 | "$schema": "http://json-schema.org/draft-06/schema",
322 | "title": "",
323 | "type": "object",
324 | "properties": {
325 | "name": {
326 | "type": "string"
327 | }
328 | }
329 | }
330 | ```
331 |
332 | # Testing
333 |
334 | This project has unit tests, formatting tests, and end-to-end tests.
335 |
336 | To run unit tests, run `go test -v ./...`.
337 |
338 | There is only one formatting test. It ensures all .go source files are gofmt'd.
339 |
340 | The end-to-end tests require bash and an internet connection. To skip tests
341 | which require an internet connection, run with the `--offline` flag:
342 | `./test_all --offline`.
343 |
344 | To run all tests (unit, formatting, and end-to-end), execute `./test_all`.
345 |
346 | # Contributing
347 |
348 | Issues and pull requests are welcome. If you are making a significant
349 | contribution (more than fixing a typo), add your name to the "Contributors"
350 | section in your PR.
351 |
352 | ### Contributors
353 |
354 | * [Thomas Fischer](https://github.com/thomasdfischer)
355 | * [Ish Shah](https://github.com/theishshah)
356 | * [Victor Niu](https://github.com/lathie)
357 | * [Noah Mullen](https://github.com/HoobMD)
358 |
--------------------------------------------------------------------------------
/cmd/diff.go:
--------------------------------------------------------------------------------
1 | // Copyright © 2018 Thomas Fischer
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package cmd
16 |
17 | import (
18 | "github.com/spf13/cobra"
19 |
20 | "github.com/Confbase/schema/diff"
21 | )
22 |
23 | var diffCfg diff.Config
24 | var diffCmd = &cobra.Command{
25 | Use: "diff",
26 | Short: "Output the structural differences between two files",
27 | Long: `Outputs the structural differences between two schemas or files.
28 |
29 | If the files are both JSON schemas, they are interpreted as such and type
30 | differences between the schemas are output.
31 |
32 | Otherwise, their schemas are inferred and the differences between the inferred
33 | schemas are output.
34 |
35 | There are two types of differences:
36 |
37 | 1. A field is included in one schema but missing from the other
38 | 2. A field is in both schemas, but the type in each schema is not the same
39 |
40 | EXIT STATUS
41 |
42 | If there are no differences, the program exits with status code 0.
43 |
44 | If there are differences, the program exits with status code 2.
45 |
46 | If there are any fatal errors, the program exits with status 1 and output is
47 | undefined.`,
48 | Args: cobra.ExactArgs(2),
49 | Run: func(cmd *cobra.Command, args []string) {
50 | diffCfg.Schema1, diffCfg.Schema2 = args[0], args[1]
51 | diff.Entry(&diffCfg)
52 | },
53 | }
54 |
55 | func init() {
56 | rootCmd.AddCommand(diffCmd)
57 | diffCmd.Flags().StringVarP(&diffCfg.Title1, "title-1", "1", "", "title of first schema")
58 | diffCmd.Flags().StringVarP(&diffCfg.Title2, "title-2", "2", "", "title of second schema")
59 | diffCmd.Flags().StringVarP(&diffCfg.MissFrom1, "miss-from-1", "", "the first file", "title of first schema in 'missing from' messages")
60 | diffCmd.Flags().StringVarP(&diffCfg.MissFrom2, "miss-from-2", "", "the second file", "title of second schema in 'missing from' messages")
61 | diffCmd.Flags().StringVarP(&diffCfg.Differ1, "differ-1", "", "the first file", "title of first schema in 'differing types' messages")
62 | diffCmd.Flags().StringVarP(&diffCfg.Differ2, "differ-2", "", "the second file", "title of second schema in 'differing types' messages")
63 | diffCmd.Flags().BoolVarP(&diffCfg.DoSkipRefs, "skip-refs", "", false, "do not resolve $ref fields with a network request")
64 | }
65 |
--------------------------------------------------------------------------------
/cmd/infer.go:
--------------------------------------------------------------------------------
1 | // Copyright © 2018 Confbase
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package cmd
16 |
17 | import (
18 | "github.com/spf13/cobra"
19 |
20 | "github.com/Confbase/schema/infer"
21 | )
22 |
23 | var inferCfg infer.Config
24 |
25 | var inferCmd = &cobra.Command{
26 | Use: "infer [output path]",
27 | Short: "Infer and output schemas from example data",
28 | Long: `Infer and output schemas from example data.
29 |
30 | By default, JSON schema (see https://json-schema.org) is output.
31 |
32 | GraphQL schemas can be output with the --graphql flag. The --omit-required
33 | and --schema-field flags do nothing when used with the --graphql flag.
34 |
35 | If called with no arguments, 'schema infer' reads from stdin and writes the
36 | inferred schema to stdout.
37 |
38 | If called with arguments, each argument is interpreted as a file path. The
39 | schema for each path is inferred and written to a new file of the same path,
40 | but with its basename prefixed with the string 'schema.'. For example,
41 |
42 | $ schema config1.json config2.json
43 |
44 | will write the inferred schemas to schema.config1.json and schema.config2.json,
45 | respectively.
46 |
47 | See the man pages for idioms, examples, and more information.`,
48 | Run: func(cmd *cobra.Command, args []string) {
49 | infer.InferEntry(inferCfg, args)
50 | },
51 | }
52 |
53 | func init() {
54 | inferCmd.Flags().BoolVarP(&inferCfg.DoPretty, "pretty", "p", true, "pretty-print the output")
55 | inferCmd.Flags().BoolVarP(&inferCfg.DoMakeReq, "make-required", "r", false, "make all fields required")
56 | inferCmd.Flags().BoolVarP(&inferCfg.DoOmitReq, "omit-required", "", true, "omit 'required' field if it's empty")
57 | inferCmd.Flags().BoolVarP(&inferCfg.DoGraphQL, "graphql", "g", false, "output GraphQL schemas")
58 | inferCmd.Flags().StringVarP(&inferCfg.SchemaField, "schema-field", "s", "", "specifies the value of the $schema field")
59 | inferCmd.Flags().StringVarP(&inferCfg.EmptyArraysAs, "empty-arrays-as", "", "", "specifies the element type of empty arrays")
60 | inferCmd.Flags().StringVarP(&inferCfg.NullAs, "null-as", "", "", "specifies the element type to null")
61 | rootCmd.AddCommand(inferCmd)
62 | }
63 |
--------------------------------------------------------------------------------
/cmd/init.go:
--------------------------------------------------------------------------------
1 | // Copyright © 2018 Confbase
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package cmd
16 |
17 | import (
18 | "github.com/spf13/cobra"
19 |
20 | "github.com/Confbase/schema/initcmd"
21 | )
22 |
23 | var initCfg initcmd.Config
24 |
25 | var initCmd = &cobra.Command{
26 | Use: "init [output instance name]",
27 | Short: "Initialize an instance of a schema",
28 | Long: `Initialize an instance of a schema.
29 |
30 | If no schema is specified, stdin is interpreted as the schema.
31 |
32 | Multiple instance names may be specfied.
33 |
34 | If more than one of the (json|yaml|toml|xml|protobuf|graphql) flags are set,
35 | behavior is undefined.
36 |
37 | $ref fields are resolved via network requests by default. Network requests can
38 | be avoided with the --skip-refs flag, which resolves the ref to an empty object
39 | ({}).
40 |
41 | See the man pages for idioms, examples, and more information.`,
42 | Run: func(cmd *cobra.Command, args []string) {
43 | initcmd.Init(initCfg, args)
44 | },
45 | }
46 |
47 | func init() {
48 | initCmd.Flags().StringVarP(&initCfg.SchemaPath, "schema", "s", "", "specifies schema to initialize")
49 | initCmd.Flags().BoolVarP(&initCfg.DoJson, "json", "", false, "initialize as JSON")
50 | initCmd.Flags().BoolVarP(&initCfg.DoYaml, "yaml", "", false, "initialize as YAML")
51 | initCmd.Flags().BoolVarP(&initCfg.DoToml, "toml", "", false, "initialize as TOML")
52 | initCmd.Flags().BoolVarP(&initCfg.DoXml, "xml", "", false, "initialize as XML")
53 | initCmd.Flags().BoolVarP(&initCfg.DoProtobuf, "protobuf", "", false, "initialize as protocol buffer")
54 | initCmd.Flags().BoolVarP(&initCfg.DoGraphQL, "graphql", "", false, "initialize as GraphQL instance")
55 | initCmd.Flags().BoolVarP(&initCfg.DoRandom, "random", "", false, "initialize with random values")
56 | initCmd.Flags().BoolVarP(&initCfg.DoPretty, "pretty", "", true, "pretty-print the output")
57 | initCmd.Flags().BoolVarP(&initCfg.DoPopLists, "populate-lists", "", true, "populate lists with one element")
58 | initCmd.Flags().BoolVarP(&initCfg.DoSkipRefs, "skip-refs", "", false, "use {} in place of $ref fields")
59 | rootCmd.AddCommand(initCmd)
60 | }
61 |
--------------------------------------------------------------------------------
/cmd/root.go:
--------------------------------------------------------------------------------
1 | // Copyright © 2018 Confbase
2 | //
3 | // Permission is hereby granted, free of charge, to any person obtaining a copy
4 | // of this software and associated documentation files (the "Software"), to deal
5 | // in the Software without restriction, including without limitation the rights
6 | // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
7 | // copies of the Software, and to permit persons to whom the Software is
8 | // furnished to do so, subject to the following conditions:
9 | //
10 | // The above copyright notice and this permission notice shall be included in
11 | // all copies or substantial portions of the Software.
12 | //
13 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14 | // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15 | // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16 | // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17 | // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18 | // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
19 | // THE SOFTWARE.
20 |
21 | package cmd
22 |
23 | import (
24 | "fmt"
25 | "os"
26 |
27 | "github.com/spf13/cobra"
28 | )
29 |
30 | var rootCmd = &cobra.Command{
31 | Use: "schema",
32 | Short: "A tool for inferring and instantiating schemas",
33 | Long: `This tool provides four subcommands:
34 |
35 | 1. infer : Infer the schema of example data
36 | 2. init : Initialize an instance of a schema with default values
37 | 3. translate : Translate data from one format to another
38 | 4. diff : Print the structural differences between two files
39 |
40 | Supported formats: JSON, YAML, TOML, XML, GraphQL schema, JSON Schema
41 |
42 | See the man pages for idioms, examples, and more information.`,
43 | }
44 |
45 | func Execute() {
46 | if err := rootCmd.Execute(); err != nil {
47 | fmt.Fprintf(os.Stderr, "%v\n", err)
48 | os.Exit(1)
49 | }
50 | }
51 |
--------------------------------------------------------------------------------
/cmd/translate.go:
--------------------------------------------------------------------------------
1 | // Copyright © 2018 Confbase
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | package cmd
16 |
17 | import (
18 | "github.com/spf13/cobra"
19 |
20 | "github.com/Confbase/schema/translate"
21 | )
22 |
23 | var translateCfg translate.Config
24 |
25 | var translateCmd = &cobra.Command{
26 | Use: "translate",
27 | Short: "Translate input data into another format",
28 | Long: `Translate input data into another format.
29 |
30 | If no input file is specified, stdin is used as input.
31 |
32 | Multiple output paths may be specfied. If none are specified, translated data
33 | is written to stdout.
34 |
35 | If more than one of the (json|yaml|toml|xml|protobuf|graphql) flags are set,
36 | behavior is undefined.
37 |
38 | See man pages`,
39 | Run: func(cmd *cobra.Command, args []string) {
40 | translate.TranslateEntry(translateCfg, args)
41 | },
42 | }
43 |
44 | func init() {
45 | translateCmd.Flags().StringVarP(&translateCfg.InputPath, "input", "i", "", "path to input data to translate")
46 | translateCmd.Flags().BoolVarP(&translateCfg.DoJson, "json", "", false, "initialize as JSON")
47 | translateCmd.Flags().BoolVarP(&translateCfg.DoYaml, "yaml", "", false, "initialize as YAML")
48 | translateCmd.Flags().BoolVarP(&translateCfg.DoToml, "toml", "", false, "initialize as TOML")
49 | translateCmd.Flags().BoolVarP(&translateCfg.DoXml, "xml", "", false, "initialize as XML")
50 | translateCmd.Flags().BoolVarP(&translateCfg.DoProtobuf, "protobuf", "", false, "initialize as protocol buffer")
51 | translateCmd.Flags().BoolVarP(&translateCfg.DoPretty, "pretty", "", true, "pretty-print the output")
52 | rootCmd.AddCommand(translateCmd)
53 | }
54 |
--------------------------------------------------------------------------------
/decode/serialize.go:
--------------------------------------------------------------------------------
1 | package decode
2 |
3 | import (
4 | "bytes"
5 | "encoding/json"
6 | "fmt"
7 | "io"
8 | "io/ioutil"
9 |
10 | "github.com/clbanning/mxj"
11 | "github.com/naoina/toml"
12 | "gopkg.in/yaml.v2"
13 | )
14 |
15 | func MuxDecode(r io.Reader) (map[string]interface{}, error) {
16 | // ReadAll is necessary, since the input stream could be only
17 | // traversable once; we must be sure to save the data
18 | // into a buffer on the first pass, so that we can read it
19 | // *multiple* times
20 | buf, err := ioutil.ReadAll(r)
21 | if err != nil {
22 | return nil, err
23 | }
24 |
25 | data := make(map[string]interface{})
26 | if err = json.Unmarshal(buf, &data); err == nil {
27 | return data, nil
28 | }
29 |
30 | data = make(map[string]interface{}) // be sure it's an empty map
31 | if err = yaml.Unmarshal(buf, &data); err == nil {
32 | return data, nil
33 | }
34 |
35 | data = make(map[string]interface{}) // be sure it's an empty map
36 | if err = toml.Unmarshal(buf, &data); err == nil {
37 | return data, nil
38 | }
39 |
40 | mv, err := mxj.NewMapXmlReader(bytes.NewReader(buf))
41 | if err == nil {
42 | return map[string]interface{}(mv), nil
43 | }
44 |
45 | return nil, fmt.Errorf("failed to recognize input data format")
46 | }
47 |
48 | func DemuxEncode(w io.Writer, data interface{}, outFmt string, doPretty bool) error {
49 | switch outFmt {
50 | case "json":
51 | enc := json.NewEncoder(w)
52 | if doPretty {
53 | enc.SetIndent("", " ")
54 | }
55 | if err := enc.Encode(&data); err != nil {
56 | return err
57 | }
58 | case "yaml":
59 | if err := yaml.NewEncoder(w).Encode(&data); err != nil {
60 | return err
61 | }
62 | case "toml":
63 | if err := toml.NewEncoder(w).Encode(&data); err != nil {
64 | return err
65 | }
66 | case "xml":
67 | strMap, ok := data.(map[string]interface{})
68 | if !ok {
69 | return fmt.Errorf("casting data to map[string]interface failed")
70 | }
71 | mv := mxj.Map(strMap)
72 | if doPretty {
73 | if err := mv.XmlIndentWriter(w, "", " "); err != nil {
74 | return err
75 | }
76 | } else {
77 | if err := mv.XmlWriter(w); err != nil {
78 | return err
79 | }
80 | }
81 | case "protobuf", "graphql":
82 | return fmt.Errorf("'%v' is not implemented yet", outFmt)
83 | default:
84 | return fmt.Errorf("unrecognized output format '%v'", outFmt)
85 | }
86 | return nil
87 | }
88 |
--------------------------------------------------------------------------------
/diff/config.go:
--------------------------------------------------------------------------------
1 | package diff
2 |
3 | type Config struct {
4 | Schema1 string
5 | Schema2 string
6 | DoSkipRefs bool
7 | titleStrings
8 | }
9 |
10 | type titleStrings struct {
11 | Title1, Title2 string
12 | MissFrom1, MissFrom2 string
13 | Differ1, Differ2 string
14 | }
15 |
--------------------------------------------------------------------------------
/diff/diff.go:
--------------------------------------------------------------------------------
1 | package diff
2 |
3 | import (
4 | "fmt"
5 | "os"
6 | "reflect"
7 |
8 | "github.com/Confbase/schema/decode"
9 | "github.com/Confbase/schema/example"
10 | "github.com/Confbase/schema/jsonsch"
11 | )
12 |
13 | func Diff(s1, s2 jsonsch.Schema, titles *titleStrings) ([]Difference, error) {
14 | if titles.Title1 != "" {
15 | titles.MissFrom1 = titles.Title1
16 | titles.Differ1 = titles.Title1
17 | }
18 | if titles.Title2 != "" {
19 | titles.MissFrom2 = titles.Title2
20 | titles.Differ2 = titles.Title2
21 | }
22 |
23 | return diff(s1, s2, "", titles)
24 | }
25 |
26 | func Entry(cfg *Config) {
27 | f1, err := os.Open(cfg.Schema1)
28 | nilOrFatal(err)
29 | f2, err := os.Open(cfg.Schema2)
30 | nilOrFatal(err)
31 |
32 | map1, err := decode.MuxDecode(f1)
33 | nilOrFatal(err)
34 | f1.Close()
35 | map2, err := decode.MuxDecode(f2)
36 | nilOrFatal(err)
37 | f2.Close()
38 |
39 | s1, err := jsonsch.FromSchema(map1, cfg.DoSkipRefs)
40 | if err != nil {
41 | params := jsonsch.FromExampleParams{
42 | DoOmitReq: false,
43 | DoMakeReq: true,
44 | EmptyArraysAs: "",
45 | NullAs: "",
46 | }
47 | s1, err = jsonsch.FromExample(example.New(map1), ¶ms)
48 | nilOrFatal(err)
49 | }
50 | s2, err := jsonsch.FromSchema(map2, cfg.DoSkipRefs)
51 | if err != nil {
52 | params := jsonsch.FromExampleParams{
53 | DoOmitReq: false,
54 | DoMakeReq: true,
55 | EmptyArraysAs: "",
56 | NullAs: "",
57 | }
58 | s2, err = jsonsch.FromExample(example.New(map2), ¶ms)
59 | nilOrFatal(err)
60 | }
61 |
62 | diffs, err := Diff(s1, s2, &cfg.titleStrings)
63 | nilOrFatal(err)
64 |
65 | for _, d := range diffs {
66 | fmt.Println(d)
67 | }
68 | if len(diffs) != 0 {
69 | os.Exit(2)
70 | }
71 | }
72 |
73 | func diff(s1, s2 jsonsch.Schema, parentKey string, titles *titleStrings) ([]Difference, error) {
74 | s1Props, s2Props := s1.GetProperties(), s2.GetProperties()
75 | s1Diffs, err := diffPropsFrom(s1Props, s2Props, titles)
76 | if err != nil {
77 | return nil, err
78 | }
79 | switchedTitles := titleStrings{
80 | Title1: titles.Title2,
81 | Title2: titles.Title1,
82 | MissFrom1: titles.MissFrom2,
83 | MissFrom2: titles.MissFrom1,
84 | Differ1: titles.Differ2,
85 | Differ2: titles.Differ1,
86 | }
87 | s2Diffs, err := diffPropsFrom(s2Props, s1Props, &switchedTitles)
88 | if err != nil {
89 | return nil, err
90 | }
91 |
92 | // differingTypes is the set of fields which have differing types.
93 | // Any DifferyingType found in s1 is guaranteed
94 | // to be in s2, but we ony want *one* of these instances
95 | // in the returned diffs.
96 | diffs, differingTypes := filterUniqueDiffs(s1Diffs, make(map[string]bool))
97 | diffs2, _ := filterUniqueDiffs(s2Diffs, differingTypes)
98 |
99 | return append(diffs, diffs2...), nil
100 | }
101 |
102 | func filterUniqueDiffs(newDiffs []Difference, differingTypes map[string]bool) ([]Difference, map[string]bool) {
103 | diffs := make([]Difference, 0)
104 | for _, d := range newDiffs {
105 | if _, ok := d.(*DifferingTypes); ok {
106 | field := d.getField()
107 | if _, ok := differingTypes[field]; !ok {
108 | diffs = append(diffs, d)
109 | differingTypes[field] = true
110 | }
111 | } else {
112 | diffs = append(diffs, d)
113 | }
114 | }
115 | return diffs, differingTypes
116 | }
117 |
118 | // diffPropsFrom assumes props1 is the base. It will return
119 | // 1. all DifferingTypes differences
120 | // 2. all fields which are in props1 but missing from props2
121 | //
122 | // Therefore, to do a complete diff of props1 and props2,
123 | // one must call
124 | // diffPropsFrom(props1, props2) AND diffPropsFrom(props2, props1)
125 | // and merge the results
126 | func diffPropsFrom(props1, props2 map[string]interface{}, titles *titleStrings) ([]Difference, error) {
127 | diffs := make([]Difference, 0)
128 | for k, v1 := range props1 {
129 | v2, ok := props2[k]
130 | if !ok {
131 | diffs = append(diffs, &MissingField{k, titles.MissFrom2})
132 | continue
133 | }
134 | subDiffs, err := diffSomething(v1, v2, k, titles)
135 | if err != nil {
136 | return nil, err
137 | }
138 | diffs = append(diffs, subDiffs...)
139 | }
140 | return diffs, nil
141 | }
142 |
143 | func diffSomething(v1, v2 interface{}, k string, titles *titleStrings) ([]Difference, error) {
144 | diffs := make([]Difference, 0)
145 |
146 | type1, err := getType(v1, k)
147 | if err != nil {
148 | return nil, err
149 | }
150 | type2, err := getType(v2, k)
151 | if err != nil {
152 | return nil, err
153 | }
154 | if type1 != type2 {
155 | diffs = append(diffs, &DifferingTypes{
156 | field: k,
157 | title1: titles.Differ1,
158 | title2: titles.Differ2,
159 | })
160 | return diffs, nil
161 | }
162 |
163 | switch v1.(type) {
164 | case jsonsch.Primitive:
165 | return diffs, nil
166 | case jsonsch.ArraySchema:
167 | a1, ok := v1.(jsonsch.ArraySchema)
168 | if !ok {
169 | return nil, fmt.Errorf("saw type 'array' but internal type is not array")
170 | }
171 | a2, ok := v2.(jsonsch.ArraySchema)
172 | if !ok {
173 | return nil, fmt.Errorf("saw type 'array' but internal type is not array")
174 | }
175 | subDiffs, err := diffSomething(a1.Items, a2.Items, "items", titles)
176 | if err != nil {
177 | return nil, err
178 | }
179 | for _, d := range subDiffs {
180 | prependKey(d, k)
181 | diffs = append(diffs, d)
182 | }
183 | case jsonsch.Schema:
184 | s1, ok := v1.(jsonsch.Schema)
185 | if !ok {
186 | return nil, fmt.Errorf("saw type 'object' but internal type is not object")
187 | }
188 | s2, ok := v2.(jsonsch.Schema)
189 | if !ok {
190 | return nil, fmt.Errorf("saw type 'object' but internal type is not object")
191 | }
192 | subDiffs, err := Diff(s1, s2, titles)
193 | if err != nil {
194 | return nil, err
195 | }
196 | for _, d := range subDiffs {
197 | prependKey(d, k)
198 | diffs = append(diffs, d)
199 | }
200 | default:
201 | return nil, fmt.Errorf("key '%v' has unrecognized type '%v'", k, reflect.TypeOf(v1))
202 | }
203 | return diffs, nil
204 | }
205 |
206 | func getType(schema interface{}, k string) (jsonsch.Type, error) {
207 | switch v := schema.(type) {
208 | case jsonsch.Primitive:
209 | return v.Type, nil
210 | case jsonsch.ArraySchema:
211 | return v.Type, nil
212 | case jsonsch.Schema:
213 | return v.GetType(), nil
214 | default:
215 | return "", fmt.Errorf("key '%v' has unrecognized type '%v'", k, reflect.TypeOf(v))
216 | }
217 | }
218 |
219 | func nilOrFatal(err error) {
220 | if err != nil {
221 | fmt.Fprintf(os.Stderr, "error: %v\n", err)
222 | os.Exit(1)
223 | }
224 | }
225 |
--------------------------------------------------------------------------------
/diff/types.go:
--------------------------------------------------------------------------------
1 | package diff
2 |
3 | import "fmt"
4 |
5 | type Difference interface {
6 | String() string
7 | setField(string)
8 | getField() string
9 | }
10 |
11 | func prependKey(d Difference, k string) {
12 | if k == "" {
13 | return
14 | }
15 | d.setField(fmt.Sprintf("%v.%v", k, d.getField()))
16 | }
17 |
18 | type MissingField struct {
19 | field string
20 | from string
21 | }
22 |
23 | func (m *MissingField) String() string {
24 | return fmt.Sprintf("the field '%v' is missing from %v", m.field, m.from)
25 | }
26 |
27 | func (m *MissingField) setField(field string) {
28 | m.field = field
29 | }
30 |
31 | func (m *MissingField) getField() string {
32 | return m.field
33 | }
34 |
35 | type DifferingTypes struct {
36 | field string
37 | title1 string
38 | title2 string
39 | }
40 |
41 | func (d *DifferingTypes) String() string {
42 | return fmt.Sprintf("the field '%v' has differing types between %v and %v", d.field, d.title1, d.title2)
43 | }
44 |
45 | func (d *DifferingTypes) setField(field string) {
46 | d.field = field
47 | }
48 |
49 | func (d *DifferingTypes) getField() string {
50 | return d.field
51 | }
52 |
--------------------------------------------------------------------------------
/e2e_tests/diff.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | diff_array_of_objects_json1() {
4 | output=`schema diff \
5 | e2e_tests/diff_test_files/array_of_objects1.json \
6 | e2e_tests/diff_test_files/array_of_objects2.json 2>&1`
7 | status="$?"
8 |
9 | expect_status='2'
10 | expect="the field 'people.items.height' has differing types between the first file and the second file"
11 | }
12 |
13 | diff_array_of_objects_json2() {
14 | output=`schema diff \
15 | e2e_tests/diff_test_files/array_of_objects2.json \
16 | e2e_tests/diff_test_files/array_of_objects1.json 2>&1`
17 | status="$?"
18 |
19 | expect_status='2'
20 | expect="the field 'people.items.height' has differing types between the first file and the second file"
21 | }
22 |
23 | diff_array_of_objects_json_same1() {
24 | output=`schema diff \
25 | e2e_tests/diff_test_files/array_of_objects1.json \
26 | e2e_tests/diff_test_files/array_of_objects1.json 2>&1`
27 | status="$?"
28 |
29 | expect_status='0'
30 | expect=''
31 | }
32 |
33 | diff_array_of_objects_json_same2() {
34 | output=`schema diff \
35 | e2e_tests/diff_test_files/array_of_objects2.json \
36 | e2e_tests/diff_test_files/array_of_objects2.json 2>&1`
37 | status="$?"
38 |
39 | expect_status='0'
40 | expect=''
41 | }
42 |
43 | diff_minimal_differing_types_json1() {
44 | output=`schema diff \
45 | e2e_tests/diff_test_files/minimal_differing_types1.json \
46 | e2e_tests/diff_test_files/minimal_differing_types2.json 2>&1`
47 | status="$?"
48 |
49 | expect_status='2'
50 | expect="the field 'age' has differing types between the first file and the second file"
51 | }
52 |
53 | diff_minimal_differing_types_json2() {
54 | output=`schema diff \
55 | e2e_tests/diff_test_files/minimal_differing_types2.json \
56 | e2e_tests/diff_test_files/minimal_differing_types1.json 2>&1`
57 | status="$?"
58 |
59 | expect_status='2'
60 | expect="the field 'age' has differing types between the first file and the second file"
61 | }
62 |
63 | diff_minimal_differing_types_json_same1() {
64 | output=`schema diff \
65 | e2e_tests/diff_test_files/minimal_differing_types1.json \
66 | e2e_tests/diff_test_files/minimal_differing_types1.json 2>&1`
67 | status="$?"
68 |
69 | expect_status='0'
70 | expect=''
71 | }
72 |
73 | diff_minimal_differing_types_json_same2() {
74 | output=`schema diff \
75 | e2e_tests/diff_test_files/minimal_differing_types2.json \
76 | e2e_tests/diff_test_files/minimal_differing_types2.json 2>&1`
77 | status="$?"
78 |
79 | expect_status='0'
80 | expect=''
81 | }
82 |
83 | diff_minimal_missing_field_json1() {
84 | output=`schema diff \
85 | e2e_tests/diff_test_files/minimal_missing_field1.json \
86 | e2e_tests/diff_test_files/minimal_missing_field2.json 2>&1`
87 | status="$?"
88 |
89 | expect_status='2'
90 | expect="the field 'missingField' is missing from the second file"
91 | }
92 |
93 | diff_minimal_missing_field_json2() {
94 | output=`schema diff \
95 | e2e_tests/diff_test_files/minimal_missing_field2.json \
96 | e2e_tests/diff_test_files/minimal_missing_field1.json 2>&1`
97 | status="$?"
98 |
99 | expect_status='2'
100 | expect="the field 'missingField' is missing from the first file"
101 | }
102 |
103 | diff_minimal_missing_field_json_same1() {
104 | output=`schema diff \
105 | e2e_tests/diff_test_files/minimal_missing_field1.json \
106 | e2e_tests/diff_test_files/minimal_missing_field1.json 2>&1`
107 | status="$?"
108 |
109 | expect_status='0'
110 | expect=''
111 | }
112 |
113 | diff_minimal_missing_field_json_same2() {
114 | output=`schema diff \
115 | e2e_tests/diff_test_files/minimal_missing_field2.json \
116 | e2e_tests/diff_test_files/minimal_missing_field2.json 2>&1`
117 | status="$?"
118 |
119 | expect_status='0'
120 | expect=''
121 | }
122 |
123 | diff_array_of_objects_schema1() {
124 | output=`schema diff \
125 | e2e_tests/diff_test_files/array_of_objects1.schema \
126 | e2e_tests/diff_test_files/array_of_objects2.schema 2>&1`
127 | status="$?"
128 |
129 | expect_status='2'
130 | expect="the field 'people.items.height' has differing types between the first file and the second file"
131 | }
132 |
133 | diff_array_of_objects_schema2() {
134 | output=`schema diff \
135 | e2e_tests/diff_test_files/array_of_objects2.schema \
136 | e2e_tests/diff_test_files/array_of_objects1.schema 2>&1`
137 | status="$?"
138 |
139 | expect_status='2'
140 | expect="the field 'people.items.height' has differing types between the first file and the second file"
141 | }
142 |
143 | diff_array_of_objects_schema_same1() {
144 | output=`schema diff \
145 | e2e_tests/diff_test_files/array_of_objects1.schema \
146 | e2e_tests/diff_test_files/array_of_objects1.schema 2>&1`
147 | status="$?"
148 |
149 | expect_status='0'
150 | expect=''
151 | }
152 |
153 | diff_array_of_objects_schema_same2() {
154 | output=`schema diff \
155 | e2e_tests/diff_test_files/array_of_objects2.schema \
156 | e2e_tests/diff_test_files/array_of_objects2.schema 2>&1`
157 | status="$?"
158 |
159 | expect_status='0'
160 | expect=''
161 | }
162 |
163 | diff_minimal_differing_types_schema1() {
164 | output=`schema diff \
165 | e2e_tests/diff_test_files/minimal_differing_types1.schema \
166 | e2e_tests/diff_test_files/minimal_differing_types2.schema 2>&1`
167 | status="$?"
168 |
169 | expect_status='2'
170 | expect="the field 'age' has differing types between the first file and the second file"
171 | }
172 |
173 | diff_minimal_differing_types_schema2() {
174 | output=`schema diff \
175 | e2e_tests/diff_test_files/minimal_differing_types2.schema \
176 | e2e_tests/diff_test_files/minimal_differing_types1.schema 2>&1`
177 | status="$?"
178 |
179 | expect_status='2'
180 | expect="the field 'age' has differing types between the first file and the second file"
181 | }
182 |
183 | diff_minimal_differing_types_schema_same1() {
184 | output=`schema diff \
185 | e2e_tests/diff_test_files/minimal_differing_types1.schema \
186 | e2e_tests/diff_test_files/minimal_differing_types1.schema 2>&1`
187 | status="$?"
188 |
189 | expect_status='0'
190 | expect=''
191 | }
192 |
193 | diff_minimal_differing_types_schema_same2() {
194 | output=`schema diff \
195 | e2e_tests/diff_test_files/minimal_differing_types2.schema \
196 | e2e_tests/diff_test_files/minimal_differing_types2.schema 2>&1`
197 | status="$?"
198 |
199 | expect_status='0'
200 | expect=''
201 | }
202 |
203 | diff_minimal_missing_field_schema1() {
204 | output=`schema diff \
205 | e2e_tests/diff_test_files/minimal_missing_field1.schema \
206 | e2e_tests/diff_test_files/minimal_missing_field2.schema 2>&1`
207 | status="$?"
208 |
209 | expect_status='2'
210 | expect="the field 'missingField' is missing from the second file"
211 | }
212 |
213 | diff_minimal_missing_field_schema2() {
214 | output=`schema diff \
215 | e2e_tests/diff_test_files/minimal_missing_field2.schema \
216 | e2e_tests/diff_test_files/minimal_missing_field1.schema 2>&1`
217 | status="$?"
218 |
219 | expect_status='2'
220 | expect="the field 'missingField' is missing from the first file"
221 | }
222 |
223 | diff_minimal_missing_field_schema_same1() {
224 | output=`schema diff \
225 | e2e_tests/diff_test_files/minimal_missing_field1.schema \
226 | e2e_tests/diff_test_files/minimal_missing_field1.schema 2>&1`
227 | status="$?"
228 |
229 | expect_status='0'
230 | expect=''
231 | }
232 |
233 | diff_minimal_missing_field_schema_same2() {
234 | output=`schema diff \
235 | e2e_tests/diff_test_files/minimal_missing_field2.schema \
236 | e2e_tests/diff_test_files/minimal_missing_field2.schema 2>&1`
237 | status="$?"
238 |
239 | expect_status='0'
240 | expect=''
241 | }
242 |
243 | diff_array_of_objects_yaml1() {
244 | output=`schema diff \
245 | e2e_tests/diff_test_files/array_of_objects1.yaml \
246 | e2e_tests/diff_test_files/array_of_objects2.yaml 2>&1`
247 | status="$?"
248 |
249 | expect_status='2'
250 | expect="the field 'people.items.height' has differing types between the first file and the second file"
251 | }
252 |
253 | diff_array_of_objects_yaml2() {
254 | output=`schema diff \
255 | e2e_tests/diff_test_files/array_of_objects2.yaml \
256 | e2e_tests/diff_test_files/array_of_objects1.yaml 2>&1`
257 | status="$?"
258 |
259 | expect_status='2'
260 | expect="the field 'people.items.height' has differing types between the first file and the second file"
261 | }
262 |
263 | diff_array_of_objects_yaml_same1() {
264 | output=`schema diff \
265 | e2e_tests/diff_test_files/array_of_objects1.yaml \
266 | e2e_tests/diff_test_files/array_of_objects1.yaml 2>&1`
267 | status="$?"
268 |
269 | expect_status='0'
270 | expect=''
271 | }
272 |
273 | diff_array_of_objects_yaml_same2() {
274 | output=`schema diff \
275 | e2e_tests/diff_test_files/array_of_objects2.yaml \
276 | e2e_tests/diff_test_files/array_of_objects2.yaml 2>&1`
277 | status="$?"
278 |
279 | expect_status='0'
280 | expect=''
281 | }
282 |
283 | diff_minimal_differing_types_yaml1() {
284 | output=`schema diff \
285 | e2e_tests/diff_test_files/minimal_differing_types1.yaml \
286 | e2e_tests/diff_test_files/minimal_differing_types2.yaml 2>&1`
287 | status="$?"
288 |
289 | expect_status='2'
290 | expect="the field 'age' has differing types between the first file and the second file"
291 | }
292 |
293 | diff_minimal_differing_types_yaml2() {
294 | output=`schema diff \
295 | e2e_tests/diff_test_files/minimal_differing_types2.yaml \
296 | e2e_tests/diff_test_files/minimal_differing_types1.yaml 2>&1`
297 | status="$?"
298 |
299 | expect_status='2'
300 | expect="the field 'age' has differing types between the first file and the second file"
301 | }
302 |
303 | diff_minimal_differing_types_yaml_same1() {
304 | output=`schema diff \
305 | e2e_tests/diff_test_files/minimal_differing_types1.yaml \
306 | e2e_tests/diff_test_files/minimal_differing_types1.yaml 2>&1`
307 | status="$?"
308 |
309 | expect_status='0'
310 | expect=''
311 | }
312 |
313 | diff_minimal_differing_types_yaml_same2() {
314 | output=`schema diff \
315 | e2e_tests/diff_test_files/minimal_differing_types2.yaml \
316 | e2e_tests/diff_test_files/minimal_differing_types2.yaml 2>&1`
317 | status="$?"
318 |
319 | expect_status='0'
320 | expect=''
321 | }
322 |
323 | diff_minimal_missing_field_yaml1() {
324 | output=`schema diff \
325 | e2e_tests/diff_test_files/minimal_missing_field1.yaml \
326 | e2e_tests/diff_test_files/minimal_missing_field2.yaml 2>&1`
327 | status="$?"
328 |
329 | expect_status='2'
330 | expect="the field 'missingField' is missing from the second file"
331 | }
332 |
333 | diff_minimal_missing_field_yaml2() {
334 | output=`schema diff \
335 | e2e_tests/diff_test_files/minimal_missing_field2.yaml \
336 | e2e_tests/diff_test_files/minimal_missing_field1.yaml 2>&1`
337 | status="$?"
338 |
339 | expect_status='2'
340 | expect="the field 'missingField' is missing from the first file"
341 | }
342 |
343 | diff_minimal_missing_field_yaml_same1() {
344 | output=`schema diff \
345 | e2e_tests/diff_test_files/minimal_missing_field1.yaml \
346 | e2e_tests/diff_test_files/minimal_missing_field1.yaml 2>&1`
347 | status="$?"
348 |
349 | expect_status='0'
350 | expect=''
351 | }
352 |
353 | diff_minimal_missing_field_yaml_same2() {
354 | output=`schema diff \
355 | e2e_tests/diff_test_files/minimal_missing_field2.yaml \
356 | e2e_tests/diff_test_files/minimal_missing_field2.yaml 2>&1`
357 | status="$?"
358 |
359 | expect_status='0'
360 | expect=''
361 | }
362 |
363 | diff_array_of_objects_toml1() {
364 | output=`schema diff \
365 | e2e_tests/diff_test_files/array_of_objects1.toml \
366 | e2e_tests/diff_test_files/array_of_objects2.toml 2>&1`
367 | status="$?"
368 |
369 | expect_status='2'
370 | expect="the field 'people.items.height' has differing types between the first file and the second file"
371 | }
372 |
373 | diff_array_of_objects_toml2() {
374 | output=`schema diff \
375 | e2e_tests/diff_test_files/array_of_objects2.toml \
376 | e2e_tests/diff_test_files/array_of_objects1.toml 2>&1`
377 | status="$?"
378 |
379 | expect_status='2'
380 | expect="the field 'people.items.height' has differing types between the first file and the second file"
381 | }
382 |
383 | diff_array_of_objects_toml_same1() {
384 | output=`schema diff \
385 | e2e_tests/diff_test_files/array_of_objects1.toml \
386 | e2e_tests/diff_test_files/array_of_objects1.toml 2>&1`
387 | status="$?"
388 |
389 | expect_status='0'
390 | expect=''
391 | }
392 |
393 | diff_array_of_objects_toml_same2() {
394 | output=`schema diff \
395 | e2e_tests/diff_test_files/array_of_objects2.toml \
396 | e2e_tests/diff_test_files/array_of_objects2.toml 2>&1`
397 | status="$?"
398 |
399 | expect_status='0'
400 | expect=''
401 | }
402 |
403 | diff_minimal_differing_types_toml1() {
404 | output=`schema diff \
405 | e2e_tests/diff_test_files/minimal_differing_types1.toml \
406 | e2e_tests/diff_test_files/minimal_differing_types2.toml 2>&1`
407 | status="$?"
408 |
409 | expect_status='2'
410 | expect="the field 'age' has differing types between the first file and the second file"
411 | }
412 |
413 | diff_minimal_differing_types_toml2() {
414 | output=`schema diff \
415 | e2e_tests/diff_test_files/minimal_differing_types2.toml \
416 | e2e_tests/diff_test_files/minimal_differing_types1.toml 2>&1`
417 | status="$?"
418 |
419 | expect_status='2'
420 | expect="the field 'age' has differing types between the first file and the second file"
421 | }
422 |
423 | diff_minimal_differing_types_toml_same1() {
424 | output=`schema diff \
425 | e2e_tests/diff_test_files/minimal_differing_types1.toml \
426 | e2e_tests/diff_test_files/minimal_differing_types1.toml 2>&1`
427 | status="$?"
428 |
429 | expect_status='0'
430 | expect=''
431 | }
432 |
433 | diff_minimal_differing_types_toml_same2() {
434 | output=`schema diff \
435 | e2e_tests/diff_test_files/minimal_differing_types2.toml \
436 | e2e_tests/diff_test_files/minimal_differing_types2.toml 2>&1`
437 | status="$?"
438 |
439 | expect_status='0'
440 | expect=''
441 | }
442 |
443 | diff_minimal_missing_field_toml1() {
444 | output=`schema diff \
445 | e2e_tests/diff_test_files/minimal_missing_field1.toml \
446 | e2e_tests/diff_test_files/minimal_missing_field2.toml 2>&1`
447 | status="$?"
448 |
449 | expect_status='2'
450 | expect="the field 'missingField' is missing from the second file"
451 | }
452 |
453 | diff_minimal_missing_field_toml2() {
454 | output=`schema diff \
455 | e2e_tests/diff_test_files/minimal_missing_field2.toml \
456 | e2e_tests/diff_test_files/minimal_missing_field1.toml 2>&1`
457 | status="$?"
458 |
459 | expect_status='2'
460 | expect="the field 'missingField' is missing from the first file"
461 | }
462 |
463 | diff_minimal_missing_field_toml_same1() {
464 | output=`schema diff \
465 | e2e_tests/diff_test_files/minimal_missing_field1.toml \
466 | e2e_tests/diff_test_files/minimal_missing_field1.toml 2>&1`
467 | status="$?"
468 |
469 | expect_status='0'
470 | expect=''
471 | }
472 |
473 | diff_minimal_missing_field_toml_same2() {
474 | output=`schema diff \
475 | e2e_tests/diff_test_files/minimal_missing_field2.toml \
476 | e2e_tests/diff_test_files/minimal_missing_field2.toml 2>&1`
477 | status="$?"
478 |
479 | expect_status='0'
480 | expect=''
481 | }
482 |
483 | tests=(
484 | "diff_array_of_objects_json1"
485 | "diff_array_of_objects_json2"
486 | "diff_array_of_objects_json_same1"
487 | "diff_array_of_objects_json_same2"
488 | "diff_minimal_differing_types_json1"
489 | "diff_minimal_differing_types_json2"
490 | "diff_minimal_differing_types_json_same1"
491 | "diff_minimal_differing_types_json_same2"
492 | "diff_minimal_missing_field_json1"
493 | "diff_minimal_missing_field_json2"
494 | "diff_minimal_missing_field_json_same1"
495 | "diff_minimal_missing_field_json_same2"
496 | "diff_array_of_objects_schema1"
497 | "diff_array_of_objects_schema2"
498 | "diff_array_of_objects_schema_same1"
499 | "diff_array_of_objects_schema_same2"
500 | "diff_minimal_differing_types_schema1"
501 | "diff_minimal_differing_types_schema2"
502 | "diff_minimal_differing_types_schema_same1"
503 | "diff_minimal_differing_types_schema_same2"
504 | "diff_minimal_missing_field_schema1"
505 | "diff_minimal_missing_field_schema2"
506 | "diff_minimal_missing_field_schema_same1"
507 | "diff_minimal_missing_field_schema_same2"
508 | "diff_array_of_objects_yaml1"
509 | "diff_array_of_objects_yaml2"
510 | "diff_array_of_objects_yaml_same1"
511 | "diff_array_of_objects_yaml_same2"
512 | "diff_minimal_differing_types_yaml1"
513 | "diff_minimal_differing_types_yaml2"
514 | "diff_minimal_differing_types_yaml_same1"
515 | "diff_minimal_differing_types_yaml_same2"
516 | "diff_minimal_missing_field_yaml1"
517 | "diff_minimal_missing_field_yaml2"
518 | "diff_minimal_missing_field_yaml_same1"
519 | "diff_minimal_missing_field_yaml_same2"
520 | "diff_array_of_objects_toml1"
521 | "diff_array_of_objects_toml2"
522 | "diff_array_of_objects_toml_same1"
523 | "diff_array_of_objects_toml_same2"
524 | "diff_minimal_differing_types_toml1"
525 | "diff_minimal_differing_types_toml2"
526 | "diff_minimal_differing_types_toml_same1"
527 | "diff_minimal_differing_types_toml_same2"
528 | "diff_minimal_missing_field_toml1"
529 | "diff_minimal_missing_field_toml2"
530 | "diff_minimal_missing_field_toml_same1"
531 | "diff_minimal_missing_field_toml_same2"
532 | )
533 |
--------------------------------------------------------------------------------
/e2e_tests/diff_test_files/array_of_objects1.json:
--------------------------------------------------------------------------------
1 | {
2 | "people": [
3 | {
4 | "age": 21,
5 | "height": 6,
6 | "name": "Tom"
7 | }
8 | ]
9 | }
10 |
--------------------------------------------------------------------------------
/e2e_tests/diff_test_files/array_of_objects1.schema:
--------------------------------------------------------------------------------
1 | {
2 | "title": "",
3 | "type": "object",
4 | "properties": {
5 | "people": {
6 | "type": "array",
7 | "items": {
8 | "title": "",
9 | "type": "object",
10 | "properties": {
11 | "age": {
12 | "type": "number"
13 | },
14 | "height": {
15 | "type": "number"
16 | },
17 | "name": {
18 | "type": "string"
19 | }
20 | }
21 | }
22 | }
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/e2e_tests/diff_test_files/array_of_objects1.toml:
--------------------------------------------------------------------------------
1 | [[people]]
2 | age = 2.1e+01
3 | height = 6e+00
4 | name = "Tom"
5 |
--------------------------------------------------------------------------------
/e2e_tests/diff_test_files/array_of_objects1.yaml:
--------------------------------------------------------------------------------
1 | people:
2 | - name: Tom
3 | age: 21
4 | height: 6
5 |
--------------------------------------------------------------------------------
/e2e_tests/diff_test_files/array_of_objects2.json:
--------------------------------------------------------------------------------
1 | {
2 | "people": [
3 | {
4 | "age": 21,
5 | "height": "six feet",
6 | "name": "Tom"
7 | }
8 | ]
9 | }
10 |
--------------------------------------------------------------------------------
/e2e_tests/diff_test_files/array_of_objects2.schema:
--------------------------------------------------------------------------------
1 | {
2 | "title": "",
3 | "type": "object",
4 | "properties": {
5 | "people": {
6 | "type": "array",
7 | "items": {
8 | "title": "",
9 | "type": "object",
10 | "properties": {
11 | "age": {
12 | "type": "number"
13 | },
14 | "height": {
15 | "type": "string"
16 | },
17 | "name": {
18 | "type": "string"
19 | }
20 | }
21 | }
22 | }
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/e2e_tests/diff_test_files/array_of_objects2.toml:
--------------------------------------------------------------------------------
1 | [[people]]
2 | age = 2.1e+01
3 | height = "six feet"
4 | name = "Tom"
5 |
--------------------------------------------------------------------------------
/e2e_tests/diff_test_files/array_of_objects2.yaml:
--------------------------------------------------------------------------------
1 | people:
2 | - name: Tom
3 | age: 21
4 | height: 'six feet'
5 |
--------------------------------------------------------------------------------
/e2e_tests/diff_test_files/minimal_differing_types1.json:
--------------------------------------------------------------------------------
1 | {
2 | "age": 21
3 | }
4 |
--------------------------------------------------------------------------------
/e2e_tests/diff_test_files/minimal_differing_types1.schema:
--------------------------------------------------------------------------------
1 | {
2 | "title": "",
3 | "type": "object",
4 | "properties": {
5 | "age": {
6 | "type": "number"
7 | }
8 | }
9 | }
10 |
--------------------------------------------------------------------------------
/e2e_tests/diff_test_files/minimal_differing_types1.toml:
--------------------------------------------------------------------------------
1 | age = 21
2 |
--------------------------------------------------------------------------------
/e2e_tests/diff_test_files/minimal_differing_types1.yaml:
--------------------------------------------------------------------------------
1 | age: 21
2 |
--------------------------------------------------------------------------------
/e2e_tests/diff_test_files/minimal_differing_types2.json:
--------------------------------------------------------------------------------
1 | {
2 | "age": "twenty one"
3 | }
4 |
--------------------------------------------------------------------------------
/e2e_tests/diff_test_files/minimal_differing_types2.schema:
--------------------------------------------------------------------------------
1 | {
2 | "title": "",
3 | "type": "object",
4 | "properties": {
5 | "age": {
6 | "type": "string"
7 | }
8 | }
9 | }
10 |
--------------------------------------------------------------------------------
/e2e_tests/diff_test_files/minimal_differing_types2.toml:
--------------------------------------------------------------------------------
1 | age = 'twenty one'
2 |
--------------------------------------------------------------------------------
/e2e_tests/diff_test_files/minimal_differing_types2.yaml:
--------------------------------------------------------------------------------
1 | age: twenty one
2 |
--------------------------------------------------------------------------------
/e2e_tests/diff_test_files/minimal_missing_field1.json:
--------------------------------------------------------------------------------
1 | {"hello":"string","missingField":21}
2 |
--------------------------------------------------------------------------------
/e2e_tests/diff_test_files/minimal_missing_field1.schema:
--------------------------------------------------------------------------------
1 | {
2 | "title": "",
3 | "type": "object",
4 | "properties": {
5 | "hello": {
6 | "type": "string"
7 | },
8 | "missingField": {
9 | "type": "number"
10 | }
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/e2e_tests/diff_test_files/minimal_missing_field1.toml:
--------------------------------------------------------------------------------
1 | hello = "string"
2 | missingField = 2.1e+01
3 |
--------------------------------------------------------------------------------
/e2e_tests/diff_test_files/minimal_missing_field1.yaml:
--------------------------------------------------------------------------------
1 | hello: string
2 | missingField: 21
3 |
--------------------------------------------------------------------------------
/e2e_tests/diff_test_files/minimal_missing_field2.json:
--------------------------------------------------------------------------------
1 | {"hello":"string"}
2 |
--------------------------------------------------------------------------------
/e2e_tests/diff_test_files/minimal_missing_field2.schema:
--------------------------------------------------------------------------------
1 | {
2 | "title": "",
3 | "type": "object",
4 | "properties": {
5 | "hello": {
6 | "type": "string"
7 | }
8 | }
9 | }
10 |
--------------------------------------------------------------------------------
/e2e_tests/diff_test_files/minimal_missing_field2.toml:
--------------------------------------------------------------------------------
1 | hello = "string"
2 |
--------------------------------------------------------------------------------
/e2e_tests/diff_test_files/minimal_missing_field2.yaml:
--------------------------------------------------------------------------------
1 | hello: string
2 |
--------------------------------------------------------------------------------
/e2e_tests/infer.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | infer_unrecognized_format() {
4 | output=`printf '{' | schema infer 2>&1`
5 | status="$?"
6 |
7 | expect_status='1'
8 | expect='error: failed to recognize input data format'
9 | }
10 |
11 | infer_json_minimal() {
12 | output=`printf '{}' | schema infer --omit-required=false 2>&1`
13 | status="$?"
14 |
15 | expect_status='0'
16 | expect='{
17 | "title": "",
18 | "type": "object",
19 | "properties": {},
20 | "required": []
21 | }'
22 | }
23 |
24 | infer_json_string() {
25 | output=`printf '{"a":"b"}' | schema infer --omit-required=false 2>&1`
26 | status="$?"
27 |
28 | expect_status='0'
29 | expect='{
30 | "title": "",
31 | "type": "object",
32 | "properties": {
33 | "a": {
34 | "type": "string"
35 | }
36 | },
37 | "required": []
38 | }'
39 | }
40 |
41 | infer_json_positive_integer() {
42 | output=`printf '{"myNumber":12}' | schema infer --omit-required=false 2>&1`
43 | status="$?"
44 |
45 | expect_status='0'
46 | expect='{
47 | "title": "",
48 | "type": "object",
49 | "properties": {
50 | "myNumber": {
51 | "type": "number"
52 | }
53 | },
54 | "required": []
55 | }'
56 | }
57 |
58 | infer_json_negative_integer() {
59 | output=`printf '{"myNumber":-12310}' | schema infer --omit-required=false 2>&1`
60 | status="$?"
61 |
62 | expect_status='0'
63 | expect='{
64 | "title": "",
65 | "type": "object",
66 | "properties": {
67 | "myNumber": {
68 | "type": "number"
69 | }
70 | },
71 | "required": []
72 | }'
73 | }
74 |
75 | infer_json_positive_float() {
76 | output=`printf '{"myNumber":420.6}' | schema infer --omit-required=false 2>&1`
77 | status="$?"
78 |
79 | expect_status='0'
80 | expect='{
81 | "title": "",
82 | "type": "object",
83 | "properties": {
84 | "myNumber": {
85 | "type": "number"
86 | }
87 | },
88 | "required": []
89 | }'
90 | }
91 |
92 | infer_json_negative_float() {
93 | output=`printf '{"myNumber":-1902.32249}' | schema infer --omit-required=false 2>&1`
94 | status="$?"
95 |
96 | expect_status='0'
97 | expect='{
98 | "title": "",
99 | "type": "object",
100 | "properties": {
101 | "myNumber": {
102 | "type": "number"
103 | }
104 | },
105 | "required": []
106 | }'
107 | }
108 |
109 | infer_json_zero() {
110 | output=`printf '{"myNumber":0}' | schema infer --omit-required=false 2>&1`
111 | status="$?"
112 |
113 | expect_status='0'
114 | expect='{
115 | "title": "",
116 | "type": "object",
117 | "properties": {
118 | "myNumber": {
119 | "type": "number"
120 | }
121 | },
122 | "required": []
123 | }'
124 | }
125 |
126 | infer_json_null() {
127 | output=`printf '{"is2004":null}' | schema infer --omit-required=false 2>&1`
128 | status="$?"
129 |
130 | expect_status='0'
131 | expect='{
132 | "title": "",
133 | "type": "object",
134 | "properties": {
135 | "is2004": {
136 | "type": "null"
137 | }
138 | },
139 | "required": []
140 | }'
141 | }
142 |
143 | infer_json_boolean() {
144 | output=`printf '{"is2004":true}' | schema infer --omit-required=false 2>&1`
145 | status="$?"
146 |
147 | expect_status='0'
148 | expect='{
149 | "title": "",
150 | "type": "object",
151 | "properties": {
152 | "is2004": {
153 | "type": "boolean"
154 | }
155 | },
156 | "required": []
157 | }'
158 | }
159 |
160 | infer_json_array_of_strings() {
161 | output=`printf '{"people":["bladee","thomas"]}' | schema infer --omit-required=false 2>&1`
162 | status="$?"
163 |
164 | expect_status='0'
165 | expect='{
166 | "title": "",
167 | "type": "object",
168 | "properties": {
169 | "people": {
170 | "type": "array",
171 | "items": {
172 | "type": "string"
173 | }
174 | }
175 | },
176 | "required": []
177 | }'
178 | }
179 |
180 | infer_json_array_of_numbers() {
181 | output=`printf '{"ages":[12.1,-1,43,-2.3,0,-0,0.0]}' | schema infer --omit-required=false 2>&1`
182 | status="$?"
183 |
184 | expect_status='0'
185 | expect='{
186 | "title": "",
187 | "type": "object",
188 | "properties": {
189 | "ages": {
190 | "type": "array",
191 | "items": {
192 | "type": "number"
193 | }
194 | }
195 | },
196 | "required": []
197 | }'
198 | }
199 |
200 | infer_json_array_of_booleans() {
201 | output=`printf '{"truthinesses":[true,false,false]}' | schema infer --omit-required=false 2>&1`
202 | status="$?"
203 |
204 | expect_status='0'
205 | expect='{
206 | "title": "",
207 | "type": "object",
208 | "properties": {
209 | "truthinesses": {
210 | "type": "array",
211 | "items": {
212 | "type": "boolean"
213 | }
214 | }
215 | },
216 | "required": []
217 | }'
218 | }
219 |
220 | infer_json_array_of_objects() {
221 | output=`printf '{"people":[{"name":"thomas"},{"name":"gordon"}]}' | schema infer --omit-required=false 2>&1`
222 | status="$?"
223 |
224 | expect_status='0'
225 | expect='{
226 | "title": "",
227 | "type": "object",
228 | "properties": {
229 | "people": {
230 | "type": "array",
231 | "items": {
232 | "title": "",
233 | "type": "object",
234 | "properties": {
235 | "name": {
236 | "type": "string"
237 | }
238 | },
239 | "required": []
240 | }
241 | }
242 | },
243 | "required": []
244 | }'
245 | }
246 |
247 | infer_json_array_of_array_objects() {
248 | output=`printf '{"people":[[{"name":"thomas"},{"name":"gordon"}]]}' | schema infer --omit-required=false 2>&1`
249 | status="$?"
250 |
251 | expect_status='0'
252 | expect='{
253 | "title": "",
254 | "type": "object",
255 | "properties": {
256 | "people": {
257 | "type": "array",
258 | "items": {
259 | "type": "array",
260 | "items": {
261 | "title": "",
262 | "type": "object",
263 | "properties": {
264 | "name": {
265 | "type": "string"
266 | }
267 | },
268 | "required": []
269 | }
270 | }
271 | }
272 | },
273 | "required": []
274 | }'
275 | }
276 |
277 | infer_json_array_of_objects_with_multiple_fields() {
278 | output=`printf '{"people":[{"name":"Thomas","age":20}]}' | schema infer --omit-required=false 2>&1`
279 | status="$?"
280 |
281 | expect_status='0'
282 | expect_either_or='true'
283 | expect_either='{
284 | "title": "",
285 | "type": "object",
286 | "properties": {
287 | "people": {
288 | "type": "array",
289 | "items": {
290 | "title": "",
291 | "type": "object",
292 | "properties": {
293 | "name": {
294 | "type": "string"
295 | },
296 | "age": {
297 | "type": "number"
298 | }
299 | },
300 | "required": []
301 | }
302 | }
303 | },
304 | "required": []
305 | }'
306 | expect_or='{
307 | "title": "",
308 | "type": "object",
309 | "properties": {
310 | "people": {
311 | "type": "array",
312 | "items": {
313 | "title": "",
314 | "type": "object",
315 | "properties": {
316 | "age": {
317 | "type": "number"
318 | },
319 | "name": {
320 | "type": "string"
321 | }
322 | },
323 | "required": []
324 | }
325 | }
326 | },
327 | "required": []
328 | }'
329 | }
330 |
331 | infer_json_complicated_and_use_schema_field() {
332 | output=`printf '{"people":[{"name":"Thomas","age":20}]}' \
333 | | schema infer -s 'http://json-schema.org/draft-06/schema' --omit-required=false 2>&1`
334 | status="$?"
335 |
336 | expect_status='0'
337 | expect_either_or='true'
338 | expect_either='{
339 | "$schema": "http://json-schema.org/draft-06/schema",
340 | "title": "",
341 | "type": "object",
342 | "properties": {
343 | "people": {
344 | "type": "array",
345 | "items": {
346 | "title": "",
347 | "type": "object",
348 | "properties": {
349 | "name": {
350 | "type": "string"
351 | },
352 | "age": {
353 | "type": "number"
354 | }
355 | },
356 | "required": []
357 | }
358 | }
359 | },
360 | "required": []
361 | }'
362 | expect_or='{
363 | "$schema": "http://json-schema.org/draft-06/schema",
364 | "title": "",
365 | "type": "object",
366 | "properties": {
367 | "people": {
368 | "type": "array",
369 | "items": {
370 | "title": "",
371 | "type": "object",
372 | "properties": {
373 | "age": {
374 | "type": "number"
375 | },
376 | "name": {
377 | "type": "string"
378 | }
379 | },
380 | "required": []
381 | }
382 | }
383 | },
384 | "required": []
385 | }'
386 | }
387 |
388 | infer_json_empty_array_as_null() {
389 | output=`printf '{"truthinesses":[]}' | schema infer --omit-required=false --empty-arrays-as=null 2>&1`
390 | status="$?"
391 |
392 | expect_status='0'
393 | expect='{
394 | "title": "",
395 | "type": "object",
396 | "properties": {
397 | "truthinesses": {
398 | "type": "array",
399 | "items": {
400 | "type": "null"
401 | }
402 | }
403 | },
404 | "required": []
405 | }'
406 | }
407 |
408 | infer_json_empty_array_as_nil() {
409 | output=`printf '{"truthinesses":[]}' | schema infer --omit-required=false --empty-arrays-as=nil 2>&1`
410 | status="$?"
411 |
412 | expect_status='0'
413 | expect='{
414 | "title": "",
415 | "type": "object",
416 | "properties": {
417 | "truthinesses": {
418 | "type": "array",
419 | "items": {
420 | "type": "null"
421 | }
422 | }
423 | },
424 | "required": []
425 | }'
426 | }
427 |
428 | infer_json_empty_array_as_string() {
429 | output=`printf '{"truthinesses":[]}' | schema infer --omit-required=false --empty-arrays-as=string 2>&1`
430 | status="$?"
431 |
432 | expect_status='0'
433 | expect='{
434 | "title": "",
435 | "type": "object",
436 | "properties": {
437 | "truthinesses": {
438 | "type": "array",
439 | "items": {
440 | "type": "string"
441 | }
442 | }
443 | },
444 | "required": []
445 | }'
446 | }
447 |
448 | infer_json_empty_array_as_str() {
449 | output=`printf '{"truthinesses":[]}' | schema infer --omit-required=false --empty-arrays-as=str 2>&1`
450 | status="$?"
451 |
452 | expect_status='0'
453 | expect='{
454 | "title": "",
455 | "type": "object",
456 | "properties": {
457 | "truthinesses": {
458 | "type": "array",
459 | "items": {
460 | "type": "string"
461 | }
462 | }
463 | },
464 | "required": []
465 | }'
466 | }
467 |
468 | infer_json_empty_array_as_boolean() {
469 | output=`printf '{"truthinesses":[]}' | schema infer --omit-required=false --empty-arrays-as=boolean 2>&1`
470 | status="$?"
471 |
472 | expect_status='0'
473 | expect='{
474 | "title": "",
475 | "type": "object",
476 | "properties": {
477 | "truthinesses": {
478 | "type": "array",
479 | "items": {
480 | "type": "boolean"
481 | }
482 | }
483 | },
484 | "required": []
485 | }'
486 | }
487 |
488 | infer_json_empty_array_as_bool() {
489 | output=`printf '{"truthinesses":[]}' | schema infer --omit-required=false --empty-arrays-as=bool 2>&1`
490 | status="$?"
491 |
492 | expect_status='0'
493 | expect='{
494 | "title": "",
495 | "type": "object",
496 | "properties": {
497 | "truthinesses": {
498 | "type": "array",
499 | "items": {
500 | "type": "boolean"
501 | }
502 | }
503 | },
504 | "required": []
505 | }'
506 | }
507 |
508 | infer_json_empty_array_as_number() {
509 | output=`printf '{"truthinesses":[]}' | schema infer --omit-required=false --empty-arrays-as=number 2>&1`
510 | status="$?"
511 |
512 | expect_status='0'
513 | expect='{
514 | "title": "",
515 | "type": "object",
516 | "properties": {
517 | "truthinesses": {
518 | "type": "array",
519 | "items": {
520 | "type": "number"
521 | }
522 | }
523 | },
524 | "required": []
525 | }'
526 | }
527 |
528 | infer_json_empty_array_as_float() {
529 | output=`printf '{"truthinesses":[]}' | schema infer --omit-required=false --empty-arrays-as=float 2>&1`
530 | status="$?"
531 |
532 | expect_status='0'
533 | expect='{
534 | "title": "",
535 | "type": "object",
536 | "properties": {
537 | "truthinesses": {
538 | "type": "array",
539 | "items": {
540 | "type": "number"
541 | }
542 | }
543 | },
544 | "required": []
545 | }'
546 | }
547 |
548 | infer_json_empty_array_as_object() {
549 | output=`printf '{"truthinesses":[]}' | schema infer --omit-required=false --empty-arrays-as=object 2>&1`
550 | status="$?"
551 |
552 | expect_status='0'
553 | expect='{
554 | "title": "",
555 | "type": "object",
556 | "properties": {
557 | "truthinesses": {
558 | "type": "array",
559 | "items": {
560 | "title": "",
561 | "type": "object",
562 | "properties": {},
563 | "required": []
564 | }
565 | }
566 | },
567 | "required": []
568 | }'
569 | }
570 |
571 | infer_json_empty_array_as_invalid() {
572 | output=`printf '{"truthinesses":[]}' | schema infer --omit-required=false --empty-arrays-as=sanguine 2>&1`
573 | status="$?"
574 |
575 | expect_status='1'
576 | expect='error: failed to infer schema
577 | invalid --empty-arrays-as value '"'"'sanguine'"'"
578 | }
579 |
580 |
581 | infer_json_null_as_string() {
582 | output=`printf '{"myString":null}' | schema infer --null-as=string 2>&1`
583 | status="$?"
584 |
585 | expect_status='0'
586 | expect='{
587 | "title": "",
588 | "type": "object",
589 | "properties": {
590 | "myString": {
591 | "type": "string"
592 | }
593 | }
594 | }'
595 | }
596 |
597 | infer_json_null_as_number() {
598 | output=`printf '{"myField":null}' | schema infer --null-as=number 2>&1`
599 | status="$?"
600 |
601 | expect_status='0'
602 | expect='{
603 | "title": "",
604 | "type": "object",
605 | "properties": {
606 | "myField": {
607 | "type": "number"
608 | }
609 | }
610 | }'
611 | }
612 |
613 | infer_json_null_as_boolean() {
614 | output=`printf '{"myBool":null}' | schema infer --null-as=bool 2>&1`
615 | status="$?"
616 |
617 | expect_status='0'
618 | expect='{
619 | "title": "",
620 | "type": "object",
621 | "properties": {
622 | "myBool": {
623 | "type": "boolean"
624 | }
625 | }
626 | }'
627 | }
628 |
629 | infer_json_nested_object_null_as_number() {
630 | output=`printf '{"myObj":{"myInnerObj":{"myNum":10}}}' | schema infer --null-as=bool 2>&1`
631 | status="$?"
632 |
633 | expect_status='0'
634 | expect='{
635 | "title": "",
636 | "type": "object",
637 | "properties": {
638 | "myObj": {
639 | "title": "",
640 | "type": "object",
641 | "properties": {
642 | "myInnerObj": {
643 | "title": "",
644 | "type": "object",
645 | "properties": {
646 | "myNum": {
647 | "type": "number"
648 | }
649 | }
650 | }
651 | }
652 | }
653 | }
654 | }'
655 | }
656 |
657 | infer_json_nested_array_null_as_number() {
658 | output=`printf '{"myObj":{"myInnerObj":{"myNums":[null]}}}' | schema infer --null-as=bool 2>&1`
659 | status="$?"
660 |
661 | expect_status='0'
662 | expect='{
663 | "title": "",
664 | "type": "object",
665 | "properties": {
666 | "myObj": {
667 | "title": "",
668 | "type": "object",
669 | "properties": {
670 | "myInnerObj": {
671 | "title": "",
672 | "type": "object",
673 | "properties": {
674 | "myNums": {
675 | "type": "array",
676 | "items": {
677 | "type": "boolean"
678 | }
679 | }
680 | }
681 | }
682 | }
683 | }
684 | }
685 | }'
686 | }
687 |
688 | infer_yaml_string() {
689 | output=`printf 'color: red' | schema infer --omit-required=false 2>&1`
690 | status="$?"
691 |
692 | expect_status='0'
693 | expect='{
694 | "title": "",
695 | "type": "object",
696 | "properties": {
697 | "color": {
698 | "type": "string"
699 | }
700 | },
701 | "required": []
702 | }'
703 | }
704 |
705 | infer_yaml_positive_integer() {
706 | output=`printf 'myNumber: 12' | schema infer --omit-required=false 2>&1`
707 | status="$?"
708 |
709 | expect_status='0'
710 | expect='{
711 | "title": "",
712 | "type": "object",
713 | "properties": {
714 | "myNumber": {
715 | "type": "number"
716 | }
717 | },
718 | "required": []
719 | }'
720 | }
721 |
722 | infer_yaml_negative_integer() {
723 | output=`printf 'myNumber: -12310' | schema infer --omit-required=false 2>&1`
724 | status="$?"
725 |
726 | expect_status='0'
727 | expect='{
728 | "title": "",
729 | "type": "object",
730 | "properties": {
731 | "myNumber": {
732 | "type": "number"
733 | }
734 | },
735 | "required": []
736 | }'
737 | }
738 |
739 | infer_yaml_positive_float() {
740 | output=`printf 'myNumber: 420.6' | schema infer --omit-required=false 2>&1`
741 | status="$?"
742 |
743 | expect_status='0'
744 | expect='{
745 | "title": "",
746 | "type": "object",
747 | "properties": {
748 | "myNumber": {
749 | "type": "number"
750 | }
751 | },
752 | "required": []
753 | }'
754 | }
755 |
756 | infer_yaml_negative_float() {
757 | output=`printf 'myNumber: -1902.32249' | schema infer --omit-required=false 2>&1`
758 | status="$?"
759 |
760 | expect_status='0'
761 | expect='{
762 | "title": "",
763 | "type": "object",
764 | "properties": {
765 | "myNumber": {
766 | "type": "number"
767 | }
768 | },
769 | "required": []
770 | }'
771 | }
772 |
773 | infer_yaml_zero() {
774 | output=`printf 'myNumber: 0' | schema infer --omit-required=false 2>&1`
775 | status="$?"
776 |
777 | expect_status='0'
778 | expect='{
779 | "title": "",
780 | "type": "object",
781 | "properties": {
782 | "myNumber": {
783 | "type": "number"
784 | }
785 | },
786 | "required": []
787 | }'
788 | }
789 |
790 | infer_yaml_null() {
791 | output=`printf 'is2004: ~' | schema infer --omit-required=false 2>&1`
792 | status="$?"
793 |
794 | expect_status='0'
795 | expect='{
796 | "title": "",
797 | "type": "object",
798 | "properties": {
799 | "is2004": {
800 | "type": "null"
801 | }
802 | },
803 | "required": []
804 | }'
805 | }
806 |
807 | infer_yaml_boolean() {
808 | output=`printf 'is2004: true' | schema infer --omit-required=false 2>&1`
809 | status="$?"
810 |
811 | expect_status='0'
812 | expect='{
813 | "title": "",
814 | "type": "object",
815 | "properties": {
816 | "is2004": {
817 | "type": "boolean"
818 | }
819 | },
820 | "required": []
821 | }'
822 | }
823 |
824 | infer_yaml_array_of_strings() {
825 | output=`printf 'people: ["bladee","thomas"]' | schema infer --omit-required=false 2>&1`
826 | status="$?"
827 |
828 | expect_status='0'
829 | expect='{
830 | "title": "",
831 | "type": "object",
832 | "properties": {
833 | "people": {
834 | "type": "array",
835 | "items": {
836 | "type": "string"
837 | }
838 | }
839 | },
840 | "required": []
841 | }'
842 | }
843 |
844 | infer_yaml_array_of_numbers() {
845 | output=`printf 'ages: [12.1,-1,43,-2.3,0,-0,0.0]' | schema infer --omit-required=false 2>&1`
846 | status="$?"
847 |
848 | expect_status='0'
849 | expect='{
850 | "title": "",
851 | "type": "object",
852 | "properties": {
853 | "ages": {
854 | "type": "array",
855 | "items": {
856 | "type": "number"
857 | }
858 | }
859 | },
860 | "required": []
861 | }'
862 | }
863 |
864 | infer_yaml_array_of_booleans() {
865 | output=`printf 'truthinesses: [true,false,false]' | schema infer --omit-required=false 2>&1`
866 | status="$?"
867 |
868 | expect_status='0'
869 | expect='{
870 | "title": "",
871 | "type": "object",
872 | "properties": {
873 | "truthinesses": {
874 | "type": "array",
875 | "items": {
876 | "type": "boolean"
877 | }
878 | }
879 | },
880 | "required": []
881 | }'
882 | }
883 |
884 | infer_yaml_array_of_objects() {
885 | output=`printf "people:\n - name: thomas\n - name: gordon" | schema infer --omit-required=false 2>&1`
886 | status="$?"
887 |
888 | expect_status='0'
889 | expect='{
890 | "title": "",
891 | "type": "object",
892 | "properties": {
893 | "people": {
894 | "type": "array",
895 | "items": {
896 | "title": "",
897 | "type": "object",
898 | "properties": {
899 | "name": {
900 | "type": "string"
901 | }
902 | },
903 | "required": []
904 | }
905 | }
906 | },
907 | "required": []
908 | }'
909 | }
910 |
911 | infer_yaml_array_of_array_objects() {
912 | output=`printf "people:\n -\n - name: thomas\n -\n - name: gordon" | schema infer --omit-required=false 2>&1`
913 | status="$?"
914 |
915 | expect_status='0'
916 | expect='{
917 | "title": "",
918 | "type": "object",
919 | "properties": {
920 | "people": {
921 | "type": "array",
922 | "items": {
923 | "type": "array",
924 | "items": {
925 | "title": "",
926 | "type": "object",
927 | "properties": {
928 | "name": {
929 | "type": "string"
930 | }
931 | },
932 | "required": []
933 | }
934 | }
935 | }
936 | },
937 | "required": []
938 | }'
939 | }
940 |
941 | infer_yaml_array_of_objects_with_multiple_fields() {
942 | output=`printf "people:\n - name: thomas\n age: 20\n - name: gordon\n age: 60" \
943 | | schema infer --omit-required=false 2>&1`
944 | status="$?"
945 |
946 | expect_status='0'
947 | expect_either_or='true'
948 | expect_either='{
949 | "title": "",
950 | "type": "object",
951 | "properties": {
952 | "people": {
953 | "type": "array",
954 | "items": {
955 | "title": "",
956 | "type": "object",
957 | "properties": {
958 | "name": {
959 | "type": "string"
960 | },
961 | "age": {
962 | "type": "number"
963 | }
964 | },
965 | "required": []
966 | }
967 | }
968 | },
969 | "required": []
970 | }'
971 | expect_or='{
972 | "title": "",
973 | "type": "object",
974 | "properties": {
975 | "people": {
976 | "type": "array",
977 | "items": {
978 | "title": "",
979 | "type": "object",
980 | "properties": {
981 | "age": {
982 | "type": "number"
983 | },
984 | "name": {
985 | "type": "string"
986 | }
987 | },
988 | "required": []
989 | }
990 | }
991 | },
992 | "required": []
993 | }'
994 | }
995 |
996 | infer_yaml_complicated_and_use_schema_field() {
997 | output=`printf "people:\n - name: thomas\n age: 20\n - name: gordon\n age: 60" \
998 | | schema infer -s 'http://json-schema.org/draft-06/schema' --omit-required=false 2>&1`
999 | status="$?"
1000 |
1001 | expect_status='0'
1002 | expect_either_or='true'
1003 | expect_either='{
1004 | "$schema": "http://json-schema.org/draft-06/schema",
1005 | "title": "",
1006 | "type": "object",
1007 | "properties": {
1008 | "people": {
1009 | "type": "array",
1010 | "items": {
1011 | "title": "",
1012 | "type": "object",
1013 | "properties": {
1014 | "name": {
1015 | "type": "string"
1016 | },
1017 | "age": {
1018 | "type": "number"
1019 | }
1020 | },
1021 | "required": []
1022 | }
1023 | }
1024 | },
1025 | "required": []
1026 | }'
1027 | expect_or='{
1028 | "$schema": "http://json-schema.org/draft-06/schema",
1029 | "title": "",
1030 | "type": "object",
1031 | "properties": {
1032 | "people": {
1033 | "type": "array",
1034 | "items": {
1035 | "title": "",
1036 | "type": "object",
1037 | "properties": {
1038 | "age": {
1039 | "type": "number"
1040 | },
1041 | "name": {
1042 | "type": "string"
1043 | }
1044 | },
1045 | "required": []
1046 | }
1047 | }
1048 | },
1049 | "required": []
1050 | }'
1051 | }
1052 |
1053 | infer_yaml_empty_array_as_null() {
1054 | output=`printf 'truthinesses: []' | schema infer --omit-required=false --empty-arrays-as=null 2>&1`
1055 | status="$?"
1056 |
1057 | expect_status='0'
1058 | expect='{
1059 | "title": "",
1060 | "type": "object",
1061 | "properties": {
1062 | "truthinesses": {
1063 | "type": "array",
1064 | "items": {
1065 | "type": "null"
1066 | }
1067 | }
1068 | },
1069 | "required": []
1070 | }'
1071 | }
1072 |
1073 | infer_yaml_empty_array_as_nil() {
1074 | output=`printf 'truthinesses: []' | schema infer --omit-required=false --empty-arrays-as=nil 2>&1`
1075 | status="$?"
1076 |
1077 | expect_status='0'
1078 | expect='{
1079 | "title": "",
1080 | "type": "object",
1081 | "properties": {
1082 | "truthinesses": {
1083 | "type": "array",
1084 | "items": {
1085 | "type": "null"
1086 | }
1087 | }
1088 | },
1089 | "required": []
1090 | }'
1091 | }
1092 |
1093 | infer_yaml_empty_array_as_string() {
1094 | output=`printf 'truthinesses: []' | schema infer --omit-required=false --empty-arrays-as=string 2>&1`
1095 | status="$?"
1096 |
1097 | expect_status='0'
1098 | expect='{
1099 | "title": "",
1100 | "type": "object",
1101 | "properties": {
1102 | "truthinesses": {
1103 | "type": "array",
1104 | "items": {
1105 | "type": "string"
1106 | }
1107 | }
1108 | },
1109 | "required": []
1110 | }'
1111 | }
1112 |
1113 | infer_yaml_empty_array_as_str() {
1114 | output=`printf 'truthinesses: []' | schema infer --omit-required=false --empty-arrays-as=str 2>&1`
1115 | status="$?"
1116 |
1117 | expect_status='0'
1118 | expect='{
1119 | "title": "",
1120 | "type": "object",
1121 | "properties": {
1122 | "truthinesses": {
1123 | "type": "array",
1124 | "items": {
1125 | "type": "string"
1126 | }
1127 | }
1128 | },
1129 | "required": []
1130 | }'
1131 | }
1132 |
1133 | infer_yaml_empty_array_as_boolean() {
1134 | output=`printf 'truthinesses: []' | schema infer --omit-required=false --empty-arrays-as=boolean 2>&1`
1135 | status="$?"
1136 |
1137 | expect_status='0'
1138 | expect='{
1139 | "title": "",
1140 | "type": "object",
1141 | "properties": {
1142 | "truthinesses": {
1143 | "type": "array",
1144 | "items": {
1145 | "type": "boolean"
1146 | }
1147 | }
1148 | },
1149 | "required": []
1150 | }'
1151 | }
1152 |
1153 | infer_yaml_empty_array_as_bool() {
1154 | output=`printf 'truthinesses: []' | schema infer --omit-required=false --empty-arrays-as=bool 2>&1`
1155 | status="$?"
1156 |
1157 | expect_status='0'
1158 | expect='{
1159 | "title": "",
1160 | "type": "object",
1161 | "properties": {
1162 | "truthinesses": {
1163 | "type": "array",
1164 | "items": {
1165 | "type": "boolean"
1166 | }
1167 | }
1168 | },
1169 | "required": []
1170 | }'
1171 | }
1172 |
1173 | infer_yaml_empty_array_as_number() {
1174 | output=`printf 'truthinesses: []' | schema infer --omit-required=false --empty-arrays-as=number 2>&1`
1175 | status="$?"
1176 |
1177 | expect_status='0'
1178 | expect='{
1179 | "title": "",
1180 | "type": "object",
1181 | "properties": {
1182 | "truthinesses": {
1183 | "type": "array",
1184 | "items": {
1185 | "type": "number"
1186 | }
1187 | }
1188 | },
1189 | "required": []
1190 | }'
1191 | }
1192 |
1193 | infer_yaml_empty_array_as_float() {
1194 | output=`printf 'truthinesses: []' | schema infer --omit-required=false --empty-arrays-as=float 2>&1`
1195 | status="$?"
1196 |
1197 | expect_status='0'
1198 | expect='{
1199 | "title": "",
1200 | "type": "object",
1201 | "properties": {
1202 | "truthinesses": {
1203 | "type": "array",
1204 | "items": {
1205 | "type": "number"
1206 | }
1207 | }
1208 | },
1209 | "required": []
1210 | }'
1211 | }
1212 |
1213 | infer_yaml_empty_array_as_object() {
1214 | output=`printf 'truthinesses: []' | schema infer --omit-required=false --empty-arrays-as=object 2>&1`
1215 | status="$?"
1216 |
1217 | expect_status='0'
1218 | expect='{
1219 | "title": "",
1220 | "type": "object",
1221 | "properties": {
1222 | "truthinesses": {
1223 | "type": "array",
1224 | "items": {
1225 | "title": "",
1226 | "type": "object",
1227 | "properties": {},
1228 | "required": []
1229 | }
1230 | }
1231 | },
1232 | "required": []
1233 | }'
1234 | }
1235 |
1236 | infer_yaml_empty_array_as_invalid() {
1237 | output=`printf 'truthinesses: []' | schema infer --omit-required=false --empty-arrays-as=sanguine 2>&1`
1238 | status="$?"
1239 |
1240 | expect_status='1'
1241 | expect='error: failed to infer schema
1242 | invalid --empty-arrays-as value '"'"'sanguine'"'"
1243 | }
1244 |
1245 | infer_toml_string() {
1246 | output=`printf 'color = "red"' | schema infer --omit-required=false 2>&1`
1247 | status="$?"
1248 |
1249 | expect_status='0'
1250 | expect='{
1251 | "title": "",
1252 | "type": "object",
1253 | "properties": {
1254 | "color": {
1255 | "type": "string"
1256 | }
1257 | },
1258 | "required": []
1259 | }'
1260 | }
1261 |
1262 | infer_toml_positive_integer() {
1263 | output=`printf 'myNumber = 12' | schema infer --omit-required=false 2>&1`
1264 | status="$?"
1265 |
1266 | expect_status='0'
1267 | expect='{
1268 | "title": "",
1269 | "type": "object",
1270 | "properties": {
1271 | "myNumber": {
1272 | "type": "number"
1273 | }
1274 | },
1275 | "required": []
1276 | }'
1277 | }
1278 |
1279 | infer_toml_negative_integer() {
1280 | output=`printf 'myNumber = -12310' | schema infer --omit-required=false 2>&1`
1281 | status="$?"
1282 |
1283 | expect_status='0'
1284 | expect='{
1285 | "title": "",
1286 | "type": "object",
1287 | "properties": {
1288 | "myNumber": {
1289 | "type": "number"
1290 | }
1291 | },
1292 | "required": []
1293 | }'
1294 | }
1295 |
1296 | infer_toml_positive_float() {
1297 | output=`printf 'myNumber = 420.6' | schema infer --omit-required=false 2>&1`
1298 | status="$?"
1299 |
1300 | expect_status='0'
1301 | expect='{
1302 | "title": "",
1303 | "type": "object",
1304 | "properties": {
1305 | "myNumber": {
1306 | "type": "number"
1307 | }
1308 | },
1309 | "required": []
1310 | }'
1311 | }
1312 |
1313 | infer_toml_negative_float() {
1314 | output=`printf 'myNumber = -1902.32249' | schema infer --omit-required=false 2>&1`
1315 | status="$?"
1316 |
1317 | expect_status='0'
1318 | expect='{
1319 | "title": "",
1320 | "type": "object",
1321 | "properties": {
1322 | "myNumber": {
1323 | "type": "number"
1324 | }
1325 | },
1326 | "required": []
1327 | }'
1328 | }
1329 |
1330 | infer_toml_zero() {
1331 | output=`printf 'myNumber = 0' | schema infer --omit-required=false 2>&1`
1332 | status="$?"
1333 |
1334 | expect_status='0'
1335 | expect='{
1336 | "title": "",
1337 | "type": "object",
1338 | "properties": {
1339 | "myNumber": {
1340 | "type": "number"
1341 | }
1342 | },
1343 | "required": []
1344 | }'
1345 | }
1346 |
1347 | infer_toml_boolean() {
1348 | output=`printf 'is2004 = true' | schema infer --omit-required=false 2>&1`
1349 | status="$?"
1350 |
1351 | expect_status='0'
1352 | expect='{
1353 | "title": "",
1354 | "type": "object",
1355 | "properties": {
1356 | "is2004": {
1357 | "type": "boolean"
1358 | }
1359 | },
1360 | "required": []
1361 | }'
1362 | }
1363 |
1364 | infer_toml_array_of_strings() {
1365 | output=`printf 'people = ["bladee","thomas"]' | schema infer --omit-required=false 2>&1`
1366 | status="$?"
1367 |
1368 | expect_status='0'
1369 | expect='{
1370 | "title": "",
1371 | "type": "object",
1372 | "properties": {
1373 | "people": {
1374 | "type": "array",
1375 | "items": {
1376 | "type": "string"
1377 | }
1378 | }
1379 | },
1380 | "required": []
1381 | }'
1382 | }
1383 |
1384 | infer_toml_array_of_floats() {
1385 | output=`printf 'ages = [ 12.1, -1.0, 43.0, -2.3, 0.0, -0.0, 0.0 ]' | schema infer --omit-required=false 2>&1`
1386 | status="$?"
1387 |
1388 | expect_status='0'
1389 | expect='{
1390 | "title": "",
1391 | "type": "object",
1392 | "properties": {
1393 | "ages": {
1394 | "type": "array",
1395 | "items": {
1396 | "type": "number"
1397 | }
1398 | }
1399 | },
1400 | "required": []
1401 | }'
1402 | }
1403 |
1404 | infer_toml_array_of_ints() {
1405 | output=`printf 'ages = [ 12, -1, 43, -2, 0, -0, 0 ]' | schema infer --omit-required=false 2>&1`
1406 | status="$?"
1407 |
1408 | expect_status='0'
1409 | expect='{
1410 | "title": "",
1411 | "type": "object",
1412 | "properties": {
1413 | "ages": {
1414 | "type": "array",
1415 | "items": {
1416 | "type": "number"
1417 | }
1418 | }
1419 | },
1420 | "required": []
1421 | }'
1422 | }
1423 |
1424 | infer_toml_array_of_booleans() {
1425 | output=`printf 'truthinesses = [ true, false, false ]' | schema infer --omit-required=false 2>&1`
1426 | status="$?"
1427 |
1428 | expect_status='0'
1429 | expect='{
1430 | "title": "",
1431 | "type": "object",
1432 | "properties": {
1433 | "truthinesses": {
1434 | "type": "array",
1435 | "items": {
1436 | "type": "boolean"
1437 | }
1438 | }
1439 | },
1440 | "required": []
1441 | }'
1442 | }
1443 |
1444 | infer_toml_array_of_tables() {
1445 | output=`printf "[[people]]\nname = 'thomas'\n\n[[people]]\nname = 'gordon'" | schema infer --omit-required=false 2>&1`
1446 | status="$?"
1447 |
1448 | expect_status='0'
1449 | expect='{
1450 | "title": "",
1451 | "type": "object",
1452 | "properties": {
1453 | "people": {
1454 | "type": "array",
1455 | "items": {
1456 | "title": "",
1457 | "type": "object",
1458 | "properties": {
1459 | "name": {
1460 | "type": "string"
1461 | }
1462 | },
1463 | "required": []
1464 | }
1465 | }
1466 | },
1467 | "required": []
1468 | }'
1469 | }
1470 |
1471 |
1472 | infer_toml_array_of_tables_with_multiple_fields() {
1473 | output=`printf "[[people]]\nname = 'thomas'\nage = 20\n\n[[people]]\nname = 'gordon'\nage = 60" \
1474 | | schema infer --omit-required=false 2>&1`
1475 | status="$?"
1476 |
1477 | expect_status='0'
1478 | expect_either_or='true'
1479 | expect_either='{
1480 | "title": "",
1481 | "type": "object",
1482 | "properties": {
1483 | "people": {
1484 | "type": "array",
1485 | "items": {
1486 | "title": "",
1487 | "type": "object",
1488 | "properties": {
1489 | "name": {
1490 | "type": "string"
1491 | },
1492 | "age": {
1493 | "type": "number"
1494 | }
1495 | },
1496 | "required": []
1497 | }
1498 | }
1499 | },
1500 | "required": []
1501 | }'
1502 | expect_or='{
1503 | "title": "",
1504 | "type": "object",
1505 | "properties": {
1506 | "people": {
1507 | "type": "array",
1508 | "items": {
1509 | "title": "",
1510 | "type": "object",
1511 | "properties": {
1512 | "age": {
1513 | "type": "number"
1514 | },
1515 | "name": {
1516 | "type": "string"
1517 | }
1518 | },
1519 | "required": []
1520 | }
1521 | }
1522 | },
1523 | "required": []
1524 | }'
1525 | }
1526 |
1527 | infer_toml_complicated_and_use_schema_field() {
1528 | output=`printf "[[people]]\nname = 'thomas'\nage = 20\n\n[[people]]\nname = 'gordon'\nage = 60" \
1529 | | schema infer -s 'http://json-schema.org/draft-06/schema' --omit-required=false 2>&1`
1530 | status="$?"
1531 |
1532 | expect_status='0'
1533 | expect_either_or='true'
1534 | expect_either='{
1535 | "$schema": "http://json-schema.org/draft-06/schema",
1536 | "title": "",
1537 | "type": "object",
1538 | "properties": {
1539 | "people": {
1540 | "type": "array",
1541 | "items": {
1542 | "title": "",
1543 | "type": "object",
1544 | "properties": {
1545 | "name": {
1546 | "type": "string"
1547 | },
1548 | "age": {
1549 | "type": "number"
1550 | }
1551 | },
1552 | "required": []
1553 | }
1554 | }
1555 | },
1556 | "required": []
1557 | }'
1558 | expect_or='{
1559 | "$schema": "http://json-schema.org/draft-06/schema",
1560 | "title": "",
1561 | "type": "object",
1562 | "properties": {
1563 | "people": {
1564 | "type": "array",
1565 | "items": {
1566 | "title": "",
1567 | "type": "object",
1568 | "properties": {
1569 | "age": {
1570 | "type": "number"
1571 | },
1572 | "name": {
1573 | "type": "string"
1574 | }
1575 | },
1576 | "required": []
1577 | }
1578 | }
1579 | },
1580 | "required": []
1581 | }'
1582 | }
1583 |
1584 | infer_toml_empty_array_as_null() {
1585 | output=`printf 'truthinesses = []' | schema infer --omit-required=false --empty-arrays-as=null 2>&1`
1586 | status="$?"
1587 |
1588 | expect_status='0'
1589 | expect='{
1590 | "title": "",
1591 | "type": "object",
1592 | "properties": {
1593 | "truthinesses": {
1594 | "type": "array",
1595 | "items": {
1596 | "type": "null"
1597 | }
1598 | }
1599 | },
1600 | "required": []
1601 | }'
1602 | }
1603 |
1604 | infer_toml_empty_array_as_nil() {
1605 | output=`printf 'truthinesses = []' | schema infer --omit-required=false --empty-arrays-as=nil 2>&1`
1606 | status="$?"
1607 |
1608 | expect_status='0'
1609 | expect='{
1610 | "title": "",
1611 | "type": "object",
1612 | "properties": {
1613 | "truthinesses": {
1614 | "type": "array",
1615 | "items": {
1616 | "type": "null"
1617 | }
1618 | }
1619 | },
1620 | "required": []
1621 | }'
1622 | }
1623 |
1624 | infer_toml_empty_array_as_string() {
1625 | output=`printf 'truthinesses = []' | schema infer --omit-required=false --empty-arrays-as=string 2>&1`
1626 | status="$?"
1627 |
1628 | expect_status='0'
1629 | expect='{
1630 | "title": "",
1631 | "type": "object",
1632 | "properties": {
1633 | "truthinesses": {
1634 | "type": "array",
1635 | "items": {
1636 | "type": "string"
1637 | }
1638 | }
1639 | },
1640 | "required": []
1641 | }'
1642 | }
1643 |
1644 | infer_toml_empty_array_as_str() {
1645 | output=`printf 'truthinesses = []' | schema infer --omit-required=false --empty-arrays-as=str 2>&1`
1646 | status="$?"
1647 |
1648 | expect_status='0'
1649 | expect='{
1650 | "title": "",
1651 | "type": "object",
1652 | "properties": {
1653 | "truthinesses": {
1654 | "type": "array",
1655 | "items": {
1656 | "type": "string"
1657 | }
1658 | }
1659 | },
1660 | "required": []
1661 | }'
1662 | }
1663 |
1664 | infer_toml_empty_array_as_boolean() {
1665 | output=`printf 'truthinesses = []' | schema infer --omit-required=false --empty-arrays-as=boolean 2>&1`
1666 | status="$?"
1667 |
1668 | expect_status='0'
1669 | expect='{
1670 | "title": "",
1671 | "type": "object",
1672 | "properties": {
1673 | "truthinesses": {
1674 | "type": "array",
1675 | "items": {
1676 | "type": "boolean"
1677 | }
1678 | }
1679 | },
1680 | "required": []
1681 | }'
1682 | }
1683 |
1684 | infer_toml_empty_array_as_bool() {
1685 | output=`printf 'truthinesses = []' | schema infer --omit-required=false --empty-arrays-as=bool 2>&1`
1686 | status="$?"
1687 |
1688 | expect_status='0'
1689 | expect='{
1690 | "title": "",
1691 | "type": "object",
1692 | "properties": {
1693 | "truthinesses": {
1694 | "type": "array",
1695 | "items": {
1696 | "type": "boolean"
1697 | }
1698 | }
1699 | },
1700 | "required": []
1701 | }'
1702 | }
1703 |
1704 | infer_toml_empty_array_as_number() {
1705 | output=`printf 'truthinesses = []' | schema infer --omit-required=false --empty-arrays-as=number 2>&1`
1706 | status="$?"
1707 |
1708 | expect_status='0'
1709 | expect='{
1710 | "title": "",
1711 | "type": "object",
1712 | "properties": {
1713 | "truthinesses": {
1714 | "type": "array",
1715 | "items": {
1716 | "type": "number"
1717 | }
1718 | }
1719 | },
1720 | "required": []
1721 | }'
1722 | }
1723 |
1724 | infer_toml_empty_array_as_float() {
1725 | output=`printf 'truthinesses = []' | schema infer --omit-required=false --empty-arrays-as=float 2>&1`
1726 | status="$?"
1727 |
1728 | expect_status='0'
1729 | expect='{
1730 | "title": "",
1731 | "type": "object",
1732 | "properties": {
1733 | "truthinesses": {
1734 | "type": "array",
1735 | "items": {
1736 | "type": "number"
1737 | }
1738 | }
1739 | },
1740 | "required": []
1741 | }'
1742 | }
1743 |
1744 | infer_toml_empty_array_as_object() {
1745 | output=`printf 'truthinesses = []' | schema infer --omit-required=false --empty-arrays-as=object 2>&1`
1746 | status="$?"
1747 |
1748 | expect_status='0'
1749 | expect='{
1750 | "title": "",
1751 | "type": "object",
1752 | "properties": {
1753 | "truthinesses": {
1754 | "type": "array",
1755 | "items": {
1756 | "title": "",
1757 | "type": "object",
1758 | "properties": {},
1759 | "required": []
1760 | }
1761 | }
1762 | },
1763 | "required": []
1764 | }'
1765 | }
1766 |
1767 | infer_toml_empty_array_as_invalid() {
1768 | output=`printf 'truthinesses = []' | schema infer --omit-required=false --empty-arrays-as=sanguine 2>&1`
1769 | status="$?"
1770 |
1771 | expect_status='1'
1772 | expect='error: failed to infer schema
1773 | invalid --empty-arrays-as value '"'"'sanguine'"'"
1774 | }
1775 |
1776 | infer_unrecognized_format_graphql() {
1777 | output=`printf '{' | schema infer --graphql 2>&1`
1778 | status="$?"
1779 |
1780 | expect_status='1'
1781 | expect='error: failed to recognize input data format'
1782 | }
1783 |
1784 | infer_json_minimal_graphql() {
1785 | output=`printf '{}' | schema infer --graphql --omit-required=false 2>&1`
1786 | status="$?"
1787 |
1788 | expect_status='0'
1789 | expect='type Object {
1790 | }'
1791 | }
1792 |
1793 | infer_json_string_graphql() {
1794 | output=`printf '{"a":"b"}' | schema infer --graphql --omit-required=false 2>&1`
1795 | status="$?"
1796 |
1797 | expect_status='0'
1798 | expect='type Object {
1799 | a: String!
1800 | }'
1801 | }
1802 |
1803 | infer_json_positive_integer_graphql() {
1804 | output=`printf '{"myNumber":12}' | schema infer --graphql --omit-required=false 2>&1`
1805 | status="$?"
1806 |
1807 | expect_status='0'
1808 | expect='type Object {
1809 | myNumber: Float!
1810 | }'
1811 | }
1812 |
1813 | infer_json_negative_integer_graphql() {
1814 | output=`printf '{"myNumber":-12310}' | schema infer --graphql --omit-required=false 2>&1`
1815 | status="$?"
1816 |
1817 | expect_status='0'
1818 | expect='type Object {
1819 | myNumber: Float!
1820 | }'
1821 | }
1822 |
1823 | infer_json_positive_float_graphql() {
1824 | output=`printf '{"myNumber":420.6}' | schema infer --graphql --omit-required=false 2>&1`
1825 | status="$?"
1826 |
1827 | expect_status='0'
1828 | expect='type Object {
1829 | myNumber: Float!
1830 | }'
1831 | }
1832 |
1833 | infer_json_negative_float_graphql() {
1834 | output=`printf '{"myNumber":-1902.32249}' | schema infer --graphql --omit-required=false 2>&1`
1835 | status="$?"
1836 |
1837 | expect_status='0'
1838 | expect='type Object {
1839 | myNumber: Float!
1840 | }'
1841 | }
1842 |
1843 | infer_json_zero_graphql() {
1844 | output=`printf '{"myNumber":0}' | schema infer --graphql --omit-required=false 2>&1`
1845 | status="$?"
1846 |
1847 | expect_status='0'
1848 | expect='type Object {
1849 | myNumber: Float!
1850 | }'
1851 | }
1852 |
1853 | infer_json_null_graphql() {
1854 | output=`printf '{"is2004":null}' | schema infer --graphql --omit-required=false 2>&1`
1855 | status="$?"
1856 |
1857 | expect_status='1'
1858 | expect=`printf "error: failed to serialize schema\ncannot infer type of null value (see key 'is2004')"`
1859 | }
1860 |
1861 | infer_json_null_as_string_graphql() {
1862 | output=`printf '{"myString":null}' | schema infer --graphql --null-as=string 2>&1`
1863 | status="$?"
1864 |
1865 | expect_status='0'
1866 | expect='type Object {
1867 | myString: String!
1868 | }'
1869 | }
1870 |
1871 | infer_json_null_as_number_graphql() {
1872 | output=`printf '{"myField":null}' | schema infer --graphql --null-as=number 2>&1`
1873 | status="$?"
1874 |
1875 | expect_status='0'
1876 | expect='type Object {
1877 | myField: Float!
1878 | }'
1879 | }
1880 |
1881 | infer_json_null_as_boolean_graphql() {
1882 | output=`printf '{"myBool":null}' | schema infer --graphql --null-as=bool 2>&1`
1883 | status="$?"
1884 |
1885 | expect_status='0'
1886 | expect='type Object {
1887 | myBool: Boolean!
1888 | }'
1889 | }
1890 |
1891 | infer_json_nested_object_null_as_number_graphql() {
1892 | output=`printf '{"myObj":{"myInnerObj":{"myNum":10}}}' | schema infer --graphql --null-as=bool 2>&1`
1893 | status="$?"
1894 |
1895 | expect_status='0'
1896 | expect='type MyInnerObj {
1897 | myNum: Float!
1898 | }
1899 |
1900 | type MyObj {
1901 | myInnerObj: MyInnerObj!
1902 | }
1903 |
1904 | type Object {
1905 | myObj: MyObj!
1906 | }'
1907 | }
1908 |
1909 | infer_json_nested_array_null_as_number_graphql() {
1910 | output=`printf '{"myObj":{"myInnerObj":{"myNums":[null]}}}' | schema infer --graphql --null-as=bool 2>&1`
1911 | status="$?"
1912 |
1913 | expect_status='0'
1914 | expect='type MyInnerObj {
1915 | myNums: [Boolean!]!
1916 | }
1917 |
1918 | type MyObj {
1919 | myInnerObj: MyInnerObj!
1920 | }
1921 |
1922 | type Object {
1923 | myObj: MyObj!
1924 | }'
1925 | }
1926 |
1927 | infer_json_boolean_graphql() {
1928 | output=`printf '{"is2004":true}' | schema infer --graphql --omit-required=false 2>&1`
1929 | status="$?"
1930 |
1931 | expect_status='0'
1932 | expect='type Object {
1933 | is2004: Boolean!
1934 | }'
1935 | }
1936 |
1937 | infer_json_array_of_strings_graphql() {
1938 | output=`printf '{"people":["bladee","thomas"]}' | schema infer --graphql --omit-required=false 2>&1`
1939 | status="$?"
1940 |
1941 | expect_status='0'
1942 | expect='type Object {
1943 | people: [String!]!
1944 | }'
1945 | }
1946 |
1947 | infer_json_array_of_numbers_graphql() {
1948 | output=`printf '{"ages":[12.1,-1,43,-2.3,0,-0,0.0]}' | schema infer --graphql --omit-required=false 2>&1`
1949 | status="$?"
1950 |
1951 | expect_status='0'
1952 | expect='type Object {
1953 | ages: [Float!]!
1954 | }'
1955 | }
1956 |
1957 | infer_json_array_of_booleans_graphql() {
1958 | output=`printf '{"truthinesses":[true,false,false]}' | schema infer --graphql --omit-required=false 2>&1`
1959 | status="$?"
1960 |
1961 | expect_status='0'
1962 | expect='type Object {
1963 | truthinesses: [Boolean!]!
1964 | }'
1965 | }
1966 |
1967 | infer_json_array_of_objects_graphql() {
1968 | output=`printf '{"people":[{"name":"thomas"},{"name":"gordon"}]}' | schema infer --graphql --omit-required=false 2>&1`
1969 | status="$?"
1970 |
1971 | expect_status='0'
1972 | expect='type People {
1973 | name: String!
1974 | }
1975 |
1976 | type Object {
1977 | people: [People!]!
1978 | }'
1979 | }
1980 |
1981 | infer_json_array_of_array_objects_graphql() {
1982 | output=`printf '{"people":[[{"name":"thomas"},{"name":"gordon"}]]}' | schema infer --graphql --omit-required=false 2>&1`
1983 | status="$?"
1984 |
1985 | expect_status='0'
1986 | expect='type People {
1987 | name: String!
1988 | }
1989 |
1990 | type Object {
1991 | people: [[People!]]!
1992 | }'
1993 | }
1994 |
1995 | infer_json_array_of_objects_with_multiple_fields_graphql() {
1996 | output=`printf '{"people":[{"name":"Thomas","age":20}]}' | schema infer --graphql --omit-required=false 2>&1`
1997 | status="$?"
1998 |
1999 | expect_status='0'
2000 | expect_either_or='true'
2001 | expect_either='type People {
2002 | age: Float!
2003 | name: String!
2004 | }
2005 |
2006 | type Object {
2007 | people: [People!]!
2008 | }'
2009 | expect_or='type People {
2010 | name: String!
2011 | age: Float!
2012 | }
2013 |
2014 | type Object {
2015 | people: [People!]!
2016 | }'
2017 | }
2018 |
2019 | infer_json_empty_array_as_string_graphql() {
2020 | output=`printf '{"people":[]}' | schema infer --graphql --omit-required=false --empty-arrays-as=string 2>&1`
2021 | status="$?"
2022 |
2023 | expect_status='0'
2024 | expect='type Object {
2025 | people: [String!]!
2026 | }'
2027 | }
2028 |
2029 | infer_json_empty_array_as_str_graphql() {
2030 | output=`printf '{"people":[]}' | schema infer --graphql --omit-required=false --empty-arrays-as=str 2>&1`
2031 | status="$?"
2032 |
2033 | expect_status='0'
2034 | expect='type Object {
2035 | people: [String!]!
2036 | }'
2037 | }
2038 |
2039 | infer_json_empty_array_as_number_graphql() {
2040 | output=`printf '{"people":[]}' | schema infer --graphql --omit-required=false --empty-arrays-as=number 2>&1`
2041 | status="$?"
2042 |
2043 | expect_status='0'
2044 | expect='type Object {
2045 | people: [Float!]!
2046 | }'
2047 | }
2048 |
2049 | infer_json_empty_array_as_float_graphql() {
2050 | output=`printf '{"people":[]}' | schema infer --graphql --omit-required=false --empty-arrays-as=float 2>&1`
2051 | status="$?"
2052 |
2053 | expect_status='0'
2054 | expect='type Object {
2055 | people: [Float!]!
2056 | }'
2057 | }
2058 |
2059 | infer_json_empty_array_as_bool_graphql() {
2060 | output=`printf '{"people":[]}' | schema infer --graphql --omit-required=false --empty-arrays-as=bool 2>&1`
2061 | status="$?"
2062 |
2063 | expect_status='0'
2064 | expect='type Object {
2065 | people: [Boolean!]!
2066 | }'
2067 | }
2068 |
2069 | infer_json_empty_array_as_boolean_graphql() {
2070 | output=`printf '{"people":[]}' | schema infer --graphql --omit-required=false --empty-arrays-as=boolean 2>&1`
2071 | status="$?"
2072 |
2073 | expect_status='0'
2074 | expect='type Object {
2075 | people: [Boolean!]!
2076 | }'
2077 | }
2078 |
2079 | infer_json_empty_array_as_object_graphql() {
2080 | output=`printf '{"people":[]}' | schema infer --graphql --omit-required=false --empty-arrays-as=object 2>&1`
2081 | status="$?"
2082 |
2083 | expect_status='0'
2084 | expect='type People {
2085 | }
2086 |
2087 | type Object {
2088 | people: [People!]!
2089 | }'
2090 | }
2091 |
2092 | infer_json_empty_array_as_invalid_graphql() {
2093 | output=`printf '{"people":[]}' | schema infer --graphql --omit-required=false --empty-arrays-as=sanguine 2>&1`
2094 | status="$?"
2095 |
2096 | expect_status='1'
2097 | expect='error: failed to infer schema
2098 | invalid --empty-arrays-as value '"'"'sanguine'"'"
2099 | }
2100 |
2101 | tests=(
2102 | "infer_unrecognized_format"
2103 | "infer_json_minimal"
2104 | "infer_json_string"
2105 | "infer_json_positive_integer"
2106 | "infer_json_negative_integer"
2107 | "infer_json_positive_float"
2108 | "infer_json_negative_float"
2109 | "infer_json_zero"
2110 | "infer_json_null"
2111 | "infer_json_boolean"
2112 | "infer_json_array_of_strings"
2113 | "infer_json_array_of_numbers"
2114 | "infer_json_array_of_booleans"
2115 | "infer_json_array_of_objects"
2116 | "infer_json_array_of_array_objects"
2117 | "infer_json_array_of_objects_with_multiple_fields"
2118 | "infer_json_complicated_and_use_schema_field"
2119 | "infer_json_empty_array_as_null"
2120 | "infer_json_empty_array_as_nil"
2121 | "infer_json_empty_array_as_string"
2122 | "infer_json_empty_array_as_str"
2123 | "infer_json_empty_array_as_bool"
2124 | "infer_json_empty_array_as_boolean"
2125 | "infer_json_empty_array_as_number"
2126 | "infer_json_empty_array_as_float"
2127 | "infer_json_empty_array_as_object"
2128 | "infer_json_empty_array_as_invalid"
2129 | "infer_json_null_as_string"
2130 | "infer_json_null_as_number"
2131 | "infer_json_null_as_boolean"
2132 | "infer_json_null_as_string"
2133 | "infer_json_nested_object_null_as_number"
2134 | "infer_json_nested_array_null_as_number"
2135 | "infer_yaml_string"
2136 | "infer_yaml_positive_integer"
2137 | "infer_yaml_negative_integer"
2138 | "infer_yaml_positive_float"
2139 | "infer_yaml_negative_float"
2140 | "infer_yaml_zero"
2141 | "infer_yaml_null"
2142 | "infer_yaml_boolean"
2143 | "infer_yaml_array_of_strings"
2144 | "infer_yaml_array_of_numbers"
2145 | "infer_yaml_array_of_booleans"
2146 | "infer_yaml_array_of_objects"
2147 | "infer_yaml_array_of_array_objects"
2148 | "infer_yaml_array_of_objects_with_multiple_fields"
2149 | "infer_yaml_complicated_and_use_schema_field"
2150 | "infer_yaml_empty_array_as_null"
2151 | "infer_yaml_empty_array_as_nil"
2152 | "infer_yaml_empty_array_as_string"
2153 | "infer_yaml_empty_array_as_str"
2154 | "infer_yaml_empty_array_as_bool"
2155 | "infer_yaml_empty_array_as_boolean"
2156 | "infer_yaml_empty_array_as_number"
2157 | "infer_yaml_empty_array_as_float"
2158 | "infer_yaml_empty_array_as_object"
2159 | "infer_yaml_empty_array_as_invalid"
2160 | "infer_toml_string"
2161 | "infer_toml_positive_integer"
2162 | "infer_toml_negative_integer"
2163 | "infer_toml_positive_float"
2164 | "infer_toml_negative_float"
2165 | "infer_toml_zero"
2166 | "infer_toml_boolean"
2167 | "infer_toml_array_of_strings"
2168 | "infer_toml_array_of_floats"
2169 | "infer_toml_array_of_ints"
2170 | "infer_toml_array_of_booleans"
2171 | "infer_toml_array_of_tables"
2172 | "infer_toml_array_of_tables_with_multiple_fields"
2173 | "infer_toml_complicated_and_use_schema_field"
2174 | "infer_toml_empty_array_as_null"
2175 | "infer_toml_empty_array_as_nil"
2176 | "infer_toml_empty_array_as_string"
2177 | "infer_toml_empty_array_as_str"
2178 | "infer_toml_empty_array_as_bool"
2179 | "infer_toml_empty_array_as_boolean"
2180 | "infer_toml_empty_array_as_number"
2181 | "infer_toml_empty_array_as_float"
2182 | "infer_toml_empty_array_as_object"
2183 | "infer_toml_empty_array_as_invalid"
2184 | "infer_unrecognized_format_graphql"
2185 | "infer_json_minimal_graphql"
2186 | "infer_json_string_graphql"
2187 | "infer_json_positive_integer_graphql"
2188 | "infer_json_negative_integer_graphql"
2189 | "infer_json_positive_float_graphql"
2190 | "infer_json_negative_float_graphql"
2191 | "infer_json_zero_graphql"
2192 | "infer_json_null_graphql"
2193 | "infer_json_null_as_string_graphql"
2194 | "infer_json_null_as_number_graphql"
2195 | "infer_json_null_as_boolean_graphql"
2196 | "infer_json_null_as_string_graphql"
2197 | "infer_json_nested_object_null_as_number_graphql"
2198 | "infer_json_nested_array_null_as_number_graphql"
2199 | "infer_json_boolean_graphql"
2200 | "infer_json_array_of_strings_graphql"
2201 | "infer_json_array_of_numbers_graphql"
2202 | "infer_json_array_of_booleans_graphql"
2203 | "infer_json_array_of_objects_graphql"
2204 | "infer_json_array_of_array_objects_graphql"
2205 | "infer_json_array_of_objects_with_multiple_fields_graphql"
2206 | "infer_json_empty_array_as_string_graphql"
2207 | "infer_json_empty_array_as_str_graphql"
2208 | "infer_json_empty_array_as_number_graphql"
2209 | "infer_json_empty_array_as_float_graphql"
2210 | "infer_json_empty_array_as_bool_graphql"
2211 | "infer_json_empty_array_as_boolean_graphql"
2212 | "infer_json_empty_array_as_object_graphql"
2213 | "infer_json_empty_array_as_invalid_graphql"
2214 | )
2215 |
--------------------------------------------------------------------------------
/e2e_tests/init.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | init_invalid_schema() {
4 | output=`printf '{' | schema init 2>&1`
5 | status="$?"
6 |
7 | expect_status='1'
8 | expect='error: failed to parse data from stdin as JSON
9 | unexpected EOF'
10 | }
11 |
12 | init_json_minimal() {
13 | output=`printf '{}' | schema infer | schema init 2>&1`
14 | status="$?"
15 |
16 | expect_status='0'
17 | expect='{}'
18 | }
19 |
20 | init_json_string() {
21 | output=`printf '{"name": "Thomas"}' | schema infer | schema init 2>&1`
22 | status="$?"
23 |
24 | expect_status='0'
25 | expect='{
26 | "name": ""
27 | }'
28 | }
29 |
30 | init_json_number() {
31 | output=`printf '{"age": 20}' | schema infer | schema init 2>&1`
32 | status="$?"
33 |
34 | expect_status='0'
35 | expect='{
36 | "age": 0
37 | }'
38 | }
39 |
40 | init_json_boolean() {
41 | output=`printf '{"isHandsome": true}' | schema infer | schema init 2>&1`
42 | status="$?"
43 |
44 | expect_status='0'
45 | expect='{
46 | "isHandsome": false
47 | }'
48 | }
49 |
50 | init_json_null() {
51 | output=`printf '{"badField": null}' | schema infer | schema init 2>&1`
52 | status="$?"
53 |
54 | expect_status='0'
55 | expect='{
56 | "badField": null
57 | }'
58 | }
59 |
60 | init_json_array() {
61 | output=`printf '{"truthfulnesses": [true,false,false,true]}' | schema infer | schema init 2>&1`
62 | status="$?"
63 |
64 | expect_status='0'
65 | expect='{
66 | "truthfulnesses": [
67 | false
68 | ]
69 | }'
70 | }
71 |
72 | init_json_array_no_pop_lists() {
73 | output=`printf '{"truthfulnesses": [true,false,false,true]}' | schema infer | schema init --populate-lists=false 2>&1`
74 | status="$?"
75 |
76 | expect_status='0'
77 | expect='{
78 | "truthfulnesses": []
79 | }'
80 | }
81 |
82 | init_json_nested_object() {
83 | output=`printf '{"myObj": {"field1":1,"field2":"Finland"}}' | schema infer | schema init 2>&1`
84 | status="$?"
85 |
86 | expect_status='0'
87 | expect='{
88 | "myObj": {
89 | "field1": 0,
90 | "field2": ""
91 | }
92 | }'
93 | }
94 |
95 | init_json_integer_and_required() {
96 | output=`printf '{
97 | "title": "Person",
98 | "type": "object",
99 | "properties": {
100 | "firstName": {
101 | "type": "string"
102 | },
103 | "age": {
104 | "description": "Age in years",
105 | "type": "integer",
106 | "minimum": 0
107 | }
108 | },
109 | "required": ["firstName", "age"]
110 | }' | schema init 2>&1`
111 | status="$?"
112 |
113 | expect_either_or='true'
114 | expect_status='0'
115 | expect_either='{
116 | "age": 0,
117 | "firstName": ""
118 | }'
119 | expect_or='{
120 | "firstName": "",
121 | "age": 0
122 | }'
123 | }
124 |
125 | setup_init_json_follow_ref() {
126 | requires_network='true'
127 | status='0'
128 | expect_status='0'
129 | }
130 |
131 | init_json_follow_ref() {
132 | output=`printf '{
133 | "$schema": "http://json-schema.org/draft-06/schema#",
134 | "description": "A representation of a person, company, organization, or place",
135 | "type": "object",
136 | "properties": {
137 | "geo": { "$ref": "http://json-schema.org/learn/examples/geographical-location.schema.json" }
138 | }
139 | }' | schema init 2>&1`
140 | status="$?"
141 |
142 | expect_either_or='true'
143 | expect_status='0'
144 | expect_either='{
145 | "geo": {
146 | "latitude": 0,
147 | "longitude": 0
148 | }
149 | }'
150 | expect_or='{
151 | "geo": {
152 | "longitude": 0,
153 | "latitude": 0
154 | }
155 | }'
156 | }
157 |
158 | init_json_skip_ref() {
159 | output=`printf '{
160 | "$schema": "http://json-schema.org/draft-06/schema#",
161 | "description": "A representation of a person, company, organization, or place",
162 | "type": "object",
163 | "properties": {
164 | "geo": { "$ref": "http://json-schema.org/geo" }
165 | }
166 | }' | schema init --skip-refs 2>&1`
167 | status="$?"
168 |
169 | expect_status='0'
170 | expect='{
171 | "geo": {}
172 | }'
173 | }
174 |
175 | init_yaml_minimal() {
176 | output=`printf '{}' | schema infer | schema init --yaml 2>&1`
177 | status="$?"
178 |
179 | expect_status='0'
180 | expect='{}'
181 | }
182 |
183 | init_yaml_string() {
184 | output=`printf '{"name": "Thomas"}' | schema infer | schema init --yaml 2>&1`
185 | status="$?"
186 |
187 | expect_status='0'
188 | expect='name: ""'
189 | }
190 |
191 | init_yaml_number() {
192 | output=`printf '{"age": 20}' | schema infer | schema init --yaml 2>&1`
193 | status="$?"
194 |
195 | expect_status='0'
196 | expect='age: 0'
197 | }
198 |
199 | init_yaml_boolean() {
200 | output=`printf '{"isHandsome": true}' | schema infer | schema init --yaml 2>&1`
201 | status="$?"
202 |
203 | expect_status='0'
204 | expect='isHandsome: false'
205 | }
206 |
207 | init_yaml_null() {
208 | output=`printf '{"badField": null}' | schema infer | schema init --yaml 2>&1`
209 | status="$?"
210 |
211 | expect_status='0'
212 | expect='badField: null'
213 | }
214 |
215 | init_yaml_array() {
216 | output=`printf '{"truthfulnesses": [true,false,false,true]}' | schema infer | schema init --yaml 2>&1`
217 | status="$?"
218 |
219 | expect_status='0'
220 | expect='truthfulnesses:
221 | - false'
222 | }
223 |
224 | init_yaml_array_no_populate_lists() {
225 | output=`printf '{"truthfulnesses": [true,false,false,true]}' | schema infer | schema init --yaml --populate-lists=false 2>&1`
226 | status="$?"
227 |
228 | expect_status='0'
229 | expect='truthfulnesses: []'
230 | }
231 |
232 | init_yaml_nested_object() {
233 | output=`printf '{"myObj": {"field1":1,"field2":"Finland"}}' | schema infer | schema init --yaml 2>&1`
234 | status="$?"
235 |
236 | expect_status='0'
237 | expect='myObj:
238 | field1: 0
239 | field2: ""'
240 | }
241 |
242 | init_yaml_integer_and_required() {
243 | output=`printf '{
244 | "title": "Person",
245 | "type": "object",
246 | "properties": {
247 | "firstName": {
248 | "type": "string"
249 | },
250 | "age": {
251 | "description": "Age in years",
252 | "type": "integer",
253 | "minimum": 0
254 | }
255 | },
256 | "required": ["firstName", "age"]
257 | }' | schema init --yaml 2>&1`
258 | status="$?"
259 |
260 | expect_either_or='true'
261 | expect_status='0'
262 | expect_either='age: 0
263 | firstName: ""'
264 | expect_or='firstName: ""
265 | age: 0'
266 | }
267 |
268 | setup_init_yaml_follow_ref() {
269 | requires_network='true'
270 | status='0'
271 | expect_status='0'
272 | }
273 |
274 | init_yaml_follow_ref() {
275 | requires_network='true'
276 |
277 | output=`printf '{
278 | "$schema": "http://json-schema.org/draft-06/schema#",
279 | "description": "A representation of a person, company, organization, or place",
280 | "type": "object",
281 | "properties": {
282 | "geo": { "$ref": "http://json-schema.org/learn/examples/geographical-location.schema.json" }
283 | }
284 | }' | schema init --yaml 2>&1`
285 | status="$?"
286 |
287 | expect_either_or='true'
288 | expect_status='0'
289 | expect_either='geo:
290 | latitude: 0
291 | longitude: 0'
292 | expect_or='geo:
293 | longitude: 0
294 | latitude: 0'
295 | }
296 |
297 | init_yaml_skip_ref() {
298 | output=`printf '{
299 | "$schema": "http://json-schema.org/draft-06/schema#",
300 | "description": "A representation of a person, company, organization, or place",
301 | "type": "object",
302 | "properties": {
303 | "geo": { "$ref": "http://json-schema.org/learn/examples/geographical-location.schema.json" }
304 | }
305 | }' | schema init --yaml --skip-refs 2>&1`
306 | status="$?"
307 |
308 | expect_status='0'
309 | expect='geo: {}'
310 | }
311 |
312 | init_toml_minimal() {
313 | output=`printf '{}' | schema infer | schema init --toml 2>&1`
314 | status="$?"
315 |
316 | expect_status='0'
317 | expect=''
318 | }
319 |
320 | init_toml_string() {
321 | output=`printf '{"name": "Thomas"}' | schema infer | schema init --toml 2>&1`
322 | status="$?"
323 |
324 | expect_status='0'
325 | expect='name = ""'
326 | }
327 |
328 | init_toml_number() {
329 | output=`printf '{"age": 20}' | schema infer | schema init --toml 2>&1`
330 | status="$?"
331 |
332 | expect_status='0'
333 | expect='age = 0'
334 | }
335 |
336 | init_toml_boolean() {
337 | output=`printf '{"isHandsome": true}' | schema infer | schema init --toml 2>&1`
338 | status="$?"
339 |
340 | expect_status='0'
341 | expect='isHandsome = false'
342 | }
343 |
344 | init_toml_null() {
345 | output=`printf '{"badField": null}' | schema infer | schema init --toml 2>&1`
346 | status="$?"
347 |
348 | expect_status='1'
349 | expect='error: failed to serialize instance of schema
350 | toml: cannot marshal nil interface {}'
351 | }
352 |
353 | init_toml_array() {
354 | output=`printf '{"truthfulnesses": [true,false,false,true]}' | schema infer | schema init --toml 2>&1`
355 | status="$?"
356 |
357 | expect_status='0'
358 | expect='truthfulnesses = [false]'
359 | }
360 |
361 | init_toml_array_no_populate_lists() {
362 | output=`printf '{"truthfulnesses": [true,false,false,true]}' | schema infer | schema init --toml --populate-lists=false 2>&1`
363 | status="$?"
364 |
365 | expect_status='0'
366 | expect='truthfulnesses = []'
367 | }
368 |
369 | init_toml_nested_object() {
370 | output=`printf '{"myObj": {"field1":1,"field2":"Finland"}}' | schema infer | schema init --toml 2>&1`
371 | status="$?"
372 |
373 | expect_status='0'
374 | expect='[myObj]
375 | field1 = 0
376 | field2 = ""'
377 | }
378 |
379 | init_toml_integer_and_required() {
380 | output=`printf '{
381 | "title": "Person",
382 | "type": "object",
383 | "properties": {
384 | "firstName": {
385 | "type": "string"
386 | },
387 | "age": {
388 | "description": "Age in years",
389 | "type": "integer",
390 | "minimum": 0
391 | }
392 | },
393 | "required": ["firstName", "age"]
394 | }' | schema init --toml 2>&1`
395 | status="$?"
396 |
397 | expect_either_or='true'
398 | expect_status='0'
399 | expect_either='age = 0
400 | firstName = ""'
401 | expect_or='firstName = ""
402 | age = 0'
403 | }
404 |
405 | setup_init_toml_follow_ref() {
406 | requires_network='true'
407 | status='0'
408 | expect_status='0'
409 | }
410 |
411 | init_toml_follow_ref() {
412 | requires_network='true'
413 |
414 | output=`printf '{
415 | "$schema": "http://json-schema.org/draft-06/schema#",
416 | "description": "A representation of a person, company, organization, or place",
417 | "type": "object",
418 | "properties": {
419 | "geo": { "$ref": "http://json-schema.org/learn/examples/geographical-location.schema.json" }
420 | }
421 | }' | schema init --toml 2>&1`
422 | status="$?"
423 |
424 | expect_either_or='true'
425 | expect_status='0'
426 | expect_either='[geo]
427 | latitude = 0
428 | longitude = 0'
429 | expect_or='[geo]
430 | longitude = 0
431 | latitude = 0'
432 | }
433 |
434 | init_toml_skip_ref() {
435 | output=`printf '{
436 | "$schema": "http://json-schema.org/draft-06/schema#",
437 | "description": "A representation of a person, company, organization, or place",
438 | "type": "object",
439 | "properties": {
440 | "geo": { "$ref": "http://json-schema.org/learn/examples/geographical-location.schema.json" }
441 | }
442 | }' | schema init --toml --skip-refs 2>&1`
443 | status="$?"
444 |
445 | expect_status='0'
446 | expect='' # toml encodes to the empty string for empty objects
447 | # and nests of empty objects
448 | }
449 |
450 | init_xml_minimal() {
451 | output=`printf '{}' | schema infer | schema init --xml 2>&1`
452 | status="$?"
453 |
454 | expect_status='0'
455 | expect=''
456 | }
457 |
458 | init_xml_string() {
459 | output=`printf '{"name": "Thomas"}' | schema infer | schema init --xml 2>&1`
460 | status="$?"
461 |
462 | expect_status='0'
463 | expect=''
464 | }
465 |
466 | init_xml_number() {
467 | output=`printf '{"age": 20}' | schema infer | schema init --xml 2>&1`
468 | status="$?"
469 |
470 | expect_status='0'
471 | expect='0'
472 | }
473 |
474 | init_xml_boolean() {
475 | output=`printf '{"isHandsome": true}' | schema infer | schema init --xml 2>&1`
476 | status="$?"
477 |
478 | expect_status='0'
479 | expect='false'
480 | }
481 |
482 | init_xml_null() {
483 | output=`printf '{"badField": null}' | schema infer | schema init --xml 2>&1`
484 | status="$?"
485 |
486 | expect_status='0'
487 | expect=''
488 | }
489 |
490 | init_xml_array() {
491 | output=`printf '{"truthfulnesses": [true,false,false,true]}' | schema infer | schema init --xml 2>&1`
492 | status="$?"
493 |
494 | expect_status='0'
495 | expect='
496 | false
497 | '
498 | }
499 |
500 | init_xml_array_no_populate_lists() {
501 | output=`printf '{"truthfulnesses": [true,false,false,true]}' | schema infer | schema init --xml --populate-lists=false 2>&1`
502 | status="$?"
503 |
504 | expect_status='0'
505 | expect='
506 | '
507 | }
508 |
509 | init_xml_nested_object() {
510 | output=`printf '{"myObj": {"field1":1,"field2":"Finland"}}' | schema infer | schema init --xml 2>&1`
511 | status="$?"
512 |
513 | expect_status='0'
514 | expect='
515 | 0
516 |
517 | '
518 | }
519 |
520 | init_xml_integer_and_required() {
521 | output=`printf '{
522 | "title": "Person",
523 | "type": "object",
524 | "properties": {
525 | "firstName": {
526 | "type": "string"
527 | },
528 | "age": {
529 | "description": "Age in years",
530 | "type": "integer",
531 | "minimum": 0
532 | }
533 | },
534 | "required": ["firstName", "age"]
535 | }' | schema init --xml 2>&1`
536 | status="$?"
537 |
538 | expect_either_or='true'
539 | expect_status='0'
540 | expect_either='
541 | 0
542 |
543 | '
544 | expect_or='
545 |
546 | 0
547 | '
548 | }
549 |
550 | setup_init_xml_follow_ref() {
551 | requires_network='true'
552 | status='0'
553 | expect_status='0'
554 | }
555 |
556 | init_xml_follow_ref() {
557 | requires_network='true'
558 |
559 | output=`printf '{
560 | "$schema": "http://json-schema.org/draft-06/schema#",
561 | "description": "A representation of a person, company, organization, or place",
562 | "type": "object",
563 | "properties": {
564 | "geo": { "$ref": "http://json-schema.org/learn/examples/geographical-location.schema.json" }
565 | }
566 | }' | schema init --xml 2>&1`
567 | status="$?"
568 |
569 | expect_either_or='true'
570 | expect_status='0'
571 | expect_either='
572 | 0
573 | 0
574 | '
575 | expect_or='
576 | 0
577 | 0
578 | '
579 | }
580 |
581 | init_xml_skip_ref() {
582 | output=`printf '{
583 | "$schema": "http://json-schema.org/draft-06/schema#",
584 | "description": "A representation of a person, company, organization, or place",
585 | "type": "object",
586 | "properties": {
587 | "geo": { "$ref": "http://json-schema.org/learn/examples/geographical-location.schema.json" }
588 | }
589 | }' | schema init --xml --skip-refs 2>&1`
590 | status="$?"
591 |
592 | expect_status='0'
593 | expect='' # clbanning/mxj has this behavior...
594 | }
595 |
596 |
597 | init_random_minimal() {
598 | output=`printf '{}' | schema infer | schema init --random | schema infer 2>&1`
599 | status="$?"
600 |
601 | expect_status='0'
602 | expect='{
603 | "title": "",
604 | "type": "object",
605 | "properties": {}
606 | }'
607 | }
608 |
609 | init_random_string() {
610 | output=`printf '{"name": "Thomas"}' | schema infer | schema init --random | schema infer 2>&1`
611 | status="$?"
612 |
613 | expect_status='0'
614 | expect='{
615 | "title": "",
616 | "type": "object",
617 | "properties": {
618 | "name": {
619 | "type": "string"
620 | }
621 | }
622 | }'
623 | }
624 |
625 | init_random_number() {
626 | output=`printf '{"age": 20}' | schema infer | schema init --random | schema infer 2>&1`
627 | status="$?"
628 |
629 | expect_status='0'
630 | expect='{
631 | "title": "",
632 | "type": "object",
633 | "properties": {
634 | "age": {
635 | "type": "number"
636 | }
637 | }
638 | }'
639 | }
640 |
641 | init_random_boolean() {
642 | output=`printf '{"isHandsome": true}' | schema infer | schema init --random | schema infer 2>&1`
643 | status="$?"
644 |
645 | expect_status='0'
646 | expect='{
647 | "title": "",
648 | "type": "object",
649 | "properties": {
650 | "isHandsome": {
651 | "type": "boolean"
652 | }
653 | }
654 | }'
655 | }
656 |
657 | init_random_null() {
658 | output=`printf '{"badField": null}' | schema infer | schema init --random | schema infer 2>&1`
659 | status="$?"
660 |
661 | expect_status='0'
662 | expect='{
663 | "title": "",
664 | "type": "object",
665 | "properties": {
666 | "badField": {
667 | "type": "null"
668 | }
669 | }
670 | }'
671 | }
672 |
673 | init_random_array() {
674 | output=`printf '{"truthfulnesses": [true,false,false,true]}' | schema infer | schema init --random | schema infer 2>&1`
675 | status="$?"
676 |
677 | expect_status='0'
678 | expect='{
679 | "title": "",
680 | "type": "object",
681 | "properties": {
682 | "truthfulnesses": {
683 | "type": "array",
684 | "items": {
685 | "type": "boolean"
686 | }
687 | }
688 | }
689 | }'
690 | }
691 |
692 | init_random_array_no_pop_lists() {
693 | output=`printf '{"truthfulnesses": [true,false,false,true]}' | schema infer | schema init --populate-lists=false --random 2>&1`
694 | status="$?"
695 |
696 | expect_status='0'
697 | expect='{
698 | "truthfulnesses": []
699 | }'
700 | }
701 |
702 | init_random_nested_object() {
703 | output=`printf '{"myObj": {"field1":1,"field2":"Finland"}}' | schema infer | schema init --random | schema infer 2>&1`
704 | status="$?"
705 |
706 | expect_status='0'
707 | expect='{
708 | "title": "",
709 | "type": "object",
710 | "properties": {
711 | "myObj": {
712 | "title": "",
713 | "type": "object",
714 | "properties": {
715 | "field1": {
716 | "type": "number"
717 | },
718 | "field2": {
719 | "type": "string"
720 | }
721 | }
722 | }
723 | }
724 | }'
725 | }
726 |
727 | init_random_integer_and_required() {
728 | output=`printf '{
729 | "title": "Person",
730 | "type": "object",
731 | "properties": {
732 | "firstName": {
733 | "type": "string"
734 | },
735 | "age": {
736 | "description": "Age in years",
737 | "type": "integer",
738 | "minimum": 0
739 | }
740 | },
741 | "required": ["firstName", "age"]
742 | }' | schema init --random | schema infer 2>&1`
743 | status="$?"
744 |
745 | expect_status='0'
746 | expect='{
747 | "title": "",
748 | "type": "object",
749 | "properties": {
750 | "age": {
751 | "type": "number"
752 | },
753 | "firstName": {
754 | "type": "string"
755 | }
756 | }
757 | }'
758 | }
759 |
760 | tests=(
761 | "init_invalid_schema"
762 | "init_json_minimal"
763 | "init_json_string"
764 | "init_json_number"
765 | "init_json_boolean"
766 | "init_json_null"
767 | "init_json_array"
768 | "init_json_array_no_pop_lists"
769 | "init_json_nested_object"
770 | "init_json_integer_and_required"
771 | "init_json_follow_ref"
772 | "init_json_skip_ref"
773 | "init_yaml_minimal"
774 | "init_yaml_string"
775 | "init_yaml_number"
776 | "init_yaml_boolean"
777 | "init_yaml_null"
778 | "init_yaml_array"
779 | "init_yaml_array_no_populate_lists"
780 | "init_yaml_nested_object"
781 | "init_yaml_integer_and_required"
782 | "init_yaml_follow_ref"
783 | "init_yaml_skip_ref"
784 | "init_toml_minimal"
785 | "init_toml_string"
786 | "init_toml_number"
787 | "init_toml_boolean"
788 | "init_toml_null"
789 | "init_toml_array"
790 | "init_toml_array_no_populate_lists"
791 | "init_toml_nested_object"
792 | "init_toml_integer_and_required"
793 | "init_toml_follow_ref"
794 | "init_toml_skip_ref"
795 | "init_xml_minimal"
796 | "init_xml_string"
797 | "init_xml_number"
798 | "init_xml_boolean"
799 | "init_xml_null"
800 | "init_xml_array"
801 | "init_xml_array_no_populate_lists"
802 | "init_xml_nested_object"
803 | "init_xml_integer_and_required"
804 | "init_xml_follow_ref"
805 | "init_xml_skip_ref"
806 | "init_random_minimal"
807 | "init_random_string"
808 | "init_random_number"
809 | "init_random_boolean"
810 | "init_random_null"
811 | "init_random_array"
812 | "init_random_array_no_pop_lists"
813 | "init_random_nested_object"
814 | "init_random_integer_and_required"
815 | )
816 |
--------------------------------------------------------------------------------
/e2e_tests/lib.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | setup_test() {
4 | fn="setup_$1"
5 | if [ ! "$(type -t $fn)" = "function" ]; then
6 | return
7 | fi
8 | printf "Setting up '$fn'..."
9 |
10 | eval "$fn"
11 |
12 | if [ ! "$status" = "$expect_status" ]; then
13 | printf "FAIL. Expected status $expect_status, but got $status\n"
14 | printf "Output:\n$output"
15 | exit 1
16 | fi
17 | printf "OK\n"
18 | }
19 |
20 | run_test() {
21 | fn="$1"
22 |
23 | printf "Running '$fn'..."
24 | if [ ! "$(type -t $fn)" = "function" ]; then
25 | printf "FAIL. '$fn' is not a function in the test scripts\n"
26 | exit 1
27 | fi
28 |
29 | if [ "$requires_network" = 'true' ] && [ "$offline_mode" = 'true' ]; then
30 | printf "SKIPPING (requires network)\n"
31 | requires_network='false'
32 | return
33 | fi
34 | requires_network='false'
35 |
36 | eval "$fn"
37 |
38 | if [ ! "$status" = "$expect_status" ]; then
39 | printf "FAIL. Expected status $expect_status, but got $status\n"
40 | printf "Output:\n$output\n"
41 | exit 1
42 | fi
43 |
44 | if [ "$expect_either_or" = 'true' ]; then
45 | diffe="$(diff <(printf "%s" "$expect_either") <(printf "%s" "$output"))"
46 | diffo="$(diff <(printf "%s" "$expect_or") <(printf "%s" "$output"))"
47 | if [ ! -z "$diffe" ] && [ ! -z "$diffo" ]; then
48 | printf "FAIL. Expected either:\n%s\n" "$expect_either"
49 | printf "or:\n%s\n" "$expect_or"
50 | printf "but got:\n%s\n" "$output"
51 | printf "diff (either):\n%s\n" "$diffe"
52 | printf "diff (or):\n%s\n" "$diffo"
53 | exit 1
54 | fi
55 | expect_either_or='false'
56 | else
57 | difference="$(diff <(printf "%s" "$expect") <(printf "%s" "$output"))"
58 | if [ ! -z "$difference" ]; then
59 | printf "FAIL. Expected output:\n%s\nBut got:\n%s" "$expect" "$output"
60 | exit 1
61 | fi
62 | fi
63 | printf "OK\n"
64 | }
65 |
--------------------------------------------------------------------------------
/e2e_tests/translate.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | translate_unrecognized_format() {
4 | output=`printf '{' | schema translate 2>&1`
5 | status="$?"
6 |
7 | expect_status='1'
8 | expect='error: failed to recognize input data format'
9 | }
10 |
11 | translate_json_to_json_minimal() {
12 | output=`printf '{}' | schema translate 2>&1`
13 | status="$?"
14 |
15 | expect_status='0'
16 | expect='{}'
17 | }
18 |
19 | translate_json_to_json_basic() {
20 | output=`printf '{"name":"Bladee","Iceland":42}' | schema translate 2>&1`
21 | status="$?"
22 |
23 | expect_either_or='true'
24 | expect_status='0'
25 | expect_either='{
26 | "name": "Bladee",
27 | "Iceland": 42
28 | }'
29 | expect_or='{
30 | "Iceland": 42,
31 | "name": "Bladee"
32 | }'
33 | }
34 |
35 | translate_json_to_json_bool() {
36 | output=`printf '{"name":"Bladee","Iceland":true}' | schema translate 2>&1`
37 | status="$?"
38 |
39 | expect_either_or='true'
40 | expect_status='0'
41 | expect_either='{
42 | "name": "Bladee",
43 | "Iceland": true
44 | }'
45 | expect_or='{
46 | "Iceland": true,
47 | "name": "Bladee"
48 | }'
49 | }
50 |
51 | translate_json_to_json_array_of_objects() {
52 | output=`printf '{"people":[{"name":"Shane MacGowan","ring":"The Snake"}]}' | schema translate 2>&1`
53 | status="$?"
54 |
55 | expect_either_or='true'
56 | expect_status='0'
57 | expect_either='{
58 | "people": [
59 | {
60 | "name": "Shane MacGowan",
61 | "ring": "The Snake"
62 | }
63 | ]
64 | }'
65 | expect_or='{
66 | "people": [
67 | {
68 | "ring": "The Snake",
69 | "name": "Shane MacGowan"
70 | }
71 | ]
72 | }'
73 | }
74 |
75 | translate_json_to_yaml_minimal() {
76 | output=`printf '{}' | schema translate --yaml 2>&1`
77 | status="$?"
78 |
79 | expect_status='0'
80 | expect='{}'
81 | }
82 |
83 | translate_json_to_yaml_basic() {
84 | output=`printf '{"name":"Bladee","Iceland":42}' | schema translate --yaml 2>&1`
85 | status="$?"
86 |
87 | expect_either_or='true'
88 | expect_status='0'
89 | expect_either='name: Bladee
90 | Iceland: 42'
91 | expect_or='Iceland: 42
92 | name: Bladee'
93 | }
94 |
95 | translate_json_to_yaml_bool() {
96 | output=`printf '{"name":"Bladee","Iceland":true}' | schema translate --yaml 2>&1`
97 | status="$?"
98 |
99 | expect_either_or='true'
100 | expect_status='0'
101 | expect_either='name: Bladee
102 | Iceland: true'
103 | expect_or='Iceland: true
104 | name: Bladee'
105 | }
106 |
107 | translate_json_to_yaml_array_of_objects() {
108 | output=`printf '{"people":[{"name":"Shane MacGowan","ring":"The Snake"}]}' | schema translate --yaml 2>&1`
109 | status="$?"
110 |
111 | expect_either_or='true'
112 | expect_status='0'
113 | expect_either='people:
114 | - name: Shane MacGowan
115 | ring: The Snake'
116 | expect_or='people:
117 | - ring: The Snake
118 | name: Shane MacGowan'
119 | }
120 |
121 | translate_json_to_toml_minimal() {
122 | output=`printf '{}' | schema translate --toml 2>&1`
123 | status="$?"
124 |
125 | expect_status='0'
126 | expect=''
127 | }
128 |
129 | translate_json_to_toml_basic() {
130 | output=`printf '{"name":"Bladee","Iceland":42}' | schema translate --toml 2>&1`
131 | status="$?"
132 |
133 | expect_either_or='true'
134 | expect_status='0'
135 | expect_either='name = "Bladee"
136 | Iceland = 4.2e+01'
137 | expect_or='Iceland = 4.2e+01
138 | name = "Bladee"'
139 | }
140 |
141 | translate_json_to_toml_bool() {
142 | output=`printf '{"name":"Bladee","Iceland":true}' | schema translate --toml 2>&1`
143 | status="$?"
144 |
145 | expect_either_or='true'
146 | expect_status='0'
147 | expect_either='name = "Bladee"
148 | Iceland = true'
149 | expect_or='Iceland = true
150 | name = "Bladee"'
151 | }
152 |
153 | translate_json_to_toml_array_of_objects() {
154 | output=`printf '{"people":[{"name":"Shane MacGowan","ring":"The Snake"}]}' | schema translate --toml 2>&1`
155 | status="$?"
156 |
157 | expect_either_or='true'
158 | expect_status='0'
159 | expect_either='[[people]]
160 | name = "Shane MacGowan"
161 | ring = "The Snake"'
162 | expect_or='[[people]]
163 | ring = "The Snake"
164 | name = "Shane MacGowan"'
165 | }
166 |
167 | translate_json_to_xml_minimal() {
168 | output=`printf '{}' | schema translate --xml 2>&1`
169 | status="$?"
170 |
171 | expect_status='0'
172 | expect=''
173 | }
174 |
175 | translate_json_to_xml_basic() {
176 | output=`printf '{"name":"Bladee","Iceland":42}' | schema translate --xml 2>&1`
177 | status="$?"
178 |
179 | expect_either_or='true'
180 | expect_status='0'
181 | expect_either='
182 | 42
183 | Bladee
184 | '
185 | expect_or='
186 | Bladee
187 | 42
188 | '
189 | }
190 |
191 | translate_json_to_xml_bool() {
192 | output=`printf '{"name":"Bladee","Iceland":true}' | schema translate --xml 2>&1`
193 | status="$?"
194 |
195 | expect_either_or='true'
196 | expect_status='0'
197 | expect_either='
198 | true
199 | Bladee
200 | '
201 | expect_or='
202 | Bladee
203 | true
204 | '
205 | }
206 |
207 | translate_json_to_xml_array_of_objects() {
208 | output=`printf '{"people":[{"name":"Shane MacGowan"},{"name":"The Snake"}]}' | schema translate --xml 2>&1`
209 | status="$?"
210 |
211 | expect_either_or='true'
212 | expect_status='0'
213 | expect_either='
214 |
215 | Shane MacGowan
216 |
217 |
218 | The Snake
219 |
220 | '
221 | expect_or='
222 |
223 | Shane MacGowan
224 |
225 |
226 | The Snake
227 |
228 | '
229 | }
230 |
231 | translate_yaml_to_json_minimal() {
232 | output=`printf '{}' | schema translate 2>&1`
233 | status="$?"
234 |
235 | expect_status='0'
236 | expect='{}'
237 | }
238 |
239 | translate_yaml_to_json_basic() {
240 | output=`printf 'name: Bladee\nIceland: 42' | schema translate 2>&1`
241 | status="$?"
242 |
243 | expect_either_or='true'
244 | expect_status='0'
245 | expect_either='{
246 | "name": "Bladee",
247 | "Iceland": 42
248 | }'
249 | expect_or='{
250 | "Iceland": 42,
251 | "name": "Bladee"
252 | }'
253 | }
254 |
255 | translate_yaml_to_json_bool() {
256 | output=`printf 'name: Bladee\nIceland: true' | schema translate 2>&1`
257 | status="$?"
258 |
259 | expect_either_or='true'
260 | expect_status='0'
261 | expect_either='{
262 | "name": "Bladee",
263 | "Iceland": true
264 | }'
265 | expect_or='{
266 | "Iceland": true,
267 | "name": "Bladee"
268 | }'
269 | }
270 |
271 | translate_yaml_to_json_array_of_objects() {
272 | output=`printf 'people:\n- name: "Shane MacGowan"\n ring: "The Snake"' | schema translate 2>&1`
273 | status="$?"
274 |
275 | expect_either_or='true'
276 | expect_status='0'
277 | expect_either='{
278 | "people": [
279 | {
280 | "name": "Shane MacGowan",
281 | "ring": "The Snake"
282 | }
283 | ]
284 | }'
285 | expect_or='{
286 | "people": [
287 | {
288 | "ring": "The Snake",
289 | "name": "Shane MacGowan"
290 | }
291 | ]
292 | }'
293 | }
294 |
295 | translate_yaml_to_yaml_minimal() {
296 | output=`printf '{}' | schema translate --yaml 2>&1`
297 | status="$?"
298 |
299 | expect_status='0'
300 | expect='{}'
301 | }
302 |
303 | translate_yaml_to_yaml_basic() {
304 | output=`printf 'name: Bladee\nIceland: 42' | schema translate --yaml 2>&1`
305 | status="$?"
306 |
307 | expect_either_or='true'
308 | expect_status='0'
309 | expect_either='name: Bladee
310 | Iceland: 42'
311 | expect_or='Iceland: 42
312 | name: Bladee'
313 | }
314 |
315 | translate_yaml_to_yaml_bool() {
316 | output=`printf 'name: Bladee\nIceland: true' | schema translate --yaml 2>&1`
317 | status="$?"
318 |
319 | expect_either_or='true'
320 | expect_status='0'
321 | expect_either='name: Bladee
322 | Iceland: true'
323 | expect_or='Iceland: true
324 | name: Bladee'
325 | }
326 |
327 | translate_yaml_to_yaml_array_of_objects() {
328 | output=`printf 'people:\n- name: "Shane MacGowan"\n ring: "The Snake"' | schema translate --yaml 2>&1`
329 | status="$?"
330 |
331 | expect_either_or='true'
332 | expect_status='0'
333 | expect_either='people:
334 | - name: Shane MacGowan
335 | ring: The Snake'
336 | expect_or='people:
337 | - ring: The Snake
338 | name: Shane MacGowan'
339 | }
340 |
341 | translate_yaml_to_toml_minimal() {
342 | output=`printf '{}' | schema translate --toml 2>&1`
343 | status="$?"
344 |
345 | expect_status='0'
346 | expect=''
347 | }
348 |
349 | translate_yaml_to_toml_basic() {
350 | output=`printf 'name: Bladee\nIceland: 42' | schema translate --toml 2>&1`
351 | status="$?"
352 |
353 | expect_either_or='true'
354 | expect_status='0'
355 | expect_either='name = "Bladee"
356 | Iceland = 42'
357 | expect_or='Iceland = 42
358 | name = "Bladee"'
359 | }
360 |
361 | translate_yaml_to_toml_bool() {
362 | output=`printf 'name: Bladee\nIceland: true' | schema translate --toml 2>&1`
363 | status="$?"
364 |
365 | expect_either_or='true'
366 | expect_status='0'
367 | expect_either='name = "Bladee"
368 | Iceland = true'
369 | expect_or='Iceland = true
370 | name = "Bladee"'
371 | }
372 |
373 | translate_yaml_to_toml_array_of_objects() {
374 | output=`printf 'people:\n- name: "Shane MacGowan"\n ring: "The Snake"' | schema translate --toml 2>&1`
375 | status="$?"
376 |
377 | expect_either_or='true'
378 | expect_status='0'
379 | expect_either='[[people]]
380 | name = "Shane MacGowan"
381 | ring = "The Snake"'
382 | expect_or='[[people]]
383 | ring = "The Snake"
384 | name = "Shane MacGowan"'
385 | }
386 |
387 | translate_yaml_to_xml_minimal() {
388 | output=`printf '{}' | schema translate --xml 2>&1`
389 | status="$?"
390 |
391 | expect_status='0'
392 | expect=''
393 | }
394 |
395 | translate_yaml_to_xml_basic() {
396 | output=`printf 'name: Bladee\nIceland: 42' | schema translate --xml 2>&1`
397 | status="$?"
398 |
399 | expect_either_or='true'
400 | expect_status='0'
401 | expect_either='
402 | 42
403 | Bladee
404 | '
405 | expect_or='
406 | Bladee
407 | 42
408 | '
409 | }
410 |
411 | translate_yaml_to_xml_bool() {
412 | output=`printf 'name: Bladee\nIceland: true' | schema translate --xml 2>&1`
413 | status="$?"
414 |
415 | expect_either_or='true'
416 | expect_status='0'
417 | expect_either='
418 | true
419 | Bladee
420 | '
421 | expect_or='
422 | Bladee
423 | true
424 | '
425 | }
426 |
427 | translate_yaml_to_xml_array_of_objects() {
428 | output=`printf 'people:\n- name: "Shane MacGowan"\n- name: "The Snake"' | schema translate --xml 2>&1`
429 | status="$?"
430 |
431 | expect_either_or='true'
432 | expect_status='0'
433 | expect_either='
434 |
435 | Shane MacGowan
436 |
437 |
438 | The Snake
439 |
440 | '
441 | expect_or='
442 |
443 | Shane MacGowan
444 |
445 |
446 | The Snake
447 |
448 | '
449 | }
450 |
451 | translate_toml_to_json_minimal() {
452 | output=`printf '' | schema translate 2>&1`
453 | status="$?"
454 |
455 | expect_status='0'
456 | expect='{}'
457 | }
458 |
459 | translate_toml_to_json_basic() {
460 | output=`printf 'name = "Bladee"\nIceland = 42' | schema translate 2>&1`
461 | status="$?"
462 |
463 | expect_either_or='true'
464 | expect_status='0'
465 | expect_either='{
466 | "name": "Bladee",
467 | "Iceland": 42
468 | }'
469 | expect_or='{
470 | "Iceland": 42,
471 | "name": "Bladee"
472 | }'
473 | }
474 |
475 | translate_toml_to_json_bool() {
476 | output=`printf 'name = "Bladee"\nIceland = true' | schema translate 2>&1`
477 | status="$?"
478 |
479 | expect_either_or='true'
480 | expect_status='0'
481 | expect_either='{
482 | "name": "Bladee",
483 | "Iceland": true
484 | }'
485 | expect_or='{
486 | "Iceland": true,
487 | "name": "Bladee"
488 | }'
489 | }
490 |
491 | translate_toml_to_json_array_of_objects() {
492 | output=`printf '[[people]]\nname = "Shane MacGowan"\nring = "The Snake"' | schema translate 2>&1`
493 | status="$?"
494 |
495 | expect_either_or='true'
496 | expect_status='0'
497 | expect_either='{
498 | "people": [
499 | {
500 | "name": "Shane MacGowan",
501 | "ring": "The Snake"
502 | }
503 | ]
504 | }'
505 | expect_or='{
506 | "people": [
507 | {
508 | "ring": "The Snake",
509 | "name": "Shane MacGowan"
510 | }
511 | ]
512 | }'
513 | }
514 |
515 | translate_toml_to_yaml_minimal() {
516 | output=`printf '' | schema translate --yaml 2>&1`
517 | status="$?"
518 |
519 | expect_status='0'
520 | expect='{}'
521 | }
522 |
523 | translate_toml_to_yaml_basic() {
524 | output=`printf 'name = "Bladee"\nIceland = 42' | schema translate --yaml 2>&1`
525 | status="$?"
526 |
527 | expect_either_or='true'
528 | expect_status='0'
529 | expect_either='name: Bladee
530 | Iceland: 42'
531 | expect_or='Iceland: 42
532 | name: Bladee'
533 | }
534 |
535 | translate_toml_to_yaml_bool() {
536 | output=`printf 'name = "Bladee"\nIceland = true' | schema translate --yaml 2>&1`
537 | status="$?"
538 |
539 | expect_either_or='true'
540 | expect_status='0'
541 | expect_either='name: Bladee
542 | Iceland: true'
543 | expect_or='Iceland: true
544 | name: Bladee'
545 | }
546 |
547 | translate_toml_to_yaml_array_of_objects() {
548 | output=`printf '[[people]]\nname = "Shane MacGowan"\nring = "The Snake"' | schema translate --yaml 2>&1`
549 | status="$?"
550 |
551 | expect_either_or='true'
552 | expect_status='0'
553 | expect_either='people:
554 | - name: Shane MacGowan
555 | ring: The Snake'
556 | expect_or='people:
557 | - ring: The Snake
558 | name: Shane MacGowan'
559 | }
560 |
561 | translate_toml_to_toml_minimal() {
562 | output=`printf '' | schema translate --toml 2>&1`
563 | status="$?"
564 |
565 | expect_status='0'
566 | expect=''
567 | }
568 |
569 | translate_toml_to_toml_basic() {
570 | output=`printf 'name = "Bladee"\nIceland = 42' | schema translate --toml 2>&1`
571 | status="$?"
572 |
573 | expect_either_or='true'
574 | expect_status='0'
575 | expect_either='name = "Bladee"
576 | Iceland = 42'
577 | expect_or='Iceland = 42
578 | name = "Bladee"'
579 | }
580 |
581 | translate_toml_to_toml_bool() {
582 | output=`printf 'name = "Bladee"\nIceland = true' | schema translate --toml 2>&1`
583 | status="$?"
584 |
585 | expect_either_or='true'
586 | expect_status='0'
587 | expect_either='name = "Bladee"
588 | Iceland = true'
589 | expect_or='Iceland = true
590 | name = "Bladee"'
591 | }
592 |
593 | translate_toml_to_toml_array_of_objects() {
594 | output=`printf '[[people]]\nname = "Shane MacGowan"\nring = "The Snake"' | schema translate --toml 2>&1`
595 | status="$?"
596 |
597 | expect_either_or='true'
598 | expect_status='0'
599 | expect_either='[[people]]
600 | name = "Shane MacGowan"
601 | ring = "The Snake"'
602 | expect_or='[[people]]
603 | ring = "The Snake"
604 | name = "Shane MacGowan"'
605 | }
606 |
607 |
608 | translate_toml_to_xml_minimal() {
609 | output=`printf '{}' | schema translate --xml 2>&1`
610 | status="$?"
611 |
612 | expect_status='0'
613 | expect=''
614 | }
615 |
616 | translate_toml_to_xml_basic() {
617 | output=`printf 'name = "Bladee"\nIceland = 42' | schema translate --xml 2>&1`
618 | status="$?"
619 |
620 | expect_either_or='true'
621 | expect_status='0'
622 | expect_either='
623 | 42
624 | Bladee
625 | '
626 | expect_or='
627 | Bladee
628 | 42
629 | '
630 | }
631 |
632 | translate_toml_to_xml_bool() {
633 | output=`printf 'name = "Bladee"\nIceland = true' | schema translate --xml 2>&1`
634 | status="$?"
635 |
636 | expect_either_or='true'
637 | expect_status='0'
638 | expect_either='
639 | true
640 | Bladee
641 | '
642 | expect_or='
643 | Bladee
644 | true
645 | '
646 | }
647 |
648 | translate_toml_to_xml_array_of_objects() {
649 | output=`printf '[[people]]\nname = "Shane MacGowan"\n\n[[people]]\nname = "The Snake"' | schema translate --xml 2>&1`
650 | status="$?"
651 |
652 | expect_either_or='true'
653 | expect_status='0'
654 | expect_either='
655 |
656 | Shane MacGowan
657 |
658 |
659 | The Snake
660 |
661 | '
662 | expect_or='
663 |
664 | Shane MacGowan
665 |
666 |
667 | The Snake
668 |
669 | '
670 | }
671 |
672 | translate_xml_to_json_minimal() {
673 | output=`printf '' | schema translate 2>&1`
674 | status="$?"
675 |
676 | expect_status='0'
677 | expect='{}'
678 | }
679 |
680 | translate_xml_to_json_basic() {
681 | output=`printf 'Bladee42' | schema translate 2>&1`
682 | status="$?"
683 |
684 | expect_either_or='true'
685 | expect_status='0'
686 | expect_either='{
687 | "doc": {
688 | "Iceland": "42",
689 | "name": "Bladee"
690 | }
691 | }' # cannot infer numbers from XML
692 | expect_or='{
693 | "doc": {
694 | "name": "Bladee",
695 | "Iceland": "42"
696 | }
697 | }' # cannot infer numbers from XML
698 | }
699 |
700 | translate_xml_to_json_bool() {
701 | output=`printf 'Bladeetrue' | schema translate 2>&1`
702 | status="$?"
703 |
704 | expect_either_or='true'
705 | expect_status='0'
706 | expect_either='{
707 | "doc": {
708 | "Iceland": "true",
709 | "name": "Bladee"
710 | }
711 | }' # cannot infer bools from XML
712 | expect_or='{
713 | "doc": {
714 | "name": "Bladee",
715 | "Iceland": "true"
716 | }
717 | }' # cannot infer bools from XML
718 | }
719 |
720 | translate_xml_to_json_array_of_objects() {
721 | output=`printf 'Shane MacGowanThe Snake' | schema translate 2>&1`
722 | status="$?"
723 |
724 | expect_either_or='true'
725 | expect_status='0'
726 | expect_either='{
727 | "doc": {
728 | "people": [
729 | {
730 | "name": "Shane MacGowan"
731 | },
732 | {
733 | "name": "The Snake"
734 | }
735 | ]
736 | }
737 | }'
738 | expect_or='{
739 | "doc": {
740 | "people": [
741 | {
742 | "name": "The Snake"
743 | },
744 | {
745 | "name": "Shane MacGowan"
746 | }
747 | ]
748 | }
749 | }'
750 | }
751 |
752 | translate_xml_to_yaml_minimal() {
753 | output=`printf '' | schema translate --yaml 2>&1`
754 | status="$?"
755 |
756 | expect_status='0'
757 | expect='{}'
758 | }
759 |
760 | translate_xml_to_yaml_basic() {
761 | output=`printf 'Bladee42' | schema translate --yaml 2>&1`
762 | status="$?"
763 |
764 | expect_either_or='true'
765 | expect_status='0'
766 | expect_either='doc:
767 | name: Bladee
768 | Iceland: "42"' # cannot infer numbers from XML
769 | expect_or='doc:
770 | Iceland: "42"
771 | name: Bladee' # cannot infer numbers from XML
772 | }
773 |
774 | translate_xml_to_yaml_bool() {
775 | output=`printf 'Bladeetrue' | schema translate --yaml 2>&1`
776 | status="$?"
777 |
778 | expect_either_or='true'
779 | expect_status='0'
780 | expect_either='doc:
781 | name: Bladee
782 | Iceland: "true"'
783 | expect_or='doc:
784 | Iceland: "true"
785 | name: Bladee'
786 | }
787 |
788 | translate_xml_to_yaml_array_of_objects() {
789 | output=`printf 'Shane MacGowanThe Snake' | schema translate --yaml 2>&1`
790 | status="$?"
791 |
792 | expect_either_or='true'
793 | expect_status='0'
794 | expect_either='doc:
795 | people:
796 | - name: Shane MacGowan
797 | - name: The Snake'
798 | expect_or='doc:
799 | people:
800 | - name: The Snake
801 | - name: Shane MacGowan'
802 | }
803 |
804 | translate_xml_to_toml_minimal() {
805 | output=`printf '' | schema translate --toml 2>&1`
806 | status="$?"
807 |
808 | expect_status='0'
809 | expect=''
810 | }
811 |
812 | translate_xml_to_toml_basic() {
813 | output=`printf 'Bladee42' | schema translate --toml 2>&1`
814 | status="$?"
815 |
816 | expect_either_or='true'
817 | expect_status='0'
818 | expect_either='[doc]
819 | Iceland = "42"
820 | name = "Bladee"' # cannot infer numbers from XML
821 | expect_or='[doc]
822 | Iceland = "42"
823 | name = "Bladee"' # cannot infer numbers from XML
824 | }
825 |
826 | translate_xml_to_toml_bool() {
827 | output=`printf 'Bladeetrue' | schema translate --toml 2>&1`
828 | status="$?"
829 |
830 | expect_either_or='true'
831 | expect_status='0'
832 | expect_either='[doc]
833 | Iceland = "true"
834 | name = "Bladee"'
835 | expect_or='[doc]
836 | name = "Bladee"
837 | Iceland = "true"'
838 | }
839 |
840 | translate_xml_to_toml_array_of_objects() {
841 | output=`printf 'Shane MacGowanThe Snake' | schema translate --toml 2>&1`
842 | status="$?"
843 |
844 | expect_either_or='true'
845 | expect_status='0'
846 | expect_either='[[doc.people]]
847 | name = "Shane MacGowan"
848 |
849 | [[doc.people]]
850 | name = "The Snake"'
851 | expect_or='[[doc.people]]
852 | name = "The Snake"
853 |
854 | [[doc.people]]
855 | name = "Shane MacGowan"'
856 | }
857 |
858 | translate_xml_to_xml_minimal() {
859 | output=`printf '' | schema translate --xml 2>&1`
860 | status="$?"
861 |
862 | expect_status='0'
863 | expect=''
864 | }
865 |
866 | translate_xml_to_xml_basic() {
867 | output=`printf 'Bladee42' | schema translate --xml 2>&1`
868 | status="$?"
869 |
870 | expect_either_or='true'
871 | expect_status='0'
872 | expect_either='
873 | 42
874 | Bladee
875 | '
876 | expect_or='
877 | Bladee
878 | 42
879 | '
880 | }
881 |
882 | translate_xml_to_xml_bool() {
883 | output=`printf 'Bladeetrue' | schema translate --xml 2>&1`
884 | status="$?"
885 |
886 | expect_either_or='true'
887 | expect_status='0'
888 | expect_either='
889 | true
890 | Bladee
891 | '
892 | expect_or='
893 | Bladee
894 | true
895 | '
896 | }
897 |
898 | translate_xml_to_xml_array_of_objects() {
899 | output=`printf 'Shane MacGowanThe Snake' | schema translate --xml 2>&1`
900 | status="$?"
901 |
902 | expect_either_or='true'
903 | expect_status='0'
904 | expect_either='
905 |
906 | Shane MacGowan
907 |
908 |
909 | The Snake
910 |
911 | '
912 | expect_or='
913 |
914 | The Snake
915 |
916 |
917 | Shane MacGowan
918 |
919 | '
920 | }
921 |
922 | tests=(
923 | "translate_unrecognized_format"
924 | "translate_json_to_json_minimal"
925 | "translate_json_to_json_basic"
926 | "translate_json_to_json_bool"
927 | "translate_json_to_json_array_of_objects"
928 | "translate_json_to_yaml_minimal"
929 | "translate_json_to_yaml_basic"
930 | "translate_json_to_yaml_bool"
931 | "translate_json_to_yaml_array_of_objects"
932 | "translate_json_to_toml_minimal"
933 | "translate_json_to_toml_basic"
934 | "translate_json_to_toml_bool"
935 | "translate_json_to_toml_array_of_objects"
936 | "translate_json_to_xml_minimal"
937 | "translate_json_to_xml_basic"
938 | "translate_json_to_xml_bool"
939 | "translate_json_to_xml_array_of_objects"
940 | "translate_yaml_to_json_minimal"
941 | "translate_yaml_to_json_basic"
942 | "translate_yaml_to_json_bool"
943 | "translate_yaml_to_json_array_of_objects"
944 | "translate_yaml_to_yaml_minimal"
945 | "translate_yaml_to_yaml_basic"
946 | "translate_yaml_to_yaml_bool"
947 | "translate_yaml_to_yaml_array_of_objects"
948 | "translate_yaml_to_toml_minimal"
949 | "translate_yaml_to_toml_basic"
950 | "translate_yaml_to_toml_bool"
951 | "translate_yaml_to_toml_array_of_objects"
952 | "translate_yaml_to_xml_minimal"
953 | "translate_yaml_to_xml_basic"
954 | "translate_yaml_to_xml_bool"
955 | "translate_yaml_to_xml_array_of_objects"
956 | "translate_toml_to_json_minimal"
957 | "translate_toml_to_json_basic"
958 | "translate_toml_to_json_bool"
959 | "translate_toml_to_json_array_of_objects"
960 | "translate_toml_to_yaml_minimal"
961 | "translate_toml_to_yaml_basic"
962 | "translate_toml_to_yaml_bool"
963 | "translate_toml_to_yaml_array_of_objects"
964 | "translate_toml_to_toml_minimal"
965 | "translate_toml_to_toml_basic"
966 | "translate_toml_to_toml_bool"
967 | "translate_toml_to_toml_array_of_objects"
968 | "translate_toml_to_xml_minimal"
969 | "translate_toml_to_xml_basic"
970 | "translate_toml_to_xml_bool"
971 | "translate_toml_to_xml_array_of_objects"
972 | "translate_xml_to_json_minimal"
973 | "translate_xml_to_json_basic"
974 | "translate_xml_to_json_bool"
975 | "translate_xml_to_json_array_of_objects"
976 | "translate_xml_to_yaml_minimal"
977 | "translate_xml_to_yaml_basic"
978 | "translate_xml_to_yaml_bool"
979 | "translate_xml_to_yaml_array_of_objects"
980 | "translate_xml_to_toml_minimal"
981 | "translate_xml_to_toml_basic"
982 | "translate_xml_to_toml_bool"
983 | "translate_xml_to_toml_array_of_objects"
984 | "translate_xml_to_xml_minimal"
985 | "translate_xml_to_xml_basic"
986 | "translate_xml_to_xml_bool"
987 | "translate_xml_to_xml_array_of_objects"
988 | )
989 |
--------------------------------------------------------------------------------
/example/example.go:
--------------------------------------------------------------------------------
1 | package example
2 |
3 | type Example struct {
4 | Data map[string]interface{}
5 | }
6 |
7 | func New(data map[string]interface{}) *Example {
8 | return &Example{Data: data}
9 | }
10 |
--------------------------------------------------------------------------------
/go.mod:
--------------------------------------------------------------------------------
1 | module github.com/Confbase/schema
2 |
3 | require (
4 | github.com/clbanning/mxj v0.0.0-20180511113302-7265c0ca7fa0
5 | github.com/inconshreveable/mousetrap v1.0.0
6 | github.com/naoina/go-stringutil v0.1.0
7 | github.com/naoina/toml v0.0.0-20170918210437-9fafd6967416
8 | github.com/spf13/cobra v0.0.3
9 | github.com/spf13/pflag v1.0.1
10 | gopkg.in/yaml.v2 v2.2.1
11 | )
12 |
--------------------------------------------------------------------------------
/go.sum:
--------------------------------------------------------------------------------
1 | github.com/clbanning/mxj v0.0.0-20180511113302-7265c0ca7fa0 h1:1yignAzDJ72Vhc+G2P8sausmNjqdCfCF387krxj4/WY=
2 | github.com/clbanning/mxj v0.0.0-20180511113302-7265c0ca7fa0/go.mod h1:BVjHeAH+rl9rs6f+QIpeRl0tfu10SXn1pUSa5PVGJng=
3 | github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
4 | github.com/naoina/go-stringutil v0.1.0 h1:rCUeRUHjBjGTSHl0VC00jUPLz8/F9dDzYI70Hzifhks=
5 | github.com/naoina/go-stringutil v0.1.0/go.mod h1:XJ2SJL9jCtBh+P9q5btrd/Ylo8XwT/h1USek5+NqSA0=
6 | github.com/naoina/toml v0.0.0-20170918210437-9fafd6967416 h1:9M852Z3gvzUmyFvy+TruhDWCwcIG3cZWg/+Eh8VkR7M=
7 | github.com/naoina/toml v0.0.0-20170918210437-9fafd6967416/go.mod h1:NBIhNtsFMo3G2szEBne+bO4gS192HuIYRqfvOWb4i1E=
8 | github.com/spf13/cobra v0.0.3 h1:ZlrZ4XsMRm04Fr5pSFxBgfND2EBVa1nLpiy1stUsX/8=
9 | github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ=
10 | github.com/spf13/pflag v1.0.1 h1:aCvUg6QPl3ibpQUxyLkrEkCHtPqYJL4x9AuhqVqFis4=
11 | github.com/spf13/pflag v1.0.1/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
12 | gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
13 | gopkg.in/yaml.v2 v2.2.1 h1:mUhvW9EsL+naU5Q3cakzfE91YhliOondGd6ZrsDBHQE=
14 | gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
15 |
--------------------------------------------------------------------------------
/graphqlsch/graphqlsch.go:
--------------------------------------------------------------------------------
1 | package graphqlsch
2 |
3 | import (
4 | "fmt"
5 | "strings"
6 | )
7 |
8 | type Schema struct {
9 | Types []Type
10 | }
11 |
12 | func New(types []Type) *Schema {
13 | return &Schema{Types: types}
14 | }
15 |
16 | type Type struct {
17 | Name string
18 | Fields []Field
19 | }
20 |
21 | func NewType(name string, fields []Field) Type {
22 | return Type{
23 | Name: name,
24 | Fields: fields,
25 | }
26 | }
27 |
28 | func (t Type) ToString() string {
29 | lines := make([]string, 0)
30 | lines = append(lines, fmt.Sprintf("type %v {", t.Name))
31 | for _, f := range t.Fields {
32 | lines = append(lines, f.ToString())
33 | }
34 | lines = append(lines, "}")
35 | return strings.Join(lines, "\n")
36 | }
37 |
38 | type Field struct {
39 | Name string
40 | Type PrimitiveType
41 | IsNullable bool
42 | IsArray bool
43 | IsElemNullable bool
44 | ArrayDim uint
45 | }
46 |
47 | func (f Field) ToString() string {
48 | var typeStr string
49 | if f.IsArray {
50 | if f.IsElemNullable {
51 | typeStr = fmt.Sprintf("%v", f.Type)
52 | } else {
53 | typeStr = fmt.Sprintf("%v!", f.Type)
54 | }
55 | for level := uint(0); level < f.ArrayDim; level++ {
56 | typeStr = fmt.Sprintf("[%v]", typeStr)
57 | }
58 | } else {
59 | typeStr = fmt.Sprintf("%v", f.Type)
60 | }
61 | if !f.IsNullable {
62 | typeStr = fmt.Sprintf("%v!", typeStr)
63 | }
64 | return fmt.Sprintf(" %v: %v", f.Name, typeStr)
65 | }
66 |
67 | type PrimitiveType string
68 |
69 | const (
70 | String PrimitiveType = "String"
71 | Int PrimitiveType = "Int"
72 | Float PrimitiveType = "Float"
73 | Boolean PrimitiveType = "Boolean"
74 | ID PrimitiveType = "ID"
75 | )
76 |
--------------------------------------------------------------------------------
/graphqlsch/serialize.go:
--------------------------------------------------------------------------------
1 | package graphqlsch
2 |
3 | import (
4 | "fmt"
5 | "io"
6 | "strings"
7 | )
8 |
9 | func SerializeSchema(s *Schema, w io.Writer) error {
10 | strs := make([]string, 0)
11 | for _, t := range s.Types {
12 | strs = append(strs, t.ToString())
13 | }
14 | fmt.Fprintf(w, "%v", strings.Join(strs, "\n\n"))
15 | return nil
16 | }
17 |
--------------------------------------------------------------------------------
/images/schema_logo_circle_transparent.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Confbase/schema/d807e2c4a0070a12e46caf1a1a28c52384a2172b/images/schema_logo_circle_transparent.png
--------------------------------------------------------------------------------
/infer/config.go:
--------------------------------------------------------------------------------
1 | package infer
2 |
3 | type Config struct {
4 | DoPretty bool
5 | DoMakeReq bool
6 | DoOmitReq bool
7 | DoGraphQL bool
8 | SchemaField string
9 | EmptyArraysAs string
10 | NullAs string
11 | }
12 |
--------------------------------------------------------------------------------
/infer/infer.go:
--------------------------------------------------------------------------------
1 | package infer
2 |
3 | import (
4 | "bytes"
5 | "fmt"
6 | "io"
7 | "io/ioutil"
8 | "os"
9 |
10 | "github.com/Confbase/schema/decode"
11 | "github.com/Confbase/schema/example"
12 | "github.com/Confbase/schema/jsonsch"
13 | )
14 |
15 | func InferEntry(cfg Config, targets []string) {
16 | if len(targets) == 0 {
17 | if err := Infer(os.Stdin, os.Stdout, cfg); err != nil {
18 | fmt.Fprintf(os.Stderr, "error: %v\n", err)
19 | os.Exit(1)
20 | }
21 | return
22 | }
23 |
24 | buf, err := ioutil.ReadAll(os.Stdin)
25 | if err != nil {
26 | fmt.Fprintf(os.Stderr, "error: failed to read from stdin\n%v", err)
27 | os.Exit(1)
28 | }
29 |
30 | for _, t := range targets {
31 | f, err := os.OpenFile(t, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0644)
32 | if err != nil {
33 | fmt.Fprintf(os.Stderr, "error: failed to open '%v'\n%v\n", t, err)
34 | os.Exit(1)
35 | }
36 | defer f.Close()
37 |
38 | if err := Infer(bytes.NewReader(buf), f, cfg); err != nil {
39 | fmt.Fprintf(os.Stderr, "error: %v\n", err)
40 | os.Exit(1)
41 | }
42 | }
43 | }
44 |
45 | func Infer(r io.Reader, w io.Writer, cfg Config) error {
46 | data, err := decode.MuxDecode(r)
47 | if err != nil {
48 | return err
49 | }
50 |
51 | ex := example.New(data)
52 | params := jsonsch.FromExampleParams{
53 | DoOmitReq: cfg.DoOmitReq,
54 | DoMakeReq: cfg.DoMakeReq,
55 | EmptyArraysAs: cfg.EmptyArraysAs,
56 | NullAs: cfg.NullAs,
57 | }
58 | js, err := jsonsch.FromExample(ex, ¶ms)
59 | if err != nil {
60 | return fmt.Errorf("failed to infer schema\n%v", err)
61 | }
62 |
63 | js.SetSchemaField(cfg.SchemaField)
64 |
65 | if cfg.DoGraphQL {
66 | if err := jsonsch.SerializeGraphQL(js, w); err != nil {
67 | return fmt.Errorf("failed to serialize schema\n%v", err)
68 | }
69 | } else {
70 | if err := jsonsch.SerializeSchema(js, w, cfg.DoPretty); err != nil {
71 | return fmt.Errorf("failed to serialize schema\n%v", err)
72 | }
73 | }
74 |
75 | return nil
76 | }
77 |
--------------------------------------------------------------------------------
/initcmd/config.go:
--------------------------------------------------------------------------------
1 | package initcmd
2 |
3 | type Config struct {
4 | SchemaPath string
5 | DoJson bool
6 | DoYaml bool
7 | DoToml bool
8 | DoXml bool
9 | DoProtobuf bool
10 | DoGraphQL bool
11 | DoPretty bool
12 | DoPopLists bool
13 | DoSkipRefs bool
14 | DoRandom bool
15 | }
16 |
17 | func (cfg *Config) OutFmt() string {
18 | if cfg.DoJson {
19 | return "json"
20 | }
21 | if cfg.DoYaml {
22 | return "yaml"
23 | }
24 | if cfg.DoToml {
25 | return "toml"
26 | }
27 | if cfg.DoXml {
28 | return "xml"
29 | }
30 | if cfg.DoProtobuf {
31 | return "protobuf"
32 | }
33 | if cfg.DoGraphQL {
34 | return "graphql"
35 | }
36 | return "json"
37 | }
38 |
--------------------------------------------------------------------------------
/initcmd/initcmd.go:
--------------------------------------------------------------------------------
1 | package initcmd
2 |
3 | import (
4 | "encoding/json"
5 | "fmt"
6 | "os"
7 |
8 | "github.com/Confbase/schema/decode"
9 | "github.com/Confbase/schema/jsonsch"
10 | )
11 |
12 | func Init(cfg Config, targets []string) {
13 |
14 | var data map[string]interface{}
15 | if cfg.SchemaPath == "" {
16 | if err := json.NewDecoder(os.Stdin).Decode(&data); err != nil {
17 | fmt.Fprintf(os.Stderr, "error: failed to parse data ")
18 | fmt.Fprintf(os.Stderr, "from stdin as JSON\n%v\n", err)
19 | os.Exit(1)
20 | }
21 | } else {
22 | f, err := os.Open(cfg.SchemaPath)
23 | if err != nil {
24 | fmt.Fprintf(os.Stderr, "error: failed to open ")
25 | fmt.Fprintf(os.Stderr, "'%v'\n%v\n", cfg.SchemaPath, err)
26 | os.Exit(1)
27 | }
28 | defer f.Close()
29 |
30 | if err := json.NewDecoder(f).Decode(&data); err != nil {
31 | fmt.Fprintf(os.Stderr, "error: failed to parse ")
32 | fmt.Fprintf(os.Stderr, "'%v' as JSON\n%v\n", cfg.SchemaPath, err)
33 | os.Exit(1)
34 | }
35 | }
36 | js, err := jsonsch.FromSchema(data, cfg.DoSkipRefs)
37 | if err != nil {
38 | fmt.Fprintf(os.Stderr, "error: input JSON is not a valid schema\n%v\n", err)
39 | os.Exit(1)
40 | }
41 |
42 | if len(targets) == 0 {
43 | inst, err := jsonsch.InitSchema(js, cfg.DoPopLists, cfg.DoRandom)
44 | if err != nil {
45 | fmt.Fprintf(os.Stderr, "error: failed to initialize ")
46 | fmt.Fprintf(os.Stderr, "instance of schema\n%v\n", err)
47 | os.Exit(1)
48 | }
49 | err = decode.DemuxEncode(os.Stdout, inst, cfg.OutFmt(), cfg.DoPretty)
50 | if err != nil {
51 | fmt.Fprintf(os.Stderr, "error: failed to serialize instance ")
52 | fmt.Fprintf(os.Stderr, "of schema\n%v\n", err)
53 | os.Exit(1)
54 | }
55 | return
56 | }
57 |
58 | for _, t := range targets {
59 | f, err := os.OpenFile(t, os.O_RDWR|os.O_CREATE, 0666)
60 | if err != nil {
61 | fmt.Fprintf(os.Stderr, "error: failed to open '%v'\n%v\n", t, err)
62 | os.Exit(1)
63 | }
64 | defer f.Close()
65 |
66 | inst, err := jsonsch.InitSchema(js, cfg.DoPopLists, cfg.DoRandom)
67 | if err != nil {
68 | fmt.Fprintf(os.Stderr, "error: failed to initialize instance of ")
69 | fmt.Fprintf(os.Stderr, "schema\n%v\n", err)
70 | os.Exit(1)
71 | }
72 | err = decode.DemuxEncode(f, inst, cfg.OutFmt(), cfg.DoPretty)
73 | if err != nil {
74 | fmt.Fprintf(os.Stderr, "error: failed to serialize instance of ")
75 | fmt.Fprintf(os.Stderr, "schema\n%v\n", err)
76 | os.Exit(1)
77 | }
78 | }
79 | }
80 |
--------------------------------------------------------------------------------
/jsonsch/from.go:
--------------------------------------------------------------------------------
1 | package jsonsch
2 |
3 | import (
4 | "fmt"
5 |
6 | "github.com/Confbase/schema/example"
7 | )
8 |
9 | func FromSchema(data map[string]interface{}, doSkipRefs bool) (Schema, error) {
10 | var js Schema
11 | js = NewInclReq()
12 |
13 | if err := ReplaceRefs(data, doSkipRefs); err != nil {
14 | return nil, err
15 | }
16 |
17 | // type field
18 | typeInter, ok := data["type"]
19 | if !ok {
20 | return nil, fmt.Errorf("'type' field does not exist")
21 | }
22 | jsType, ok := typeInter.(string)
23 | if !ok {
24 | return nil, fmt.Errorf("'type' field must be a string")
25 | }
26 | js.SetType(Type(jsType))
27 |
28 | // properties field
29 | propsInter, ok := data["properties"]
30 | if !ok {
31 | return nil, fmt.Errorf("'properties' field does not exist")
32 | }
33 | properties, ok := propsInter.(map[string]interface{})
34 | if !ok {
35 | return nil, fmt.Errorf("'properties' field must be an object")
36 |
37 | }
38 | for k, v := range properties {
39 | propObj, ok := v.(map[string]interface{})
40 | if !ok {
41 | return nil, fmt.Errorf("'properties' field must only contain objects")
42 | }
43 |
44 | subSchema, err := fromSchema(propObj, k)
45 | if err != nil {
46 | return nil, err
47 | }
48 | js.SetProperty(k, subSchema)
49 |
50 | }
51 |
52 | if reqInter, ok := data["required"]; ok {
53 | wrongType := false
54 | strSlice := make([]string, 0)
55 | interSlice, ok := reqInter.([]interface{})
56 | if ok {
57 | for _, v := range interSlice {
58 | if s, isStr := v.(string); isStr {
59 | strSlice = append(strSlice, s)
60 | } else {
61 | wrongType = false
62 | break
63 | }
64 | }
65 | }
66 | if !ok || wrongType {
67 | return nil, fmt.Errorf("'required' field must be an array of strings")
68 | }
69 | js.SetRequired(strSlice)
70 | }
71 | if titleInter, ok := data["title"]; ok {
72 | if title, ok := titleInter.(string); ok {
73 | js.SetTitle(title)
74 | } else {
75 | return nil, fmt.Errorf("'title' field must be a string")
76 | }
77 | }
78 | if descInter, ok := data["description"]; ok {
79 | if description, ok := descInter.(string); ok {
80 | js.SetDescription(description)
81 | } else {
82 | return nil, fmt.Errorf("'description' field must be a string")
83 | }
84 | }
85 |
86 | return js, nil
87 | }
88 |
89 | // fromSchema takes a map[string]interface{} and returns a
90 | // jsonsch-Typed schema object. The `k` field is not optional
91 | // and only used in error messages.
92 | func fromSchema(propObj map[string]interface{}, k string) (interface{}, error) {
93 | tInter, ok := propObj["type"]
94 | if !ok {
95 | return nil, fmt.Errorf("field '%v' does not have a 'type' field", k)
96 | }
97 | tStr, ok := tInter.(string)
98 | if !ok {
99 | return nil, fmt.Errorf("field '%v' has a 'type' field, but it's not a string", k)
100 | }
101 |
102 | params := FromExampleParams{
103 | DoOmitReq: false,
104 | DoMakeReq: false,
105 | EmptyArraysAs: "",
106 | NullAs: "",
107 | }
108 |
109 | switch Type(tStr) {
110 | case Null:
111 | return NewNull(¶ms), nil
112 | case Boolean:
113 | return NewBoolean(), nil
114 | case String:
115 | return NewString(), nil
116 |
117 | case Number, "integer", "float":
118 | return NewNumber(), nil
119 |
120 | case Array:
121 | itemsInter, ok := propObj["items"]
122 | if !ok {
123 | return nil, fmt.Errorf("key '%v' has 'type' field of value '%v', but no 'items' field", k, Array)
124 | }
125 | items, ok := itemsInter.(map[string]interface{})
126 | if !ok {
127 | return nil, fmt.Errorf("key '%v' has 'items' field, but it's not a map[string]interface{}", k)
128 | }
129 | arr, err := fromSchema(items, fmt.Sprintf("%v.items", k))
130 | if err != nil {
131 | return nil, err
132 | }
133 | return NewArray(arr), nil
134 |
135 | case Object:
136 | // value is another JSON object
137 | obj, err := FromSchema(propObj, true)
138 | if err != nil {
139 | return nil, err
140 | }
141 | return obj, nil
142 |
143 | default:
144 | return nil, fmt.Errorf("unknown type '%v'", tStr)
145 | }
146 |
147 | }
148 |
149 | type FromExampleParams struct {
150 | DoOmitReq bool
151 | DoMakeReq bool
152 | EmptyArraysAs string
153 | NullAs string
154 | }
155 |
156 | func FromExample(ex *example.Example, params *FromExampleParams) (Schema, error) {
157 | var js Schema
158 | if params.DoOmitReq {
159 | js = NewOmitReq()
160 | } else {
161 | js = NewInclReq()
162 | }
163 |
164 | for key, value := range ex.Data {
165 | var childDst interface{}
166 | if err := buildSchema(value, &childDst, params); err != nil {
167 | return nil, err
168 | }
169 | js.SetProperty(key, childDst)
170 |
171 | if params.DoMakeReq {
172 | js.SetRequired(append(js.GetRequired(), key))
173 | }
174 | }
175 | return js, nil
176 | }
177 |
--------------------------------------------------------------------------------
/jsonsch/init.go:
--------------------------------------------------------------------------------
1 | package jsonsch
2 |
3 | import (
4 | "fmt"
5 | "math/rand"
6 | "reflect"
7 | )
8 |
9 | // InitSchema is the only exposed method in this file
10 | // InitSchema is a method on Schema which returns an
11 | // instance of the schema.
12 | //
13 | // InitSchema assumes all $ref fields are already either
14 | // 1) resolved and replaced by a network request
15 | // 2) replaced by an empty object
16 | func InitSchema(s Schema, doPopLists, doRandom bool) (map[string]interface{}, error) {
17 | data := make(map[string]interface{})
18 | for key, value := range s.GetProperties() {
19 | initValue, err := initSchema(value, doPopLists, doRandom, key)
20 | if err != nil {
21 | return nil, err
22 | }
23 | data[key] = initValue
24 | }
25 | return data, nil
26 | }
27 |
28 | func initSchema(schema interface{}, doPopLists, doRandom bool, k string) (interface{}, error) {
29 | switch v := schema.(type) {
30 | case Primitive:
31 | if doRandom {
32 | return initRandomPrimitive(v.Type, k)
33 | }
34 | return initPrimitive(v.Type, k)
35 | case ArraySchema:
36 | return initArray(v, doPopLists, doRandom, k)
37 | case Schema:
38 | return InitSchema(v, doPopLists, doRandom)
39 | default:
40 | return nil, fmt.Errorf("key '%v' has unrecognized type '%v'", k, reflect.TypeOf(schema))
41 | }
42 | }
43 |
44 | func initPrimitive(t Type, k string) (interface{}, error) {
45 | switch t {
46 | case Null:
47 | return nil, nil
48 | case String:
49 | return "", nil
50 | case Boolean:
51 | return false, nil
52 | case Integer, Number:
53 | return 0, nil
54 | default:
55 | return nil, fmt.Errorf("key '%v' (primitive) has unrecognized type '%v'", k, t)
56 | }
57 | }
58 |
59 | func initRandomPrimitive(t Type, k string) (interface{}, error) {
60 | switch t {
61 | case Null:
62 | return nil, nil
63 | case String:
64 | return randomString(), nil
65 | case Boolean:
66 | return rand.Intn(2) != 0, nil
67 | case Integer, Number:
68 | return rand.Int(), nil
69 | default:
70 | return nil, fmt.Errorf("key '%v' (primitive) has unrecognized type '%v'", k, t)
71 | }
72 | }
73 |
74 | func initArray(as ArraySchema, doPopLists, doRandom bool, k string) (interface{}, error) {
75 | arr := make([]interface{}, 0)
76 | if doPopLists {
77 | elem, err := initSchema(as.Items, doPopLists, doRandom, k)
78 | if err != nil {
79 | return nil, err
80 | }
81 | arr = append(arr, elem)
82 | }
83 | return arr, nil
84 | }
85 |
--------------------------------------------------------------------------------
/jsonsch/initrandom.go:
--------------------------------------------------------------------------------
1 | package jsonsch
2 |
3 | import (
4 | "math/rand"
5 | "time"
6 | )
7 |
8 | const randAlphabet = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890`~!@#$%^&*()-=_+[]{};':/?,.<>"
9 | const randStrMaxLen = 64
10 |
11 | func init() {
12 | rand.Seed(time.Now().Unix())
13 | }
14 |
15 | func randomString() string {
16 | length := rand.Int() % randStrMaxLen
17 | randStr := make([]byte, length)
18 | for i := range randStr {
19 | randStr[i] = randAlphabet[rand.Intn(len(randAlphabet))]
20 | }
21 | return string(randStr)
22 | }
23 |
--------------------------------------------------------------------------------
/jsonsch/jsonsch.go:
--------------------------------------------------------------------------------
1 | package jsonsch
2 |
3 | import (
4 | "fmt"
5 | "strings"
6 |
7 | "github.com/Confbase/schema/graphqlsch"
8 | )
9 |
10 | // This only exists because we need two different
11 | // types for the same exact struct---a struct representing
12 | // JSON schema---where the struct tags of one of the types
13 | // are different from the struct tags of the other type.
14 | // This way, we can omit the `required` field during serialization.
15 | type Schema interface {
16 | GetSchemaField() string
17 | SetSchemaField(string)
18 | GetTitle() string
19 | SetTitle(string)
20 | GetType() Type
21 | SetType(Type)
22 | GetDescription() string
23 | SetDescription(string)
24 | GetProperties() map[string]interface{}
25 | SetProperties(map[string]interface{})
26 | SetProperty(string, interface{})
27 | GetRequired() []string
28 | SetRequired([]string)
29 | }
30 |
31 | func ToGraphQLTypes(rootSchema Schema, rootName string) ([]graphqlsch.Type, error) {
32 | types := make([]graphqlsch.Type, 0)
33 | fields := make([]graphqlsch.Field, 0)
34 | for k, inter := range rootSchema.GetProperties() {
35 | switch value := inter.(type) {
36 | case Primitive:
37 | newFields, err := handlePrimitive(k, value, fields)
38 | if err != nil {
39 | return nil, err
40 | }
41 | fields = newFields
42 | case ArraySchema:
43 | params := handleArraySchemaParams{
44 | Key: k,
45 | AS: value,
46 | Fields: fields,
47 | Types: types,
48 | }
49 | newFields, newTypes, err := handleArraySchema(params)
50 | if err != nil {
51 | return nil, err
52 | }
53 | fields = newFields
54 | types = newTypes
55 | case Schema:
56 | newFields, newTypes, err := handleSchema(handleSchemaParams{
57 | Key: k,
58 | ChildSchema: value,
59 | Fields: fields,
60 | Types: types,
61 | })
62 | if err != nil {
63 | return nil, err
64 | }
65 | fields = newFields
66 | types = newTypes
67 | default:
68 | return nil, fmt.Errorf("key '%v' has unexpected type %T", k, value)
69 | }
70 | }
71 | types = append(types, graphqlsch.NewType(rootName, fields))
72 | return types, nil
73 | }
74 |
75 | func ToGraphQLSchema(s Schema) (*graphqlsch.Schema, error) {
76 | title := s.GetTitle()
77 | if title == "" {
78 | title = "Object"
79 | }
80 | types, err := ToGraphQLTypes(s, title)
81 | if err != nil {
82 | return nil, err
83 | }
84 | return graphqlsch.New(types), nil
85 | }
86 |
87 | func handlePrimitive(key string, prim Primitive, fields []graphqlsch.Field) ([]graphqlsch.Field, error) {
88 | f := graphqlsch.Field{
89 | Name: key,
90 | IsNullable: false,
91 | IsArray: false,
92 | }
93 | switch prim.Type {
94 | case Boolean:
95 | f.Type = graphqlsch.Boolean
96 | case String:
97 | f.Type = graphqlsch.String
98 | case Number:
99 | f.Type = graphqlsch.Float
100 | case Null:
101 | return nil, fmt.Errorf("cannot infer type of null value (see key '%v')", key)
102 | default:
103 | return nil, fmt.Errorf("key '%v' has unexpected 'type' field value '%v'", key, prim.Type)
104 | }
105 | return append(fields, f), nil
106 | }
107 |
108 | type handleArraySchemaParams struct {
109 | Key string
110 | AS ArraySchema
111 | Fields []graphqlsch.Field
112 | Types []graphqlsch.Type
113 | }
114 |
115 | func handleArraySchema(params handleArraySchemaParams) ([]graphqlsch.Field, []graphqlsch.Type, error) {
116 | f := graphqlsch.Field{
117 | Name: params.Key,
118 | IsNullable: false,
119 | IsArray: true,
120 | ArrayDim: 1,
121 | }
122 |
123 | // unwrap multi-dimensional arrays
124 | item := params.AS.Items
125 | for {
126 | unwrapped, ok := item.(ArraySchema)
127 | if !ok {
128 | break
129 | }
130 | item = unwrapped.Items
131 | f.ArrayDim++
132 | }
133 |
134 | // TODO: ensure all items in array are same type
135 | switch value := item.(type) {
136 | case Primitive:
137 | switch value.Type {
138 | case Boolean:
139 | f.Type = graphqlsch.Boolean
140 | case String:
141 | f.Type = graphqlsch.String
142 | case Number:
143 | f.Type = graphqlsch.Float
144 | case Null:
145 | return nil, nil, fmt.Errorf("cannot infer type of null value (in array at key '%v')", params.Key)
146 | default:
147 | return nil, nil, fmt.Errorf("array (key '%v') has unexpected type '%v'", params.Key, value.Type)
148 | }
149 | case Schema:
150 | _, newTypes, err := handleSchema(handleSchemaParams{
151 | Key: params.Key,
152 | ChildSchema: value,
153 | Fields: make([]graphqlsch.Field, 0),
154 | Types: params.Types,
155 | })
156 | if err != nil {
157 | return nil, nil, err
158 | }
159 | params.Types = newTypes
160 | f.Type = graphqlsch.PrimitiveType(strings.Title(params.Key))
161 | default:
162 | return nil, nil, fmt.Errorf("key '%v' has unexpected type %T", params.Key, value)
163 | }
164 |
165 | return append(params.Fields, f), params.Types, nil
166 | }
167 |
168 | type handleSchemaParams struct {
169 | Key string
170 | ChildSchema Schema
171 | Fields []graphqlsch.Field
172 | Types []graphqlsch.Type
173 | }
174 |
175 | func handleSchema(params handleSchemaParams) ([]graphqlsch.Field, []graphqlsch.Type, error) {
176 | childTypes, err := ToGraphQLTypes(params.ChildSchema, strings.Title(params.Key))
177 | if err != nil {
178 | return nil, nil, err
179 | }
180 | for _, childT := range childTypes {
181 | params.Types = append(params.Types, childT)
182 | }
183 | f := graphqlsch.Field{
184 | Name: params.Key,
185 | Type: graphqlsch.PrimitiveType(strings.Title(params.Key)),
186 | IsNullable: false,
187 | IsArray: false,
188 | }
189 | return append(params.Fields, f), params.Types, nil
190 | }
191 |
--------------------------------------------------------------------------------
/jsonsch/primitives.go:
--------------------------------------------------------------------------------
1 | package jsonsch
2 |
3 | import "fmt"
4 |
5 | type Type string
6 |
7 | const (
8 | Object Type = "object"
9 | Boolean Type = "boolean"
10 | Array Type = "array"
11 | Number Type = "number"
12 | Integer Type = "integer"
13 | String Type = "string"
14 | Null Type = "null"
15 | )
16 |
17 | type Primitive struct {
18 | Type Type `json:"type"`
19 | Description string `json:"description,omitempty"`
20 | }
21 |
22 | func NewNull(params *FromExampleParams) Primitive {
23 | if params.NullAs == "" {
24 | return Primitive{Type: Null}
25 | }
26 |
27 | switch params.NullAs {
28 | case "null", "nil":
29 | return Primitive{Type: Null}
30 | case "bool":
31 | return Primitive{Type: Boolean}
32 | case "string":
33 | return Primitive{Type: String}
34 | case "number", "float":
35 | return Primitive{Type: Number}
36 | case "object":
37 | return Primitive{Type: Object}
38 | default:
39 | return Primitive{Type: Null}
40 | }
41 | }
42 |
43 | func NewBoolean() Primitive {
44 | return Primitive{Type: Boolean}
45 | }
46 |
47 | func NewNumber() Primitive {
48 | return Primitive{Type: Number}
49 | }
50 |
51 | func NewString() Primitive {
52 | return Primitive{Type: String}
53 | }
54 |
55 | type ArraySchema struct {
56 | Type Type `json:"type"`
57 | Items interface{} `json:"items"`
58 | }
59 |
60 | func NewArray(items interface{}) ArraySchema {
61 | return ArraySchema{Type: Array, Items: items}
62 | }
63 |
64 | func TodoRenameThisFunctionLmao(data []interface{}, params *FromExampleParams) (ArraySchema, error) {
65 | // TODO: incoporate entire array depending on mode
66 | // E.g.,
67 | // - use the first element to infer array type
68 | // - use conjuction of all elements to infer array type
69 | // - verify all elements are same type, otherwise fail
70 |
71 | var elem interface{}
72 |
73 | if len(data) == 0 {
74 | if params.EmptyArraysAs == "" {
75 | return ArraySchema{}, fmt.Errorf("cannot infer type of empty array; consider using --empty-arrays-as")
76 | }
77 | switch params.EmptyArraysAs {
78 | case "null", "nil":
79 | elem = nil
80 | case "bool", "boolean":
81 | elem = false
82 | case "string", "str":
83 | elem = ""
84 | case "number", "float":
85 | elem = 0.0
86 | case "object":
87 | elem = make(map[string]interface{})
88 | default:
89 | return ArraySchema{}, fmt.Errorf("invalid --empty-arrays-as value '%v'", params.EmptyArraysAs)
90 | }
91 | } else {
92 | elem = data[0]
93 | }
94 |
95 | a := ArraySchema{Type: Array}
96 |
97 | if err := buildSchema(elem, &a.Items, params); err != nil {
98 | return ArraySchema{}, err
99 | }
100 |
101 | return a, nil
102 | }
103 |
--------------------------------------------------------------------------------
/jsonsch/replacerefs.go:
--------------------------------------------------------------------------------
1 | package jsonsch
2 |
3 | import (
4 | "encoding/json"
5 | "fmt"
6 | "net/http"
7 | )
8 |
9 | func ReplaceRefs(data map[string]interface{}, doSkipRefs bool) error {
10 |
11 | var subData map[string]interface{}
12 | keyPath := make([]string, 0)
13 | stack := make([][]string, 0)
14 | stack = append(stack, keyPath)
15 |
16 | for len(stack) > 0 {
17 | keyPath := stack[len(stack)-1]
18 | stack = stack[:len(stack)-1]
19 | next, err := subDataFrom(data, keyPath)
20 | if err != nil {
21 | return err
22 | }
23 | subData = next
24 |
25 | refInter, ok := subData["$ref"]
26 | if !ok {
27 | newStack, isTypeObject, err := pushPropsToStack(data, stack, keyPath)
28 | if err != nil {
29 | return err
30 | }
31 | if !isTypeObject {
32 | continue
33 | }
34 | stack = newStack
35 | continue
36 | }
37 | refStr, ok := refInter.(string)
38 | if !ok {
39 | return fmt.Errorf("'$ref' field must be a string")
40 | }
41 | if doSkipRefs {
42 | delete(subData, "$ref")
43 | subData["type"] = string(Object)
44 | subData["properties"] = make(map[string]interface{})
45 | if err := setDataAtKeyPath(data, subData, keyPath); err != nil {
46 | return err
47 | }
48 | continue
49 | }
50 | resp, err := http.Get(refStr)
51 | if err != nil {
52 | return err
53 | }
54 | defer resp.Body.Close()
55 |
56 | if resp.StatusCode != http.StatusOK {
57 | return fmt.Errorf("received %v status code", resp.StatusCode)
58 | }
59 | var refData map[string]interface{}
60 | if err := json.NewDecoder(resp.Body).Decode(&refData); err != nil {
61 | return err
62 | }
63 | subData = refData
64 | if err := setDataAtKeyPath(data, subData, keyPath); err != nil {
65 | return err
66 | }
67 | stack = append(stack, keyPath)
68 | }
69 | return nil
70 | }
71 |
72 | // returns stack, isTypeObject, error
73 | func pushPropsToStack(data map[string]interface{}, stack [][]string, prefix []string) ([][]string, bool, error) {
74 | subData, err := subDataFrom(data, prefix)
75 | if err != nil {
76 | return stack, false, err
77 | }
78 |
79 | typeInter, ok := subData["type"]
80 | if !ok {
81 | return stack, false, nil
82 | }
83 | typeStr, ok := typeInter.(string)
84 | if !ok {
85 | return nil, true, fmt.Errorf("'type' field is not a string")
86 | }
87 | if typeStr != string(Object) {
88 | return stack, false, nil
89 | }
90 |
91 | propsInter, ok := subData["properties"]
92 | if !ok {
93 | return nil, true, fmt.Errorf("'properties' field does not exist")
94 | }
95 | props, ok := propsInter.(map[string]interface{})
96 | if !ok {
97 | return nil, true, fmt.Errorf("'properties' field is not a map[string]interface{}")
98 | }
99 | for k, _ := range props {
100 | keyPath := append([]string(nil), prefix...)
101 | keyPath = append(keyPath, "properties", k)
102 | stack = append(stack, keyPath)
103 | }
104 | return stack, true, nil
105 | }
106 |
107 | func subDataFrom(data map[string]interface{}, keyPath []string) (map[string]interface{}, error) {
108 | subData := data
109 | for len(keyPath) > 0 {
110 | key := keyPath[0]
111 | if len(keyPath) == 1 {
112 | keyPath = keyPath[:0]
113 | } else {
114 | keyPath = keyPath[1:len(keyPath)]
115 | }
116 | nextInter, ok := subData[key]
117 | if !ok {
118 | return nil, fmt.Errorf("invalid key path")
119 | }
120 | next, ok := nextInter.(map[string]interface{})
121 | if !ok {
122 | return nil, fmt.Errorf("invalid type in key path; expected map[string]interface")
123 | }
124 | subData = next
125 | }
126 | return subData, nil
127 | }
128 |
129 | func setDataAtKeyPath(data, srcData map[string]interface{}, keyPath []string) error {
130 | if len(keyPath) < 1 {
131 | data = srcData
132 | return nil
133 | }
134 | if len(keyPath) == 1 {
135 | data[keyPath[0]] = srcData
136 | return nil
137 | }
138 | nextInter, ok := data[keyPath[0]]
139 | if !ok {
140 | return fmt.Errorf("invalid key path")
141 | }
142 | next, ok := nextInter.(map[string]interface{})
143 | if !ok {
144 | return fmt.Errorf("invalid type in key path; expected map[string]interface")
145 | }
146 | return setDataAtKeyPath(next, srcData, keyPath[1:len(keyPath)])
147 | }
148 |
--------------------------------------------------------------------------------
/jsonsch/schemainclreq.go:
--------------------------------------------------------------------------------
1 | package jsonsch
2 |
3 | import "sort"
4 |
5 | type SchemaInclReq struct {
6 | SchemaField string `json:"$schema,omitempty"`
7 | Title string `json:"title"`
8 | Type Type `json:"type"`
9 | Description string `json:"description,omitempty"`
10 | Properties map[string]interface{} `json:"properties"`
11 | Required []string `json:"required"`
12 | }
13 |
14 | func NewInclReq() *SchemaInclReq {
15 | return &SchemaInclReq{
16 | Type: Object,
17 | Properties: make(map[string]interface{}),
18 | Required: make([]string, 0),
19 | }
20 | }
21 |
22 | func (s *SchemaInclReq) SetRequired(r []string) {
23 | s.Required = append(r)
24 | sort.Strings(s.Required)
25 | }
26 |
27 | func (s *SchemaInclReq) GetRequired() []string {
28 | return s.Required
29 | }
30 |
31 | func (s *SchemaInclReq) SetProperty(k string, v interface{}) {
32 | s.Properties[k] = v
33 | }
34 |
35 | func (s *SchemaInclReq) SetProperties(ps map[string]interface{}) {
36 | s.Properties = ps
37 | }
38 |
39 | func (s *SchemaInclReq) GetProperties() map[string]interface{} {
40 | return s.Properties
41 | }
42 |
43 | func (s *SchemaInclReq) SetDescription(d string) {
44 | s.Description = d
45 | }
46 |
47 | func (s *SchemaInclReq) GetDescription() string {
48 | return s.Description
49 | }
50 |
51 | func (s *SchemaInclReq) SetType(t Type) {
52 | s.Type = t
53 | }
54 |
55 | func (s *SchemaInclReq) GetType() Type {
56 | return s.Type
57 | }
58 |
59 | func (s *SchemaInclReq) SetTitle(t string) {
60 | s.Title = t
61 | }
62 |
63 | func (s *SchemaInclReq) GetTitle() string {
64 | return s.Title
65 | }
66 |
67 | func (s *SchemaInclReq) SetSchemaField(sf string) {
68 | s.SchemaField = sf
69 | }
70 |
71 | func (s *SchemaInclReq) GetSchemaField() string {
72 | return s.SchemaField
73 | }
74 |
--------------------------------------------------------------------------------
/jsonsch/schemaomitreq.go:
--------------------------------------------------------------------------------
1 | package jsonsch
2 |
3 | import "sort"
4 |
5 | type SchemaOmitReq struct {
6 | SchemaField string `json:"$schema,omitempty"`
7 | Title string `json:"title"`
8 | Type Type `json:"type"`
9 | Description string `json:"description,omitempty"`
10 | Properties map[string]interface{} `json:"properties"`
11 | Required []string `json:"required,omitempty"`
12 | }
13 |
14 | func NewOmitReq() *SchemaOmitReq {
15 | return &SchemaOmitReq{
16 | Type: Object,
17 | Properties: make(map[string]interface{}),
18 | Required: make([]string, 0),
19 | }
20 | }
21 |
22 | func (s *SchemaOmitReq) SetRequired(r []string) {
23 | s.Required = append(r)
24 | sort.Strings(s.Required)
25 | }
26 |
27 | func (s *SchemaOmitReq) GetRequired() []string {
28 | return s.Required
29 | }
30 |
31 | func (s *SchemaOmitReq) SetProperty(k string, v interface{}) {
32 | s.Properties[k] = v
33 | }
34 |
35 | func (s *SchemaOmitReq) SetProperties(ps map[string]interface{}) {
36 | s.Properties = ps
37 | }
38 |
39 | func (s *SchemaOmitReq) GetProperties() map[string]interface{} {
40 | return s.Properties
41 | }
42 |
43 | func (s *SchemaOmitReq) SetDescription(d string) {
44 | s.Description = d
45 | }
46 |
47 | func (s *SchemaOmitReq) GetDescription() string {
48 | return s.Description
49 | }
50 |
51 | func (s *SchemaOmitReq) SetType(t Type) {
52 | s.Type = t
53 | }
54 |
55 | func (s *SchemaOmitReq) GetType() Type {
56 | return s.Type
57 | }
58 |
59 | func (s *SchemaOmitReq) SetTitle(t string) {
60 | s.Title = t
61 | }
62 |
63 | func (s *SchemaOmitReq) GetTitle() string {
64 | return s.Title
65 | }
66 |
67 | func (s *SchemaOmitReq) SetSchemaField(sf string) {
68 | s.SchemaField = sf
69 | }
70 |
71 | func (s *SchemaOmitReq) GetSchemaField() string {
72 | return s.SchemaField
73 | }
74 |
--------------------------------------------------------------------------------
/jsonsch/serialize.go:
--------------------------------------------------------------------------------
1 | package jsonsch
2 |
3 | import (
4 | "encoding/json"
5 | "io"
6 |
7 | "github.com/Confbase/schema/graphqlsch"
8 | )
9 |
10 | func SerializeSchema(s Schema, w io.Writer, doPretty bool) error {
11 | enc := json.NewEncoder(w)
12 | if doPretty {
13 | enc.SetIndent("", " ")
14 | }
15 | if err := enc.Encode(&s); err != nil {
16 | return err
17 | }
18 | return nil
19 | }
20 |
21 | func SerializeGraphQL(s Schema, w io.Writer) error {
22 | gqls, err := ToGraphQLSchema(s)
23 | if err != nil {
24 | return err
25 | }
26 | return graphqlsch.SerializeSchema(gqls, w)
27 | }
28 |
--------------------------------------------------------------------------------
/jsonsch/util.go:
--------------------------------------------------------------------------------
1 | package jsonsch
2 |
3 | import (
4 | "fmt"
5 | "reflect"
6 |
7 | "github.com/Confbase/schema/example"
8 | )
9 |
10 | func buildSchema(fromValue interface{}, dst *interface{}, params *FromExampleParams) error {
11 | switch v := fromValue.(type) {
12 | case nil:
13 | *dst = NewNull(params)
14 | case bool:
15 | *dst = NewBoolean()
16 | case string:
17 | *dst = NewString()
18 |
19 | case uint, uint8, uint16, uint32, uint64, int, int8, int16, int32, int64, float32, float64:
20 | *dst = NewNumber()
21 |
22 | case []interface{}:
23 | arr, err := TodoRenameThisFunctionLmao(v, params)
24 | if err != nil {
25 | return err
26 | }
27 | *dst = arr
28 |
29 | case map[string]interface{}:
30 | // value is another JSON object
31 | obj, err := FromExample(example.New(v), params)
32 | if err != nil {
33 | return err
34 | }
35 | *dst = obj
36 |
37 | case map[interface{}]interface{}:
38 | obj, err := interInterMap2Sch(v, params)
39 | if err != nil {
40 | return err
41 | }
42 | *dst = obj
43 |
44 | default:
45 | return fmt.Errorf("unknown type '%v'", reflect.TypeOf(v))
46 | }
47 | return nil
48 | }
49 |
50 | func interInterMap2Sch(v map[interface{}]interface{}, params *FromExampleParams) (Schema, error) {
51 | if len(v) == 0 {
52 | return nil, fmt.Errorf("cannot infer type of empty map")
53 | }
54 |
55 | data := make(map[string]interface{})
56 | for vKey, vValue := range v {
57 | dataKey, ok := vKey.(string)
58 | if !ok {
59 | return nil, fmt.Errorf("unrecognized map key type '%v'", reflect.TypeOf(vKey))
60 | }
61 | data[dataKey] = vValue
62 | }
63 |
64 | return FromExample(example.New(data), params)
65 | }
66 |
--------------------------------------------------------------------------------
/main.go:
--------------------------------------------------------------------------------
1 | // Copyright © 2018 Thomas Fischer
2 | //
3 | // Permission is hereby granted, free of charge, to any person obtaining a copy
4 | // of this software and associated documentation files (the "Software"), to deal
5 | // in the Software without restriction, including without limitation the rights
6 | // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
7 | // copies of the Software, and to permit persons to whom the Software is
8 | // furnished to do so, subject to the following conditions:
9 | //
10 | // The above copyright notice and this permission notice shall be included in
11 | // all copies or substantial portions of the Software.
12 | //
13 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14 | // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15 | // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16 | // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17 | // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18 | // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
19 | // THE SOFTWARE.
20 |
21 | package main
22 |
23 | import "github.com/Confbase/schema/cmd"
24 |
25 | func main() {
26 | cmd.Execute()
27 | }
28 |
--------------------------------------------------------------------------------
/man_pages/schema.1:
--------------------------------------------------------------------------------
1 | .TH SCHEMA 1
2 | .SH NAME
3 | schema \- a schema generator, instantiator, and validator tool.
4 | .SH SYNOPSIS
5 | .B schema
6 | [command] [flags]
7 | .SH DESCRIPTION
8 | .B schema
9 | provides three subcommands
10 | .RS
11 | .TP
12 | .BR infer
13 | Infer and output the schema of example data
14 | .TP
15 | .BR init
16 | Initialize an instance of a schema with default values
17 | .TP
18 | .BR translate
19 | Translate data from one format to another
20 | .RE
21 | .TP
22 | Supported formats: JSON, YAML, TOML, XML, GraphQL schema, JSON Schema
23 | .SH OPTIONS
24 | .TP
25 | .BR schema
26 | infer [output path] [flags]
27 | .RS
28 | Infer and output schemas from example data.
29 | .P
30 | By default, JSON schema (see https://json-schema.org) is output.
31 | .P
32 | GraphQL schemas can be output with the --graphql flag. The --omit-required
33 | and --schema-field flags do nothing when used with the --graphql flag.
34 | .P
35 | If called with no arguments, 'schema infer' reads from stdin and writes the
36 | inferred schema to stdout.
37 | .P
38 | If called with arguments, each argument is interpreted as a file path. The
39 | schema for each path is inferred and written to a new file of the same path,
40 | but with its basename prefixed with the string 'schema.'. For example:
41 | .P
42 | $ schema config1.json config2.json
43 | .P
44 | will write the inferred schemas to schema.config1.json and schema.config2.json,
45 | respectively.
46 | .P
47 | .B
48 | Flags:
49 | .RS
50 | .TP
51 | .BR \-g ", " \-\-graph\-ql
52 | output GraphQL schemas
53 | .TP
54 | .BR \-r ", " \-\-make\-required
55 | make all fields required
56 | .TP
57 | .BR \-\-omit\-required
58 | omit 'required' field if it's empty (default true)
59 | .TP
60 | .BR \-p ", " \-\-pretty
61 | pretty-print the output (default true)
62 | .TP
63 | .BR \-s ", " \-\-schema\-field
64 | (string) specifies the value of the $schema field
65 | .TP
66 | .BR \-\-empty\-arrays\-as
67 | (string) specifies the type of elements of empty arrays; must be one of (null,string,boolean,float,number,object)
68 | .TP
69 | .BR \-h ", " \-\-help
70 | help for infer
71 | .RE
72 | .RE
73 | .P
74 | .BR schema
75 | init [output instance name] [flags]
76 | .RS
77 | .P
78 | Initialize an instance of a schema.
79 | .P
80 | If no schema is specified, stdin is interpreted as the schema.
81 | .P
82 | Multiple instance names may be specfied.
83 | .P
84 | If more than one of the (json|yaml|toml|xml|protobuf|graphql) flags are set,
85 | behavior is undefined.
86 | .P
87 | $ref fields are resolved via network requests by default. Network requests can
88 | be avoided with the --skip-refs flag, which resolves the ref to an empty object
89 | ({}).
90 | .P
91 | .B
92 | Flags:
93 | .RS
94 | .TP
95 | .BR \-\-json
96 | initialize as JSON
97 | .TP
98 | .BR \-\-protobuf
99 | initialize as protocol buffer
100 | .TP
101 | .BR \-\-toml
102 | initialize as TOML
103 | .TP
104 | .BR \-\-xml
105 | initialize as xml
106 | .TP
107 | .BR \-\-yaml
108 | initialize as yaml
109 | .TP
110 | .BR \-s ", " \-\-schema
111 | (string) specifies schema to initialize
112 | .TP
113 | .BR \-\-populate\-lists
114 | populate lists with one element (default true)
115 | .TP
116 | .BR \-\-skip-refs
117 | use {} in place of $ref fields
118 | .TP
119 | .BR \-\-pretty
120 | pretty-print the output (default true)
121 | .TP
122 | .BR \-h ", " \-\-help
123 | help for init
124 | .RE
125 | .RE
126 | .P
127 | .BR schema
128 | translate [flags]
129 | .RS
130 | .P
131 | Translate input data into another format.
132 | .P
133 | If no input file is specified, stdin is used as input.
134 | .P
135 | Multiple output paths may be specfied. If none are specified, translated data
136 | is written to stdout.
137 | .P
138 | If more than one of the (json|yaml|toml|xml|protobuf|graphql) flags are set,
139 | behavior is undefined.
140 | .P
141 | .B
142 | Flags:
143 | .RS
144 | .TP
145 | .BR \-i ", " \-\-input
146 | (string) path to input data to translate
147 | .TP
148 | .BR \-\-json
149 | initialize as JSON
150 | .TP
151 | .BR \-\-protobuf
152 | initialize as protocol buffer
153 | .TP
154 | .BR \-\-toml
155 | initialize as TOML
156 | .TP
157 | .BR \-\-xml
158 | initialize as xml
159 | .TP
160 | .BR \-\-yaml
161 | initialize as yaml
162 | .TP
163 | .BR \-\-pretty
164 | pretty-print the output (default true)
165 | .TP
166 | .BR \-h ", " \-\-help
167 | help for init
168 | .RE
169 | .RE
170 | .P
171 | .TP
172 | .BR \-h ", " \-\-help
173 | Use "schema [command] --help" for more information about a command
174 |
--------------------------------------------------------------------------------
/test_all:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | if [ "$#" -eq 1 ] && [ "$1" = '--offline' ]; then
4 | printf "Running in offline mode.\n"
5 | printf "Will skip tests which require network.\n"
6 | offline_mode='true'
7 | else
8 | printf "Will NOT skip tests which require network.\n"
9 | offline_mode='false'
10 | fi
11 |
12 | printf "Running gofmt tests..."
13 | gofmt_output="$(git ls-files | grep -v "^$(git ls-files -d)\$" | grep -v '^vendor/' | grep '.go$' | xargs gofmt -l 2>&1)"
14 | if [ ! -z "$gofmt_output" ]; then
15 | printf "FAIL. The following files are not gofmt'd:\n$gofmt_output" 1>&2
16 | exit 1
17 | fi
18 | printf "OK\n"
19 |
20 | printf "Running 'go install'..."
21 | go_install_output=`go install 2>&1`
22 | if [ "$?" -ne "0" ]; then
23 | printf "FAIL. output:\n$go_install_output\n" 1>&2
24 | exit 1
25 | fi
26 | printf "OK\n"
27 |
28 | printf "Running go vet tests..."
29 | go_vet_output="$(go vet -v ./... 2>&1 | grep ':' )"
30 | # grep ':' is a hack-ish way to only match path/to/file.go:lineno:colno: vet msg
31 | if [ ! -z "$go_vet_output" ]; then
32 | printf "FAIL. output:\n$go_vet_output\n" 1>&2
33 | exit 1
34 | fi
35 | printf "OK\n"
36 |
37 | printf "Running end-to-end tests...\n"
38 | source ./e2e_tests/lib.sh
39 | if [ "$?" -ne 0 ]; then
40 | printf "FAIL. failed to source './e2e_tests/lib.sh'\n"
41 | exit 1
42 | fi
43 |
44 | for test_module in `ls e2e_tests/*.sh | grep -v 'lib.sh'`; do
45 |
46 | source "$test_module" # populates the $tests variable
47 | if [ "$?" -ne 0 ]; then
48 | printf "FAIL. failed to source '$test_module'\n"
49 | exit 1
50 | fi
51 |
52 | for testcase in "${tests[@]}"; do
53 | setup_test "$testcase"
54 | run_test "$testcase"
55 | done
56 | done
57 |
58 | printf "Running 'go test -v ./...'\n"
59 | go test -v ./...
60 |
--------------------------------------------------------------------------------
/translate/config.go:
--------------------------------------------------------------------------------
1 | package translate
2 |
3 | type Config struct {
4 | InputPath string
5 | DoJson bool
6 | DoYaml bool
7 | DoToml bool
8 | DoXml bool
9 | DoProtobuf bool
10 | DoGraphQL bool
11 | DoPretty bool
12 | }
13 |
14 | func (translate *Config) OutFmt() string {
15 | if translate.DoJson {
16 | return "json"
17 | }
18 | if translate.DoYaml {
19 | return "yaml"
20 | }
21 | if translate.DoToml {
22 | return "toml"
23 | }
24 | if translate.DoXml {
25 | return "xml"
26 | }
27 | if translate.DoProtobuf {
28 | return "protobuf"
29 | }
30 | if translate.DoGraphQL {
31 | return "graphql"
32 | }
33 | return "json"
34 | }
35 |
--------------------------------------------------------------------------------
/translate/translate.go:
--------------------------------------------------------------------------------
1 | package translate
2 |
3 | import (
4 | "bytes"
5 | "fmt"
6 | "io"
7 | "io/ioutil"
8 | "os"
9 |
10 | "github.com/Confbase/schema/decode"
11 | )
12 |
13 | func TranslateEntry(cfg Config, targets []string) {
14 | if len(targets) == 0 {
15 | if err := Translate(os.Stdin, os.Stdout, cfg); err != nil {
16 | fmt.Fprintf(os.Stderr, "error: %v\n", err)
17 | os.Exit(1)
18 | }
19 | return
20 | }
21 |
22 | buf, err := ioutil.ReadAll(os.Stdin)
23 | if err != nil {
24 | fmt.Fprintf(os.Stderr, "error: failed to read from stdin\n%v", err)
25 | os.Exit(1)
26 | }
27 |
28 | for _, t := range targets {
29 | f, err := os.OpenFile(t, os.O_RDWR|os.O_CREATE, 0666)
30 | if err != nil {
31 | fmt.Fprintf(os.Stderr, "error: failed to open '%v'\n%v\n", t, err)
32 | os.Exit(1)
33 | }
34 | defer f.Close()
35 |
36 | if err := Translate(bytes.NewReader(buf), f, cfg); err != nil {
37 | fmt.Fprintf(os.Stderr, "error: %v\n", err)
38 | os.Exit(1)
39 | }
40 | }
41 | }
42 |
43 | func Translate(r io.Reader, w io.Writer, cfg Config) error {
44 | m, err := decode.MuxDecode(r)
45 | if err != nil {
46 | return err
47 | }
48 | if !isAllKeysStrs(m) {
49 | // TODO: fix this horrible hack
50 | interMap := make(map[interface{}]interface{})
51 | for k, v := range m {
52 | interMap[k] = v
53 | }
54 | goodM, err := mkKeysStrsMap(interMap)
55 | if err != nil {
56 | return err
57 | }
58 | return decode.DemuxEncode(w, goodM, cfg.OutFmt(), cfg.DoPretty)
59 | }
60 | return decode.DemuxEncode(w, m, cfg.OutFmt(), cfg.DoPretty)
61 | }
62 |
63 | func isAllKeysStrs(some interface{}) bool {
64 | xs, ok := some.([]interface{})
65 | if ok {
66 | for _, value := range xs {
67 | switch v := value.(type) {
68 |
69 | case map[interface{}]interface{}:
70 | for subK, subV := range v {
71 | _, isStr := subK.(string)
72 | if !isStr || !isAllKeysStrs(subV) {
73 | return false
74 | }
75 | }
76 |
77 | case []interface{}:
78 | if !isAllKeysStrs(v) {
79 | return false
80 | }
81 | default:
82 | continue
83 | }
84 | }
85 | return true
86 | }
87 |
88 | // TODO: use code generation or somehow de-duplicate this
89 | m, ok := some.(map[interface{}]interface{})
90 | if ok {
91 | for _, value := range m {
92 | switch v := value.(type) {
93 |
94 | case map[interface{}]interface{}:
95 | for subK, subV := range v {
96 | _, isStr := subK.(string)
97 | if !isStr || !isAllKeysStrs(subV) {
98 | return false
99 | }
100 | }
101 |
102 | case []interface{}:
103 | if !isAllKeysStrs(v) {
104 | return false
105 | }
106 | default:
107 | continue
108 | }
109 | }
110 | return true
111 | }
112 | return false
113 | }
114 |
115 | func mkKeysStrsMap(m map[interface{}]interface{}) (map[string]interface{}, error) {
116 | res := make(map[string]interface{})
117 | for key, value := range m {
118 | keyStr, ok := key.(string)
119 | if !ok {
120 | return nil, fmt.Errorf("found non-str key in object")
121 | }
122 | switch v := value.(type) {
123 | case map[interface{}]interface{}:
124 | strMap, err := mkKeysStrsMap(v)
125 | if err != nil {
126 | return nil, err
127 | }
128 | res[keyStr] = strMap
129 | case []interface{}:
130 | goodSlice, err := mkKeysStrsSlice(v)
131 | if err != nil {
132 | return nil, err
133 | }
134 | res[keyStr] = goodSlice
135 | default:
136 | res[keyStr] = value
137 | }
138 | }
139 | return res, nil
140 | }
141 |
142 | func mkKeysStrsSlice(xs []interface{}) ([]interface{}, error) {
143 | res := make([]interface{}, 0)
144 | for _, elem := range xs {
145 | switch v := elem.(type) {
146 | case map[interface{}]interface{}:
147 | strMap, err := mkKeysStrsMap(v)
148 | if err != nil {
149 | return nil, err
150 | }
151 | res = append(res, strMap)
152 | case []interface{}:
153 | goodSlice, err := mkKeysStrsSlice(v)
154 | if err != nil {
155 | return nil, err
156 | }
157 | res = append(res, goodSlice)
158 | default:
159 | res = append(res, v)
160 | }
161 | }
162 | return res, nil
163 | }
164 |
--------------------------------------------------------------------------------