├── json_typegen
├── README.md
├── Cargo.toml
└── src
│ └── lib.rs
├── json_typegen_web
├── src
│ ├── examples
│ │ ├── point.json
│ │ ├── hnStory.json
│ │ ├── examples.ts
│ │ ├── steamAppNews.json
│ │ └── zalandoArticle.json
│ ├── vite-env.d.ts
│ ├── components
│ │ ├── Column.svelte
│ │ ├── Container.svelte
│ │ ├── Row.svelte
│ │ ├── Checkbox.svelte
│ │ ├── IconButton.svelte
│ │ ├── Input.svelte
│ │ ├── Select.svelte
│ │ ├── FileInputButton.svelte
│ │ ├── Spinner.svelte
│ │ ├── HighlightedCode.svelte
│ │ ├── Textarea.svelte
│ │ ├── Button.svelte
│ │ ├── FormField.svelte
│ │ └── GithubCorner.svelte
│ ├── main.ts
│ ├── icons
│ │ ├── XCircleIcon.svelte
│ │ ├── CodeBracketIcon.svelte
│ │ ├── ArrowUpTrayIcon.svelte
│ │ ├── ArrowDownTrayIcon.svelte
│ │ ├── InfoIcon.svelte
│ │ ├── QuestionMarkCircleIcon.svelte
│ │ └── ClipboardDocumentIcon.svelte
│ ├── lib
│ │ ├── file.ts
│ │ ├── localstorage.ts
│ │ ├── WorkerMessage.ts
│ │ ├── download.ts
│ │ └── worker.ts
│ ├── shape.svg
│ └── app.css
├── .prettierrc
├── .vscode
│ └── extensions.json
├── .firebaserc
├── firebase.json
├── rsw.toml
├── tsconfig.node.json
├── svelte.config.js
├── README.md
├── .gitignore
├── tsconfig.json
├── vite.config.ts
├── package.json
└── index.html
├── .gitignore
├── json_typegen_shared
├── src
│ ├── inference.rs
│ ├── generation.rs
│ ├── inference
│ │ ├── jsoninputerr.rs
│ │ └── jsonlex.rs
│ ├── progress.rs
│ ├── generation
│ │ ├── value.rs
│ │ ├── shape.rs
│ │ ├── typescript_type_alias.rs
│ │ ├── json_schema.rs
│ │ ├── zod_schema.rs
│ │ ├── serde_case.rs
│ │ ├── typescript.rs
│ │ └── python.rs
│ ├── to_singular.rs
│ ├── sql.rs
│ ├── hints.rs
│ ├── util.rs
│ ├── options.rs
│ ├── shape.rs
│ └── lib.rs
├── benches
│ ├── bench.rs
│ └── fixtures
│ │ └── zalando_article.json
├── Cargo.toml
├── README.md
└── tests
│ ├── python_generation.rs
│ └── with_defaults.rs
├── .editorconfig
├── json_typegen_wasm
├── README.md
├── LICENSE
├── src
│ └── lib.rs
└── Cargo.toml
├── json_typegen_demo
├── Cargo.toml
└── src
│ └── main.rs
├── Cargo.toml
├── RELEASE.md
├── json_typegen_cli
├── Cargo.toml
├── README.md
└── src
│ └── main.rs
├── LICENSE-MIT
├── .github
└── workflows
│ ├── publish.yml
│ └── ci.yml
├── CONFIGURATION.md
├── README.md
└── LICENSE-APACHE
/json_typegen/README.md:
--------------------------------------------------------------------------------
1 | ../README.md
--------------------------------------------------------------------------------
/json_typegen_web/src/examples/point.json:
--------------------------------------------------------------------------------
1 | {
2 | "x": 3,
3 | "y": 5
4 | }
5 |
--------------------------------------------------------------------------------
/json_typegen_web/.prettierrc:
--------------------------------------------------------------------------------
1 | {
2 | "plugins": ["prettier-plugin-svelte"]
3 | }
4 |
--------------------------------------------------------------------------------
/json_typegen_web/.vscode/extensions.json:
--------------------------------------------------------------------------------
1 | {
2 | "recommendations": ["svelte.svelte-vscode"]
3 | }
4 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | target
2 | Cargo.lock
3 | *.rs.bk
4 |
5 | json_typegen_wasm/pkg
6 |
7 | flamegraph*.svg
8 |
--------------------------------------------------------------------------------
/json_typegen_web/.firebaserc:
--------------------------------------------------------------------------------
1 | {
2 | "projects": {
3 | "production": "json-typegen"
4 | }
5 | }
6 |
--------------------------------------------------------------------------------
/json_typegen_web/src/vite-env.d.ts:
--------------------------------------------------------------------------------
1 | ///
{helpText}
49 | {/if} 50 | {#if $$slots.default} 51 | = map
102 | .iter()
103 | .map(|(name, typ)| {
104 | let (was_optional, collapsed) = collapse_option(typ);
105 |
106 | ctxt.indent_level += 1;
107 | let field_type = type_from_shape(ctxt, collapsed);
108 | ctxt.indent_level -= 1;
109 |
110 | let escape_name = !is_ts_identifier(name);
111 |
112 | format!(
113 | "{}{}{}{}{}: {};",
114 | " ".repeat(ctxt.indent_level),
115 | if escape_name { "\"" } else { "" },
116 | name,
117 | if escape_name { "\"" } else { "" },
118 | if was_optional { "?" } else { "" },
119 | field_type
120 | )
121 | })
122 | .collect();
123 |
124 | let mut code = "{\n".to_string();
125 |
126 | if !fields.is_empty() {
127 | code += &fields.join("\n");
128 | code += "\n";
129 | }
130 | code += &" ".repeat(ctxt.indent_level - 1);
131 | code += "}";
132 |
133 | code
134 | }
135 |
--------------------------------------------------------------------------------
/json_typegen_shared/src/generation/json_schema.rs:
--------------------------------------------------------------------------------
1 | use linked_hash_map::LinkedHashMap;
2 |
3 | use crate::generation::value::{Value, pretty_print_value};
4 | use crate::options::Options;
5 | use crate::shape::{self, Shape};
6 | use crate::to_singular::to_singular;
7 | use crate::util::string_hashmap;
8 |
9 | #[allow(dead_code)]
10 | pub struct Ctxt {
11 | options: Options,
12 | }
13 |
14 | pub type Code = String;
15 |
16 | pub fn json_schema(name: &str, shape: &Shape, options: Options) -> Code {
17 | let mut ctxt = Ctxt { options };
18 |
19 | let value = type_from_shape(&mut ctxt, name, shape);
20 |
21 | let mut schema = string_hashmap! {
22 | "$schema" => Value::Str("http://json-schema.org/draft-07/schema#"),
23 | "title" => Value::String(format!("Generated schema for {}", name)),
24 | };
25 |
26 | if let Value::Object(map) = value {
27 | for (key, val) in map.into_iter() {
28 | schema.insert(key, val);
29 | }
30 | }
31 |
32 | pretty_print_value(0, &Value::Object(schema))
33 | }
34 |
35 | fn type_from_shape(ctxt: &mut Ctxt, path: &str, shape: &Shape) -> Value {
36 | use crate::shape::Shape::*;
37 | match shape {
38 | Null | Any | Bottom => Value::Object(LinkedHashMap::new()),
39 | Bool => Value::Object(string_hashmap! { "type" => Value::Str("boolean") }),
40 | StringT => Value::Object(string_hashmap! { "type" => Value::Str("string") }),
41 | Integer => Value::Object(string_hashmap! { "type" => Value::Str("number") }),
42 | Floating => Value::Object(string_hashmap! { "type" => Value::Str("number") }),
43 | Tuple(shapes, _n) => {
44 | let folded = shape::fold_shapes(shapes.clone());
45 | if folded == Any && shapes.iter().any(|s| s != &Any) {
46 | generate_tuple_type(ctxt, path, shapes)
47 | } else {
48 | generate_vec_type(ctxt, path, &folded)
49 | }
50 | }
51 | VecT { elem_type: e } => generate_vec_type(ctxt, path, e),
52 | Struct { fields: map } => generate_struct_from_field_shapes(ctxt, path, map),
53 | MapT { val_type: v } => generate_map_type(ctxt, path, v),
54 | Opaque(t) => Value::Object(string_hashmap! { "type" => Value::String(t.clone()) }),
55 | Optional(e) => type_from_shape(ctxt, path, e),
56 | Nullable(e) => type_from_shape(ctxt, path, e),
57 | }
58 | }
59 |
60 | fn generate_vec_type(ctxt: &mut Ctxt, path: &str, shape: &Shape) -> Value {
61 | let singular = to_singular(path);
62 | let inner = type_from_shape(ctxt, &singular, shape);
63 | Value::Object(string_hashmap! {
64 | "type" => Value::Str("array"),
65 | "items" => inner
66 | })
67 | }
68 |
69 | fn generate_map_type(ctxt: &mut Ctxt, path: &str, shape: &Shape) -> Value {
70 | let singular = to_singular(path);
71 | let inner = type_from_shape(ctxt, &singular, shape);
72 | Value::Object(string_hashmap! {
73 | "type" => Value::Str("object"),
74 | "additionalProperties" => inner
75 | })
76 | }
77 |
78 | fn generate_tuple_type(ctxt: &mut Ctxt, path: &str, shapes: &[Shape]) -> Value {
79 | let mut types = Vec::new();
80 |
81 | for shape in shapes {
82 | let typ = type_from_shape(ctxt, path, shape);
83 | types.push(typ);
84 | }
85 |
86 | Value::Object(string_hashmap! {
87 | "type" => Value::Str("array"),
88 | "items" => Value::Array(types),
89 | "additionalItems" => Value::Bool(false)
90 | })
91 | }
92 |
93 | fn collapse_option(typ: &Shape) -> (bool, &Shape) {
94 | if let Shape::Optional(inner) = typ {
95 | return (true, &**inner);
96 | }
97 | (false, typ)
98 | }
99 |
100 | fn generate_struct_from_field_shapes(
101 | ctxt: &mut Ctxt,
102 | _path: &str,
103 | map: &LinkedHashMap,
104 | ) -> Value {
105 | let mut required: Vec = Vec::new();
106 | let mut properties = LinkedHashMap::new();
107 |
108 | for (name, typ) in map.iter() {
109 | let (was_optional, collapsed) = collapse_option(typ);
110 |
111 | if !was_optional {
112 | required.push(Value::String(name.clone()));
113 | }
114 |
115 | let field_code = type_from_shape(ctxt, name, collapsed);
116 |
117 | properties.insert(name.to_string(), field_code);
118 | }
119 |
120 | Value::Object(string_hashmap! {
121 | "type" => Value::Str("object"),
122 | "properties" => Value::Object(properties),
123 | "required" => Value::Array(required)
124 | })
125 | }
126 |
--------------------------------------------------------------------------------
/json_typegen_shared/src/sql.rs:
--------------------------------------------------------------------------------
1 | use crate::Shape;
2 | use sqlparser::ast::{ColumnDef, ColumnOption, DataType, Statement};
3 | use sqlparser::dialect::GenericDialect;
4 | use sqlparser::parser::Parser;
5 |
6 | pub fn sql_to_shape(input: &str) -> Result, String> {
7 | let dialect = GenericDialect {};
8 | let ast: Vec = Parser::parse_sql(&dialect, input).map_err(|e| e.to_string())?;
9 | Ok(ast
10 | .iter()
11 | .filter_map(|stmt| match stmt {
12 | Statement::CreateTable { name, columns, .. } => Some((
13 | name.to_string(),
14 | Shape::Struct {
15 | fields: columns
16 | .iter()
17 | .map(|column: &ColumnDef| {
18 | (column.name.to_string(), shape_for_column(column))
19 | })
20 | .collect(),
21 | },
22 | )),
23 | _ => None,
24 | })
25 | .collect())
26 | }
27 |
28 | fn shape_for_column(column: &ColumnDef) -> Shape {
29 | let base_shape = match column.data_type {
30 | DataType::Character(_) |
31 | DataType::Char(_) |
32 | DataType::CharacterVarying(_) |
33 | DataType::CharVarying(_) |
34 | DataType::Varchar(_) |
35 | DataType::Nvarchar(_) |
36 | DataType::Text |
37 | DataType::String => Shape::StringT,
38 | // DataType::Uuid => {}
39 | // DataType::CharacterLargeObject(_) => {}
40 | // DataType::CharLargeObject(_) => {}
41 | // DataType::Clob(_) => {}
42 | // DataType::Binary(_) => {}
43 | // DataType::Varbinary(_) => {}
44 | // DataType::Blob(_) => {}
45 | // DataType::Numeric(_) => {}
46 | // DataType::Decimal(_) => {}
47 | // DataType::BigNumeric(_) => {}
48 | // DataType::BigDecimal(_) => {}
49 | // DataType::Dec(_) => {}
50 | // DataType::Float(_) => {}
51 | DataType::TinyInt(_) |
52 | DataType::UnsignedTinyInt(_) |
53 | DataType::SmallInt(_) |
54 | DataType::UnsignedSmallInt(_) |
55 | DataType::MediumInt(_) |
56 | DataType::UnsignedMediumInt(_) |
57 | DataType::Int(_) |
58 | DataType::Integer(_) |
59 | DataType::UnsignedInt(_) |
60 | DataType::UnsignedInteger(_) |
61 | DataType::BigInt(_) |
62 | DataType::UnsignedBigInt(_) => Shape::Integer,
63 | // DataType::Real => {}
64 | // DataType::Double => {}
65 | // DataType::DoublePrecision => {}
66 | DataType::Boolean => Shape::Bool,
67 | DataType::Date |
68 | // DataType::Time(_, _) => {}
69 | DataType::Datetime(_) |
70 | DataType::Timestamp(_, _) => Shape::Opaque("Date".to_string()),
71 | // DataType::Interval => {}
72 | // DataType::JSON => {}
73 | // DataType::Regclass => {}
74 | // DataType::Bytea => {}
75 | // DataType::Custom(ObjectName(idents), _) => {}
76 | // DataType::Array(data_type_opt) => {
77 | //
78 | // }
79 | // DataType::Enum(_) => {}
80 | // DataType::Set(_) => {}
81 | _ => { Shape::Any }
82 | };
83 | let nullable = !column
84 | .options
85 | .iter()
86 | .any(|option| matches!(option.option, ColumnOption::NotNull));
87 | if nullable {
88 | base_shape.into_nullable()
89 | } else {
90 | base_shape
91 | }
92 | }
93 |
94 | #[cfg(test)]
95 | mod tests {
96 | use super::*;
97 | use crate::{Options, OutputMode, codegen_from_shape};
98 |
99 | #[test]
100 | fn test_sql_to_shape() {
101 | let output = sql_to_shape(
102 | r#"
103 | CREATE TABLE users (
104 | id SERIAL PRIMARY KEY,
105 | name VARCHAR(255) NOT NULL,
106 | age INT NOT NULL,
107 | created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
108 | );
109 | "#,
110 | )
111 | .unwrap()
112 | .iter()
113 | .map(|(name, shape)| {
114 | codegen_from_shape(
115 | name,
116 | shape,
117 | Options {
118 | output_mode: OutputMode::ZodSchema,
119 | ..Options::default()
120 | },
121 | )
122 | .unwrap()
123 | })
124 | .collect::>()
125 | .join("\n");
126 | println!("{}", output);
127 | }
128 | }
129 |
--------------------------------------------------------------------------------
/json_typegen_shared/src/generation/zod_schema.rs:
--------------------------------------------------------------------------------
1 | use linked_hash_map::LinkedHashMap;
2 |
3 | use crate::generation::typescript::{collapse_option, is_ts_identifier};
4 | use crate::options::Options;
5 | use crate::shape::{self, Shape, common_shape};
6 | use crate::util::lower_camel_case;
7 |
8 | pub struct Ctxt {
9 | options: Options,
10 | indent_level: usize,
11 | }
12 |
13 | pub type Code = String;
14 |
15 | pub fn zod_schema(name: &str, shape: &Shape, options: Options) -> Code {
16 | let mut ctxt = Ctxt {
17 | options,
18 | indent_level: 1,
19 | };
20 |
21 | let code = type_from_shape(&mut ctxt, shape);
22 | let mut schema_name = lower_camel_case(name);
23 | schema_name.push_str("Schema");
24 |
25 | format!("export const {} = {};\n\n", schema_name, code)
26 | }
27 |
28 | fn type_from_shape(ctxt: &mut Ctxt, shape: &Shape) -> Code {
29 | use crate::shape::Shape::*;
30 | match shape {
31 | Null | Any | Bottom => "z.unknown()".into(),
32 | Bool => "z.boolean()".into(),
33 | StringT => "z.string()".into(),
34 | Integer => "z.number()".into(),
35 | Floating => "z.number()".into(),
36 | Tuple(shapes, _n) => {
37 | let folded = shape::fold_shapes(shapes.clone());
38 | if folded == Any && shapes.iter().any(|s| s != &Any) {
39 | generate_tuple_type(ctxt, shapes)
40 | } else {
41 | generate_vec_type(ctxt, &folded)
42 | }
43 | }
44 | VecT { elem_type: e } => generate_vec_type(ctxt, e),
45 | Struct { fields } => {
46 | if ctxt
47 | .options
48 | .infer_map_threshold
49 | .is_some_and(|lim| fields.len() > lim)
50 | {
51 | let inner = fields
52 | .into_iter()
53 | .map(|(_, value)| value.clone())
54 | .fold(Shape::Bottom, common_shape);
55 | generate_map_type(ctxt, &inner)
56 | } else {
57 | generate_struct_from_field_shapes(ctxt, fields)
58 | }
59 | }
60 | MapT { val_type: v } => generate_map_type(ctxt, v),
61 | Opaque(t) => t.clone(),
62 | Optional(e) => {
63 | let inner = type_from_shape(ctxt, e);
64 | if ctxt.options.use_default_for_missing_fields {
65 | inner
66 | } else {
67 | format!("{}.optional()", inner)
68 | }
69 | }
70 | Nullable(e) => {
71 | let inner = type_from_shape(ctxt, e);
72 | if ctxt.options.use_default_for_missing_fields {
73 | inner
74 | } else {
75 | format!("{}.nullable()", inner)
76 | }
77 | }
78 | }
79 | }
80 |
81 | fn generate_vec_type(ctxt: &mut Ctxt, shape: &Shape) -> Code {
82 | let inner = type_from_shape(ctxt, shape);
83 | format!("{}.array()", inner)
84 | }
85 |
86 | fn generate_map_type(ctxt: &mut Ctxt, shape: &Shape) -> Code {
87 | let (_was_optional, collapsed) = collapse_option(shape);
88 | let inner = type_from_shape(ctxt, collapsed);
89 | format!("z.record(z.string(), {})", inner)
90 | }
91 |
92 | fn generate_tuple_type(ctxt: &mut Ctxt, shapes: &[Shape]) -> Code {
93 | let mut types = Vec::new();
94 |
95 | for shape in shapes {
96 | let typ = type_from_shape(ctxt, shape);
97 | types.push(typ);
98 | }
99 |
100 | format!("z.tuple([{}])", types.join(", "))
101 | }
102 |
103 | fn generate_struct_from_field_shapes(ctxt: &mut Ctxt, map: &LinkedHashMap) -> Code {
104 | let fields: Vec = map
105 | .iter()
106 | .map(|(name, typ)| {
107 | ctxt.indent_level += 1;
108 | let field_type = type_from_shape(ctxt, typ);
109 | ctxt.indent_level -= 1;
110 |
111 | let escape_name = !is_ts_identifier(name);
112 |
113 | format!(
114 | "{}{}{}{}: {};",
115 | " ".repeat(ctxt.indent_level),
116 | if escape_name { "\"" } else { "" },
117 | name,
118 | if escape_name { "\"" } else { "" },
119 | field_type
120 | )
121 | })
122 | .collect();
123 |
124 | let mut code = "z.object({\n".to_string();
125 |
126 | if !fields.is_empty() {
127 | code += &fields.join("\n");
128 | code += "\n";
129 | }
130 | code += &" ".repeat(ctxt.indent_level - 1);
131 | code += "})";
132 |
133 | code
134 | }
135 |
--------------------------------------------------------------------------------
/json_typegen_shared/src/hints.rs:
--------------------------------------------------------------------------------
1 | #![allow(dead_code)]
2 |
3 | use std::borrow::Cow;
4 | use std::cell::Cell;
5 |
6 | #[derive(Debug, PartialEq, Clone)]
7 | pub enum HintType {
8 | OpaqueType(String),
9 | MapType(String),
10 | TypeName(String),
11 | }
12 |
13 | #[derive(Debug, PartialEq, Clone)]
14 | pub struct Hint {
15 | pub hint_type: HintType,
16 | pub used: Cell,
17 | }
18 |
19 | impl Hint {
20 | pub fn new(hint_type: HintType) -> Self {
21 | Hint {
22 | hint_type,
23 | used: Cell::new(false),
24 | }
25 | }
26 |
27 | pub fn default_map() -> Self {
28 | Hint::new(HintType::MapType("::std::collections::HashMap".into()))
29 | }
30 |
31 | pub fn opaque_type(name: impl Into) -> Self {
32 | Hint::new(HintType::OpaqueType(name.into()))
33 | }
34 |
35 | pub fn type_name(name: impl Into) -> Self {
36 | Hint::new(HintType::TypeName(name.into()))
37 | }
38 | }
39 |
40 | pub struct Hints<'a> {
41 | pub hints: Vec<(Cow<'a, [&'a str]>, &'a Hint)>,
42 | pub applicable: Vec<&'a Hint>,
43 | }
44 |
45 | fn is_index(s: &str) -> bool {
46 | s == "-" || s.bytes().all(|b| (b'0'..=b'9').contains(&b))
47 | }
48 |
49 | impl<'a> Hints<'a> {
50 | pub fn new() -> Self {
51 | Hints {
52 | hints: Vec::new(),
53 | applicable: Vec::new(),
54 | }
55 | }
56 |
57 | pub fn add(&mut self, pointer: &'a str, hint: &'a Hint) {
58 | if pointer.is_empty() {
59 | self.applicable.push(hint);
60 | } else {
61 | if !pointer.starts_with('/') {
62 | panic!(
63 | "Invalid JSON pointer: {:?}\n{}",
64 | pointer, "A pointer not referring to the root has to start with '/'",
65 | );
66 | }
67 | let tokens: Vec<_> = pointer.split('/').skip(1).collect();
68 | let pair: (Cow<[&str]>, &Hint) = (tokens.into(), hint);
69 | self.hints.push(pair);
70 | }
71 | }
72 |
73 | /// ([/a/b, /a/c, /d/e], "a") -> [/b, /c]
74 | pub fn step_field(&self, name: &str) -> Hints {
75 | self.step(|first| first == "-" || first == name)
76 | }
77 |
78 | /// [/1/b, /a/c, /-/e] -> [/b, /c, /e]
79 | pub fn step_any(&self) -> Hints {
80 | self.step(|_first| true)
81 | }
82 |
83 | /// [/1/b, /a/c, /-/e] -> [/b, /e]
84 | pub fn step_array(&self) -> Hints {
85 | self.step(is_index)
86 | }
87 |
88 | /// ([/2/b, /a/c, /-/e, /3/d], 3) -> [/e, /d]
89 | pub fn step_index(&self, index: usize) -> Hints {
90 | let i_str = &index.to_string();
91 | self.step(|first| first == "-" || first == i_str)
92 | }
93 |
94 | fn step bool>(&self, pred: F) -> Hints {
95 | let mut filtered = Vec::new();
96 | let mut applicable = Vec::new();
97 |
98 | for &(ref path, hint) in self.hints.iter() {
99 | if pred(path[0]) {
100 | let stepped: &[&str] = &path[1..];
101 | if stepped.is_empty() {
102 | applicable.push(hint);
103 | } else {
104 | filtered.push((stepped.into(), hint));
105 | }
106 | }
107 | }
108 |
109 | Hints {
110 | hints: filtered,
111 | applicable,
112 | }
113 | }
114 | }
115 |
116 | #[cfg(test)]
117 | mod tests {
118 | use super::*;
119 |
120 | #[test]
121 | fn basic_pointers() {
122 | let hint = Hint::default_map();
123 | let mut hints = Hints::new();
124 | hints.add("/a/b", &hint);
125 | hints.add("/foo", &hint);
126 | hints.add("/foo", &hint);
127 | hints.add("", &hint);
128 |
129 | assert_eq!(hints.hints.len(), 3);
130 | assert_eq!(hints.applicable.len(), 1);
131 | }
132 |
133 | #[test]
134 | #[should_panic]
135 | fn invalid_pointer() {
136 | let hint = Hint::default_map();
137 | let mut hints = Hints::new();
138 | hints.add("foo", &hint);
139 | }
140 |
141 | #[test]
142 | fn step_field() {
143 | let hint = Hint::default_map();
144 | let mut hints = Hints::new();
145 | hints.add("/a/b", &hint);
146 | hints.add("/b/c", &hint);
147 | hints.add("/b/", &hint);
148 | hints.add("/b", &hint);
149 |
150 | let hints = hints.step_field("b");
151 |
152 | assert_eq!(hints.hints.len(), 2);
153 | assert_eq!(hints.applicable.len(), 1);
154 | }
155 | }
156 |
--------------------------------------------------------------------------------
/json_typegen_shared/src/util.rs:
--------------------------------------------------------------------------------
1 | pub fn camel_case(name: &str) -> String {
2 | let mut s = String::new();
3 | let mut last = ' ';
4 | for c in name.chars().skip_while(|c| !c.is_ascii_alphanumeric()) {
5 | if !c.is_ascii_alphanumeric() {
6 | last = c;
7 | continue;
8 | }
9 | if (last.is_ascii() && !last.is_ascii_alphanumeric() && c.is_ascii_alphanumeric())
10 | || (last.is_ascii_lowercase() && c.is_ascii_uppercase())
11 | {
12 | s.push(c.to_ascii_uppercase());
13 | } else if last.is_ascii_alphabetic() {
14 | s.push(c.to_ascii_lowercase());
15 | } else {
16 | s.push(c);
17 | }
18 | last = c;
19 | }
20 | s
21 | }
22 |
23 | pub fn snake_case(name: &str) -> String {
24 | sep_case(name, '_')
25 | }
26 |
27 | pub fn kebab_case(name: &str) -> String {
28 | sep_case(name, '-')
29 | }
30 |
31 | fn sep_case(name: &str, separator: char) -> String {
32 | let mut s = String::new();
33 | let mut last = 'A';
34 | for c in name.chars().skip_while(|c| !c.is_ascii_alphanumeric()) {
35 | if !c.is_ascii_alphanumeric() {
36 | last = c;
37 | continue;
38 | }
39 | if (last.is_ascii() && !last.is_ascii_alphanumeric() && c.is_ascii_alphanumeric())
40 | || (last.is_ascii_lowercase() && c.is_ascii_uppercase())
41 | {
42 | s.push(separator);
43 | }
44 | s.push(c.to_ascii_lowercase());
45 | last = c;
46 | }
47 | s
48 | }
49 |
50 | pub fn type_case(name: &str) -> String {
51 | let s = camel_case(name);
52 | uppercase_first_letter(&s)
53 | }
54 |
55 | pub fn lower_camel_case(name: &str) -> String {
56 | let s = camel_case(name);
57 | lowercase_first_letter(&s)
58 | }
59 |
60 | // from http://stackoverflow.com/questions/38406793/.../38406885
61 | fn uppercase_first_letter(s: &str) -> String {
62 | let mut c = s.chars();
63 | match c.next() {
64 | None => String::new(),
65 | Some(f) => f.to_ascii_uppercase().to_string() + c.as_str(),
66 | }
67 | }
68 |
69 | fn lowercase_first_letter(s: &str) -> String {
70 | let mut c = s.chars();
71 | match c.next() {
72 | None => String::new(),
73 | Some(f) => f.to_ascii_lowercase().to_string() + c.as_str(),
74 | }
75 | }
76 |
77 | // based on hashmap! macro from maplit crate
78 | macro_rules! string_hashmap {
79 | ($($key:expr => $value:expr,)+) => { string_hashmap!($($key => $value),+) };
80 | ($($key:expr => $value:expr),*) => {
81 | {
82 | let mut _map = ::linked_hash_map::LinkedHashMap::new();
83 | $(
84 | _map.insert($key.to_string(), $value);
85 | )*
86 | _map
87 | }
88 | };
89 | }
90 |
91 | pub(crate) use string_hashmap;
92 |
93 | #[cfg(test)]
94 | mod tests {
95 | use super::*;
96 |
97 | #[test]
98 | fn test_camel_case() {
99 | assert_eq!("FooBar", &camel_case("FooBar"));
100 | assert_eq!("FooBar", &camel_case("fooBar"));
101 | assert_eq!("FooBar", &camel_case("foo bar"));
102 | assert_eq!("FooBar", &camel_case("foo_bar"));
103 | assert_eq!("FooBar", &camel_case("_foo_bar"));
104 | assert_eq!("FooBar", &camel_case("åfoo_bar"));
105 | assert_eq!("FooBar", &camel_case("foåo_bar"));
106 | assert_eq!("FooBar", &camel_case("FOO_BAR"));
107 |
108 | assert_eq!("Foo1bar", &camel_case("Foo1bar"));
109 | assert_eq!("Foo2bar", &camel_case("foo_2bar"));
110 | assert_eq!("Foo3Bar", &camel_case("Foo3Bar"));
111 | assert_eq!("Foo4Bar", &camel_case("foo4_bar"));
112 | assert_eq!("1920x1080", &camel_case("1920x1080"));
113 | assert_eq!("19201080", &camel_case("1920*1080"));
114 | }
115 |
116 | #[test]
117 | fn test_snake_case() {
118 | assert_eq!("foo_bar", &snake_case("FooBar"));
119 | assert_eq!("foo_bar", &snake_case("fooBar"));
120 | assert_eq!("foo_bar", &snake_case("foo bar"));
121 | assert_eq!("foo_bar", &snake_case("foo_bar"));
122 | assert_eq!("foo_bar", &snake_case("_foo_bar"));
123 | assert_eq!("foo_bar", &snake_case("åfoo_bar"));
124 | assert_eq!("foo_bar", &snake_case("foåo_bar"));
125 | assert_eq!("foo_bar", &snake_case("FOO_BAR"));
126 |
127 | assert_eq!("foo_5bar", &snake_case("foo_5bar"));
128 | assert_eq!("foo6_bar", &snake_case("foo6_bar"));
129 | assert_eq!("1920x1080", &snake_case("1920x1080"));
130 | assert_eq!("1920_1080", &snake_case("1920*1080"));
131 | }
132 | }
133 |
--------------------------------------------------------------------------------
/json_typegen_shared/tests/python_generation.rs:
--------------------------------------------------------------------------------
1 | use json_typegen_shared::{ImportStyle, Options, OutputMode, codegen};
2 |
3 | /// Function to test AST equality, not string equality
4 | fn code_output_test(name: &str, input: &str, expected: &str) {
5 | let mut options = Options::default();
6 | options.import_style = ImportStyle::AssumeExisting;
7 | options.output_mode = OutputMode::PythonPydantic;
8 | let res = codegen(name, input, options);
9 | let output = res.unwrap();
10 | let expected = &expected[1..];
11 | assert_eq!(
12 | output, expected,
13 | "\n\nUnexpected output code:\n input: {}\n output:\n{}\n expected: {}",
14 | input, output, expected
15 | );
16 | }
17 |
18 | #[test]
19 | fn empty_object() {
20 | code_output_test(
21 | "Root",
22 | r##"
23 | {}
24 | "##,
25 | r##"
26 | class Root(BaseModel):
27 | pass
28 | "##,
29 | );
30 | }
31 |
32 | #[test]
33 | fn list_of_numbers() {
34 | code_output_test(
35 | "Numbers",
36 | r##"
37 | [1, 2, 3]
38 | "##,
39 | "
40 | Numbers = list[int]
41 | ",
42 | );
43 | }
44 |
45 | #[test]
46 | fn point() {
47 | code_output_test(
48 | "Point",
49 | r##"
50 | {
51 | "x": 2,
52 | "y": 3
53 | }
54 | "##,
55 | r##"
56 | class Point(BaseModel):
57 | x: int
58 | y: int
59 | "##,
60 | );
61 | }
62 |
63 | #[test]
64 | fn optionals() {
65 | code_output_test(
66 | "Opts",
67 | r##"
68 | [
69 | {
70 | "in_both": 5,
71 | "missing": 5,
72 | "has_null": 5
73 | },
74 | {
75 | "in_both": 5,
76 | "has_null": null,
77 | "added": 5
78 | }
79 | ]
80 | "##,
81 | r##"
82 | class Opt(BaseModel):
83 | in_both: int
84 | missing: Optional[int]
85 | has_null: Optional[int]
86 | added: Optional[int]
87 |
88 |
89 | Opts = list[Opt]
90 | "##,
91 | );
92 | }
93 |
94 | #[test]
95 | fn fallback() {
96 | code_output_test(
97 | "FallbackExamples",
98 | r##"
99 | [
100 | {
101 | "only_null": null,
102 | "conflicting": 5,
103 | "empty_array": []
104 | },
105 | {
106 | "only_null": null,
107 | "conflicting": "five",
108 | "empty_array": []
109 | }
110 | ]
111 | "##,
112 | r##"
113 | class FallbackExample(BaseModel):
114 | only_null: Any
115 | conflicting: Any
116 | empty_array: list[Any]
117 |
118 |
119 | FallbackExamples = list[FallbackExample]
120 | "##,
121 | );
122 | }
123 |
124 | #[test]
125 | fn nesting() {
126 | code_output_test(
127 | "NestedTypes",
128 | r##"
129 | [
130 | {
131 | "nested": {
132 | "a": 5,
133 | "doubly_nested": { "c": 10 }
134 | },
135 | "in_array": [{ "b": 5 }]
136 | }
137 | ]
138 | "##,
139 | r##"
140 | class DoublyNested(BaseModel):
141 | c: int
142 |
143 |
144 | class Nested(BaseModel):
145 | a: int
146 | doubly_nested: DoublyNested
147 |
148 |
149 | class InArray(BaseModel):
150 | b: int
151 |
152 |
153 | class NestedType(BaseModel):
154 | nested: Nested
155 | in_array: list[InArray]
156 |
157 |
158 | NestedTypes = list[NestedType]
159 | "##,
160 | );
161 | }
162 |
163 | #[test]
164 | fn tuple() {
165 | code_output_test(
166 | "Pagination",
167 | r##"
168 | [
169 | {
170 | "pages": 1,
171 | "items": 3
172 | },
173 | [
174 | {
175 | "name": "John"
176 | },
177 | {
178 | "name": "James"
179 | },
180 | {
181 | "name": "Jake"
182 | }
183 | ]
184 | ]
185 | "##,
186 | r##"
187 | class Pagination2(BaseModel):
188 | pages: int
189 | items: int
190 |
191 |
192 | class Pagination3(BaseModel):
193 | name: str
194 |
195 |
196 | Pagination = tuple[Pagination2, list[Pagination3]]
197 | "##,
198 | );
199 | }
200 |
201 | #[test]
202 | fn rename() {
203 | code_output_test(
204 | "Renamed",
205 | r##"
206 | {
207 | "class": 5
208 | }
209 | "##,
210 | r##"
211 | class Renamed(BaseModel):
212 | class_field: int = Field(alias="class")
213 | "##,
214 | );
215 | }
216 |
--------------------------------------------------------------------------------
/json_typegen_shared/src/options.rs:
--------------------------------------------------------------------------------
1 | use crate::hints::Hint;
2 |
3 | /// Options for the code generation
4 | ///
5 | /// Construct with `Options::default()`, and change any settings you care about.
6 | #[non_exhaustive]
7 | #[derive(Debug, PartialEq, Clone)]
8 | pub struct Options {
9 | pub output_mode: OutputMode,
10 | pub input_mode: InputMode,
11 | pub use_default_for_missing_fields: bool,
12 | pub deny_unknown_fields: bool,
13 | pub(crate) allow_option_vec: bool,
14 | pub type_visibility: String,
15 | pub field_visibility: Option,
16 | pub derives: String,
17 | pub property_name_format: Option,
18 | pub(crate) hints: Vec<(String, Hint)>,
19 | pub unwrap: String,
20 | pub import_style: ImportStyle,
21 | pub collect_additional: bool,
22 | pub infer_map_threshold: Option,
23 | }
24 |
25 | impl Default for Options {
26 | fn default() -> Options {
27 | Options {
28 | output_mode: OutputMode::Rust,
29 | input_mode: InputMode::Json,
30 | use_default_for_missing_fields: false,
31 | deny_unknown_fields: false,
32 | allow_option_vec: false,
33 | type_visibility: "pub".into(),
34 | field_visibility: Some("pub".into()),
35 | derives: "Default, Debug, Clone, PartialEq, serde_derive::Serialize, serde_derive::Deserialize".into(),
36 | property_name_format: None,
37 | hints: Vec::new(),
38 | unwrap: "".into(),
39 | import_style: ImportStyle::AddImports,
40 | collect_additional: false,
41 | infer_map_threshold: None,
42 | }
43 | }
44 | }
45 |
46 | #[cfg(feature = "option-parsing")]
47 | impl Options {
48 | pub(crate) fn macro_default() -> Options {
49 | Options {
50 | import_style: ImportStyle::QualifiedPaths,
51 | ..Options::default()
52 | }
53 | }
54 | }
55 |
56 | /// How imports/external types should be handled by code generation
57 | #[non_exhaustive]
58 | #[derive(Debug, PartialEq, Clone)]
59 | pub enum ImportStyle {
60 | /// Add import/use statements for any external types used
61 | AddImports,
62 | /// Assume import/use statements already exist where the generated code will be inserted
63 | AssumeExisting,
64 | /// Use fully qualified paths for any external type used
65 | QualifiedPaths,
66 | }
67 |
68 | impl ImportStyle {
69 | pub fn parse(s: &str) -> Option {
70 | match s {
71 | "add_imports" => Some(ImportStyle::AddImports),
72 | "assume_existing" => Some(ImportStyle::AssumeExisting),
73 | "qualified_paths" => Some(ImportStyle::QualifiedPaths),
74 | _ => None,
75 | }
76 | }
77 | }
78 |
79 | #[non_exhaustive]
80 | #[derive(Debug, PartialEq, Clone)]
81 | pub enum OutputMode {
82 | Rust,
83 | Typescript,
84 | TypescriptTypeAlias,
85 | KotlinJackson,
86 | KotlinKotlinx,
87 | PythonPydantic,
88 | JsonSchema,
89 | ZodSchema,
90 | Shape,
91 | }
92 |
93 | impl OutputMode {
94 | pub fn parse(s: &str) -> Option {
95 | match s {
96 | "rust" => Some(OutputMode::Rust),
97 | "typescript" => Some(OutputMode::Typescript),
98 | "typescript/typealias" => Some(OutputMode::TypescriptTypeAlias),
99 | "kotlin" => Some(OutputMode::KotlinJackson),
100 | "kotlin/jackson" => Some(OutputMode::KotlinJackson),
101 | "kotlin/kotlinx" => Some(OutputMode::KotlinKotlinx),
102 | "python" => Some(OutputMode::PythonPydantic),
103 | "json_schema" => Some(OutputMode::JsonSchema),
104 | "zod" => Some(OutputMode::ZodSchema),
105 | "shape" => Some(OutputMode::Shape),
106 | _ => None,
107 | }
108 | }
109 | }
110 |
111 | #[non_exhaustive]
112 | #[derive(Debug, PartialEq, Clone)]
113 | pub enum InputMode {
114 | Json,
115 | Sql,
116 | }
117 |
118 | impl InputMode {
119 | pub fn parse(s: &str) -> Option {
120 | match s {
121 | "json" => Some(InputMode::Json),
122 | "sql" => Some(InputMode::Sql),
123 | _ => None,
124 | }
125 | }
126 | }
127 |
128 | // https://serde.rs/container-attrs.html rename_all:
129 | // "lowercase", "UPPERCASE", "PascalCase", "camelCase", "snake_case",
130 | // "SCREAMING_SNAKE_CASE", "kebab-case", "SCREAMING-KEBAB-CASE"
131 |
132 | // Jackson JsonNaming PropertyNamingStrategy:
133 | // KebabCaseStrategy, LowerCaseStrategy, SnakeCaseStrategy, UpperCamelCaseStrategy
134 | #[allow(clippy::enum_variant_names)]
135 | #[derive(Debug, PartialEq, Clone)]
136 | pub enum StringTransform {
137 | LowerCase,
138 | UpperCase,
139 | PascalCase,
140 | CamelCase,
141 | SnakeCase,
142 | ScreamingSnakeCase,
143 | KebabCase,
144 | ScreamingKebabCase,
145 | }
146 |
147 | impl StringTransform {
148 | pub fn parse(s: &str) -> Option {
149 | match s {
150 | "lowercase" => Some(StringTransform::LowerCase),
151 | "uppercase" | "UPPERCASE" => Some(StringTransform::UpperCase),
152 | "pascalcase" | "uppercamelcase" | "PascalCase" => Some(StringTransform::PascalCase),
153 | "camelcase" | "camelCase" => Some(StringTransform::CamelCase),
154 | "snakecase" | "snake_case" => Some(StringTransform::SnakeCase),
155 | "screamingsnakecase" | "SCREAMING_SNAKE_CASE" => {
156 | Some(StringTransform::ScreamingSnakeCase)
157 | }
158 | "kebabcase" | "kebab-case" => Some(StringTransform::KebabCase),
159 | "screamingkebabcase" | "SCREAMING-KEBAB-CASE" => {
160 | Some(StringTransform::ScreamingKebabCase)
161 | }
162 | _ => None,
163 | }
164 | }
165 | }
166 |
--------------------------------------------------------------------------------
/json_typegen_shared/src/generation/serde_case.rs:
--------------------------------------------------------------------------------
1 | //! Code to convert the Rust-styled field/variant (e.g. `my_field`, `MyType`) to the
2 | //! case of the source (e.g. `my-field`, `MY_FIELD`).
3 | //!
4 | //! Manually vendored from serde_derive/internals
5 | //!
6 | #![allow(dead_code)]
7 | #![allow(clippy::upper_case_acronyms)]
8 |
9 | // See https://users.rust-lang.org/t/psa-dealing-with-warning-unused-import-std-ascii-asciiext-in-today-s-nightly/13726
10 | #[allow(deprecated, unused_imports)]
11 | use std::ascii::AsciiExt;
12 |
13 | use std::str::FromStr;
14 |
15 | use self::RenameRule::*;
16 |
17 | /// The different possible ways to change case of fields in a struct, or variants in an enum.
18 | #[derive(Copy, Clone, PartialEq)]
19 | pub enum RenameRule {
20 | /// Don't apply a default rename rule.
21 | None,
22 | /// Rename direct children to "lowercase" style.
23 | LowerCase,
24 | /// Rename direct children to "UPPERCASE" style.
25 | UPPERCASE,
26 | /// Rename direct children to "PascalCase" style, as typically used for
27 | /// enum variants.
28 | PascalCase,
29 | /// Rename direct children to "camelCase" style.
30 | CamelCase,
31 | /// Rename direct children to "snake_case" style, as commonly used for
32 | /// fields.
33 | SnakeCase,
34 | /// Rename direct children to "SCREAMING_SNAKE_CASE" style, as commonly
35 | /// used for constants.
36 | ScreamingSnakeCase,
37 | /// Rename direct children to "kebab-case" style.
38 | KebabCase,
39 | /// Rename direct children to "SCREAMING-KEBAB-CASE" style.
40 | ScreamingKebabCase,
41 | }
42 |
43 | impl RenameRule {
44 | /// Apply a renaming rule to an enum variant, returning the version expected in the source.
45 | pub fn apply_to_variant(&self, variant: &str) -> String {
46 | match *self {
47 | None | PascalCase => variant.to_owned(),
48 | LowerCase => variant.to_ascii_lowercase(),
49 | UPPERCASE => variant.to_ascii_uppercase(),
50 | CamelCase => variant[..1].to_ascii_lowercase() + &variant[1..],
51 | SnakeCase => {
52 | let mut snake = String::new();
53 | for (i, ch) in variant.char_indices() {
54 | if i > 0 && ch.is_ascii_uppercase() {
55 | snake.push('_');
56 | }
57 | snake.push(ch.to_ascii_lowercase());
58 | }
59 | snake
60 | }
61 | ScreamingSnakeCase => SnakeCase.apply_to_variant(variant).to_ascii_uppercase(),
62 | KebabCase => SnakeCase.apply_to_variant(variant).replace('_', "-"),
63 | ScreamingKebabCase => ScreamingSnakeCase
64 | .apply_to_variant(variant)
65 | .replace('_', "-"),
66 | }
67 | }
68 |
69 | /// Apply a renaming rule to a struct field, returning the version expected in the source.
70 | pub fn apply_to_field(&self, field: &str) -> String {
71 | match *self {
72 | None | LowerCase | SnakeCase => field.to_owned(),
73 | UPPERCASE => field.to_ascii_uppercase(),
74 | PascalCase => {
75 | let mut pascal = String::new();
76 | let mut capitalize = true;
77 | for ch in field.chars() {
78 | if ch == '_' {
79 | capitalize = true;
80 | } else if capitalize {
81 | pascal.push(ch.to_ascii_uppercase());
82 | capitalize = false;
83 | } else {
84 | pascal.push(ch);
85 | }
86 | }
87 | pascal
88 | }
89 | CamelCase => {
90 | let pascal = PascalCase.apply_to_field(field);
91 | pascal[..1].to_ascii_lowercase() + &pascal[1..]
92 | }
93 | ScreamingSnakeCase => field.to_ascii_uppercase(),
94 | KebabCase => field.replace('_', "-"),
95 | ScreamingKebabCase => ScreamingSnakeCase.apply_to_field(field).replace('_', "-"),
96 | }
97 | }
98 | }
99 |
100 | impl FromStr for RenameRule {
101 | type Err = ();
102 |
103 | fn from_str(rename_all_str: &str) -> Result {
104 | match rename_all_str {
105 | "lowercase" => Ok(LowerCase),
106 | "UPPERCASE" => Ok(UPPERCASE),
107 | "PascalCase" => Ok(PascalCase),
108 | "camelCase" => Ok(CamelCase),
109 | "snake_case" => Ok(SnakeCase),
110 | "SCREAMING_SNAKE_CASE" => Ok(ScreamingSnakeCase),
111 | "kebab-case" => Ok(KebabCase),
112 | "SCREAMING-KEBAB-CASE" => Ok(ScreamingKebabCase),
113 | _ => Err(()),
114 | }
115 | }
116 | }
117 |
118 | #[test]
119 | fn rename_variants() {
120 | for &(original, lower, upper, camel, snake, screaming, kebab, screaming_kebab) in &[
121 | (
122 | "Outcome", "outcome", "OUTCOME", "outcome", "outcome", "OUTCOME", "outcome", "OUTCOME",
123 | ),
124 | (
125 | "VeryTasty",
126 | "verytasty",
127 | "VERYTASTY",
128 | "veryTasty",
129 | "very_tasty",
130 | "VERY_TASTY",
131 | "very-tasty",
132 | "VERY-TASTY",
133 | ),
134 | ("A", "a", "A", "a", "a", "A", "a", "A"),
135 | ("Z42", "z42", "Z42", "z42", "z42", "Z42", "z42", "Z42"),
136 | ] {
137 | assert_eq!(None.apply_to_variant(original), original);
138 | assert_eq!(LowerCase.apply_to_variant(original), lower);
139 | assert_eq!(UPPERCASE.apply_to_variant(original), upper);
140 | assert_eq!(PascalCase.apply_to_variant(original), original);
141 | assert_eq!(CamelCase.apply_to_variant(original), camel);
142 | assert_eq!(SnakeCase.apply_to_variant(original), snake);
143 | assert_eq!(ScreamingSnakeCase.apply_to_variant(original), screaming);
144 | assert_eq!(KebabCase.apply_to_variant(original), kebab);
145 | assert_eq!(
146 | ScreamingKebabCase.apply_to_variant(original),
147 | screaming_kebab
148 | );
149 | }
150 | }
151 |
152 | #[test]
153 | fn rename_fields() {
154 | for &(original, upper, pascal, camel, screaming, kebab, screaming_kebab) in &[
155 | (
156 | "outcome", "OUTCOME", "Outcome", "outcome", "OUTCOME", "outcome", "OUTCOME",
157 | ),
158 | (
159 | "very_tasty",
160 | "VERY_TASTY",
161 | "VeryTasty",
162 | "veryTasty",
163 | "VERY_TASTY",
164 | "very-tasty",
165 | "VERY-TASTY",
166 | ),
167 | ("a", "A", "A", "a", "A", "a", "A"),
168 | ("z42", "Z42", "Z42", "z42", "Z42", "z42", "Z42"),
169 | ] {
170 | assert_eq!(None.apply_to_field(original), original);
171 | assert_eq!(UPPERCASE.apply_to_field(original), upper);
172 | assert_eq!(PascalCase.apply_to_field(original), pascal);
173 | assert_eq!(CamelCase.apply_to_field(original), camel);
174 | assert_eq!(SnakeCase.apply_to_field(original), original);
175 | assert_eq!(ScreamingSnakeCase.apply_to_field(original), screaming);
176 | assert_eq!(KebabCase.apply_to_field(original), kebab);
177 | assert_eq!(ScreamingKebabCase.apply_to_field(original), screaming_kebab);
178 | }
179 | }
180 |
--------------------------------------------------------------------------------
/json_typegen_shared/tests/with_defaults.rs:
--------------------------------------------------------------------------------
1 | use testsyn::{Item, parse_str};
2 |
3 | use json_typegen_shared::{ImportStyle, Options, codegen};
4 |
5 | /// Function to test AST equality, not string equality
6 | fn code_output_test(name: &str, input: &str, expected: &str) {
7 | let mut options = Options::default();
8 | options.import_style = ImportStyle::AssumeExisting;
9 | let res = codegen(name, input, options);
10 | let output = res.unwrap();
11 | assert_eq!(
12 | // Wrapping in mod Foo { } since there is no impl Parse for Vec-
13 | parse_str::
- (&format!("mod Foo {{ {} }}", &output)).unwrap(),
14 | parse_str::
- (&format!("mod Foo {{ {} }}", expected)).unwrap(),
15 | "\n\nUnexpected output code:\n input: {}\n output:\n{}\n expected: {}",
16 | input,
17 | output,
18 | expected
19 | );
20 | }
21 |
22 | #[test]
23 | fn empty_object() {
24 | code_output_test(
25 | "Root",
26 | // language=JSON
27 | r##"
28 | {}
29 | "##,
30 | // language=Rust
31 | r##"
32 | #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
33 | pub struct Root {}
34 | "##,
35 | );
36 | }
37 |
38 | #[test]
39 | fn list_of_numbers() {
40 | code_output_test(
41 | "Numbers",
42 | // language=JSON
43 | r##"
44 | [1, 2, 3]
45 | "##,
46 | // language=Rust
47 | r##"
48 | pub type Numbers = Vec
;
49 | "##,
50 | );
51 | }
52 |
53 | #[test]
54 | fn point() {
55 | code_output_test(
56 | "Point",
57 | // language=JSON
58 | r##"
59 | {
60 | "x": 2,
61 | "y": 3
62 | }
63 | "##,
64 | // language=Rust
65 | r##"
66 | #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
67 | pub struct Point {
68 | pub x: i64,
69 | pub y: i64,
70 | }
71 | "##,
72 | );
73 | }
74 |
75 | #[test]
76 | fn pub_crate_point() {
77 | code_output_test(
78 | "pub(crate) Point",
79 | // language=JSON
80 | r##"
81 | {
82 | "x": 2,
83 | "y": 3
84 | }
85 | "##,
86 | // language=Rust
87 | r##"
88 | #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
89 | pub(crate) struct Point {
90 | pub x: i64,
91 | pub y: i64,
92 | }
93 | "##,
94 | );
95 | }
96 |
97 | #[test]
98 | fn optionals() {
99 | code_output_test(
100 | "Optionals",
101 | // language=JSON
102 | r##"
103 | [
104 | {
105 | "in_both": 5,
106 | "missing": 5,
107 | "has_null": 5
108 | },
109 | {
110 | "in_both": 5,
111 | "has_null": null,
112 | "added": 5
113 | }
114 | ]
115 | "##,
116 | // language=Rust
117 | r##"
118 | pub type Optionals = Vec;
119 |
120 | #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
121 | pub struct Optional {
122 | pub in_both: i64,
123 | pub missing: Option,
124 | pub has_null: Option,
125 | pub added: Option,
126 | }
127 | "##,
128 | );
129 | }
130 |
131 | #[test]
132 | fn fallback() {
133 | code_output_test(
134 | "FallbackExamples",
135 | // language=JSON
136 | r##"
137 | [
138 | {
139 | "only_null": null,
140 | "conflicting": 5,
141 | "empty_array": []
142 | },
143 | {
144 | "only_null": null,
145 | "conflicting": "five",
146 | "empty_array": []
147 | }
148 | ]
149 | "##,
150 | // language=Rust
151 | r##"
152 | pub type FallbackExamples = Vec;
153 |
154 | #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
155 | pub struct FallbackExample {
156 | pub only_null: Value,
157 | pub conflicting: Value,
158 | pub empty_array: Vec,
159 | }
160 | "##,
161 | );
162 | }
163 |
164 | #[test]
165 | fn nesting() {
166 | code_output_test(
167 | "NestedTypes",
168 | // language=JSON
169 | r##"
170 | [
171 | {
172 | "nested": {
173 | "a": 5,
174 | "doubly_nested": { "c": 10 }
175 | },
176 | "in_array": [{ "b": 5 }]
177 | }
178 | ]
179 | "##,
180 | // language=Rust
181 | r##"
182 | pub type NestedTypes = Vec;
183 |
184 | #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
185 | pub struct NestedType {
186 | pub nested: Nested,
187 | pub in_array: Vec,
188 | }
189 |
190 | #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
191 | pub struct Nested {
192 | pub a: i64,
193 | pub doubly_nested: DoublyNested,
194 | }
195 |
196 | #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
197 | pub struct DoublyNested {
198 | pub c: i64,
199 | }
200 |
201 | #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
202 | pub struct InArray {
203 | pub b: i64,
204 | }
205 | "##,
206 | );
207 | }
208 |
209 | #[test]
210 | fn tuple() {
211 | code_output_test(
212 | "Pagination",
213 | // language=JSON
214 | r##"
215 | [
216 | {
217 | "pages": 1,
218 | "items": 3
219 | },
220 | [
221 | {
222 | "name": "John"
223 | },
224 | {
225 | "name": "James"
226 | },
227 | {
228 | "name": "Jake"
229 | }
230 | ]
231 | ]
232 | "##,
233 | // language=Rust
234 | r##"
235 | pub type Pagination = (Pagination2, Vec);
236 |
237 | #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
238 | pub struct Pagination2 {
239 | pub pages: i64,
240 | pub items: i64,
241 | }
242 |
243 | #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
244 | pub struct Pagination3 {
245 | pub name : String,
246 | }
247 | "##,
248 | );
249 | }
250 |
251 | #[test]
252 | fn rename() {
253 | code_output_test(
254 | "Renamed",
255 | // language=JSON
256 | r##"
257 | {
258 | "type": 5
259 | }
260 | "##,
261 | // language=Rust
262 | r##"
263 | #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
264 | pub struct Renamed {
265 | #[serde(rename = "type")]
266 | pub type_field: i64,
267 | }
268 | "##,
269 | );
270 | }
271 |
--------------------------------------------------------------------------------
/json_typegen_shared/src/shape.rs:
--------------------------------------------------------------------------------
1 | use linked_hash_map::LinkedHashMap;
2 |
3 | /// The type representing the inferred structure
4 | ///
5 | /// A word of caution: Everything in this crate is "internal API", but for this type in particular,
6 | /// since it is very central to how json_typegen works,
7 | /// be prepared that major breaking changes may need to be made to this in the future.
8 | #[non_exhaustive]
9 | #[derive(Debug, PartialEq, Clone)]
10 | pub enum Shape {
11 | /// `Bottom` represents the absence of any inference information
12 | Bottom,
13 |
14 | /// `Any` represents conflicting inference information that can not be
15 | /// represented by any single shape
16 | Any,
17 |
18 | /// `Optional(T)` represents that a value is not always present
19 | Optional(Box),
20 | /// `Nullable(T)` represents that a value is nullable
21 | Nullable(Box),
22 | /// Equivalent to `Optional(Bottom)`, `Null` represents optionality with no further information
23 | Null,
24 |
25 | Bool,
26 | StringT,
27 | Integer,
28 | Floating,
29 | VecT {
30 | elem_type: Box,
31 | },
32 | Struct {
33 | fields: LinkedHashMap,
34 | },
35 | Tuple(Vec, u64),
36 | MapT {
37 | val_type: Box,
38 | },
39 | Opaque(String),
40 | }
41 |
42 | pub fn fold_shapes(shapes: Vec) -> Shape {
43 | shapes.into_iter().fold(Shape::Bottom, common_shape)
44 | }
45 |
46 | pub fn common_shape(a: Shape, b: Shape) -> Shape {
47 | if a == b {
48 | return a;
49 | }
50 | use self::Shape::*;
51 | match (a, b) {
52 | (a, Bottom) | (Bottom, a) => a,
53 | (Integer, Floating) | (Floating, Integer) => Floating,
54 | (a, Null) | (Null, a) => a.into_nullable(),
55 | (a, Optional(b)) | (Optional(b), a) => common_shape(a, *b).into_optional(),
56 | (Tuple(shapes1, n1), Tuple(shapes2, n2)) => {
57 | if shapes1.len() == shapes2.len() {
58 | let shapes: Vec<_> = shapes1
59 | .into_iter()
60 | .zip(shapes2.into_iter())
61 | .map(|(a, b)| common_shape(a, b))
62 | .collect();
63 | Tuple(shapes, n1 + n2)
64 | } else {
65 | VecT {
66 | elem_type: Box::new(common_shape(fold_shapes(shapes1), fold_shapes(shapes2))),
67 | }
68 | }
69 | }
70 | (Tuple(shapes, _), VecT { elem_type: e1 }) | (VecT { elem_type: e1 }, Tuple(shapes, _)) => {
71 | VecT {
72 | elem_type: Box::new(common_shape(*e1, fold_shapes(shapes))),
73 | }
74 | }
75 | (VecT { elem_type: e1 }, VecT { elem_type: e2 }) => VecT {
76 | elem_type: Box::new(common_shape(*e1, *e2)),
77 | },
78 | (MapT { val_type: v1 }, MapT { val_type: v2 }) => MapT {
79 | val_type: Box::new(common_shape(*v1, *v2)),
80 | },
81 | (Struct { fields: f1 }, Struct { fields: f2 }) => Struct {
82 | fields: common_field_shapes(f1, f2),
83 | },
84 | (Opaque(t), _) | (_, Opaque(t)) => Opaque(t),
85 | (a, Nullable(b)) | (Nullable(b), a) => common_shape(a, *b).into_nullable(),
86 | _ => Any,
87 | }
88 | }
89 |
90 | fn common_field_shapes(
91 | mut f1: LinkedHashMap,
92 | mut f2: LinkedHashMap,
93 | ) -> LinkedHashMap {
94 | if f1 == f2 {
95 | return f1;
96 | }
97 | for (key, val) in f1.iter_mut() {
98 | let temp = std::mem::replace(val, Shape::Bottom);
99 | match f2.remove(key) {
100 | Some(val2) => {
101 | *val = common_shape(temp, val2);
102 | }
103 | None => {
104 | *val = temp.into_optional();
105 | }
106 | };
107 | }
108 | for (key, val) in f2.into_iter() {
109 | f1.insert(key, val.into_optional());
110 | }
111 | f1
112 | }
113 |
114 | impl Shape {
115 | pub(crate) fn into_optional(self) -> Self {
116 | use self::Shape::*;
117 | match self {
118 | Null => Nullable(Box::new(self)),
119 | Any | Bottom | Optional(_) => self,
120 | non_nullable => Optional(Box::new(non_nullable)),
121 | }
122 | }
123 | pub(crate) fn into_nullable(self) -> Self {
124 | use self::Shape::*;
125 | match self {
126 | Null => Nullable(Box::new(self)),
127 | Any | Bottom | Nullable(_) => self,
128 | Optional(non_nullable) => Optional(Box::new(Nullable(non_nullable))),
129 | non_nullable => Nullable(Box::new(non_nullable)),
130 | }
131 | }
132 |
133 | /// Note: This is asymmetrical because we don't unify based on this,
134 | /// but check if `self` can be used *as is* as a replacement for `other`
135 | pub(crate) fn is_acceptable_substitution_for(&self, other: &Shape) -> bool {
136 | use self::Shape::*;
137 | if self == other {
138 | return true;
139 | }
140 | match (self, other) {
141 | (_, Bottom) => true,
142 | (Optional(_), Null) => true,
143 | (Optional(a), Optional(b)) => a.is_acceptable_substitution_for(b),
144 | (VecT { elem_type: e1 }, VecT { elem_type: e2 }) => {
145 | e1.is_acceptable_substitution_for(e2)
146 | }
147 | (MapT { val_type: v1 }, MapT { val_type: v2 }) => v1.is_acceptable_substitution_for(v2),
148 | (Tuple(a, _), Tuple(b, _)) => {
149 | a.len() == b.len()
150 | && a.iter()
151 | .zip(b.iter())
152 | .all(|(e1, e2)| e1.is_acceptable_substitution_for(e2))
153 | }
154 | (Struct { fields: f1 }, Struct { fields: f2 }) => {
155 | // Require all fields to be the same (but ignore order)
156 | // Could maybe be more lenient, e.g. for missing optional fields
157 | f1.len() == f2.len()
158 | && f1.iter().all(|(key, shape1)| {
159 | if let Some(shape2) = f2.get(key) {
160 | shape1.is_acceptable_substitution_for(shape2)
161 | } else {
162 | false
163 | }
164 | })
165 | }
166 | _ => false,
167 | }
168 | }
169 | }
170 |
171 | #[test]
172 | fn test_unify() {
173 | use self::Shape::*;
174 | assert_eq!(common_shape(Bool, Bool), Bool);
175 | assert_eq!(common_shape(Bool, Integer), Any);
176 | assert_eq!(common_shape(Integer, Floating), Floating);
177 | assert_eq!(common_shape(Null, Any), Any);
178 | assert_eq!(common_shape(Null, Bool), Nullable(Box::new(Bool)));
179 | assert_eq!(
180 | common_shape(Null, Optional(Box::new(Integer))),
181 | Optional(Box::new(Nullable(Box::new(Integer))))
182 | );
183 | assert_eq!(common_shape(Any, Optional(Box::new(Integer))), Any);
184 | assert_eq!(common_shape(Any, Optional(Box::new(Integer))), Any);
185 | assert_eq!(
186 | common_shape(Optional(Box::new(Integer)), Optional(Box::new(Floating))),
187 | Optional(Box::new(Floating))
188 | );
189 | assert_eq!(
190 | common_shape(Optional(Box::new(StringT)), Optional(Box::new(Integer))),
191 | Any
192 | );
193 | }
194 |
195 | #[test]
196 | fn test_common_field_shapes() {
197 | use self::Shape::*;
198 | use crate::util::string_hashmap;
199 | {
200 | let f1 = string_hashmap! {
201 | "a" => Integer,
202 | "b" => Bool,
203 | "c" => Integer,
204 | "d" => StringT,
205 | };
206 | let f2 = string_hashmap! {
207 | "a" => Integer,
208 | "c" => Floating,
209 | "d" => Null,
210 | "e" => Any,
211 | };
212 | assert_eq!(
213 | common_field_shapes(f1, f2),
214 | string_hashmap! {
215 | "a" => Integer,
216 | "b" => Optional(Box::new(Bool)),
217 | "c" => Floating,
218 | "d" => Nullable(Box::new(StringT)),
219 | "e" => Any,
220 | }
221 | );
222 | }
223 | }
224 |
--------------------------------------------------------------------------------
/json_typegen_shared/src/generation/typescript.rs:
--------------------------------------------------------------------------------
1 | use linked_hash_map::LinkedHashMap;
2 | use std::collections::HashSet;
3 |
4 | use crate::options::Options;
5 | use crate::shape::{self, Shape};
6 | use crate::to_singular::to_singular;
7 | use crate::util::type_case;
8 |
9 | pub struct Ctxt {
10 | options: Options,
11 | type_names: HashSet,
12 | created_interfaces: Vec<(Shape, Ident)>,
13 | }
14 |
15 | pub type Ident = String;
16 | pub type Code = String;
17 |
18 | pub fn typescript_types(name: &str, shape: &Shape, options: Options) -> Code {
19 | let mut ctxt = Ctxt {
20 | options,
21 | type_names: HashSet::new(),
22 | created_interfaces: Vec::new(),
23 | };
24 |
25 | if !matches!(shape, Shape::Struct { .. }) {
26 | // reserve the requested name
27 | ctxt.type_names.insert(name.to_string());
28 | }
29 |
30 | let (ident, code) = type_from_shape(&mut ctxt, name, shape);
31 | let mut code = code.unwrap_or_default();
32 |
33 | if ident != name {
34 | code = format!("export type {} = {};\n\n", name, ident) + &code;
35 | }
36 |
37 | code
38 | }
39 |
40 | fn type_from_shape(ctxt: &mut Ctxt, path: &str, shape: &Shape) -> (Ident, Option) {
41 | use crate::shape::Shape::*;
42 | match shape {
43 | Null | Any | Bottom => ("any".into(), None),
44 | Bool => ("boolean".into(), None),
45 | StringT => ("string".into(), None),
46 | Integer => ("number".into(), None),
47 | Floating => ("number".into(), None),
48 | Tuple(shapes, _n) => {
49 | let folded = shape::fold_shapes(shapes.clone());
50 | if folded == Any && shapes.iter().any(|s| s != &Any) {
51 | generate_tuple_type(ctxt, path, shapes)
52 | } else {
53 | generate_vec_type(ctxt, path, &folded)
54 | }
55 | }
56 | VecT { elem_type: e } => generate_vec_type(ctxt, path, e),
57 | Struct { fields } => generate_interface_type(ctxt, path, fields, shape),
58 | MapT { val_type: v } => generate_map_type(ctxt, path, v),
59 | Opaque(t) => (t.clone(), None),
60 | Optional(e) => {
61 | let (inner, defs) = type_from_shape(ctxt, path, e);
62 | if ctxt.options.use_default_for_missing_fields {
63 | (inner, defs)
64 | } else {
65 | (format!("{} | undefined", inner), defs)
66 | }
67 | }
68 | Nullable(e) => {
69 | let (inner, defs) = type_from_shape(ctxt, path, e);
70 | (format!("{} | null", inner), defs)
71 | }
72 | }
73 | }
74 |
75 | fn generate_vec_type(ctxt: &mut Ctxt, path: &str, shape: &Shape) -> (Ident, Option) {
76 | let singular = to_singular(path);
77 | let (inner, defs) = type_from_shape(ctxt, &singular, shape);
78 | (format!("{}[]", inner), defs)
79 | }
80 |
81 | fn generate_map_type(ctxt: &mut Ctxt, path: &str, shape: &Shape) -> (Ident, Option) {
82 | let singular = to_singular(path);
83 | let (inner, defs) = type_from_shape(ctxt, &singular, shape);
84 | (format!("{{ [key: string]: {} }}", inner), defs)
85 | }
86 |
87 | fn generate_tuple_type(ctxt: &mut Ctxt, path: &str, shapes: &[Shape]) -> (Ident, Option) {
88 | let mut types = Vec::new();
89 | let mut defs = Vec::new();
90 |
91 | for shape in shapes {
92 | let (typ, def) = type_from_shape(ctxt, path, shape);
93 | types.push(typ);
94 | if let Some(code) = def {
95 | defs.push(code)
96 | }
97 | }
98 |
99 | (format!("[{}]", types.join(", ")), Some(defs.join("\n\n")))
100 | }
101 |
102 | fn type_name(name: &str, used_names: &HashSet) -> Ident {
103 | let name = name.trim();
104 | let mut output_name = if let Some(c) = name.chars().next() {
105 | if c.is_ascii_digit() {
106 | let temp = String::from("n") + name;
107 | type_case(&temp)
108 | } else {
109 | type_case(name)
110 | }
111 | } else {
112 | type_case(name)
113 | };
114 | if output_name.is_empty() {
115 | output_name.push_str("GeneratedType");
116 | }
117 | if !used_names.contains(&output_name) {
118 | return output_name;
119 | }
120 | for n in 2.. {
121 | let temp = format!("{}{}", output_name, n);
122 | if !used_names.contains(&temp) {
123 | return temp;
124 | }
125 | }
126 | unreachable!()
127 | }
128 |
129 | pub fn collapse_option(typ: &Shape) -> (bool, &Shape) {
130 | if let Shape::Optional(inner) = typ {
131 | return (true, &**inner);
132 | }
133 | (false, typ)
134 | }
135 |
136 | #[rustfmt::skip]
137 | const RESERVED_WORDS: &[&str] = &["break", "case", "catch", "class", "const",
138 | "continue", "debugger", "default", "delete", "do", "else", "enum", "export", "extends", "false",
139 | "finally", "for", "function", "if", "import", "in", "instanceof", "new", "null", "return",
140 | "super", "switch", "this", "throw", "true", "try", "typeof", "var", "void", "while", "with",
141 | "implements", "interface", "let", "package", "private", "protected", "public", "static",
142 | "yield"];
143 |
144 | pub fn is_ts_identifier(s: &str) -> bool {
145 | if RESERVED_WORDS.contains(&s) {
146 | return false;
147 | }
148 |
149 | if let Some((first, rest)) = s.as_bytes().split_first() {
150 | let first_valid = (b'a'..=b'z').contains(first)
151 | || (b'A'..=b'Z').contains(first)
152 | || *first == b'_'
153 | || *first == b'$';
154 | return first_valid
155 | && rest.iter().all(|b| {
156 | (b'a'..=b'z').contains(b)
157 | || (b'A'..=b'Z').contains(b)
158 | || *b == b'_'
159 | || *b == b'$'
160 | || (b'0'..=b'9').contains(b)
161 | });
162 | }
163 | false
164 | }
165 |
166 | fn generate_interface_type(
167 | ctxt: &mut Ctxt,
168 | path: &str,
169 | field_shapes: &LinkedHashMap,
170 | containing_shape: &Shape,
171 | ) -> (Ident, Option) {
172 | for (created_for_shape, ident) in ctxt.created_interfaces.iter() {
173 | if created_for_shape.is_acceptable_substitution_for(containing_shape) {
174 | return (ident.into(), None);
175 | }
176 | }
177 |
178 | let type_name = type_name(path, &ctxt.type_names);
179 | ctxt.type_names.insert(type_name.clone());
180 | ctxt.created_interfaces
181 | .push((containing_shape.clone(), type_name.clone()));
182 |
183 | let mut defs = Vec::new();
184 |
185 | let fields: Vec = field_shapes
186 | .iter()
187 | .map(|(name, typ)| {
188 | let (was_optional, collapsed) = collapse_option(typ);
189 |
190 | let (field_type, child_defs) = type_from_shape(ctxt, name, collapsed);
191 |
192 | if let Some(code) = child_defs {
193 | defs.push(code);
194 | }
195 |
196 | let escape_name = !is_ts_identifier(name);
197 |
198 | format!(
199 | " {}{}{}{}: {};",
200 | if escape_name { "\"" } else { "" },
201 | name,
202 | if escape_name { "\"" } else { "" },
203 | if was_optional { "?" } else { "" },
204 | field_type
205 | )
206 | })
207 | .collect();
208 |
209 | let mut code = format!("export interface {} {{\n", type_name);
210 |
211 | if !fields.is_empty() {
212 | code += &fields.join("\n");
213 | code += "\n";
214 | }
215 | code += "}";
216 |
217 | if !defs.is_empty() {
218 | code += "\n\n";
219 | code += &defs.join("\n\n");
220 | }
221 |
222 | (type_name, Some(code))
223 | }
224 |
225 | #[cfg(test)]
226 | mod tests {
227 | use super::*;
228 |
229 | #[test]
230 | fn test_is_ts_identifier() {
231 | // Valid:
232 | assert!(is_ts_identifier("foobar"));
233 | assert!(is_ts_identifier("FOOBAR"));
234 | assert!(is_ts_identifier("foo_bar"));
235 | assert!(is_ts_identifier("$"));
236 | assert!(is_ts_identifier("foobar1"));
237 | assert!(is_ts_identifier("zoneId"));
238 |
239 | // Invalid:
240 | assert!(!is_ts_identifier("1foobar"));
241 | assert!(!is_ts_identifier(""));
242 | assert!(!is_ts_identifier(" "));
243 | assert!(!is_ts_identifier(" foobar"));
244 | assert!(!is_ts_identifier("foobar "));
245 | assert!(!is_ts_identifier("foo bar"));
246 | assert!(!is_ts_identifier("foo.bar"));
247 | assert!(!is_ts_identifier("true"));
248 | }
249 | }
250 |
--------------------------------------------------------------------------------
/json_typegen_shared/src/lib.rs:
--------------------------------------------------------------------------------
1 | //! [json_typegen](https://typegen.vestera.as/) as just a library,
2 | //! for use in build scripts and other crates.
3 | //! If you want an actual interface, like a website, CLI or procedural macro, check the repo:
4 | //! [github.com/evestera/json_typegen](https://github.com/evestera/json_typegen)
5 | //!
6 | //! Note: This crate is to a certain extent considered internal API of the `json_typegen` tools.
7 | //! If you want to use this crate directly, be prepared for breaking changes to happen, and consider
8 | //! [opening an issue](https://github.com/evestera/json_typegen/issues/new)
9 | //! to let me know what you are using. (Breaking changes may still happen,
10 | //! but then I'll at least try to keep your use-case in mind if possible.
11 | //! This has happened enough by now that there are parts I already consider public API.)
12 |
13 | use thiserror::Error;
14 |
15 | mod generation;
16 | mod hints;
17 | mod inference;
18 | mod options;
19 | #[cfg(feature = "option-parsing")]
20 | pub mod parse;
21 | #[cfg(feature = "progress")]
22 | mod progress;
23 | mod shape;
24 | mod sql;
25 | mod to_singular;
26 | mod util;
27 |
28 | use crate::hints::Hints;
29 | use crate::inference::shape_from_json;
30 | pub use crate::options::{ImportStyle, Options, OutputMode, StringTransform};
31 | pub use crate::shape::Shape;
32 |
33 | /// The errors that json_typegen_shared may produce
34 | ///
35 | /// No stability guarantees are made with for this type
36 | /// except that it is a type that implements `std::error::Error`
37 | #[non_exhaustive]
38 | #[derive(Error, Debug)]
39 | pub enum JTError {
40 | #[cfg(feature = "remote-samples")]
41 | #[error("An error occurred while fetching JSON")]
42 | SampleFetchingError(#[from] ureq::Error),
43 | #[cfg(feature = "local-samples")]
44 | #[error("An error occurred while reading JSON from file")]
45 | SampleReadingError(#[from] std::io::Error),
46 | #[error("An error occurred while parsing JSON")]
47 | JsonParsingError(#[from] inference::JsonInputErr),
48 | #[error("An error occurred while parsing SQL: {0}")]
49 | SqlParsingError(String),
50 | #[error("An error occurred while parsing a macro or macro input: {0}")]
51 | MacroParsingError(String),
52 | }
53 |
54 | /// Utilities exposed only to be available inside the `json_typegen` workspace. Internal API.
55 | pub mod internal_util {
56 | pub fn display_error_with_causes(error: &dyn std::error::Error) -> String {
57 | let mut message = format!("{}", error);
58 | let mut err = error;
59 | while let Some(source) = err.source() {
60 | message += &format!("\n Caused by: {}", source);
61 | err = source;
62 | }
63 | message
64 | }
65 | }
66 |
67 | enum SampleSource<'a> {
68 | #[cfg(feature = "remote-samples")]
69 | Url(&'a str),
70 | #[cfg(feature = "local-samples")]
71 | File(&'a str),
72 | Text(&'a str),
73 | }
74 |
75 | #[cfg(feature = "option-parsing")]
76 | /// Generate code from a `json_typegen` macro invocation
77 | pub fn codegen_from_macro(input: &str) -> Result {
78 | let macro_input = parse::full_macro(input).map_err(JTError::MacroParsingError)?;
79 |
80 | codegen(
81 | ¯o_input.name,
82 | ¯o_input.sample_source,
83 | macro_input.options,
84 | )
85 | }
86 |
87 | #[cfg(feature = "option-parsing")]
88 | /// Generate code from the arguments to a `json_typegen` macro invocation
89 | pub fn codegen_from_macro_input(input: &str) -> Result {
90 | let macro_input = parse::macro_input(input).map_err(JTError::MacroParsingError)?;
91 |
92 | codegen(
93 | ¯o_input.name,
94 | ¯o_input.sample_source,
95 | macro_input.options,
96 | )
97 | }
98 |
99 | /// The main code generation function for `json_typegen`
100 | pub fn codegen(name: &str, input: &str, mut options: Options) -> Result {
101 | let source = infer_source_type(input);
102 | let name = handle_pub_in_name(name, &mut options);
103 |
104 | let mut hints_vec = Vec::new();
105 | std::mem::swap(&mut options.hints, &mut hints_vec);
106 |
107 | let mut hints = Hints::new();
108 | for (pointer, hint) in hints_vec.iter() {
109 | hints.add(pointer, hint);
110 | }
111 |
112 | let shape = match options.input_mode {
113 | options::InputMode::Sql => {
114 | let shapes = sql::sql_to_shape(input).map_err(JTError::SqlParsingError)?;
115 | let (_name, shap) = shapes.get(0).unwrap();
116 | shap.clone()
117 | }
118 | options::InputMode::Json => infer_from_sample(&source, &options, &hints)?,
119 | };
120 |
121 | codegen_from_shape(name, &shape, options)
122 | }
123 |
124 | /// Just code generation, no inference
125 | pub fn codegen_from_shape(name: &str, shape: &Shape, options: Options) -> Result {
126 | let mut generated_code = match options.output_mode {
127 | OutputMode::Rust => generation::rust::rust_types(name, shape, options),
128 | OutputMode::JsonSchema => generation::json_schema::json_schema(name, shape, options),
129 | OutputMode::ZodSchema => generation::zod_schema::zod_schema(name, shape, options),
130 | OutputMode::KotlinJackson | OutputMode::KotlinKotlinx => {
131 | generation::kotlin::kotlin_types(name, shape, options)
132 | }
133 | OutputMode::Shape => generation::shape::shape_string(name, shape, options),
134 | OutputMode::Typescript => generation::typescript::typescript_types(name, shape, options),
135 | OutputMode::TypescriptTypeAlias => {
136 | generation::typescript_type_alias::typescript_type_alias(name, shape, options)
137 | }
138 | OutputMode::PythonPydantic => generation::python::python_types(name, shape, options),
139 | };
140 |
141 | // Ensure generated code ends with exactly one newline
142 | generated_code.truncate(generated_code.trim_end().len());
143 | generated_code.push('\n');
144 |
145 | Ok(generated_code)
146 | }
147 |
148 | /// Parse "names" like `pub(crate) Foo` into a name and a visibility option
149 | fn handle_pub_in_name<'a>(name: &'a str, options: &mut Options) -> &'a str {
150 | if let Some(suffix) = name.strip_prefix("pub ") {
151 | options.type_visibility = "pub".to_string();
152 | return suffix;
153 | }
154 | if name.starts_with("pub(") {
155 | // MSRV: after 1.52 use split_once
156 | let split = name.splitn(2, ") ").collect::>();
157 | if split.len() == 2 {
158 | options.type_visibility = format!("{})", split[0]);
159 | return split[1];
160 | }
161 | }
162 | name
163 | }
164 |
165 | fn infer_source_type(s: &str) -> SampleSource {
166 | let s = s.trim();
167 | if s.starts_with('{') || s.starts_with('[') {
168 | return SampleSource::Text(s);
169 | }
170 | #[cfg(feature = "remote-samples")]
171 | if s.starts_with("http://") || s.starts_with("https://") {
172 | return SampleSource::Url(s);
173 | }
174 | #[cfg(feature = "local-samples")]
175 | return SampleSource::File(s);
176 | #[cfg(not(feature = "local-samples"))]
177 | return SampleSource::Text(s);
178 | }
179 |
180 | fn infer_from_sample(
181 | source: &SampleSource,
182 | options: &Options,
183 | hints: &Hints,
184 | ) -> Result {
185 | let parse_result = match *source {
186 | #[cfg(feature = "remote-samples")]
187 | SampleSource::Url(url) => {
188 | shape_from_json(ureq::get(url).call()?.into_reader(), options, hints)
189 | }
190 |
191 | #[cfg(all(feature = "local-samples", feature = "progress"))]
192 | SampleSource::File(path) => shape_from_json(
193 | crate::progress::FileWithProgress::open(path)?,
194 | options,
195 | hints,
196 | ),
197 | #[cfg(all(feature = "local-samples", not(feature = "progress")))]
198 | SampleSource::File(path) => shape_from_json(std::fs::File::open(path)?, options, hints),
199 |
200 | SampleSource::Text(text) => shape_from_json(text.as_bytes(), options, hints),
201 | };
202 | Ok(parse_result?)
203 | }
204 |
205 | #[cfg(test)]
206 | mod tests {
207 | use super::*;
208 | #[test]
209 | fn test_handle_pub_in_name() {
210 | let mut options = Options::default();
211 | let name = handle_pub_in_name("Foo", &mut options);
212 | assert_eq!(name, "Foo");
213 | assert_eq!(options.type_visibility, Options::default().type_visibility);
214 | let name = handle_pub_in_name("pub Foo", &mut options);
215 | assert_eq!(name, "Foo");
216 | assert_eq!(options.type_visibility, "pub".to_string());
217 | let name = handle_pub_in_name("pub(crate) Foo Bar", &mut options);
218 | assert_eq!(name, "Foo Bar");
219 | assert_eq!(options.type_visibility, "pub(crate)".to_string());
220 | let name = handle_pub_in_name("pub(some::path) Foo", &mut options);
221 | assert_eq!(name, "Foo");
222 | assert_eq!(options.type_visibility, "pub(some::path)".to_string());
223 | }
224 | }
225 |
--------------------------------------------------------------------------------
/json_typegen_shared/benches/fixtures/zalando_article.json:
--------------------------------------------------------------------------------
1 | {
2 | "id" : "AN621C0S5-K11",
3 | "modelId" : "AN621C0S5",
4 | "name" : "Summer dress - blue/white",
5 | "shopUrl" : "https://www.zalando.co.uk/anna-field-summer-dress-blue-white-an621c0s5-k11.html",
6 | "color" : "Blue",
7 | "available" : true,
8 | "season" : "WINTER",
9 | "seasonYear" : "2016",
10 | "activationDate" : "2015-04-30T16:52:52+02:00",
11 | "additionalInfos" : [ ],
12 | "tags" : [ ],
13 | "genders" : [ "FEMALE" ],
14 | "ageGroups" : [ "ADULT" ],
15 | "brand" : {
16 | "key" : "AN6",
17 | "name" : "Anna Field",
18 | "logoUrl" : "https://i6.ztat.net/brand/anna-field.jpg",
19 | "logoLargeUrl" : "https://i6.ztat.net/brandxl/anna-field.jpg",
20 | "brandFamily" : {
21 | "key" : "AN00",
22 | "name" : "Anna Field",
23 | "shopUrl" : "https://www.zalando.co.uk/anna-field-online-shop"
24 | },
25 | "shopUrl" : "https://www.zalando.co.uk/anna-field"
26 | },
27 | "categoryKeys" : [ "catalog", "women", "womens-sale", "womens-clothing-sale", "womens-dresses-sale", "all", "sale", "summer-dresses-sale" ],
28 | "attributes" : [ {
29 | "name" : "Outer fabric material",
30 | "values" : [ "100% cotton" ]
31 | }, {
32 | "name" : "Total length",
33 | "values" : [ "34.0 \" (Size 8)" ]
34 | }, {
35 | "name" : "Insert material",
36 | "values" : [ "97% viscose, 3% spandex" ]
37 | }, {
38 | "name" : "Fabric",
39 | "values" : [ "Jersey" ]
40 | }, {
41 | "name" : "Details",
42 | "values" : [ "belt included" ]
43 | }, {
44 | "name" : "Length",
45 | "values" : [ "short" ]
46 | }, {
47 | "name" : "Top part material",
48 | "values" : [ "97% viscose, 3% spandex" ]
49 | }, {
50 | "name" : "Fit",
51 | "values" : [ "tailored" ]
52 | }, {
53 | "name" : "Pattern",
54 | "values" : [ "striped" ]
55 | }, {
56 | "name" : "Neckline",
57 | "values" : [ "round neck" ]
58 | }, {
59 | "name" : "Washing instructions",
60 | "values" : [ "do not tumble dry", "machine wash at 30°C", "Machine wash on gentle cycle" ]
61 | }, {
62 | "name" : "Sleeve length",
63 | "values" : [ "Extra short" ]
64 | }, {
65 | "name" : "Our model's height",
66 | "values" : [ "Our model is 70.0 \" tall and is wearing size 8" ]
67 | } ],
68 | "units" : [ {
69 | "id" : "AN621C0S5-K110340000",
70 | "size" : "6",
71 | "price" : {
72 | "currency" : "GBP",
73 | "value" : 14.69,
74 | "formatted" : "£14.69"
75 | },
76 | "originalPrice" : {
77 | "currency" : "GBP",
78 | "value" : 20.99,
79 | "formatted" : "£20.99"
80 | },
81 | "available" : false,
82 | "stock" : 0
83 | }, {
84 | "id" : "AN621C0S5-K110360000",
85 | "size" : "8",
86 | "price" : {
87 | "currency" : "GBP",
88 | "value" : 14.69,
89 | "formatted" : "£14.69"
90 | },
91 | "originalPrice" : {
92 | "currency" : "GBP",
93 | "value" : 20.99,
94 | "formatted" : "£20.99"
95 | },
96 | "available" : false,
97 | "stock" : 0
98 | }, {
99 | "id" : "AN621C0S5-K110440000",
100 | "size" : "16",
101 | "price" : {
102 | "currency" : "GBP",
103 | "value" : 14.69,
104 | "formatted" : "£14.69"
105 | },
106 | "originalPrice" : {
107 | "currency" : "GBP",
108 | "value" : 20.99,
109 | "formatted" : "£20.99"
110 | },
111 | "available" : true,
112 | "stock" : 3
113 | }, {
114 | "id" : "AN621C0S5-K110420000",
115 | "size" : "14",
116 | "price" : {
117 | "currency" : "GBP",
118 | "value" : 14.69,
119 | "formatted" : "£14.69"
120 | },
121 | "originalPrice" : {
122 | "currency" : "GBP",
123 | "value" : 20.99,
124 | "formatted" : "£20.99"
125 | },
126 | "available" : false,
127 | "stock" : 0
128 | }, {
129 | "id" : "AN621C0S5-K110380000",
130 | "size" : "10",
131 | "price" : {
132 | "currency" : "GBP",
133 | "value" : 14.69,
134 | "formatted" : "£14.69"
135 | },
136 | "originalPrice" : {
137 | "currency" : "GBP",
138 | "value" : 20.99,
139 | "formatted" : "£20.99"
140 | },
141 | "available" : false,
142 | "stock" : 0
143 | }, {
144 | "id" : "AN621C0S5-K110400000",
145 | "size" : "12",
146 | "price" : {
147 | "currency" : "GBP",
148 | "value" : 14.69,
149 | "formatted" : "£14.69"
150 | },
151 | "originalPrice" : {
152 | "currency" : "GBP",
153 | "value" : 20.99,
154 | "formatted" : "£20.99"
155 | },
156 | "available" : false,
157 | "stock" : 0
158 | } ],
159 | "media" : {
160 | "images" : [ {
161 | "orderNumber" : 1,
162 | "type" : "NON_MODEL",
163 | "thumbnailHdUrl" : "https://i2.ztat.net/thumb_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@16.jpg",
164 | "smallUrl" : "https://i2.ztat.net/catalog/AN/62/1C/0S/5K/11/AN621C0S5-K11@16.jpg",
165 | "smallHdUrl" : "https://i2.ztat.net/catalog_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@16.jpg",
166 | "mediumUrl" : "https://i2.ztat.net/detail/AN/62/1C/0S/5K/11/AN621C0S5-K11@16.jpg",
167 | "mediumHdUrl" : "https://i2.ztat.net/detail_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@16.jpg",
168 | "largeUrl" : "https://i2.ztat.net/large/AN/62/1C/0S/5K/11/AN621C0S5-K11@16.jpg",
169 | "largeHdUrl" : "https://i2.ztat.net/large_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@16.jpg"
170 | }, {
171 | "orderNumber" : 2,
172 | "type" : "STYLE",
173 | "thumbnailHdUrl" : "https://i3.ztat.net/thumb_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@15.jpg",
174 | "smallUrl" : "https://i3.ztat.net/catalog/AN/62/1C/0S/5K/11/AN621C0S5-K11@15.jpg",
175 | "smallHdUrl" : "https://i3.ztat.net/catalog_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@15.jpg",
176 | "mediumUrl" : "https://i3.ztat.net/detail/AN/62/1C/0S/5K/11/AN621C0S5-K11@15.jpg",
177 | "mediumHdUrl" : "https://i3.ztat.net/detail_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@15.jpg",
178 | "largeUrl" : "https://i3.ztat.net/large/AN/62/1C/0S/5K/11/AN621C0S5-K11@15.jpg",
179 | "largeHdUrl" : "https://i3.ztat.net/large_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@15.jpg"
180 | }, {
181 | "orderNumber" : 3,
182 | "type" : "PREMIUM",
183 | "thumbnailHdUrl" : "https://i6.ztat.net/thumb_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@14.jpg",
184 | "smallUrl" : "https://i6.ztat.net/catalog/AN/62/1C/0S/5K/11/AN621C0S5-K11@14.jpg",
185 | "smallHdUrl" : "https://i6.ztat.net/catalog_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@14.jpg",
186 | "mediumUrl" : "https://i6.ztat.net/detail/AN/62/1C/0S/5K/11/AN621C0S5-K11@14.jpg",
187 | "mediumHdUrl" : "https://i6.ztat.net/detail_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@14.jpg",
188 | "largeUrl" : "https://i6.ztat.net/large/AN/62/1C/0S/5K/11/AN621C0S5-K11@14.jpg",
189 | "largeHdUrl" : "https://i6.ztat.net/large_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@14.jpg"
190 | }, {
191 | "orderNumber" : 4,
192 | "type" : "PREMIUM",
193 | "thumbnailHdUrl" : "https://i5.ztat.net/thumb_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@13.jpg",
194 | "smallUrl" : "https://i5.ztat.net/catalog/AN/62/1C/0S/5K/11/AN621C0S5-K11@13.jpg",
195 | "smallHdUrl" : "https://i5.ztat.net/catalog_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@13.jpg",
196 | "mediumUrl" : "https://i5.ztat.net/detail/AN/62/1C/0S/5K/11/AN621C0S5-K11@13.jpg",
197 | "mediumHdUrl" : "https://i5.ztat.net/detail_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@13.jpg",
198 | "largeUrl" : "https://i5.ztat.net/large/AN/62/1C/0S/5K/11/AN621C0S5-K11@13.jpg",
199 | "largeHdUrl" : "https://i5.ztat.net/large_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@13.jpg"
200 | }, {
201 | "orderNumber" : 5,
202 | "type" : "PREMIUM",
203 | "thumbnailHdUrl" : "https://i4.ztat.net/thumb_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@12.jpg",
204 | "smallUrl" : "https://i4.ztat.net/catalog/AN/62/1C/0S/5K/11/AN621C0S5-K11@12.jpg",
205 | "smallHdUrl" : "https://i4.ztat.net/catalog_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@12.jpg",
206 | "mediumUrl" : "https://i4.ztat.net/detail/AN/62/1C/0S/5K/11/AN621C0S5-K11@12.jpg",
207 | "mediumHdUrl" : "https://i4.ztat.net/detail_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@12.jpg",
208 | "largeUrl" : "https://i4.ztat.net/large/AN/62/1C/0S/5K/11/AN621C0S5-K11@12.jpg",
209 | "largeHdUrl" : "https://i4.ztat.net/large_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@12.jpg"
210 | }, {
211 | "orderNumber" : 6,
212 | "type" : "PREMIUM",
213 | "thumbnailHdUrl" : "https://i1.ztat.net/thumb_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@11.jpg",
214 | "smallUrl" : "https://i1.ztat.net/catalog/AN/62/1C/0S/5K/11/AN621C0S5-K11@11.jpg",
215 | "smallHdUrl" : "https://i1.ztat.net/catalog_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@11.jpg",
216 | "mediumUrl" : "https://i1.ztat.net/detail/AN/62/1C/0S/5K/11/AN621C0S5-K11@11.jpg",
217 | "mediumHdUrl" : "https://i1.ztat.net/detail_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@11.jpg",
218 | "largeUrl" : "https://i1.ztat.net/large/AN/62/1C/0S/5K/11/AN621C0S5-K11@11.jpg",
219 | "largeHdUrl" : "https://i1.ztat.net/large_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@11.jpg"
220 | }, {
221 | "orderNumber" : 7,
222 | "type" : "PREMIUM",
223 | "thumbnailHdUrl" : "https://i6.ztat.net/thumb_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@10.jpg",
224 | "smallUrl" : "https://i6.ztat.net/catalog/AN/62/1C/0S/5K/11/AN621C0S5-K11@10.jpg",
225 | "smallHdUrl" : "https://i6.ztat.net/catalog_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@10.jpg",
226 | "mediumUrl" : "https://i6.ztat.net/detail/AN/62/1C/0S/5K/11/AN621C0S5-K11@10.jpg",
227 | "mediumHdUrl" : "https://i6.ztat.net/detail_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@10.jpg",
228 | "largeUrl" : "https://i6.ztat.net/large/AN/62/1C/0S/5K/11/AN621C0S5-K11@10.jpg",
229 | "largeHdUrl" : "https://i6.ztat.net/large_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@10.jpg"
230 | }, {
231 | "orderNumber" : 8,
232 | "type" : "PREMIUM",
233 | "thumbnailHdUrl" : "https://i5.ztat.net/thumb_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@9.jpg",
234 | "smallUrl" : "https://i5.ztat.net/catalog/AN/62/1C/0S/5K/11/AN621C0S5-K11@9.jpg",
235 | "smallHdUrl" : "https://i5.ztat.net/catalog_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@9.jpg",
236 | "mediumUrl" : "https://i5.ztat.net/detail/AN/62/1C/0S/5K/11/AN621C0S5-K11@9.jpg",
237 | "mediumHdUrl" : "https://i5.ztat.net/detail_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@9.jpg",
238 | "largeUrl" : "https://i5.ztat.net/large/AN/62/1C/0S/5K/11/AN621C0S5-K11@9.jpg",
239 | "largeHdUrl" : "https://i5.ztat.net/large_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@9.jpg"
240 | } ]
241 | }
242 | }
243 |
--------------------------------------------------------------------------------
/LICENSE-APACHE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/json_typegen_web/src/examples/zalandoArticle.json:
--------------------------------------------------------------------------------
1 | {
2 | "id": "AN621C0S5-K11",
3 | "modelId": "AN621C0S5",
4 | "name": "Summer dress - blue/white",
5 | "shopUrl": "https://www.zalando.co.uk/anna-field-summer-dress-blue-white-an621c0s5-k11.html",
6 | "color": "Blue",
7 | "available": true,
8 | "season": "WINTER",
9 | "seasonYear": "2016",
10 | "activationDate": "2015-04-30T16:52:52+02:00",
11 | "additionalInfos": [],
12 | "tags": [],
13 | "genders": ["FEMALE"],
14 | "ageGroups": ["ADULT"],
15 | "brand": {
16 | "key": "AN6",
17 | "name": "Anna Field",
18 | "logoUrl": "https://i6.ztat.net/brand/anna-field.jpg",
19 | "logoLargeUrl": "https://i6.ztat.net/brandxl/anna-field.jpg",
20 | "brandFamily": {
21 | "key": "AN00",
22 | "name": "Anna Field",
23 | "shopUrl": "https://www.zalando.co.uk/anna-field-online-shop"
24 | },
25 | "shopUrl": "https://www.zalando.co.uk/anna-field"
26 | },
27 | "categoryKeys": [
28 | "catalog",
29 | "women",
30 | "womens-sale",
31 | "womens-clothing-sale",
32 | "womens-dresses-sale",
33 | "all",
34 | "sale",
35 | "summer-dresses-sale"
36 | ],
37 | "attributes": [
38 | {
39 | "name": "Outer fabric material",
40 | "values": ["100% cotton"]
41 | },
42 | {
43 | "name": "Total length",
44 | "values": ["34.0 \" (Size 8)"]
45 | },
46 | {
47 | "name": "Insert material",
48 | "values": ["97% viscose, 3% spandex"]
49 | },
50 | {
51 | "name": "Fabric",
52 | "values": ["Jersey"]
53 | },
54 | {
55 | "name": "Details",
56 | "values": ["belt included"]
57 | },
58 | {
59 | "name": "Length",
60 | "values": ["short"]
61 | },
62 | {
63 | "name": "Top part material",
64 | "values": ["97% viscose, 3% spandex"]
65 | },
66 | {
67 | "name": "Fit",
68 | "values": ["tailored"]
69 | },
70 | {
71 | "name": "Pattern",
72 | "values": ["striped"]
73 | },
74 | {
75 | "name": "Neckline",
76 | "values": ["round neck"]
77 | },
78 | {
79 | "name": "Washing instructions",
80 | "values": [
81 | "do not tumble dry",
82 | "machine wash at 30°C",
83 | "Machine wash on gentle cycle"
84 | ]
85 | },
86 | {
87 | "name": "Sleeve length",
88 | "values": ["Extra short"]
89 | },
90 | {
91 | "name": "Our model's height",
92 | "values": ["Our model is 70.0 \" tall and is wearing size 8"]
93 | }
94 | ],
95 | "units": [
96 | {
97 | "id": "AN621C0S5-K110340000",
98 | "size": "6",
99 | "price": {
100 | "currency": "GBP",
101 | "value": 14.69,
102 | "formatted": "£14.69"
103 | },
104 | "originalPrice": {
105 | "currency": "GBP",
106 | "value": 20.99,
107 | "formatted": "£20.99"
108 | },
109 | "available": false,
110 | "stock": 0
111 | },
112 | {
113 | "id": "AN621C0S5-K110360000",
114 | "size": "8",
115 | "price": {
116 | "currency": "GBP",
117 | "value": 14.69,
118 | "formatted": "£14.69"
119 | },
120 | "originalPrice": {
121 | "currency": "GBP",
122 | "value": 20.99,
123 | "formatted": "£20.99"
124 | },
125 | "available": false,
126 | "stock": 0
127 | },
128 | {
129 | "id": "AN621C0S5-K110440000",
130 | "size": "16",
131 | "price": {
132 | "currency": "GBP",
133 | "value": 14.69,
134 | "formatted": "£14.69"
135 | },
136 | "originalPrice": {
137 | "currency": "GBP",
138 | "value": 20.99,
139 | "formatted": "£20.99"
140 | },
141 | "available": true,
142 | "stock": 3
143 | },
144 | {
145 | "id": "AN621C0S5-K110420000",
146 | "size": "14",
147 | "price": {
148 | "currency": "GBP",
149 | "value": 14.69,
150 | "formatted": "£14.69"
151 | },
152 | "originalPrice": {
153 | "currency": "GBP",
154 | "value": 20.99,
155 | "formatted": "£20.99"
156 | },
157 | "available": false,
158 | "stock": 0
159 | },
160 | {
161 | "id": "AN621C0S5-K110380000",
162 | "size": "10",
163 | "price": {
164 | "currency": "GBP",
165 | "value": 14.69,
166 | "formatted": "£14.69"
167 | },
168 | "originalPrice": {
169 | "currency": "GBP",
170 | "value": 20.99,
171 | "formatted": "£20.99"
172 | },
173 | "available": false,
174 | "stock": 0
175 | },
176 | {
177 | "id": "AN621C0S5-K110400000",
178 | "size": "12",
179 | "price": {
180 | "currency": "GBP",
181 | "value": 14.69,
182 | "formatted": "£14.69"
183 | },
184 | "originalPrice": {
185 | "currency": "GBP",
186 | "value": 20.99,
187 | "formatted": "£20.99"
188 | },
189 | "available": false,
190 | "stock": 0
191 | }
192 | ],
193 | "media": {
194 | "images": [
195 | {
196 | "orderNumber": 1,
197 | "type": "NON_MODEL",
198 | "thumbnailHdUrl": "https://i2.ztat.net/thumb_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@16.jpg",
199 | "smallUrl": "https://i2.ztat.net/catalog/AN/62/1C/0S/5K/11/AN621C0S5-K11@16.jpg",
200 | "smallHdUrl": "https://i2.ztat.net/catalog_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@16.jpg",
201 | "mediumUrl": "https://i2.ztat.net/detail/AN/62/1C/0S/5K/11/AN621C0S5-K11@16.jpg",
202 | "mediumHdUrl": "https://i2.ztat.net/detail_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@16.jpg",
203 | "largeUrl": "https://i2.ztat.net/large/AN/62/1C/0S/5K/11/AN621C0S5-K11@16.jpg",
204 | "largeHdUrl": "https://i2.ztat.net/large_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@16.jpg"
205 | },
206 | {
207 | "orderNumber": 2,
208 | "type": "STYLE",
209 | "thumbnailHdUrl": "https://i3.ztat.net/thumb_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@15.jpg",
210 | "smallUrl": "https://i3.ztat.net/catalog/AN/62/1C/0S/5K/11/AN621C0S5-K11@15.jpg",
211 | "smallHdUrl": "https://i3.ztat.net/catalog_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@15.jpg",
212 | "mediumUrl": "https://i3.ztat.net/detail/AN/62/1C/0S/5K/11/AN621C0S5-K11@15.jpg",
213 | "mediumHdUrl": "https://i3.ztat.net/detail_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@15.jpg",
214 | "largeUrl": "https://i3.ztat.net/large/AN/62/1C/0S/5K/11/AN621C0S5-K11@15.jpg",
215 | "largeHdUrl": "https://i3.ztat.net/large_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@15.jpg"
216 | },
217 | {
218 | "orderNumber": 3,
219 | "type": "PREMIUM",
220 | "thumbnailHdUrl": "https://i6.ztat.net/thumb_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@14.jpg",
221 | "smallUrl": "https://i6.ztat.net/catalog/AN/62/1C/0S/5K/11/AN621C0S5-K11@14.jpg",
222 | "smallHdUrl": "https://i6.ztat.net/catalog_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@14.jpg",
223 | "mediumUrl": "https://i6.ztat.net/detail/AN/62/1C/0S/5K/11/AN621C0S5-K11@14.jpg",
224 | "mediumHdUrl": "https://i6.ztat.net/detail_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@14.jpg",
225 | "largeUrl": "https://i6.ztat.net/large/AN/62/1C/0S/5K/11/AN621C0S5-K11@14.jpg",
226 | "largeHdUrl": "https://i6.ztat.net/large_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@14.jpg"
227 | },
228 | {
229 | "orderNumber": 4,
230 | "type": "PREMIUM",
231 | "thumbnailHdUrl": "https://i5.ztat.net/thumb_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@13.jpg",
232 | "smallUrl": "https://i5.ztat.net/catalog/AN/62/1C/0S/5K/11/AN621C0S5-K11@13.jpg",
233 | "smallHdUrl": "https://i5.ztat.net/catalog_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@13.jpg",
234 | "mediumUrl": "https://i5.ztat.net/detail/AN/62/1C/0S/5K/11/AN621C0S5-K11@13.jpg",
235 | "mediumHdUrl": "https://i5.ztat.net/detail_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@13.jpg",
236 | "largeUrl": "https://i5.ztat.net/large/AN/62/1C/0S/5K/11/AN621C0S5-K11@13.jpg",
237 | "largeHdUrl": "https://i5.ztat.net/large_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@13.jpg"
238 | },
239 | {
240 | "orderNumber": 5,
241 | "type": "PREMIUM",
242 | "thumbnailHdUrl": "https://i4.ztat.net/thumb_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@12.jpg",
243 | "smallUrl": "https://i4.ztat.net/catalog/AN/62/1C/0S/5K/11/AN621C0S5-K11@12.jpg",
244 | "smallHdUrl": "https://i4.ztat.net/catalog_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@12.jpg",
245 | "mediumUrl": "https://i4.ztat.net/detail/AN/62/1C/0S/5K/11/AN621C0S5-K11@12.jpg",
246 | "mediumHdUrl": "https://i4.ztat.net/detail_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@12.jpg",
247 | "largeUrl": "https://i4.ztat.net/large/AN/62/1C/0S/5K/11/AN621C0S5-K11@12.jpg",
248 | "largeHdUrl": "https://i4.ztat.net/large_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@12.jpg"
249 | },
250 | {
251 | "orderNumber": 6,
252 | "type": "PREMIUM",
253 | "thumbnailHdUrl": "https://i1.ztat.net/thumb_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@11.jpg",
254 | "smallUrl": "https://i1.ztat.net/catalog/AN/62/1C/0S/5K/11/AN621C0S5-K11@11.jpg",
255 | "smallHdUrl": "https://i1.ztat.net/catalog_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@11.jpg",
256 | "mediumUrl": "https://i1.ztat.net/detail/AN/62/1C/0S/5K/11/AN621C0S5-K11@11.jpg",
257 | "mediumHdUrl": "https://i1.ztat.net/detail_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@11.jpg",
258 | "largeUrl": "https://i1.ztat.net/large/AN/62/1C/0S/5K/11/AN621C0S5-K11@11.jpg",
259 | "largeHdUrl": "https://i1.ztat.net/large_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@11.jpg"
260 | },
261 | {
262 | "orderNumber": 7,
263 | "type": "PREMIUM",
264 | "thumbnailHdUrl": "https://i6.ztat.net/thumb_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@10.jpg",
265 | "smallUrl": "https://i6.ztat.net/catalog/AN/62/1C/0S/5K/11/AN621C0S5-K11@10.jpg",
266 | "smallHdUrl": "https://i6.ztat.net/catalog_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@10.jpg",
267 | "mediumUrl": "https://i6.ztat.net/detail/AN/62/1C/0S/5K/11/AN621C0S5-K11@10.jpg",
268 | "mediumHdUrl": "https://i6.ztat.net/detail_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@10.jpg",
269 | "largeUrl": "https://i6.ztat.net/large/AN/62/1C/0S/5K/11/AN621C0S5-K11@10.jpg",
270 | "largeHdUrl": "https://i6.ztat.net/large_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@10.jpg"
271 | },
272 | {
273 | "orderNumber": 8,
274 | "type": "PREMIUM",
275 | "thumbnailHdUrl": "https://i5.ztat.net/thumb_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@9.jpg",
276 | "smallUrl": "https://i5.ztat.net/catalog/AN/62/1C/0S/5K/11/AN621C0S5-K11@9.jpg",
277 | "smallHdUrl": "https://i5.ztat.net/catalog_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@9.jpg",
278 | "mediumUrl": "https://i5.ztat.net/detail/AN/62/1C/0S/5K/11/AN621C0S5-K11@9.jpg",
279 | "mediumHdUrl": "https://i5.ztat.net/detail_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@9.jpg",
280 | "largeUrl": "https://i5.ztat.net/large/AN/62/1C/0S/5K/11/AN621C0S5-K11@9.jpg",
281 | "largeHdUrl": "https://i5.ztat.net/large_hd/AN/62/1C/0S/5K/11/AN621C0S5-K11@9.jpg"
282 | }
283 | ]
284 | }
285 | }
286 |
--------------------------------------------------------------------------------
/json_typegen_shared/src/inference/jsonlex.rs:
--------------------------------------------------------------------------------
1 | use crate::inference::jsoninputerr::JsonInputErr;
2 | use std::io::{BufReader, Bytes, Read};
3 | use std::iter::Peekable;
4 |
5 | #[derive(Debug, PartialEq)]
6 | pub enum JsonToken {
7 | True,
8 | False,
9 | Null,
10 | Number(String),
11 | String(String),
12 | ObjectStart,
13 | ObjectEnd,
14 | ArrayStart,
15 | ArrayEnd,
16 | Comma,
17 | Colon,
18 | }
19 |
20 | pub struct JsonLexer {
21 | bytes: Peekable>>, // TODO: Keep position info
22 | failed: bool,
23 | buffer: Vec,
24 | }
25 |
26 | impl JsonLexer {
27 | pub fn new(source: R) -> Self {
28 | JsonLexer {
29 | bytes: BufReader::with_capacity(128 * 1024, source)
30 | .bytes()
31 | .peekable(),
32 | failed: false,
33 | buffer: Vec::new(),
34 | }
35 | }
36 |
37 | fn get_next_token(&mut self) -> Option> {
38 | loop {
39 | let byte = match self.bytes.peek() {
40 | Some(Ok(byte)) => *byte,
41 | Some(Err(_)) => return Some(Err(JsonInputErr::IoErr)),
42 | None => return None,
43 | };
44 |
45 | return Some(match byte {
46 | b' ' | b'\t' | b'\n' | b'\r' => {
47 | self.bytes.next();
48 | continue;
49 | }
50 | b'{' => self.skip_and_produce(JsonToken::ObjectStart),
51 | b'}' => self.skip_and_produce(JsonToken::ObjectEnd),
52 | b'[' => self.skip_and_produce(JsonToken::ArrayStart),
53 | b']' => self.skip_and_produce(JsonToken::ArrayEnd),
54 | b',' => self.skip_and_produce(JsonToken::Comma),
55 | b':' => self.skip_and_produce(JsonToken::Colon),
56 | b't' => self.match_token("true", JsonToken::True),
57 | b'f' => self.match_token("false", JsonToken::False),
58 | b'n' => self.match_token("null", JsonToken::Null),
59 | b'"' => self.match_string(),
60 | b'0'..=b'9' | b'-' => self.match_number(),
61 | _ => Err(JsonInputErr::InvalidJson),
62 | });
63 | }
64 | }
65 |
66 | fn skip_and_produce(&mut self, token: JsonToken) -> Result {
67 | self.bytes.next();
68 | Ok(token)
69 | }
70 |
71 | fn expect_byte(&mut self) -> Result {
72 | match self.bytes.next() {
73 | Some(Ok(byte)) => Ok(byte),
74 | Some(Err(_)) => Err(JsonInputErr::IoErr),
75 | None => Err(JsonInputErr::UnexpectedEndOfInput),
76 | }
77 | }
78 |
79 | fn skip_byte(&mut self, target_byte: u8) -> Result<(), JsonInputErr> {
80 | let byte = self.expect_byte()?;
81 | if byte == target_byte {
82 | Ok(())
83 | } else {
84 | Err(JsonInputErr::InvalidJson)
85 | }
86 | }
87 |
88 | fn match_token(
89 | &mut self,
90 | target_str: &'static str,
91 | token: JsonToken,
92 | ) -> Result {
93 | for target_byte in target_str.bytes() {
94 | self.skip_byte(target_byte)?;
95 | }
96 | Ok(token)
97 | }
98 |
99 | fn match_string(&mut self) -> Result {
100 | self.skip_byte(b'"')?;
101 | self.buffer.clear();
102 | loop {
103 | let byte = self.expect_byte()?;
104 |
105 | if byte == b'"' {
106 | return Ok(JsonToken::String(
107 | String::from_utf8(self.buffer.clone())
108 | .map_err(|_| JsonInputErr::InvalidUtf8)?,
109 | ));
110 | } else if byte == b'\\' {
111 | let escaped = self.expect_byte()?;
112 |
113 | match escaped {
114 | b'"' => self.buffer.push(b'"'),
115 | b'\\' => self.buffer.push(b'\\'),
116 | b'/' => self.buffer.push(b'/'),
117 | b'b' => self.buffer.push(8), // backspace
118 | b'f' => self.buffer.push(12), // form feed
119 | b'n' => self.buffer.push(b'\n'),
120 | b'r' => self.buffer.push(b'\r'),
121 | b't' => self.buffer.push(b'\t'),
122 | b'u' => {
123 | let surrogate_offset: u32 = (0xD800 << 10) + 0xDC00 - 0x10000;
124 |
125 | let mut codepoint = self.parse_codepoint()? as u32;
126 | if (0xD800..=0xDFFF).contains(&codepoint) {
127 | // first codepoint was the start of a surrogate pair
128 | self.skip_byte(b'\\')?;
129 | self.skip_byte(b'u')?;
130 | let codepoint2 = self.parse_codepoint()? as u32;
131 | codepoint = ((codepoint << 10) + codepoint2) - surrogate_offset;
132 | };
133 | let mut buf = [0u8; 4];
134 | let encoded_bytes = std::char::from_u32(codepoint)
135 | .unwrap_or(std::char::REPLACEMENT_CHARACTER)
136 | .encode_utf8(&mut buf)
137 | .bytes();
138 | for encoded_byte in encoded_bytes {
139 | self.buffer.push(encoded_byte);
140 | }
141 | }
142 | _ => return Err(JsonInputErr::InvalidEscape(escaped)),
143 | };
144 | } else {
145 | self.buffer.push(byte)
146 | }
147 | }
148 | }
149 |
150 | // "ab03..." -> 0xab03
151 | fn parse_codepoint(&mut self) -> Result {
152 | let mut codepoint: u16 = 0;
153 | for _ in 0..4 {
154 | codepoint <<= 4;
155 | let byte2 = self.expect_byte()?;
156 | codepoint += match byte2 {
157 | b'0'..=b'9' => byte2 - b'0',
158 | b'a'..=b'f' => byte2 - b'a' + 10,
159 | b'A'..=b'F' => byte2 - b'A' + 10,
160 | _ => return Err(JsonInputErr::InvalidEscape(byte2)),
161 | } as u16;
162 | }
163 | Ok(codepoint)
164 | }
165 |
166 | fn match_number(&mut self) -> Result {
167 | let mut buffer = Vec::new();
168 | loop {
169 | let byte = match self.bytes.peek() {
170 | Some(Ok(byte)) => *byte,
171 | Some(Err(_)) => return Err(JsonInputErr::IoErr),
172 | None => break,
173 | };
174 |
175 | match byte {
176 | b'0'..=b'9' | b'-' | b'+' | b'.' | b'e' | b'E' => {
177 | buffer.push(byte);
178 | self.bytes.next();
179 | }
180 | _ => break,
181 | }
182 | }
183 | // TODO: Actually parse numbers
184 | Ok(JsonToken::Number(
185 | String::from_utf8(buffer).map_err(|_err| JsonInputErr::InvalidUtf8)?,
186 | ))
187 | }
188 | }
189 |
190 | impl Iterator for JsonLexer {
191 | type Item = Result;
192 |
193 | fn next(&mut self) -> Option {
194 | if self.failed {
195 | return None;
196 | }
197 |
198 | let res = self.get_next_token();
199 | if let Some(Err(_)) = res {
200 | self.failed = true;
201 | }
202 | res
203 | }
204 | }
205 |
206 | #[cfg(test)]
207 | mod tests {
208 | use super::*;
209 | use crate::inference::jsoninputerr::JsonInputErr;
210 | use std::fmt::Debug;
211 |
212 | #[test]
213 | fn empty_input() {
214 | assert_eq!(tokens_from_str(""), Ok(vec![]));
215 | assert_eq!(tokens_from_str(" \t\r\n"), Ok(vec![]));
216 | }
217 |
218 | #[test]
219 | fn bare_number() {
220 | assert_eq!(
221 | tokens_from_str("123"),
222 | Ok(vec![JsonToken::Number("123".to_string())])
223 | );
224 | }
225 |
226 | #[test]
227 | fn object() {
228 | assert_eq!(
229 | tokens_from_str("{}"),
230 | Ok(vec![JsonToken::ObjectStart, JsonToken::ObjectEnd])
231 | );
232 | assert_eq!(
233 | tokens_from_str(" { } "),
234 | Ok(vec![JsonToken::ObjectStart, JsonToken::ObjectEnd])
235 | );
236 | }
237 |
238 | #[test]
239 | fn string() {
240 | assert_eq!(
241 | tokens_from_str(r#" "hello world" "#),
242 | Ok(vec![JsonToken::String("hello world".to_string())])
243 | );
244 | }
245 |
246 | #[test]
247 | fn escapes() {
248 | assert_eq!(
249 | tokens_from_str(r#" "foo\nbar" "#),
250 | Ok(vec![JsonToken::String("foo\nbar".to_string())])
251 | );
252 |
253 | assert_eq!(
254 | tokens_from_str(r#" "John says \"Hello\"" "#),
255 | Ok(vec![JsonToken::String(r#"John says "Hello""#.to_string())])
256 | );
257 | }
258 |
259 | #[test]
260 | fn unicode_escapes() {
261 | assert_eq!(
262 | tokens_from_str(r#" "\u00e6" "#),
263 | Ok(vec![JsonToken::String("æ".to_string())])
264 | );
265 |
266 | assert_eq!(
267 | tokens_from_str(r#" "\uD83D\uDE00" "#),
268 | Ok(vec![JsonToken::String("😀".to_string())])
269 | );
270 |
271 | assert_eq!(
272 | tokens_from_str(r#" "\uD83D" "#),
273 | Err(JsonInputErr::InvalidJson)
274 | );
275 | }
276 |
277 | #[test]
278 | fn number() {
279 | assert_eq!(
280 | tokens_from_str(r#" 14.5 "#),
281 | Ok(vec![JsonToken::Number("14.5".to_string())])
282 | );
283 | }
284 |
285 | #[test]
286 | fn complex() {
287 | assert_eq!(
288 | tokens_from_str(
289 | r#"
290 | {
291 | "foo": [1, true]
292 | }
293 | "#
294 | ),
295 | Ok(vec![
296 | JsonToken::ObjectStart,
297 | JsonToken::String("foo".to_string()),
298 | JsonToken::Colon,
299 | JsonToken::ArrayStart,
300 | JsonToken::Number("1".to_string()),
301 | JsonToken::Comma,
302 | JsonToken::True,
303 | JsonToken::ArrayEnd,
304 | JsonToken::ObjectEnd
305 | ])
306 | );
307 | }
308 |
309 | #[test]
310 | fn invalid() {
311 | assert_eq!(tokens_from_str("foo"), Err(JsonInputErr::InvalidJson));
312 | assert_eq!(tokens_from_str(" [ foo ] "), Err(JsonInputErr::InvalidJson));
313 | }
314 |
315 | fn tokens_from_str(s: &'static str) -> Result, JsonInputErr> {
316 | let collected: Vec> =
317 | JsonLexer::new(s.as_bytes()).collect();
318 | coalesce_err(collected)
319 | }
320 |
321 | fn coalesce_err(vec: Vec>) -> Result, E> {
322 | let error_count = vec.iter().filter(|res| res.is_err()).count();
323 | match error_count {
324 | 0 => Ok(vec.into_iter().map(|res| res.unwrap()).collect()),
325 | 1 => Err(vec
326 | .into_iter()
327 | .find(|res| res.is_err())
328 | .unwrap()
329 | .unwrap_err()),
330 | _ => panic!("More than one error: {:?}", vec),
331 | }
332 | }
333 | }
334 |
--------------------------------------------------------------------------------
/json_typegen_shared/src/generation/python.rs:
--------------------------------------------------------------------------------
1 | use linked_hash_map::LinkedHashMap;
2 | use std::collections::HashSet;
3 |
4 | use crate::options::{ImportStyle, Options, StringTransform};
5 | use crate::shape::{self, Shape};
6 | use crate::to_singular::to_singular;
7 | use crate::util::{kebab_case, lower_camel_case, snake_case, type_case};
8 |
9 | #[derive(PartialEq, PartialOrd, Ord, Eq, Hash, Clone, Copy)]
10 | enum Import {
11 | Any,
12 | Optional,
13 | BaseModel,
14 | Field,
15 | }
16 |
17 | impl Import {
18 | fn pair(&self) -> (&'static str, &'static str) {
19 | match self {
20 | Import::Any => ("typing", "Any"),
21 | Import::Optional => ("typing", "Optional"),
22 | Import::BaseModel => ("pydantic", "BaseModel"),
23 | Import::Field => ("pydantic", "Field"),
24 | }
25 | }
26 | fn module(&self) -> &'static str {
27 | self.pair().0
28 | }
29 | fn identifier(&self) -> &'static str {
30 | self.pair().1
31 | }
32 | fn qualified(&self) -> String {
33 | let (module, identifier) = self.pair();
34 | format!("{}.{}", module, identifier)
35 | }
36 | }
37 |
38 | struct Ctxt {
39 | options: Options,
40 | type_names: HashSet,
41 | imports: HashSet,
42 | created_classes: Vec<(Shape, Ident)>,
43 | }
44 |
45 | pub type Ident = String;
46 | pub type Code = String;
47 |
48 | pub fn python_types(name: &str, shape: &Shape, options: Options) -> Code {
49 | let mut ctxt = Ctxt {
50 | options,
51 | type_names: HashSet::new(),
52 | imports: HashSet::new(),
53 | created_classes: Vec::new(),
54 | };
55 |
56 | if !matches!(shape, Shape::Struct { .. }) {
57 | // reserve the requested name
58 | ctxt.type_names.insert(name.to_string());
59 | }
60 |
61 | let (ident, code) = type_from_shape(&mut ctxt, name, shape);
62 | let mut code = code.unwrap_or_default();
63 |
64 | if !ctxt.imports.is_empty() {
65 | let mut imports: Vec<_> = ctxt.imports.drain().collect();
66 | imports.sort();
67 | let mut import_code = String::new();
68 | match ctxt.options.import_style {
69 | ImportStyle::AssumeExisting => {}
70 | ImportStyle::AddImports => {
71 | for import in imports {
72 | let (module, identifier) = import.pair();
73 | import_code += &format!("from {} import {}\n", module, identifier);
74 | }
75 | }
76 | ImportStyle::QualifiedPaths => {
77 | let mut seen = HashSet::new();
78 | for import in imports {
79 | let module = import.module();
80 | if seen.insert(module) {
81 | import_code += &format!("import {}\n", module);
82 | }
83 | }
84 | }
85 | }
86 | if !import_code.is_empty() {
87 | import_code += "\n\n";
88 | code = import_code + &code;
89 | }
90 | }
91 |
92 | if ident != name {
93 | if !code.is_empty() {
94 | code += "\n\n";
95 | }
96 | code += &format!("{} = {}", name, ident);
97 | }
98 | code
99 | }
100 |
101 | fn type_from_shape(ctxt: &mut Ctxt, path: &str, shape: &Shape) -> (Ident, Option) {
102 | use crate::shape::Shape::*;
103 | match shape {
104 | Null | Any | Bottom => (import(ctxt, Import::Any), None),
105 | Bool => ("bool".into(), None),
106 | StringT => ("str".into(), None),
107 | Integer => ("int".into(), None),
108 | Floating => ("float".into(), None),
109 | Tuple(shapes, _n) => {
110 | let folded = shape::fold_shapes(shapes.clone());
111 | if folded == Any && shapes.iter().any(|s| s != &Any) {
112 | generate_tuple_type(ctxt, path, shapes)
113 | } else {
114 | generate_vec_type(ctxt, path, &folded)
115 | }
116 | }
117 | VecT { elem_type: e } => generate_vec_type(ctxt, path, e),
118 | Struct { fields } => generate_data_class(ctxt, path, fields, shape),
119 | MapT { val_type: v } => generate_map_type(ctxt, path, v),
120 | Opaque(t) => (t.clone(), None),
121 | Optional(e) => {
122 | let (inner, defs) = type_from_shape(ctxt, path, e);
123 | if ctxt.options.use_default_for_missing_fields {
124 | (inner, defs)
125 | } else {
126 | let optional = import(ctxt, Import::Optional);
127 | (format!("{}[{}]", optional, inner), defs)
128 | }
129 | }
130 | Nullable(e) => {
131 | let (inner, defs) = type_from_shape(ctxt, path, e);
132 | if ctxt.options.use_default_for_missing_fields {
133 | (inner, defs)
134 | } else {
135 | let optional = import(ctxt, Import::Optional);
136 | (format!("{}[{}]", optional, inner), defs)
137 | }
138 | }
139 | }
140 | }
141 |
142 | fn generate_vec_type(ctxt: &mut Ctxt, path: &str, shape: &Shape) -> (Ident, Option) {
143 | let singular = to_singular(path);
144 | let (inner, defs) = type_from_shape(ctxt, &singular, shape);
145 | (format!("list[{}]", inner), defs)
146 | }
147 |
148 | fn generate_map_type(ctxt: &mut Ctxt, path: &str, shape: &Shape) -> (Ident, Option) {
149 | let singular = to_singular(path);
150 | let (inner, defs) = type_from_shape(ctxt, &singular, shape);
151 | (format!("dict[str, {}]", inner), defs)
152 | }
153 |
154 | fn generate_tuple_type(ctxt: &mut Ctxt, path: &str, shapes: &[Shape]) -> (Ident, Option) {
155 | let mut types = Vec::new();
156 | let mut defs = Vec::new();
157 |
158 | for shape in shapes {
159 | let (typ, def) = type_from_shape(ctxt, path, shape);
160 | types.push(typ);
161 | if let Some(code) = def {
162 | if !code.is_empty() {
163 | defs.push(code)
164 | }
165 | }
166 | }
167 |
168 | (
169 | format!("tuple[{}]", types.join(", ")),
170 | Some(defs.join("\n\n")),
171 | )
172 | }
173 |
174 | fn field_name(name: &str, used_names: &HashSet) -> Ident {
175 | type_or_field_name(name, used_names, "field", snake_case)
176 | }
177 |
178 | fn type_name(name: &str, used_names: &HashSet) -> Ident {
179 | type_or_field_name(name, used_names, "GeneratedType", type_case)
180 | }
181 |
182 | // https://docs.python.org/3/reference/lexical_analysis.html#keywords
183 | #[rustfmt::skip]
184 | const PYTHON_KEYWORDS: &[&str] = &[
185 | "False", "None", "True",
186 | "and", "as", "assert", "async", "await", "break", "class", "continue",
187 | "def", "del", "elif", "else", "except", "finally", "for", "from", "global",
188 | "if", "import", "in", "is", "lambda", "nonlocal", "not", "or", "pass",
189 | "raise", "return", "try", "while", "with", "yield",
190 | ];
191 |
192 | fn type_or_field_name(
193 | name: &str,
194 | used_names: &HashSet,
195 | default_name: &str,
196 | case_fn: fn(&str) -> String,
197 | ) -> Ident {
198 | let name = name.trim();
199 | let mut output_name = case_fn(name);
200 | if PYTHON_KEYWORDS.contains(&&*output_name) {
201 | output_name.push_str("_field");
202 | }
203 | if output_name.is_empty() {
204 | output_name.push_str(default_name);
205 | }
206 | if let Some(c) = output_name.chars().next() {
207 | if c.is_ascii_digit() {
208 | output_name = String::from("n") + &output_name;
209 | }
210 | }
211 | if !used_names.contains(&output_name) {
212 | return output_name;
213 | }
214 | for n in 2.. {
215 | let temp = format!("{}{}", output_name, n);
216 | if !used_names.contains(&temp) {
217 | return temp;
218 | }
219 | }
220 | unreachable!()
221 | }
222 |
223 | fn import(ctxt: &mut Ctxt, import: Import) -> String {
224 | ctxt.imports.insert(import);
225 | match ctxt.options.import_style {
226 | ImportStyle::QualifiedPaths => import.qualified(),
227 | _ => import.identifier().into(),
228 | }
229 | }
230 |
231 | fn generate_data_class(
232 | ctxt: &mut Ctxt,
233 | path: &str,
234 | field_shapes: &LinkedHashMap,
235 | containing_shape: &Shape,
236 | ) -> (Ident, Option) {
237 | for (created_for_shape, ident) in ctxt.created_classes.iter() {
238 | if created_for_shape.is_acceptable_substitution_for(containing_shape) {
239 | return (ident.into(), None);
240 | }
241 | }
242 |
243 | let type_name = type_name(path, &ctxt.type_names);
244 | ctxt.type_names.insert(type_name.clone());
245 | ctxt.created_classes
246 | .push((containing_shape.clone(), type_name.clone()));
247 |
248 | let mut field_names = HashSet::new();
249 | let mut defs = Vec::new();
250 |
251 | let fields: Vec = field_shapes
252 | .iter()
253 | .map(|(name, typ)| {
254 | let field_name = field_name(name, &field_names);
255 | field_names.insert(field_name.clone());
256 |
257 | let (field_type, child_defs) = type_from_shape(ctxt, name, typ);
258 |
259 | if let Some(code) = child_defs {
260 | if !code.is_empty() {
261 | defs.push(code);
262 | }
263 | }
264 |
265 | let mut field_code = String::new();
266 | let transformed = apply_transform(ctxt, &field_name, name);
267 | if transformed != field_name {
268 | field_code += &format!(
269 | " = {}(alias=\"{}\")",
270 | import(ctxt, Import::Field),
271 | transformed
272 | )
273 | }
274 |
275 | format!(" {}: {}{}", field_name, field_type, field_code)
276 | })
277 | .collect();
278 |
279 | let mut code = String::new();
280 |
281 | code += &format!(
282 | "class {}({}):\n",
283 | type_name,
284 | import(ctxt, Import::BaseModel)
285 | );
286 |
287 | if fields.is_empty() {
288 | code += " pass\n";
289 | } else {
290 | code += &fields.join("\n");
291 | code += "\n";
292 | }
293 |
294 | if !defs.is_empty() {
295 | let mut d = defs.join("\n\n");
296 | d += "\n\n";
297 | d += &code;
298 | code = d;
299 | }
300 |
301 | (type_name, Some(code))
302 | }
303 |
304 | fn apply_transform(ctxt: &Ctxt, field_name: &str, name: &str) -> String {
305 | match ctxt.options.property_name_format {
306 | Some(StringTransform::LowerCase) => field_name.to_ascii_lowercase(),
307 | Some(StringTransform::PascalCase) => type_case(field_name),
308 | Some(StringTransform::SnakeCase) => snake_case(field_name),
309 | Some(StringTransform::KebabCase) => kebab_case(field_name),
310 | Some(StringTransform::UpperCase) => field_name.to_ascii_uppercase(),
311 | Some(StringTransform::CamelCase) => lower_camel_case(field_name),
312 | Some(StringTransform::ScreamingSnakeCase) => snake_case(field_name).to_ascii_uppercase(),
313 | Some(StringTransform::ScreamingKebabCase) => kebab_case(field_name).to_ascii_uppercase(),
314 | None => name.to_string(),
315 | }
316 | }
317 |
318 | #[cfg(test)]
319 | mod python_codegen_tests {
320 | use super::*;
321 |
322 | #[test]
323 | fn field_names_test() {
324 | fn field_name_test(from: &str, to: &str) {
325 | assert_eq!(
326 | field_name(from, &HashSet::new()),
327 | to.to_string(),
328 | r#"From "{}" to "{}""#,
329 | from,
330 | to
331 | );
332 | }
333 |
334 | field_name_test("valid", "valid");
335 | field_name_test("1", "n1");
336 | field_name_test("+1", "n1");
337 | field_name_test("", "field");
338 | field_name_test("def", "def_field");
339 | }
340 | }
341 |
--------------------------------------------------------------------------------