├── .github └── workflows │ └── release.yml ├── .gitignore ├── Cargo.toml ├── LICENSE ├── README.md ├── src ├── lib.rs └── main.rs └── tests ├── fixtures ├── mixed.jf.json └── mixed.json └── test.rs /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | # Copyright 2022-2024, axodotdev 2 | # SPDX-License-Identifier: MIT or Apache-2.0 3 | # 4 | # CI that: 5 | # 6 | # * checks for a Git Tag that looks like a release 7 | # * builds artifacts with cargo-dist (archives, installers, hashes) 8 | # * uploads those artifacts to temporary workflow zip 9 | # * on success, uploads the artifacts to a GitHub Release 10 | # 11 | # Note that the GitHub Release will be created with a generated 12 | # title/body based on your changelogs. 13 | 14 | name: Release 15 | 16 | permissions: 17 | contents: write 18 | 19 | # This task will run whenever you push a git tag that looks like a version 20 | # like "1.0.0", "v0.1.0-prerelease.1", "my-app/0.1.0", "releases/v1.0.0", etc. 21 | # Various formats will be parsed into a VERSION and an optional PACKAGE_NAME, where 22 | # PACKAGE_NAME must be the name of a Cargo package in your workspace, and VERSION 23 | # must be a Cargo-style SemVer Version (must have at least major.minor.patch). 24 | # 25 | # If PACKAGE_NAME is specified, then the announcement will be for that 26 | # package (erroring out if it doesn't have the given version or isn't cargo-dist-able). 27 | # 28 | # If PACKAGE_NAME isn't specified, then the announcement will be for all 29 | # (cargo-dist-able) packages in the workspace with that version (this mode is 30 | # intended for workspaces with only one dist-able package, or with all dist-able 31 | # packages versioned/released in lockstep). 32 | # 33 | # If you push multiple tags at once, separate instances of this workflow will 34 | # spin up, creating an independent announcement for each one. However, GitHub 35 | # will hard limit this to 3 tags per commit, as it will assume more tags is a 36 | # mistake. 37 | # 38 | # If there's a prerelease-style suffix to the version, then the release(s) 39 | # will be marked as a prerelease. 40 | on: 41 | push: 42 | tags: 43 | - '**[0-9]+.[0-9]+.[0-9]+*' 44 | pull_request: 45 | 46 | jobs: 47 | # Run 'cargo dist plan' (or host) to determine what tasks we need to do 48 | plan: 49 | runs-on: ubuntu-latest 50 | outputs: 51 | val: ${{ steps.plan.outputs.manifest }} 52 | tag: ${{ !github.event.pull_request && github.ref_name || '' }} 53 | tag-flag: ${{ !github.event.pull_request && format('--tag={0}', github.ref_name) || '' }} 54 | publishing: ${{ !github.event.pull_request }} 55 | env: 56 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 57 | steps: 58 | - uses: actions/checkout@v4 59 | with: 60 | submodules: recursive 61 | - name: Install cargo-dist 62 | # we specify bash to get pipefail; it guards against the `curl` command 63 | # failing. otherwise `sh` won't catch that `curl` returned non-0 64 | shell: bash 65 | run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.13.3/cargo-dist-installer.sh | sh" 66 | # sure would be cool if github gave us proper conditionals... 67 | # so here's a doubly-nested ternary-via-truthiness to try to provide the best possible 68 | # functionality based on whether this is a pull_request, and whether it's from a fork. 69 | # (PRs run on the *source* but secrets are usually on the *target* -- that's *good* 70 | # but also really annoying to build CI around when it needs secrets to work right.) 71 | - id: plan 72 | run: | 73 | cargo dist ${{ (!github.event.pull_request && format('host --steps=create --tag={0}', github.ref_name)) || 'plan' }} --output-format=json > plan-dist-manifest.json 74 | echo "cargo dist ran successfully" 75 | cat plan-dist-manifest.json 76 | echo "manifest=$(jq -c "." plan-dist-manifest.json)" >> "$GITHUB_OUTPUT" 77 | - name: "Upload dist-manifest.json" 78 | uses: actions/upload-artifact@v4 79 | with: 80 | name: artifacts-plan-dist-manifest 81 | path: plan-dist-manifest.json 82 | 83 | # Build and packages all the platform-specific things 84 | build-local-artifacts: 85 | name: build-local-artifacts (${{ join(matrix.targets, ', ') }}) 86 | # Let the initial task tell us to not run (currently very blunt) 87 | needs: 88 | - plan 89 | if: ${{ fromJson(needs.plan.outputs.val).ci.github.artifacts_matrix.include != null && (needs.plan.outputs.publishing == 'true' || fromJson(needs.plan.outputs.val).ci.github.pr_run_mode == 'upload') }} 90 | strategy: 91 | fail-fast: false 92 | # Target platforms/runners are computed by cargo-dist in create-release. 93 | # Each member of the matrix has the following arguments: 94 | # 95 | # - runner: the github runner 96 | # - dist-args: cli flags to pass to cargo dist 97 | # - install-dist: expression to run to install cargo-dist on the runner 98 | # 99 | # Typically there will be: 100 | # - 1 "global" task that builds universal installers 101 | # - N "local" tasks that build each platform's binaries and platform-specific installers 102 | matrix: ${{ fromJson(needs.plan.outputs.val).ci.github.artifacts_matrix }} 103 | runs-on: ${{ matrix.runner }} 104 | env: 105 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 106 | BUILD_MANIFEST_NAME: target/distrib/${{ join(matrix.targets, '-') }}-dist-manifest.json 107 | steps: 108 | - name: enable windows longpaths 109 | run: | 110 | git config --global core.longpaths true 111 | - uses: actions/checkout@v4 112 | with: 113 | submodules: recursive 114 | - uses: swatinem/rust-cache@v2 115 | with: 116 | key: ${{ join(matrix.targets, '-') }} 117 | - name: Install cargo-dist 118 | run: ${{ matrix.install_dist }} 119 | # Get the dist-manifest 120 | - name: Fetch local artifacts 121 | uses: actions/download-artifact@v4 122 | with: 123 | pattern: artifacts-* 124 | path: target/distrib/ 125 | merge-multiple: true 126 | - name: Install dependencies 127 | run: | 128 | ${{ matrix.packages_install }} 129 | - name: Build artifacts 130 | run: | 131 | # Actually do builds and make zips and whatnot 132 | cargo dist build ${{ needs.plan.outputs.tag-flag }} --print=linkage --output-format=json ${{ matrix.dist_args }} > dist-manifest.json 133 | echo "cargo dist ran successfully" 134 | - id: cargo-dist 135 | name: Post-build 136 | # We force bash here just because github makes it really hard to get values up 137 | # to "real" actions without writing to env-vars, and writing to env-vars has 138 | # inconsistent syntax between shell and powershell. 139 | shell: bash 140 | run: | 141 | # Parse out what we just built and upload it to scratch storage 142 | echo "paths<> "$GITHUB_OUTPUT" 143 | jq --raw-output ".upload_files[]" dist-manifest.json >> "$GITHUB_OUTPUT" 144 | echo "EOF" >> "$GITHUB_OUTPUT" 145 | 146 | cp dist-manifest.json "$BUILD_MANIFEST_NAME" 147 | - name: "Upload artifacts" 148 | uses: actions/upload-artifact@v4 149 | with: 150 | name: artifacts-build-local-${{ join(matrix.targets, '_') }} 151 | path: | 152 | ${{ steps.cargo-dist.outputs.paths }} 153 | ${{ env.BUILD_MANIFEST_NAME }} 154 | 155 | # Build and package all the platform-agnostic(ish) things 156 | build-global-artifacts: 157 | needs: 158 | - plan 159 | - build-local-artifacts 160 | runs-on: "ubuntu-20.04" 161 | env: 162 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 163 | BUILD_MANIFEST_NAME: target/distrib/global-dist-manifest.json 164 | steps: 165 | - uses: actions/checkout@v4 166 | with: 167 | submodules: recursive 168 | - name: Install cargo-dist 169 | shell: bash 170 | run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.13.3/cargo-dist-installer.sh | sh" 171 | # Get all the local artifacts for the global tasks to use (for e.g. checksums) 172 | - name: Fetch local artifacts 173 | uses: actions/download-artifact@v4 174 | with: 175 | pattern: artifacts-* 176 | path: target/distrib/ 177 | merge-multiple: true 178 | - id: cargo-dist 179 | shell: bash 180 | run: | 181 | cargo dist build ${{ needs.plan.outputs.tag-flag }} --output-format=json "--artifacts=global" > dist-manifest.json 182 | echo "cargo dist ran successfully" 183 | 184 | # Parse out what we just built and upload it to scratch storage 185 | echo "paths<> "$GITHUB_OUTPUT" 186 | jq --raw-output ".upload_files[]" dist-manifest.json >> "$GITHUB_OUTPUT" 187 | echo "EOF" >> "$GITHUB_OUTPUT" 188 | 189 | cp dist-manifest.json "$BUILD_MANIFEST_NAME" 190 | - name: "Upload artifacts" 191 | uses: actions/upload-artifact@v4 192 | with: 193 | name: artifacts-build-global 194 | path: | 195 | ${{ steps.cargo-dist.outputs.paths }} 196 | ${{ env.BUILD_MANIFEST_NAME }} 197 | # Determines if we should publish/announce 198 | host: 199 | needs: 200 | - plan 201 | - build-local-artifacts 202 | - build-global-artifacts 203 | # Only run if we're "publishing", and only if local and global didn't fail (skipped is fine) 204 | if: ${{ always() && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.build-local-artifacts.result == 'skipped' || needs.build-local-artifacts.result == 'success') }} 205 | env: 206 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 207 | runs-on: "ubuntu-20.04" 208 | outputs: 209 | val: ${{ steps.host.outputs.manifest }} 210 | steps: 211 | - uses: actions/checkout@v4 212 | with: 213 | submodules: recursive 214 | - name: Install cargo-dist 215 | run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.13.3/cargo-dist-installer.sh | sh" 216 | # Fetch artifacts from scratch-storage 217 | - name: Fetch artifacts 218 | uses: actions/download-artifact@v4 219 | with: 220 | pattern: artifacts-* 221 | path: target/distrib/ 222 | merge-multiple: true 223 | # This is a harmless no-op for GitHub Releases, hosting for that happens in "announce" 224 | - id: host 225 | shell: bash 226 | run: | 227 | cargo dist host ${{ needs.plan.outputs.tag-flag }} --steps=upload --steps=release --output-format=json > dist-manifest.json 228 | echo "artifacts uploaded and released successfully" 229 | cat dist-manifest.json 230 | echo "manifest=$(jq -c "." dist-manifest.json)" >> "$GITHUB_OUTPUT" 231 | - name: "Upload dist-manifest.json" 232 | uses: actions/upload-artifact@v4 233 | with: 234 | # Overwrite the previous copy 235 | name: artifacts-dist-manifest 236 | path: dist-manifest.json 237 | 238 | publish-homebrew-formula: 239 | needs: 240 | - plan 241 | - host 242 | runs-on: "ubuntu-20.04" 243 | env: 244 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 245 | PLAN: ${{ needs.plan.outputs.val }} 246 | GITHUB_USER: "axo bot" 247 | GITHUB_EMAIL: "admin+bot@axo.dev" 248 | if: ${{ !fromJson(needs.plan.outputs.val).announcement_is_prerelease || fromJson(needs.plan.outputs.val).publish_prereleases }} 249 | steps: 250 | - uses: actions/checkout@v4 251 | with: 252 | repository: "say4n/homebrew-tap" 253 | token: ${{ secrets.HOMEBREW_TAP_TOKEN }} 254 | # So we have access to the formula 255 | - name: Fetch local artifacts 256 | uses: actions/download-artifact@v4 257 | with: 258 | pattern: artifacts-* 259 | path: Formula/ 260 | merge-multiple: true 261 | # This is extra complex because you can make your Formula name not match your app name 262 | # so we need to find releases with a *.rb file, and publish with that filename. 263 | - name: Commit formula files 264 | run: | 265 | git config --global user.name "${GITHUB_USER}" 266 | git config --global user.email "${GITHUB_EMAIL}" 267 | 268 | for release in $(echo "$PLAN" | jq --compact-output '.releases[] | select([.artifacts[] | endswith(".rb")] | any)'); do 269 | filename=$(echo "$release" | jq '.artifacts[] | select(endswith(".rb"))' --raw-output) 270 | name=$(echo "$filename" | sed "s/\.rb$//") 271 | version=$(echo "$release" | jq .app_version --raw-output) 272 | 273 | git add "Formula/${filename}" 274 | git commit -m "${name} ${version}" 275 | done 276 | git push 277 | 278 | # Create a GitHub Release while uploading all files to it 279 | announce: 280 | needs: 281 | - plan 282 | - host 283 | - publish-homebrew-formula 284 | # use "always() && ..." to allow us to wait for all publish jobs while 285 | # still allowing individual publish jobs to skip themselves (for prereleases). 286 | # "host" however must run to completion, no skipping allowed! 287 | if: ${{ always() && needs.host.result == 'success' && (needs.publish-homebrew-formula.result == 'skipped' || needs.publish-homebrew-formula.result == 'success') }} 288 | runs-on: "ubuntu-20.04" 289 | env: 290 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 291 | steps: 292 | - uses: actions/checkout@v4 293 | with: 294 | submodules: recursive 295 | - name: "Download GitHub Artifacts" 296 | uses: actions/download-artifact@v4 297 | with: 298 | pattern: artifacts-* 299 | path: artifacts 300 | merge-multiple: true 301 | - name: Cleanup 302 | run: | 303 | # Remove the granular manifests 304 | rm -f artifacts/*-dist-manifest.json 305 | - name: Create GitHub Release 306 | uses: ncipollo/release-action@v1 307 | with: 308 | tag: ${{ needs.plan.outputs.tag }} 309 | name: ${{ fromJson(needs.host.outputs.val).announcement_title }} 310 | body: ${{ fromJson(needs.host.outputs.val).announcement_github_body }} 311 | prerelease: ${{ fromJson(needs.host.outputs.val).announcement_is_prerelease }} 312 | artifacts: "artifacts/*" 313 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Generated by Cargo 2 | # will have compiled files and executables 3 | debug/ 4 | target/ 5 | 6 | # Remove Cargo.lock from gitignore if creating an executable, leave it for libraries 7 | # More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html 8 | Cargo.lock 9 | 10 | # These are backup files generated by rustfmt 11 | **/*.rs.bk 12 | 13 | # MSVC Windows builds of rustc generate these, which store debugging information 14 | *.pdb 15 | 16 | 17 | # Added by cargo 18 | 19 | /target 20 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "jf" 3 | version = "0.2.2" 4 | edition = "2021" 5 | repository = "https://github.com/say4n/jf/" 6 | description = "flatten them json" 7 | license = "MIT" 8 | authors = ["say4n"] 9 | 10 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 11 | 12 | [dependencies] 13 | clap = { version = "4.5.4", features = ["derive"] } 14 | serde_json = "1.0.116" 15 | 16 | # The profile that 'cargo dist' will build with 17 | [profile.dist] 18 | inherits = "release" 19 | lto = "thin" 20 | 21 | # Config for 'cargo dist' 22 | [workspace.metadata.dist] 23 | # The preferred cargo-dist version to use in CI (Cargo.toml SemVer syntax) 24 | cargo-dist-version = "0.13.3" 25 | # CI backends to support 26 | ci = ["github"] 27 | # The installers to generate for each app 28 | installers = ["shell", "powershell", "homebrew"] 29 | # A GitHub repo to push Homebrew formulas to 30 | tap = "say4n/homebrew-tap" 31 | # Target platforms to build apps for (Rust target-triple syntax) 32 | targets = ["aarch64-apple-darwin", "x86_64-apple-darwin", "x86_64-unknown-linux-gnu", "x86_64-pc-windows-msvc"] 33 | # The archive format to use for windows builds (defaults .zip) 34 | windows-archive = ".tar.gz" 35 | # The archive format to use for non-windows builds (defaults .tar.xz) 36 | unix-archive = ".tar.gz" 37 | # Publish jobs to run in CI 38 | publish-jobs = ["homebrew"] 39 | # Publish jobs to run in CI 40 | pr-run-mode = "plan" 41 | # Whether to install an updater program 42 | install-updater = true 43 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2024 Sayan Goswami 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # `$ jf` 2 | flatten them json 3 | 4 | ## usage 5 | 6 | grab the latest `$ jf` from the [releases tab](https://github.com/say4n/jf/releases/) for your platform 7 | 8 | ``` 9 | $ jf -f foo.json 10 | {"foo.bar.0":"baz"} 11 | 12 | $ jf --filename foo.json 13 | {"foo.bar.0":"baz"} 14 | 15 | $ echo '{"foo": {"bar": ["baz"]}}' | jf 16 | {"foo.bar.0":"baz"} 17 | ``` 18 | 19 | ## options 20 | 21 | ``` 22 | -f, --filename 23 | -s, --separator [default: .] 24 | -p, --pretty 25 | -h, --help Print help 26 | -V, --version Print version 27 | ``` 28 | 29 | ## intent 30 | 31 | handling nested json data is messy, `$ jf` comes to the rescue. 32 | 33 | it can turn highly nested json blobs like: 34 | 35 | ```json 36 | { 37 | "this": { 38 | "is": { 39 | "nested": { 40 | "quite": { 41 | "deep": ["but", "that", "is", "fine"] 42 | } 43 | } 44 | } 45 | }, 46 | "some" : [ 47 | { 48 | "other": ["stuff"] 49 | } 50 | ], 51 | "even": { 52 | "more": "data", 53 | "boolean": true, 54 | "number": 2 55 | } 56 | } 57 | ``` 58 | 59 | into 60 | 61 | ```json 62 | { 63 | "even.boolean": true, 64 | "even.more": "data", 65 | "even.number": 2, 66 | "some.0.other.0": "stuff", 67 | "this.is.nested.quite.deep.0": "but", 68 | "this.is.nested.quite.deep.1": "that", 69 | "this.is.nested.quite.deep.2": "is", 70 | "this.is.nested.quite.deep.3": "fine" 71 | } 72 | ``` 73 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | use serde_json::{json, Map, Value}; 2 | 3 | pub fn flatten(root: Value, separtor: &String) -> Value { 4 | let _flattened = _flatten_impl(root, &String::from(""), separtor); 5 | let mut flattened = Map::new(); 6 | 7 | match _flattened { 8 | Value::Object(items) => { 9 | for (k, v) in items { 10 | flattened.insert(k[1..].to_string(), v); 11 | } 12 | } 13 | _ => {} 14 | } 15 | 16 | return Value::Object(flattened); 17 | } 18 | 19 | fn _flatten_impl(root: Value, key: &String, separtor: &String) -> Value { 20 | // println!("key: {:?}", key); 21 | 22 | match root { 23 | Value::Null => return root, 24 | Value::Bool(value) => return json!({key: value}), 25 | Value::Number(value) => return json!({key: value}), 26 | Value::String(value) => return json!({key: value}), 27 | Value::Array(items) => { 28 | let mut flattened_array = Map::new(); 29 | 30 | for (index, item) in items.iter().enumerate() { 31 | let subkey = format!("{}{}{}", key, &separtor, index); 32 | let parsed_subtree = _flatten_impl(item.clone(), &subkey, &separtor); 33 | 34 | match parsed_subtree { 35 | Value::Bool(value) => flattened_array.insert(subkey, Value::Bool(value)), 36 | Value::Number(value) => flattened_array.insert(subkey, Value::Number(value)), 37 | Value::String(value) => flattened_array.insert(subkey, Value::String(value)), 38 | Value::Object(parsed_subtree_items) => { 39 | for (k, v) in parsed_subtree_items { 40 | flattened_array.insert(k, v); 41 | } 42 | Some(Value::Null) 43 | } 44 | Value::Null | Value::Array(_) => Some(Value::Null), 45 | }; 46 | } 47 | 48 | // println!("key: {:?}, flattened_array: {:#}\n\n", key, Value::Object(flattened_array.clone())); 49 | return Value::Object(flattened_array); 50 | } 51 | Value::Object(items) => { 52 | let mut flattened_dict = Map::new(); 53 | 54 | for (nested_key, nested_value) in items { 55 | let subkey = format!("{}{}{}", key, &separtor, nested_key); 56 | let parsed_subtree = _flatten_impl(nested_value.clone(), &subkey, &separtor); 57 | 58 | match parsed_subtree { 59 | Value::Bool(value) => flattened_dict.insert(subkey, Value::Bool(value)), 60 | Value::Number(value) => flattened_dict.insert(subkey, Value::Number(value)), 61 | Value::String(value) => flattened_dict.insert(subkey, Value::String(value)), 62 | Value::Object(parsed_subtree_items) => { 63 | for (k, v) in parsed_subtree_items { 64 | flattened_dict.insert(k, v); 65 | } 66 | Some(Value::Null) 67 | } 68 | Value::Null | Value::Array(_) => Some(Value::Null), 69 | }; 70 | } 71 | 72 | // println!("key: {:?}, flattened_dict: {:#}\n\n", key, Value::Object(flattened_dict.clone())); 73 | return Value::Object(flattened_dict); 74 | } 75 | } 76 | } 77 | -------------------------------------------------------------------------------- /src/main.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | fs::read, 3 | io::{self, IsTerminal, Read}, 4 | process::exit, 5 | }; 6 | 7 | use clap::{command, ArgAction, CommandFactory, Parser}; 8 | use jf::flatten; 9 | 10 | #[derive(Parser, Debug)] 11 | #[command( 12 | version, 13 | about="flatten them json", 14 | long_about=None, 15 | override_usage="\t$ jf -f foo.json 16 | \t{\"foo.bar.0\":\"baz\"} 17 | 18 | \t$ jf --filename foo.json 19 | \t{\"foo.bar.0\":\"baz\"} 20 | 21 | \t$ echo '{\"foo\": {\"bar\": [\"baz\"]}}' | jf 22 | \t{\"foo.bar.0\":\"baz\"} 23 | " 24 | )] 25 | struct Args { 26 | #[arg(short, long, value_name = "path_to_file.json")] 27 | filename: Option, 28 | 29 | #[arg(short, long, default_value = ".")] 30 | separator: String, 31 | 32 | #[arg(short, long, action=ArgAction::SetTrue)] 33 | pretty: bool, 34 | } 35 | 36 | fn main() -> io::Result<()> { 37 | let mut cmd = Args::command(); 38 | let args = Args::parse(); 39 | 40 | let mut buffer = Vec::::new(); 41 | 42 | match args.filename { 43 | Some(filename) => { 44 | buffer = read(filename).expect("Failed to read file"); 45 | } 46 | None => { 47 | let input = std::io::stdin(); 48 | 49 | if input.is_terminal() { 50 | let _ = cmd.print_long_help(); 51 | exit(1) 52 | } else { 53 | io::stdin().read_to_end(&mut buffer).expect("Failed to read from stdin"); 54 | } 55 | } 56 | }; 57 | 58 | let json_tree = serde_json::from_slice(&buffer).expect("Failed to parse JSON"); 59 | let flat_json_tree = flatten(json_tree, &args.separator); 60 | 61 | if !args.pretty { 62 | println!("{}", &flat_json_tree); 63 | } else { 64 | println!("{}", serde_json::to_string_pretty(&flat_json_tree).expect("Failed to pretty print JSON")); 65 | } 66 | Ok(()) 67 | } 68 | -------------------------------------------------------------------------------- /tests/fixtures/mixed.jf.json: -------------------------------------------------------------------------------- 1 | { 2 | "even.boolean": true, 3 | "even.more": "data", 4 | "even.number": 2, 5 | "some.0.other.0": "stuff", 6 | "this.is.nested.quite.deep.0": "but", 7 | "this.is.nested.quite.deep.1": "that", 8 | "this.is.nested.quite.deep.2": "is", 9 | "this.is.nested.quite.deep.3": "fine" 10 | } -------------------------------------------------------------------------------- /tests/fixtures/mixed.json: -------------------------------------------------------------------------------- 1 | { 2 | "this": { 3 | "is": { 4 | "nested": { 5 | "quite": { 6 | "deep": ["but", "that", "is", "fine"] 7 | } 8 | } 9 | } 10 | }, 11 | "some" : [ 12 | { 13 | "other": ["stuff"] 14 | } 15 | ], 16 | "even": { 17 | "more": "data", 18 | "boolean": true, 19 | "number": 2 20 | } 21 | } -------------------------------------------------------------------------------- /tests/test.rs: -------------------------------------------------------------------------------- 1 | mod tests { 2 | // Note this useful idiom: importing names from outer (for mod tests) scope. 3 | use std::fs::read_to_string; 4 | 5 | use jf::flatten; 6 | use serde_json::Value; 7 | 8 | #[test] 9 | fn test_mixed_json() { 10 | let mixed = read_to_string("tests/fixtures/mixed.json").unwrap(); 11 | let mixed_jf = read_to_string("tests/fixtures/mixed.jf.json").unwrap(); 12 | 13 | let json_tree: Value = serde_json::from_str(&mixed).unwrap(); 14 | let jf_tree: Value = serde_json::from_str(&mixed_jf).unwrap(); 15 | 16 | let flat_json_tree = flatten(json_tree, &String::from(".")); 17 | 18 | assert_eq!(flat_json_tree, jf_tree); 19 | } 20 | } 21 | --------------------------------------------------------------------------------