├── .github ├── tl-packages.txt └── workflows │ ├── benchmark.yaml │ ├── l3build-check-dev.yaml │ └── l3build-check.yaml ├── .gitignore ├── LICENCE ├── README.md ├── benchmark ├── .latexmkrc ├── 4KB.json ├── README.md ├── benchmark.pdf └── benchmark.tex ├── build-luatex.lua ├── build.lua ├── jason ├── jason.pdf ├── jason.svg └── jason.tex ├── jsonparse-doc.pdf ├── jsonparse-doc.tex ├── jsonparse.sty └── testfiles ├── README.md ├── test001.lvt ├── test001.tlg ├── test002.lvt ├── test002.tlg ├── test003.lve ├── test003.lvt ├── test004.lve ├── test004.lvt ├── test005.lvt ├── test005.tlg ├── test006.lvt ├── test006.tlg ├── test007.lvt ├── test007.tlg ├── test008.lvt ├── test008.tlg ├── test009.lvt ├── test009.tlg ├── test010.lve ├── test010.lvt ├── test011.lve ├── test011.lvt ├── test012.lvt ├── test012.tlg ├── test100.lvt ├── test100.tlg ├── test200.lvt └── test200.tlg /.github/tl-packages.txt: -------------------------------------------------------------------------------- 1 | l3build 2 | 3 | latex-bin 4 | pdftex 5 | luatex 6 | xetex 7 | 8 | ec 9 | cm-super 10 | tools 11 | 12 | latex-bin-dev 13 | latex-base-dev 14 | latex-firstaid-dev 15 | 16 | geometry 17 | booktabs 18 | -------------------------------------------------------------------------------- /.github/workflows/benchmark.yaml: -------------------------------------------------------------------------------- 1 | name: Check benchmark of jsonparse 2 | 3 | on: [push, pull_request] 4 | 5 | jobs: 6 | benchmark-check: 7 | runs-on: ubuntu-latest 8 | permissions: 9 | contents: write 10 | steps: 11 | - name: Checkout repository 12 | uses: actions/checkout@v4 13 | - name: Install TeX Live 14 | uses: zauguin/install-texlive@v4 15 | with: 16 | package_file: .github/tl-packages.txt 17 | - name: Run benchmark check 18 | run: | 19 | pdflatex benchmark.tex 20 | working-directory: benchmark/ 21 | - name: Check for changes 22 | id: verify_diff 23 | run: | 24 | git diff --quiet . || echo "changed=true" >> $GITHUB_OUTPUT 25 | - name: Commit and push changes 26 | if: steps.verify_diff.outputs.changed == 'true' 27 | run: | 28 | git config --global user.name "github-actions[bot]" 29 | git config --global user.email "github-actions[bot]@users.noreply.github.com" 30 | git add -f benchmark/README.md 31 | git add -f benchmark/benchmark.pdf 32 | git commit -m "Update benchmark results (Action)" 33 | git push 34 | -------------------------------------------------------------------------------- /.github/workflows/l3build-check-dev.yaml: -------------------------------------------------------------------------------- 1 | name: Test jsonparse using l3build -dev 2 | 3 | on: [push, pull_request] 4 | 5 | jobs: 6 | l3build-check-dev: 7 | runs-on: ubuntu-latest 8 | steps: 9 | - name: Checkout repository 10 | uses: actions/checkout@v4 11 | - name: Install TeX Live 12 | uses: zauguin/install-texlive@v4 13 | with: 14 | package_file: .github/tl-packages.txt 15 | - name: Run l3build check -dev 16 | run: | 17 | l3build check --dev -q -H --show-log-on-error -------------------------------------------------------------------------------- /.github/workflows/l3build-check.yaml: -------------------------------------------------------------------------------- 1 | name: Test jsonparse using l3build 2 | 3 | on: [push, pull_request] 4 | 5 | jobs: 6 | l3build-check: 7 | runs-on: ubuntu-latest 8 | steps: 9 | - name: Checkout repository 10 | uses: actions/checkout@v4 11 | - name: Install TeX Live 12 | uses: zauguin/install-texlive@v4 13 | with: 14 | package_file: .github/tl-packages.txt 15 | - name: Run l3build check 16 | run: | 17 | l3build check -q -H --show-log-on-error -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.aux 2 | *.fdb_latexmk 3 | *.fls 4 | *.listing 5 | *.log 6 | *.out 7 | *.synctex.gz 8 | *.zip 9 | 10 | build/* 11 | benchmark/*.pdf 12 | benchmark/*.md 13 | jsonparse/* -------------------------------------------------------------------------------- /LICENCE: -------------------------------------------------------------------------------- 1 | The LaTeX Project Public License 2 | =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- 3 | 4 | LPPL Version 1.3c 2008-05-04 5 | 6 | Copyright 1999 2002-2008 LaTeX3 Project 7 | Everyone is allowed to distribute verbatim copies of this 8 | license document, but modification of it is not allowed. 9 | 10 | 11 | PREAMBLE 12 | ======== 13 | 14 | The LaTeX Project Public License (LPPL) is the primary license under 15 | which the LaTeX kernel and the base LaTeX packages are distributed. 16 | 17 | You may use this license for any work of which you hold the copyright 18 | and which you wish to distribute. This license may be particularly 19 | suitable if your work is TeX-related (such as a LaTeX package), but 20 | it is written in such a way that you can use it even if your work is 21 | unrelated to TeX. 22 | 23 | The section `WHETHER AND HOW TO DISTRIBUTE WORKS UNDER THIS LICENSE', 24 | below, gives instructions, examples, and recommendations for authors 25 | who are considering distributing their works under this license. 26 | 27 | This license gives conditions under which a work may be distributed 28 | and modified, as well as conditions under which modified versions of 29 | that work may be distributed. 30 | 31 | We, the LaTeX3 Project, believe that the conditions below give you 32 | the freedom to make and distribute modified versions of your work 33 | that conform with whatever technical specifications you wish while 34 | maintaining the availability, integrity, and reliability of 35 | that work. If you do not see how to achieve your goal while 36 | meeting these conditions, then read the document `cfgguide.tex' 37 | and `modguide.tex' in the base LaTeX distribution for suggestions. 38 | 39 | 40 | DEFINITIONS 41 | =========== 42 | 43 | In this license document the following terms are used: 44 | 45 | `Work' 46 | Any work being distributed under this License. 47 | 48 | `Derived Work' 49 | Any work that under any applicable law is derived from the Work. 50 | 51 | `Modification' 52 | Any procedure that produces a Derived Work under any applicable 53 | law -- for example, the production of a file containing an 54 | original file associated with the Work or a significant portion of 55 | such a file, either verbatim or with modifications and/or 56 | translated into another language. 57 | 58 | `Modify' 59 | To apply any procedure that produces a Derived Work under any 60 | applicable law. 61 | 62 | `Distribution' 63 | Making copies of the Work available from one person to another, in 64 | whole or in part. Distribution includes (but is not limited to) 65 | making any electronic components of the Work accessible by 66 | file transfer protocols such as FTP or HTTP or by shared file 67 | systems such as Sun's Network File System (NFS). 68 | 69 | `Compiled Work' 70 | A version of the Work that has been processed into a form where it 71 | is directly usable on a computer system. This processing may 72 | include using installation facilities provided by the Work, 73 | transformations of the Work, copying of components of the Work, or 74 | other activities. Note that modification of any installation 75 | facilities provided by the Work constitutes modification of the Work. 76 | 77 | `Current Maintainer' 78 | A person or persons nominated as such within the Work. If there is 79 | no such explicit nomination then it is the `Copyright Holder' under 80 | any applicable law. 81 | 82 | `Base Interpreter' 83 | A program or process that is normally needed for running or 84 | interpreting a part or the whole of the Work. 85 | 86 | A Base Interpreter may depend on external components but these 87 | are not considered part of the Base Interpreter provided that each 88 | external component clearly identifies itself whenever it is used 89 | interactively. Unless explicitly specified when applying the 90 | license to the Work, the only applicable Base Interpreter is a 91 | `LaTeX-Format' or in the case of files belonging to the 92 | `LaTeX-format' a program implementing the `TeX language'. 93 | 94 | 95 | 96 | CONDITIONS ON DISTRIBUTION AND MODIFICATION 97 | =========================================== 98 | 99 | 1. Activities other than distribution and/or modification of the Work 100 | are not covered by this license; they are outside its scope. In 101 | particular, the act of running the Work is not restricted and no 102 | requirements are made concerning any offers of support for the Work. 103 | 104 | 2. You may distribute a complete, unmodified copy of the Work as you 105 | received it. Distribution of only part of the Work is considered 106 | modification of the Work, and no right to distribute such a Derived 107 | Work may be assumed under the terms of this clause. 108 | 109 | 3. You may distribute a Compiled Work that has been generated from a 110 | complete, unmodified copy of the Work as distributed under Clause 2 111 | above, as long as that Compiled Work is distributed in such a way that 112 | the recipients may install the Compiled Work on their system exactly 113 | as it would have been installed if they generated a Compiled Work 114 | directly from the Work. 115 | 116 | 4. If you are the Current Maintainer of the Work, you may, without 117 | restriction, modify the Work, thus creating a Derived Work. You may 118 | also distribute the Derived Work without restriction, including 119 | Compiled Works generated from the Derived Work. Derived Works 120 | distributed in this manner by the Current Maintainer are considered to 121 | be updated versions of the Work. 122 | 123 | 5. If you are not the Current Maintainer of the Work, you may modify 124 | your copy of the Work, thus creating a Derived Work based on the Work, 125 | and compile this Derived Work, thus creating a Compiled Work based on 126 | the Derived Work. 127 | 128 | 6. If you are not the Current Maintainer of the Work, you may 129 | distribute a Derived Work provided the following conditions are met 130 | for every component of the Work unless that component clearly states 131 | in the copyright notice that it is exempt from that condition. Only 132 | the Current Maintainer is allowed to add such statements of exemption 133 | to a component of the Work. 134 | 135 | a. If a component of this Derived Work can be a direct replacement 136 | for a component of the Work when that component is used with the 137 | Base Interpreter, then, wherever this component of the Work 138 | identifies itself to the user when used interactively with that 139 | Base Interpreter, the replacement component of this Derived Work 140 | clearly and unambiguously identifies itself as a modified version 141 | of this component to the user when used interactively with that 142 | Base Interpreter. 143 | 144 | b. Every component of the Derived Work contains prominent notices 145 | detailing the nature of the changes to that component, or a 146 | prominent reference to another file that is distributed as part 147 | of the Derived Work and that contains a complete and accurate log 148 | of the changes. 149 | 150 | c. No information in the Derived Work implies that any persons, 151 | including (but not limited to) the authors of the original version 152 | of the Work, provide any support, including (but not limited to) 153 | the reporting and handling of errors, to recipients of the 154 | Derived Work unless those persons have stated explicitly that 155 | they do provide such support for the Derived Work. 156 | 157 | d. You distribute at least one of the following with the Derived Work: 158 | 159 | 1. A complete, unmodified copy of the Work; 160 | if your distribution of a modified component is made by 161 | offering access to copy the modified component from a 162 | designated place, then offering equivalent access to copy 163 | the Work from the same or some similar place meets this 164 | condition, even though third parties are not compelled to 165 | copy the Work along with the modified component; 166 | 167 | 2. Information that is sufficient to obtain a complete, 168 | unmodified copy of the Work. 169 | 170 | 7. If you are not the Current Maintainer of the Work, you may 171 | distribute a Compiled Work generated from a Derived Work, as long as 172 | the Derived Work is distributed to all recipients of the Compiled 173 | Work, and as long as the conditions of Clause 6, above, are met with 174 | regard to the Derived Work. 175 | 176 | 8. The conditions above are not intended to prohibit, and hence do not 177 | apply to, the modification, by any method, of any component so that it 178 | becomes identical to an updated version of that component of the Work as 179 | it is distributed by the Current Maintainer under Clause 4, above. 180 | 181 | 9. Distribution of the Work or any Derived Work in an alternative 182 | format, where the Work or that Derived Work (in whole or in part) is 183 | then produced by applying some process to that format, does not relax or 184 | nullify any sections of this license as they pertain to the results of 185 | applying that process. 186 | 187 | 10. a. A Derived Work may be distributed under a different license 188 | provided that license itself honors the conditions listed in 189 | Clause 6 above, in regard to the Work, though it does not have 190 | to honor the rest of the conditions in this license. 191 | 192 | b. If a Derived Work is distributed under a different license, that 193 | Derived Work must provide sufficient documentation as part of 194 | itself to allow each recipient of that Derived Work to honor the 195 | restrictions in Clause 6 above, concerning changes from the Work. 196 | 197 | 11. This license places no restrictions on works that are unrelated to 198 | the Work, nor does this license place any restrictions on aggregating 199 | such works with the Work by any means. 200 | 201 | 12. Nothing in this license is intended to, or may be used to, prevent 202 | complete compliance by all parties with all applicable laws. 203 | 204 | 205 | NO WARRANTY 206 | =========== 207 | 208 | There is no warranty for the Work. Except when otherwise stated in 209 | writing, the Copyright Holder provides the Work `as is', without 210 | warranty of any kind, either expressed or implied, including, but not 211 | limited to, the implied warranties of merchantability and fitness for a 212 | particular purpose. The entire risk as to the quality and performance 213 | of the Work is with you. Should the Work prove defective, you assume 214 | the cost of all necessary servicing, repair, or correction. 215 | 216 | In no event unless required by applicable law or agreed to in writing 217 | will The Copyright Holder, or any author named in the components of the 218 | Work, or any other party who may distribute and/or modify the Work as 219 | permitted above, be liable to you for damages, including any general, 220 | special, incidental or consequential damages arising out of any use of 221 | the Work or out of inability to use the Work (including, but not limited 222 | to, loss of data, data being rendered inaccurate, or losses sustained by 223 | anyone as a result of any failure of the Work to operate with any other 224 | programs), even if the Copyright Holder or said author or said other 225 | party has been advised of the possibility of such damages. 226 | 227 | 228 | MAINTENANCE OF THE WORK 229 | ======================= 230 | 231 | The Work has the status `author-maintained' if the Copyright Holder 232 | explicitly and prominently states near the primary copyright notice in 233 | the Work that the Work can only be maintained by the Copyright Holder 234 | or simply that it is `author-maintained'. 235 | 236 | The Work has the status `maintained' if there is a Current Maintainer 237 | who has indicated in the Work that they are willing to receive error 238 | reports for the Work (for example, by supplying a valid e-mail 239 | address). It is not required for the Current Maintainer to acknowledge 240 | or act upon these error reports. 241 | 242 | The Work changes from status `maintained' to `unmaintained' if there 243 | is no Current Maintainer, or the person stated to be Current 244 | Maintainer of the work cannot be reached through the indicated means 245 | of communication for a period of six months, and there are no other 246 | significant signs of active maintenance. 247 | 248 | You can become the Current Maintainer of the Work by agreement with 249 | any existing Current Maintainer to take over this role. 250 | 251 | If the Work is unmaintained, you can become the Current Maintainer of 252 | the Work through the following steps: 253 | 254 | 1. Make a reasonable attempt to trace the Current Maintainer (and 255 | the Copyright Holder, if the two differ) through the means of 256 | an Internet or similar search. 257 | 258 | 2. If this search is successful, then enquire whether the Work 259 | is still maintained. 260 | 261 | a. If it is being maintained, then ask the Current Maintainer 262 | to update their communication data within one month. 263 | 264 | b. If the search is unsuccessful or no action to resume active 265 | maintenance is taken by the Current Maintainer, then announce 266 | within the pertinent community your intention to take over 267 | maintenance. (If the Work is a LaTeX work, this could be 268 | done, for example, by posting to comp.text.tex.) 269 | 270 | 3a. If the Current Maintainer is reachable and agrees to pass 271 | maintenance of the Work to you, then this takes effect 272 | immediately upon announcement. 273 | 274 | b. If the Current Maintainer is not reachable and the Copyright 275 | Holder agrees that maintenance of the Work be passed to you, 276 | then this takes effect immediately upon announcement. 277 | 278 | 4. If you make an `intention announcement' as described in 2b. above 279 | and after three months your intention is challenged neither by 280 | the Current Maintainer nor by the Copyright Holder nor by other 281 | people, then you may arrange for the Work to be changed so as 282 | to name you as the (new) Current Maintainer. 283 | 284 | 5. If the previously unreachable Current Maintainer becomes 285 | reachable once more within three months of a change completed 286 | under the terms of 3b) or 4), then that Current Maintainer must 287 | become or remain the Current Maintainer upon request provided 288 | they then update their communication data within one month. 289 | 290 | A change in the Current Maintainer does not, of itself, alter the fact 291 | that the Work is distributed under the LPPL license. 292 | 293 | If you become the Current Maintainer of the Work, you should 294 | immediately provide, within the Work, a prominent and unambiguous 295 | statement of your status as Current Maintainer. You should also 296 | announce your new status to the same pertinent community as 297 | in 2b) above. 298 | 299 | 300 | WHETHER AND HOW TO DISTRIBUTE WORKS UNDER THIS LICENSE 301 | ====================================================== 302 | 303 | This section contains important instructions, examples, and 304 | recommendations for authors who are considering distributing their 305 | works under this license. These authors are addressed as `you' in 306 | this section. 307 | 308 | Choosing This License or Another License 309 | ---------------------------------------- 310 | 311 | If for any part of your work you want or need to use *distribution* 312 | conditions that differ significantly from those in this license, then 313 | do not refer to this license anywhere in your work but, instead, 314 | distribute your work under a different license. You may use the text 315 | of this license as a model for your own license, but your license 316 | should not refer to the LPPL or otherwise give the impression that 317 | your work is distributed under the LPPL. 318 | 319 | The document `modguide.tex' in the base LaTeX distribution explains 320 | the motivation behind the conditions of this license. It explains, 321 | for example, why distributing LaTeX under the GNU General Public 322 | License (GPL) was considered inappropriate. Even if your work is 323 | unrelated to LaTeX, the discussion in `modguide.tex' may still be 324 | relevant, and authors intending to distribute their works under any 325 | license are encouraged to read it. 326 | 327 | A Recommendation on Modification Without Distribution 328 | ----------------------------------------------------- 329 | 330 | It is wise never to modify a component of the Work, even for your own 331 | personal use, without also meeting the above conditions for 332 | distributing the modified component. While you might intend that such 333 | modifications will never be distributed, often this will happen by 334 | accident -- you may forget that you have modified that component; or 335 | it may not occur to you when allowing others to access the modified 336 | version that you are thus distributing it and violating the conditions 337 | of this license in ways that could have legal implications and, worse, 338 | cause problems for the community. It is therefore usually in your 339 | best interest to keep your copy of the Work identical with the public 340 | one. Many works provide ways to control the behavior of that work 341 | without altering any of its licensed components. 342 | 343 | How to Use This License 344 | ----------------------- 345 | 346 | To use this license, place in each of the components of your work both 347 | an explicit copyright notice including your name and the year the work 348 | was authored and/or last substantially modified. Include also a 349 | statement that the distribution and/or modification of that 350 | component is constrained by the conditions in this license. 351 | 352 | Here is an example of such a notice and statement: 353 | 354 | %% pig.dtx 355 | %% Copyright 2005 M. Y. Name 356 | % 357 | % This work may be distributed and/or modified under the 358 | % conditions of the LaTeX Project Public License, either version 1.3 359 | % of this license or (at your option) any later version. 360 | % The latest version of this license is in 361 | % http://www.latex-project.org/lppl.txt 362 | % and version 1.3 or later is part of all distributions of LaTeX 363 | % version 2005/12/01 or later. 364 | % 365 | % This work has the LPPL maintenance status `maintained'. 366 | % 367 | % The Current Maintainer of this work is M. Y. Name. 368 | % 369 | % This work consists of the files pig.dtx and pig.ins 370 | % and the derived file pig.sty. 371 | 372 | Given such a notice and statement in a file, the conditions 373 | given in this license document would apply, with the `Work' referring 374 | to the three files `pig.dtx', `pig.ins', and `pig.sty' (the last being 375 | generated from `pig.dtx' using `pig.ins'), the `Base Interpreter' 376 | referring to any `LaTeX-Format', and both `Copyright Holder' and 377 | `Current Maintainer' referring to the person `M. Y. Name'. 378 | 379 | If you do not want the Maintenance section of LPPL to apply to your 380 | Work, change `maintained' above into `author-maintained'. 381 | However, we recommend that you use `maintained', as the Maintenance 382 | section was added in order to ensure that your Work remains useful to 383 | the community even when you can no longer maintain and support it 384 | yourself. 385 | 386 | Derived Works That Are Not Replacements 387 | --------------------------------------- 388 | 389 | Several clauses of the LPPL specify means to provide reliability and 390 | stability for the user community. They therefore concern themselves 391 | with the case that a Derived Work is intended to be used as a 392 | (compatible or incompatible) replacement of the original Work. If 393 | this is not the case (e.g., if a few lines of code are reused for a 394 | completely different task), then clauses 6b and 6d shall not apply. 395 | 396 | 397 | Important Recommendations 398 | ------------------------- 399 | 400 | Defining What Constitutes the Work 401 | 402 | The LPPL requires that distributions of the Work contain all the 403 | files of the Work. It is therefore important that you provide a 404 | way for the licensee to determine which files constitute the Work. 405 | This could, for example, be achieved by explicitly listing all the 406 | files of the Work near the copyright notice of each file or by 407 | using a line such as: 408 | 409 | % This work consists of all files listed in manifest.txt. 410 | 411 | in that place. In the absence of an unequivocal list it might be 412 | impossible for the licensee to determine what is considered by you 413 | to comprise the Work and, in such a case, the licensee would be 414 | entitled to make reasonable conjectures as to which files comprise 415 | the Work. 416 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ![Version 1.6.2](https://img.shields.io/badge/version-1.6.2-blue) 2 | 3 | ![Jason, the JSON parsing horse](https://github.com/jasperhabicht/jsonparse/assets/6378801/ddfddc70-bf5f-4121-ba45-4b9128875d85) 4 | 5 | # The `jsonparse` package 6 | 7 | The `jsonparse` package provides a handy way to read in JSON data from files 8 | or strings in LaTeX documents, parse the data and store it in a user-defined 9 | token variable. The package allows accessing the stored data via a 10 | JavaScript-flavored syntax. 11 | 12 | Using the commands `\JSONParseFromFile` or `\JSONParse`, JSON data can be 13 | stored in a token variable. Using the command `\JSONParseValue`, entries can 14 | be extracted from the stored data. 15 | 16 | --- 17 | 18 | Let's assume a file with the name `example.json` is stored in the working 19 | directory with the following contents: 20 | 21 | ```json 22 | { 23 | "givenName": "Joe", 24 | "familyName": "Public", 25 | "points": 1.7, 26 | "hasCertification": true, 27 | "address": { 28 | "streetAddress": "10 Main St", 29 | "locality": "Cityville", 30 | "postalCode": "12345" 31 | }, 32 | "contactPoint": [ 33 | { 34 | "contactType": "office", 35 | "telephone": "+1 (555) 555-1234" 36 | }, 37 | { 38 | "contactType": "mobile", 39 | "telephone": "+1 (555) 555-6789" 40 | } 41 | ], 42 | "children": [ 43 | "Tom", 44 | "Doug", 45 | "Harry" 46 | ], 47 | "memberOf": null 48 | } 49 | ``` 50 | 51 | We can store it in the token variable `\myJSONdata` using the command 52 | `\JSONParseFromFile{\myJSONdata}{example.json}`. Calling the command 53 | `\JSONParseValue{\myJSONdata}{contactPoint[0].telephone}` would then result in 54 | the output `+1 (555) 555-1234` (indices are zero-based per default). The 55 | package allows for parsing JSON data inline as well. 56 | 57 | The package also offers several commands for looping through arrays and 58 | accessing individual elements, for example to typeset them in tabular form or 59 | to plot their values using packages such as PGFPlots. 60 | 61 | Adding commas between the items of the children array can be done with: 62 | ```tex 63 | \JSONParseArrayUse{\myJSONdata}{children}{, } 64 | ``` 65 | 66 | A tabular listing the contact points of above JSON file can be created with: 67 | ```tex 68 | \begin{tabular}{cc} 69 | \textbf{Contact Type} & \textbf{Telephone} \\ 70 | \JSONParseArrayMapInline{\myJSONdata}{contactPoint}{% 71 | \JSONParseValue{\myJSONdata}{contactPoint[#1].contactType} & 72 | \JSONParseValue{\myJSONdata}{contactPoint[#1].telephone} \\ 73 | } 74 | \end{tabular} 75 | ``` 76 | 77 | The package also provides a few helper commands such as to validate a JSON 78 | number or to convert Unicode surrogate pairs to the relevant Unicode codepoint. 79 | 80 | --- 81 | 82 | **Frequently asked questions** 83 | 84 | *Why does a comparison of two equal strings return false?* 85 | 86 | 1) Note that if you probably need to use `\JSONParseExpandableValue` if you want 87 | to compare a value from parsed JSON data with the contents of another token 88 | list. 89 | 90 | 2) Also note that `\JSONParseExpandableValue` returns a string with specific 91 | category codes, namely category code 12 (``other'') for all characters except 92 | spaces. Therefore, you need to test against another string. You can use 93 | `\detokenize` for conversion. 94 | 95 | *Why does parsing take so long?* 96 | 97 | 1) Make sure that you use the most recent version of the package. Parsing speed 98 | is constantly monitored and updates typically increase parsing speed. 99 | 100 | 2) If you don't need to access whole objects or arrays from the JSON data, you 101 | can set the key `skip structures` which should increase parsing speed. 102 | 103 | 3) If the JSON data contains a considerable amount of numbers, parsing speed can 104 | be increased slightly by setting `validate numbers=false`. 105 | 106 | 4) If the JSON data does not change often, you can externalise the parsed result 107 | by setting the key `externalize`. 108 | 109 | --- 110 | 111 | This package including all files is subject to the LPPL 1.3c license. 112 | Copyright 2024–2025 Jasper Habicht (mail(at)jasperhabicht.de). 113 | 114 | Jason, the JSON parsing horse: Copyright 2024–2025 Hannah Klöber. 115 | -------------------------------------------------------------------------------- /benchmark/.latexmkrc: -------------------------------------------------------------------------------- 1 | $hash_calc_ignore_pattern{'md'} = '^'; -------------------------------------------------------------------------------- /benchmark/4KB.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "name": "Adeel Solangi", 4 | "language": "Sindhi", 5 | "id": "V59OF92YF627HFY0", 6 | "bio": "Donec lobortis eleifend condimentum. Cras dictum dolor lacinia lectus vehicula rutrum. Maecenas quis nisi nunc. Nam tristique feugiat est vitae mollis. Maecenas quis nisi nunc.", 7 | "version": 6.1 8 | }, 9 | { 10 | "name": "Afzal Ghaffar", 11 | "language": "Sindhi", 12 | "id": "ENTOCR13RSCLZ6KU", 13 | "bio": "Aliquam sollicitudin ante ligula, eget malesuada nibh efficitur et. Pellentesque massa sem, scelerisque sit amet odio id, cursus tempor urna. Etiam congue dignissim volutpat. Vestibulum pharetra libero et velit gravida euismod.", 14 | "version": 1.88 15 | }, 16 | { 17 | "name": "Aamir Solangi", 18 | "language": "Sindhi", 19 | "id": "IAKPO3R4761JDRVG", 20 | "bio": "Vestibulum pharetra libero et velit gravida euismod. Quisque mauris ligula, efficitur porttitor sodales ac, lacinia non ex. Fusce eu ultrices elit, vel posuere neque.", 21 | "version": 7.27 22 | }, 23 | { 24 | "name": "Abla Dilmurat", 25 | "language": "Uyghur", 26 | "id": "5ZVOEPMJUI4MB4EN", 27 | "bio": "Donec lobortis eleifend condimentum. Morbi ac tellus erat.", 28 | "version": 2.53 29 | }, 30 | { 31 | "name": "Adil Eli", 32 | "language": "Uyghur", 33 | "id": "6VTI8X6LL0MMPJCC", 34 | "bio": "Vivamus id faucibus velit, id posuere leo. Morbi vitae nisi lacinia, laoreet lorem nec, egestas orci. Suspendisse potenti.", 35 | "version": 6.49 36 | }, 37 | { 38 | "name": "Adile Qadir", 39 | "language": "Uyghur", 40 | "id": "F2KEU5L7EHYSYFTT", 41 | "bio": "Duis commodo orci ut dolor iaculis facilisis. Morbi ultricies consequat ligula posuere eleifend. Aenean finibus in tortor vel aliquet. Fusce eu ultrices elit, vel posuere neque.", 42 | "version": 1.9 43 | }, 44 | { 45 | "name": "Abdukerim Ibrahim", 46 | "language": "Uyghur", 47 | "id": "LO6DVTZLRK68528I", 48 | "bio": "Vivamus id faucibus velit, id posuere leo. Nunc aliquet sodales nunc a pulvinar. Nunc aliquet sodales nunc a pulvinar. Ut viverra quis eros eu tincidunt.", 49 | "version": 5.9 50 | }, 51 | { 52 | "name": "Adil Abro", 53 | "language": "Sindhi", 54 | "id": "LJRIULRNJFCNZJAJ", 55 | "bio": "Etiam malesuada blandit erat, nec ultricies leo maximus sed. Fusce congue aliquam elit ut luctus. Etiam malesuada blandit erat, nec ultricies leo maximus sed. Cras dictum dolor lacinia lectus vehicula rutrum. Integer vehicula, arcu sit amet egestas efficitur, orci justo interdum massa, eget ullamcorper risus ligula tristique libero.", 56 | "version": 9.32 57 | }, 58 | { 59 | "name": "Afonso Vilarchán", 60 | "language": "Galician", 61 | "id": "JMCL0CXNXHPL1GBC", 62 | "bio": "Fusce eu ultrices elit, vel posuere neque. Morbi ac tellus erat. Nunc tincidunt laoreet laoreet.", 63 | "version": 5.21 64 | }, 65 | { 66 | "name": "Mark Schembri", 67 | "language": "Maltese", 68 | "id": "KU4T500C830697CW", 69 | "bio": "Nam laoreet, nunc non suscipit interdum, justo turpis vestibulum massa, non vulputate ex urna at purus. Morbi ultricies consequat ligula posuere eleifend. Vivamus id faucibus velit, id posuere leo. Sed laoreet posuere sapien, ut feugiat nibh gravida at. Ut maximus, libero nec facilisis fringilla, ex sem sollicitudin leo, non congue tortor ligula in eros.", 70 | "version": 3.17 71 | }, 72 | { 73 | "name": "Antía Sixirei", 74 | "language": "Galician", 75 | "id": "XOF91ZR7MHV1TXRS", 76 | "bio": "Pellentesque massa sem, scelerisque sit amet odio id, cursus tempor urna. Phasellus massa ligula, hendrerit eget efficitur eget, tincidunt in ligula. Morbi finibus dui sed est fringilla ornare. Duis pellentesque ultrices convallis. Morbi ultricies consequat ligula posuere eleifend.", 77 | "version": 6.44 78 | }, 79 | { 80 | "name": "Aygul Mutellip", 81 | "language": "Uyghur", 82 | "id": "FTSNV411G5MKLPDT", 83 | "bio": "Duis commodo orci ut dolor iaculis facilisis. Nam semper gravida nunc, sit amet elementum ipsum. Donec pellentesque ultrices mi, non consectetur eros luctus non. Pellentesque massa sem, scelerisque sit amet odio id, cursus tempor urna.", 84 | "version": 9.1 85 | }, 86 | { 87 | "name": "Awais Shaikh", 88 | "language": "Sindhi", 89 | "id": "OJMWMEEQWMLDU29P", 90 | "bio": "Nunc aliquet sodales nunc a pulvinar. Ut dictum, ligula eget sagittis maximus, tellus mi varius ex, a accumsan justo tellus vitae leo. Donec pellentesque ultrices mi, non consectetur eros luctus non. Nulla finibus massa at viverra facilisis. Nunc tincidunt laoreet laoreet.", 91 | "version": 1.59 92 | }, 93 | { 94 | "name": "Ambreen Ahmed", 95 | "language": "Sindhi", 96 | "id": "5G646V7E6TJW8X2M", 97 | "bio": "Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia curae; Etiam consequat enim lorem, at tincidunt velit ultricies et. Ut maximus, libero nec facilisis fringilla, ex sem sollicitudin leo, non congue tortor ligula in eros.", 98 | "version": 2.35 99 | } 100 | ] -------------------------------------------------------------------------------- /benchmark/README.md: -------------------------------------------------------------------------------- 1 | 2 | | timestamp | version | parsing time | parsing time (skip structures) | 3 | | ----- | ----- | ----- | ----- | 4 | |D:20250409132739+02'00'|v1.3.1|22.07600402832031| 5 | |D:20250409132811+02'00'|v1.4.0|1.44000244140625| 6 | |D:20250409142700+02'00'|v1.4.0|0.6510009765625| 7 | |D:20250410184157Z|v1.4.1|0.91845703125| 8 | |D:20250413204749Z|v1.5.0|0.9999084472656247| 9 | |D:20250415125642Z|v1.5.0a|1.052703857421875| 10 | |D:20250415130739Z|v1.5.0b|1.026265462239583| 11 | |D:20250415172251Z|v1.5.0c|1.032521565755208| 12 | |D:20250415175900Z|v1.5.0d|0.9537404378255207| 13 | |D:20250415204736Z|v1.5.0e|0.956476847330729| 14 | |D:20250418081056Z|v1.5.0f|0.9522298177083333| 15 | |D:20250421162939Z|v1.5.0|0.96490478515625| 16 | |D:20250422163352Z|v1.5.1|1.000879923502604| 17 | |D:20250422165226Z|v1.5.1a|1.016632080078125| 18 | |D:20250422170117Z|v1.5.1b|0.9851633707682293| 19 | |D:20250505205620Z|v1.5.1|1.011739095052083| 20 | |D:20250506064657Z|v1.5.2|0.998672485351562| 21 | |D:20250508165748Z|v1.6.0|1.006790161132813| 22 | |D:20250525220516Z|v1.6.1|0.9986012776692707| 23 | |D:20250526174334Z|v1.6.1a|0.996190388997396| 24 | |D:20250528181628Z|v1.6.1b|0.9743347167968747|0.4514821370442707| 25 | |D:20250531131502Z|v1.6.2|0.9620157877604167|0.4517873128255207| 26 | -------------------------------------------------------------------------------- /benchmark/benchmark.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jasperhabicht/jsonparse/c8a9c3d264c86e685ed314e9e072b4ff14f5e3fa/benchmark/benchmark.pdf -------------------------------------------------------------------------------- /benchmark/benchmark.tex: -------------------------------------------------------------------------------- 1 | \documentclass{article} 2 | \usepackage{geometry, longtable, booktabs} 3 | \geometry{margin=15mm} 4 | 5 | \ExplSyntaxOn 6 | 7 | \str_new:N \g_jsonparse_benchmark_version_str 8 | 9 | \cs_set_eq:NN \ProvidesExplPackageOrig \ProvidesExplPackage 10 | \RenewDocumentCommand{\ProvidesExplPackage}{ m m m m }{ 11 | \ProvidesExplPackageOrig{#1}{#2}{#3}{#4} 12 | \str_set:Nn \g_jsonparse_benchmark_version_str { v #3 } 13 | } 14 | \usepackage[debug]{../jsonparse} 15 | \cs_set_eq:NN \ProvidesExplPackage \ProvidesExplPackageOrig 16 | 17 | \seq_new:N \g_jsonparse_benchmark_file_data_seq 18 | \str_new:N \g_jsonparse_benchmark_last_version_str 19 | \ior_new:N \g_jsonparse_benchmark_ior 20 | \iow_new:N \g_jsonparse_benchmark_iow 21 | \fp_new:N \g_jsonparse_benchmark_time_fp 22 | \fp_new:N \g_jsonparse_benchmark_time_average_fp 23 | \fp_new:N \g_jsonparse_benchmark_nostruct_time_fp 24 | \fp_new:N \g_jsonparse_benchmark_nostruct_time_average_fp 25 | \tl_new:N \g_jsonparse_benchmark_temp_tl 26 | 27 | \cs_generate_variant:Nn \tl_greplace_all:Nnn { Nno } 28 | \cs_generate_variant:Nn \seq_use:Nn { No } 29 | 30 | \cs_new_protected:Npn \__jsonparse_benchmark_get_version:w | #1 | #2 | #3 \q_stop { 31 | \str_gset:Nn \g_jsonparse_benchmark_last_version_str {#2} 32 | } 33 | 34 | \cs_new:Npn \__jsonparse_benchmark_get_tabular_data:w | #1 | #2 | #3 | #4 \q_stop { 35 | \tl_if_head_eq_charcode:nNT {#1} D { 36 | \tl_gset:Nn \g_jsonparse_benchmark_temp_tl {#4} 37 | \tl_gremove_once:Nn \g_jsonparse_benchmark_temp_tl { | } 38 | #1 & #2 & #3 & \tl_use:N \g_jsonparse_benchmark_temp_tl \\ 39 | } 40 | } 41 | 42 | \cs_new:Npn \__jsonparse_benchmark_get_tabular_data:n #1 { 43 | \tl_if_blank:nF {#1} { 44 | \__jsonparse_benchmark_get_tabular_data:w #1 \q_stop 45 | } 46 | } 47 | 48 | \NewDocumentCommand { \printtabulardata } { } { 49 | \seq_map_function:NN 50 | \g_jsonparse_benchmark_file_data_seq 51 | \__jsonparse_benchmark_get_tabular_data:n 52 | } 53 | 54 | \cs_new:Npn \__jsonparse_benchmark_run: { 55 | \int_step_inline:nn { 3 } { 56 | \benchmark_tic: 57 | 58 | \JSONParseFromFile{\theJSONdata}{4KB.json} 59 | 60 | \benchmark_toc: 61 | 62 | \tl_set_eq:NN \theJSONdata \undefined 63 | \fp_gadd:Nn \g_jsonparse_benchmark_time_fp { \g_benchmark_time_fp } 64 | } 65 | 66 | \fp_gset:Nn \g_jsonparse_benchmark_time_average_fp 67 | { \g_jsonparse_benchmark_time_fp / 3 } 68 | } 69 | 70 | \cs_new:Npn \__jsonparse_benchmark_nostruct_run: { 71 | \int_step_inline:nn { 3 } { 72 | \benchmark_tic: 73 | 74 | \JSONParseFromFile[skip ~ structures]{\theJSONdata}{4KB.json} 75 | 76 | \benchmark_toc: 77 | 78 | \tl_set_eq:NN \theJSONdata \undefined 79 | \fp_gadd:Nn \g_jsonparse_benchmark_nostruct_time_fp { \g_benchmark_time_fp } 80 | } 81 | 82 | \fp_gset:Nn \g_jsonparse_benchmark_nostruct_time_average_fp 83 | { \g_jsonparse_benchmark_nostruct_time_fp / 3 } 84 | } 85 | 86 | \ior_open:NnTF \g_jsonparse_benchmark_ior { README.md } { 87 | \ior_str_map_inline:Nn \g_jsonparse_benchmark_ior { 88 | \seq_gput_right:Nn \g_jsonparse_benchmark_file_data_seq {#1} 89 | \tl_if_blank:nF {#1} { 90 | \__jsonparse_benchmark_get_version:w #1 \q_stop 91 | } 92 | } 93 | \ior_close:N \g_jsonparse_benchmark_ior 94 | } { 95 | \seq_gput_right:Nn \g_jsonparse_benchmark_file_data_seq 96 | { } 97 | \seq_gput_right:Ne \g_jsonparse_benchmark_file_data_seq 98 | { | ~ timestamp ~ | ~ version ~ | ~ parsing ~ time ~ | ~ parsing ~ time ~ (skip ~ structures) ~ | } 99 | \seq_gput_right:Nn \g_jsonparse_benchmark_file_data_seq 100 | { | ~ ----- ~ | ~ ----- ~ | ~ ----- ~ | ~ ----- ~ | } 101 | } 102 | 103 | \str_if_eq:VVTF \g_jsonparse_benchmark_version_str \g_jsonparse_benchmark_last_version_str { 104 | \tex_end:D 105 | } { 106 | \__jsonparse_benchmark_run: 107 | \__jsonparse_benchmark_nostruct_run: 108 | \iow_open:Nn \g_jsonparse_benchmark_iow { README.md } 109 | \iow_now:Ne \g_jsonparse_benchmark_iow { 110 | \seq_use:No \g_jsonparse_benchmark_file_data_seq { \iow_newline: } 111 | \iow_newline: 112 | | \c_sys_timestamp_str | \g_jsonparse_benchmark_version_str 113 | | \fp_use:N \g_jsonparse_benchmark_time_average_fp 114 | | \fp_use:N \g_jsonparse_benchmark_nostruct_time_average_fp | 115 | } 116 | \iow_close:N \g_jsonparse_benchmark_iow 117 | \seq_gput_right:Ne \g_jsonparse_benchmark_file_data_seq { 118 | | \c_sys_timestamp_str | \g_jsonparse_benchmark_version_str 119 | | \fp_use:N \g_jsonparse_benchmark_time_average_fp 120 | | \fp_use:N \g_jsonparse_benchmark_nostruct_time_average_fp | 121 | } 122 | } 123 | 124 | \ExplSyntaxOff 125 | 126 | \begin{document} 127 | 128 | \begin{longtable}{ l l l l } 129 | \toprule 130 | \textbf{timestamp} & \textbf{version} & \textbf{parsing time} & \textbf{parsing time (skip structres)} \\ 131 | \midrule 132 | \printtabulardata 133 | \bottomrule 134 | \end{longtable} 135 | 136 | \end{document} 137 | -------------------------------------------------------------------------------- /build-luatex.lua: -------------------------------------------------------------------------------- 1 | module = "jsonparse" 2 | 3 | sourcefiles = {"jsonparse.sty"} 4 | 5 | checkengines = {"luatex"} 6 | excludetests = {} 7 | includetests = {"test002", "test007"} -------------------------------------------------------------------------------- /build.lua: -------------------------------------------------------------------------------- 1 | module = "jsonparse" 2 | 3 | sourcefiles = {"jsonparse.sty"} 4 | 5 | checkconfigs = {"build", "build-luatex"} 6 | 7 | checkengines = {"pdftex"} 8 | excludetests = {"test002"} -------------------------------------------------------------------------------- /jason/jason.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jasperhabicht/jsonparse/c8a9c3d264c86e685ed314e9e072b4ff14f5e3fa/jason/jason.pdf -------------------------------------------------------------------------------- /jason/jason.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | -------------------------------------------------------------------------------- /jason/jason.tex: -------------------------------------------------------------------------------- 1 | % File: jason/jason.tex 2 | % Copyright 2024-2025 Jasper Habicht (mail(at)jasperhabicht.de). 3 | % 4 | % This work may be distributed and/or modified under the 5 | % conditions of the LaTeX Project Public License version 1.3c, 6 | % available at http://www.latex-project.org/lppl/. 7 | % 8 | % This file is part of the `jsonparse' package (The Work in LPPL) 9 | % and all files in that bundle must be distributed together. 10 | % 11 | % This work has the LPPL maintenance status `maintained'. 12 | % 13 | \documentclass{standalone} 14 | \usepackage{l3draw} 15 | 16 | \begin{document} 17 | \ExplSyntaxOn 18 | 19 | \color_set:nnn { color_i } { rgb } { 0.356863 , 0.639216 , 0.870588 } 20 | \color_set:nnn { color_ii } { rgb } { 0.729412 , 0.552941 , 0.364706 } 21 | \color_set:nnn { color_iii } { rgb } { 0.964706 , 0.52549 , 0.137255 } 22 | \color_set:nnn { color_iv } { rgb } { 0.592157 , 0.427451 , 0.27451 } 23 | \color_set:nnn { color_v } { rgb } { 0.317647 , 0.172549 , 0.0823529 } 24 | \color_set:nnn { color_vi } { rgb } { 0.203922 , 0.101961 , 0.0745098 } 25 | \color_set:nnn { color_vii } { rgb } { 1 , 1 , 1 } 26 | 27 | \draw_begin: 28 | 29 | % sky 30 | \cs_new_protected:Npn \__jsonparse_jason_path_i: { 31 | \draw_path_moveto:n { 433.418 , 221.836 } 32 | \draw_path_curveto:nnn { 433.418 , 335.695 } { 341.117 , 428 } { 227.254 , 428 } 33 | \draw_path_curveto:nnn { 113.391 , 428 } { 21.086 , 335.695 } { 21.086 , 221.836 } 34 | \draw_path_curveto:nnn { 21.086 , 107.973 } { 113.391 , 15.668 } { 227.254 , 15.668 } 35 | \draw_path_curveto:nnn { 341.117 , 15.668 } { 433.418 , 107.973 } { 433.418 , 221.836 } 36 | \draw_path_close: 37 | } 38 | 39 | % head 40 | \cs_new_protected:Npn \__jsonparse_jason_path_ii: { 41 | \draw_path_moveto:n { 406.586 , 385.5 } 42 | \draw_path_curveto:nnn { 406.586 , 385.5 } { 396.254 , 373.168 } { 370.586 , 298.836 } 43 | \draw_path_curveto:nnn { 344.922 , 224.5 } { 332.254 , 185.168 } { 321.918 , 158.836 } 44 | \draw_path_curveto:nnn { 316.367 , 144.684 } { 309.375 , 136.508 } { 304.09 , 131.965 } 45 | \draw_path_curveto:nnn { 311.859 , 126.008 } { 319.867 , 118.781 } { 323.918 , 111.836 } 46 | \draw_path_curveto:nnn { 330.82 , 100 } { 335.586 , 87.336 } { 339.586 , 73.336 } 47 | \draw_path_curveto:nnn { 341.984 , 64.941 } { 349.254 , 59.168 } { 349.254 , 59.168 } 48 | \draw_path_curveto:nnn { 349.254 , 59.168 } { 294.254 , 46.168 } { 270.586 , 88.668 } 49 | \draw_path_curveto:nnn { 269.051 , 91.422 } { 267.551 , 94.051 } { 266.047 , 96.559 } 50 | \draw_path_lineto:n { 140.461 , 121.891 } 51 | \draw_path_curveto:nnn { 129.957 , 133 } { 118.902 , 147.27 } { 114.254 , 161.836 } 52 | \draw_path_curveto:nnn { 110.125 , 174.766 } { 102.918 , 177.836 } { 87.918 , 189.836 } 53 | \draw_path_curveto:nnn { 72.918 , 201.836 } { 40.52 , 233.609 } { 40.52 , 233.609 } 54 | \draw_path_lineto:n { 52.629 , 282.461 } 55 | \draw_path_lineto:n { 108.742 , 351.707 } 56 | \draw_path_curveto:nnn { 122.176 , 342.695 } { 136.945 , 331.641 } { 155.254 , 324.586 } 57 | \draw_path_curveto:nnn { 169.125 , 319.234 } { 188.387 , 316.105 } { 206.367 , 312.504 } 58 | \draw_path_curveto:nnn { 208.922 , 318.824 } { 215.195 , 335.516 } { 217.754 , 352.836 } 59 | \draw_path_curveto:nnn { 221.004 , 374.836 } { 222.254 , 444.336 } { 222.254 , 444.336 } 60 | \draw_path_close: 61 | } 62 | 63 | % mane 64 | \cs_new_protected:Npn \__jsonparse_jason_path_iii: { 65 | \draw_path_moveto:n { 453.754 , 303.168 } 66 | \draw_path_curveto:nnn { 453.754 , 303.168 } { 444.086 , 288.336 } { 432.086 , 282.168 } 67 | \draw_path_curveto:nnn { 420.676 , 276.305 } { 407.586 , 275 } { 407.586 , 275 } 68 | \draw_path_curveto:nnn { 407.586 , 275 } { 410.438 , 263.355 } { 418.754 , 252.168 } 69 | \draw_path_curveto:nnn { 427.918 , 239.836 } { 444.254 , 229.5 } { 444.254 , 229.5 } 70 | \draw_path_lineto:n { 392.754 , 212 } 71 | \draw_path_curveto:nnn { 392.754 , 212 } { 409.918 , 205.5 } { 417.918 , 198.168 } 72 | \draw_path_curveto:nnn { 427.016 , 189.832 } { 431.254 , 168.168 } { 431.254 , 168.168 } 73 | \draw_path_curveto:nnn { 431.254 , 168.168 } { 383.633 , 170.973 } { 376.918 , 153 } 74 | \draw_path_curveto:nnn { 371.754 , 139.168 } { 390.254 , 109.836 } { 390.254 , 109.836 } 75 | \draw_path_curveto:nnn { 390.254 , 109.836 } { 362.918 , 103.5 } { 349.086 , 103.668 } 76 | \draw_path_curveto:nnn { 341.516 , 103.758 } { 333.258 , 104.543 } { 326.984 , 106.223 } 77 | \draw_path_curveto:nnn { 326.004 , 108.113 } { 324.996 , 109.988 } { 323.918 , 111.836 } 78 | \draw_path_curveto:nnn { 319.867 , 118.781 } { 311.859 , 126.008 } { 304.09 , 131.965 } 79 | \draw_path_curveto:nnn { 309.375 , 136.508 } { 316.367 , 144.684 } { 321.918 , 158.836 } 80 | \draw_path_curveto:nnn { 332.254 , 185.168 } { 344.922 , 224.5 } { 370.586 , 298.836 } 81 | \draw_path_curveto:nnn { 396.254 , 373.168 } { 406.586 , 385.5 } { 406.586 , 385.5 } 82 | \draw_path_lineto:n { 439.086 , 349 } 83 | \draw_path_curveto:nnn { 423.918 , 346 } { 416.754 , 328.5 } { 416.754 , 328.5 } 84 | \draw_path_close: 85 | } 86 | 87 | % bangs 88 | \cs_new_protected:Npn \__jsonparse_jason_path_iv: { 89 | \draw_path_moveto:n { 255.586 , 71 } 90 | \draw_path_curveto:nnn { 244.922 , 61.336 } { 233.754 , 60.168 } { 224.918 , 59.836 } 91 | \draw_path_curveto:nnn { 209.812 , 59.266 } { 199.918 , 33.168 } { 199.918 , 33.168 } 92 | \draw_path_curveto:nnn { 199.918 , 33.168 } { 199.918 , 48.336 } { 196.918 , 51.5 } 93 | \draw_path_curveto:nnn { 190.754 , 42.168 } { 177.586 , 39.668 } { 167.254 , 40.5 } 94 | \draw_path_curveto:nnn { 152.656 , 41.68 } { 143.254 , 52.168 } { 143.254 , 52.168 } 95 | \draw_path_curveto:nnn { 143.254 , 52.168 } { 157.754 , 49.336 } { 167.418 , 59.336 } 96 | \draw_path_curveto:nnn { 177.086 , 69.336 } { 177.586 , 74.5 } { 177.586 , 74.5 } 97 | \draw_path_curveto:nnn { 170.418 , 65.5 } { 151.754 , 68.168 } { 135.418 , 79.836 } 98 | \draw_path_curveto:nnn { 119.086 , 91.5 } { 123.918 , 122.836 } { 123.918 , 122.836 } 99 | \draw_path_curveto:nnn { 123.918 , 122.836 } { 127.586 , 110 } { 144.586 , 102.168 } 100 | \draw_path_curveto:nnn { 151.156 , 99.141 } { 168.586 , 98.5 } { 168.586 , 98.5 } 101 | \draw_path_curveto:nnn { 168.586 , 98.5 } { 143.754 , 99.336 } { 138.918 , 128.336 } 102 | \draw_path_curveto:nnn { 136.012 , 145.789 } { 142.586 , 160.668 } { 142.586 , 160.668 } 103 | \draw_path_curveto:nnn { 142.586 , 160.668 } { 144.086 , 145.168 } { 158.918 , 136 } 104 | \draw_path_curveto:nnn { 169.23 , 129.629 } { 180.754 , 126.668 } { 180.754 , 126.668 } 105 | \draw_path_curveto:nnn { 175.586 , 130.5 } { 177.586 , 141.5 } { 177.586 , 141.5 } 106 | \draw_path_curveto:nnn { 177.586 , 141.5 } { 186.586 , 126.336 } { 210.418 , 127.836 } 107 | \draw_path_curveto:nnn { 240.898 , 129.754 } { 253.441 , 117.625 } { 266.047 , 96.559 } 108 | \draw_path_curveto:nnn { 265.492 , 90.465 } { 263.41 , 78.09 } { 255.586 , 71 } 109 | \draw_path_close: 110 | } 111 | 112 | % muzzle 113 | \cs_new_protected:Npn \__jsonparse_jason_path_v: { 114 | \draw_path_moveto:n { 108.738 , 351.707 } 115 | \draw_path_curveto:nnn { 117.086 , 341.312 } { 128.648 , 326.078 } { 131.504 , 318.086 } 116 | \draw_path_curveto:nnn { 139.004 , 297.086 } { 130.254 , 287.336 } { 125.504 , 280.086 } 117 | \draw_path_curveto:nnn { 120.754 , 272.836 } { 128.754 , 268.586 } { 115.504 , 255.836 } 118 | \draw_path_curveto:nnn { 102.254 , 243.086 } { 61.754 , 264.086 } { 56.254 , 261.586 } 119 | \draw_path_curveto:nnn { 50.754 , 259.086 } { 51.254 , 255.836 } { 49.754 , 244.836 } 120 | \draw_path_curveto:nnn { 48.254 , 233.836 } { 42.57 , 232.652 } { 33.254 , 233.836 } 121 | \draw_path_curveto:nnn { 20.605 , 235.438 } { 14.754 , 258.336 } { 11.504 , 274.086 } 122 | \draw_path_curveto:nnn { 8.254 , 289.836 } { -0.047 , 289.59 } { 1.754 , 303.086 } 123 | \draw_path_curveto:nnn { 2.605 , 309.484 } { 6.199 , 316.066 } { 17.652 , 318.48 } 124 | \draw_path_curveto:nnn { 16.93 , 324.238 } { 16.715 , 331.266 } { 17.934 , 338.922 } 125 | \draw_path_curveto:nnn { 21.098 , 358.809 } { 52.738 , 368.754 } { 76.242 , 366.492 } 126 | \draw_path_curveto:nnn { 87 , 365.461 } { 97.395 , 359.312 } { 108.738 , 351.707 } 127 | \draw_path_close: 128 | } 129 | 130 | % head outline 131 | \cs_new_protected:Npn \__jsonparse_jason_path_vi: { 132 | \draw_path_moveto:n { 108.738 , 351.707 } 133 | \draw_path_curveto:nnn { 122.176 , 342.695 } { 136.945 , 331.641 } { 155.254 , 324.586 } 134 | \draw_path_curveto:nnn { 179.297 , 315.316 } { 219.527 , 312.707 } { 241.223 , 302.762 } 135 | \draw_path_curveto:nnn { 262.918 , 292.82 } { 265.629 , 259.371 } { 265.629 , 259.371 } 136 | 137 | \draw_path_moveto:n { 222.254 , 444.336 } 138 | \draw_path_curveto:nnn { 222.254 , 444.336 } { 221.004 , 374.836 } { 217.754 , 352.836 } 139 | \draw_path_curveto:nnn { 215.195 , 335.516 } { 208.922 , 318.824 } { 206.363 , 312.504 } 140 | \draw_path_curveto:nnn { 206.18 , 312.539 } { 205.996 , 312.574 } { 205.812 , 312.613 } 141 | 142 | \draw_path_moveto:n { 284.254 , 145.336 } 143 | \draw_path_curveto:nnn { 284.254 , 145.336 } { 314.586 , 127.836 } { 323.918 , 111.836 } 144 | \draw_path_curveto:nnn { 330.82 , 100 } { 335.586 , 87.336 } { 339.586 , 73.336 } 145 | \draw_path_curveto:nnn { 341.984 , 64.941 } { 349.254 , 59.168 } { 349.254 , 59.168 } 146 | \draw_path_curveto:nnn { 349.254 , 59.168 } { 294.254 , 46.168 } { 270.586 , 88.668 } 147 | \draw_path_curveto:nnn { 269.051 , 91.422 } { 267.551 , 94.051 } { 266.047 , 96.559 } 148 | 149 | \draw_path_moveto:n { 140.461 , 121.891 } 150 | \draw_path_curveto:nnn { 129.957 , 133 } { 118.898 , 147.27 } { 114.254 , 161.836 } 151 | \draw_path_curveto:nnn { 110.125 , 174.766 } { 102.918 , 177.836 } { 87.918 , 189.836 } 152 | \draw_path_curveto:nnn { 72.918 , 201.836 } { 40.52 , 233.609 } { 40.52 , 233.609 } 153 | } 154 | 155 | % ear 156 | \cs_new_protected:Npn \__jsonparse_jason_path_vii: { 157 | \draw_path_moveto:n { 177.586 , 74.5 } 158 | \draw_path_curveto:nnn { 177.586 , 74.5 } { 177.086 , 69.336 } { 167.418 , 59.336 } 159 | \draw_path_curveto:nnn { 165.707 , 57.562 } { 163.84 , 56.199 } { 161.93 , 55.145 } 160 | \draw_path_curveto:nnn { 162.246 , 61.227 } { 162.852 , 66.336 } { 163.234 , 69.152 } 161 | \draw_path_curveto:nnn { 169.391 , 68.957 } { 174.535 , 70.668 } { 177.586 , 74.5 } 162 | \draw_path_close: 163 | 164 | \draw_path_moveto:n { 167.254 , 40.5 } 165 | \draw_path_curveto:nnn { 177.586 , 39.668 } { 190.754 , 42.168 } { 196.918 , 51.5 } 166 | \draw_path_curveto:nnn { 199.918 , 48.336 } { 199.918 , 33.168 } { 199.918 , 33.168 } 167 | \draw_path_curveto:nnn { 199.918 , 33.168 } { 207.445 , 53.012 } { 219.391 , 58.465 } 168 | \draw_path_curveto:nnn { 214.809 , 42.316 } { 198.312 , 27.348 } { 191.504 , 21.836 } 169 | \draw_path_curveto:nnn { 184.504 , 16.168 } { 174.879 , 2.5 } { 174.879 , 2.5 } 170 | \draw_path_curveto:nnn { 174.879 , 2.5 } { 165.898 , 13.836 } { 162.898 , 31.5 } 171 | \draw_path_curveto:nnn { 162.355 , 34.695 } { 162.035 , 38.043 } { 161.867 , 41.383 } 172 | \draw_path_curveto:nnn { 163.57 , 40.965 } { 165.367 , 40.652 } { 167.254 , 40.5 } 173 | \draw_path_close: 174 | } 175 | 176 | % eye 177 | \cs_new_protected:Npn \__jsonparse_jason_path_viii: { 178 | \draw_path_moveto:n { 230.922 , 186.793 } 179 | \draw_path_curveto:nnn { 230.922 , 196.391 } { 222.188 , 204.168 } { 211.422 , 204.168 } 180 | \draw_path_curveto:nnn { 200.648 , 204.168 } { 191.922 , 196.391 } { 191.922 , 186.793 } 181 | \draw_path_curveto:nnn { 191.922 , 177.195 } { 200.648 , 169.418 } { 211.422 , 169.418 } 182 | \draw_path_curveto:nnn { 222.188 , 169.418 } { 230.922 , 177.195 } { 230.922 , 186.793 } 183 | \draw_path_close: 184 | } 185 | 186 | % pupil 187 | \cs_new_protected:Npn \__jsonparse_jason_path_ix: { 188 | \draw_path_moveto:n { 217.922 , 188.836 } 189 | \draw_path_curveto:nnn { 217.922 , 192.699 } { 214.375 , 195.836 } { 210.004 , 195.836 } 190 | \draw_path_curveto:nnn { 205.633 , 195.836 } { 202.086 , 192.699 } { 202.086 , 188.836 } 191 | \draw_path_curveto:nnn { 202.086 , 184.969 } { 205.633 , 181.836 } { 210.004 , 181.836 } 192 | \draw_path_curveto:nnn { 214.375 , 181.836 } { 217.922 , 184.969 } { 217.922 , 188.836 } 193 | \draw_path_close: 194 | } 195 | 196 | % nostril 197 | \cs_new_protected:Npn \__jsonparse_jason_path_x: { 198 | \draw_path_moveto:n { 68.77 , 289.297 } 199 | \draw_path_curveto:nnn { 68.77 , 289.297 } { 71.215 , 285.078 } { 74.879 , 280.211 } 200 | \draw_path_curveto:nnn { 79.305 , 274.324 } { 87.262 , 269.184 } { 91.254 , 268.836 } 201 | \draw_path_curveto:nnn { 98.379 , 268.211 } { 103.449 , 269.938 } { 100.551 , 275.527 } 202 | \draw_path_curveto:nnn { 97.652 , 281.117 } { 88.188 , 287.027 } { 81.254 , 287.961 } 203 | \draw_path_curveto:nnn { 74.316 , 288.891 } { 68.77 , 289.297 } { 68.77 , 289.297 } 204 | \draw_path_close: 205 | } 206 | 207 | % mouth 208 | \cs_new_protected:Npn \__jsonparse_jason_path_xi: { 209 | \draw_path_moveto:n { 27.488 , 319.328 } 210 | \draw_path_curveto:nnn { 26.766 , 323.887 } { 26.395 , 330.605 } { 29.504 , 334.336 } 211 | \draw_path_curveto:nnn { 34.504 , 340.336 } { 44.004 , 349.336 } { 65.504 , 336.336 } 212 | \draw_path_curveto:nnn { 83.551 , 325.422 } { 106.699 , 303.945 } { 113.672 , 297.328 } 213 | \draw_path_curveto:nnn { 109.645 , 300.484 } { 98.965 , 306.266 } { 74.754 , 306.086 } 214 | \draw_path_curveto:nnn { 48.09 , 305.883 } { 41.801 , 318.445 } { 27.488 , 319.328 } 215 | \draw_path_close: 216 | } 217 | 218 | % mouth outline 219 | \cs_new_protected:Npn \__jsonparse_jason_path_xii: { 220 | \draw_path_moveto:n { 17.652 , 318.48 } 221 | \draw_path_curveto:nnn { 19.613 , 318.891 } { 21.801 , 319.188 } { 24.254 , 319.336 } 222 | \draw_path_curveto:nnn { 25.383 , 319.402 } { 26.453 , 319.395 } { 27.488 , 319.328 } 223 | } 224 | 225 | % teeth 226 | \cs_new_protected:Npn \__jsonparse_jason_path_xiii: { 227 | \draw_path_moveto:n { 96.598 , 304.234 } 228 | \draw_path_curveto:nnn { 80.379 , 312.863 } { 52.5 , 327.137 } { 44.855 , 327.496 } 229 | \draw_path_curveto:nnn { 37.57 , 327.84 } { 31.289 , 322.75 } { 27.891 , 319.293 } 230 | \draw_path_curveto:nnn { 27.758 , 319.305 } { 27.625 , 319.32 } { 27.488 , 319.328 } 231 | } 232 | 233 | \draw_transform_yscale:n { -1 } 234 | \draw_linewidth:n { 3pt } 235 | \draw_miterlimit:n { 10 } 236 | \color_stroke:n { color_v } 237 | 238 | \draw_scope_begin: 239 | 240 | \color_fill:n { color_i } 241 | \__jsonparse_jason_path_i: 242 | \draw_path_use_clear:n { fill , clip } 243 | 244 | \color_fill:n { color_ii } 245 | \__jsonparse_jason_path_ii: 246 | \draw_path_use_clear:n { fill } 247 | 248 | \color_fill:n { color_iii } 249 | \__jsonparse_jason_path_iii: 250 | \draw_path_use_clear:n { fill , draw } 251 | 252 | \__jsonparse_jason_path_vi: 253 | \draw_path_use_clear:n { draw } 254 | 255 | \draw_scope_end: 256 | 257 | \color_fill:n { color_ii } 258 | \__jsonparse_jason_path_vii: 259 | \draw_path_use_clear:n { fill , draw } 260 | 261 | \color_fill:n { color_iii } 262 | \__jsonparse_jason_path_iv: 263 | \draw_path_use_clear:n { fill , draw } 264 | 265 | \color_fill:n { color_iv } 266 | \__jsonparse_jason_path_v: 267 | \draw_path_use_clear:n { fill , draw } 268 | 269 | \color_fill:n { color_vii } 270 | \__jsonparse_jason_path_viii: 271 | \draw_path_use_clear:n { fill , draw } 272 | 273 | \color_fill:n { color_vi } 274 | \__jsonparse_jason_path_ix: 275 | \draw_path_use_clear:n { fill } 276 | 277 | \__jsonparse_jason_path_x: 278 | \draw_path_use_clear:n { fill } 279 | 280 | \color_fill:n { color_vii } 281 | \__jsonparse_jason_path_xi: 282 | \draw_path_use_clear:n { fill , draw } 283 | 284 | \__jsonparse_jason_path_xii: 285 | \draw_path_use_clear:n { draw } 286 | 287 | \__jsonparse_jason_path_xiii: 288 | \draw_path_use_clear:n { draw } 289 | 290 | \draw_end: 291 | 292 | \ExplSyntaxOff 293 | \end{document} -------------------------------------------------------------------------------- /jsonparse-doc.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jasperhabicht/jsonparse/c8a9c3d264c86e685ed314e9e072b4ff14f5e3fa/jsonparse-doc.pdf -------------------------------------------------------------------------------- /jsonparse-doc.tex: -------------------------------------------------------------------------------- 1 | % File: jsonparse-doc.tex 2 | % Copyright 2024-2025 Jasper Habicht (mail(at)jasperhabicht.de). 3 | % 4 | % This work may be distributed and/or modified under the 5 | % conditions of the LaTeX Project Public License version 1.3c, 6 | % available at http://www.latex-project.org/lppl/. 7 | % 8 | % This file is part of the `jsonparse' package (The Work in LPPL) 9 | % and all files in that bundle must be distributed together. 10 | % 11 | % This work has the LPPL maintenance status `maintained'. 12 | % 13 | \documentclass[a4paper]{article} 14 | \def\jsonparsefileversion{1.6.2} 15 | \def\jsonparsefiledate{3 June 2025} 16 | 17 | \usepackage[T1]{fontenc} 18 | \usepackage{Alegreya} 19 | \usepackage{AlegreyaSans} 20 | \usepackage{FiraMono} 21 | 22 | \usepackage{geometry, longtable, booktabs, siunitx, tcolorbox, hyperref} 23 | \geometry{margin=25mm, left=45mm} 24 | \urlstyle{same} 25 | 26 | \usepackage{pgfplots} 27 | \pgfplotsset{compat=1.18} 28 | 29 | \usepackage{fancyhdr} 30 | \pagestyle{fancy} 31 | \fancyhf{} 32 | \renewcommand{\headrulewidth}{0pt} 33 | \fancyfoot[R]{\thepage} 34 | 35 | \ExplSyntaxOn 36 | \NewDocumentCommand{\fnstars}{m}{ 37 | \int_step_inline:nn { \value{#1} } { * } 38 | } 39 | \ExplSyntaxOff 40 | 41 | \linespread{1.1}\selectfont 42 | \addtolength{\skip\footins}{1em plus 5pt} 43 | \makeatletter 44 | \renewcommand*{\thefootnote}{\fnstars{footnote}} 45 | \renewcommand{\@makefntext}[1]{% 46 | \noindent\hbox to 1em{\normalfont\@thefnmark\hss}~#1% 47 | } 48 | \makeatother 49 | 50 | \tcbuselibrary{skins,listings} 51 | \lstdefinestyle{jsonparsedocmacro}{ 52 | basicstyle=\small\ttfamily, 53 | literate=*{<}{{{\color{black!50}\guilsinglleft}}}1 54 | {>}{{{\color{black!50}\guilsinglright}}}1, 55 | keywords={}, 56 | moredelim=[is][\bfseries]{|}{|}, 57 | moredelim=[is][\bfseries\itshape]{?}{?}, 58 | moredelim=[is][\color{black!50}]{!}{!}, 59 | } 60 | \lstdefinestyle{jsonparsedoccodeexample}{ 61 | basicstyle=\small\ttfamily, 62 | keywords={}, 63 | } 64 | 65 | \hypersetup{colorlinks} 66 | 67 | \ExplSyntaxOn 68 | \int_new:N \l_jsonparse_doc_change_int 69 | \NewDocumentCommand{\changes}{ m m m }{ 70 | \int_incr:N \l_jsonparse_doc_change_int 71 | \prop_new:c { l_jsonparse_doc_change_ 72 | \int_to_roman:n { \l_jsonparse_doc_change_int } _prop } 73 | \prop_put:cnn { l_jsonparse_doc_change_ 74 | \int_to_roman:n { \l_jsonparse_doc_change_int } _prop } { version } {#1} 75 | \prop_put:cnn { l_jsonparse_doc_change_ 76 | \int_to_roman:n { \l_jsonparse_doc_change_int } _prop } { date } {#2} 77 | \prop_put:cnn { l_jsonparse_doc_change_ 78 | \int_to_roman:n { \l_jsonparse_doc_change_int } _prop } { changes } {#3} 79 | } 80 | \NewDocumentCommand{\printchanges}{ }{ 81 | \section{Changes} 82 | \begin{description} 83 | \setlength\itemsep{0pt} 84 | \int_step_inline:nn { \l_jsonparse_doc_change_int } { 85 | \item[ 86 | \prop_item:cn { l_jsonparse_doc_change_ 87 | \int_to_roman:n { ##1 } _prop } { version } ~ 88 | \normalfont{ ( 89 | \prop_item:cn { l_jsonparse_doc_change_ 90 | \int_to_roman:n { ##1 } _prop } { date } 91 | ) } 92 | ] 93 | \prop_item:cn { l_jsonparse_doc_change_ 94 | \int_to_roman:n { ##1 } _prop } { changes } 95 | } 96 | \end{description} 97 | } 98 | 99 | \tl_new:N \l_jsonparse_doc_doctitle_tl 100 | \NewExpandableDocumentCommand{\makedoctitle}{ o m o m m o m }{ 101 | \hypersetup{ 102 | pdfauthor={#5}, 103 | pdftitle={\IfValueTF{#1}{#1}{#2}}, 104 | pdfsubject={\IfValueTF{#3}{#3}{#4}} 105 | } 106 | \tl_set:Nn \l_jsonparse_doc_doctitle_tl { 107 | \group_begin: 108 | \tcbset{ 109 | title ~ style ~ hook/.style={ 110 | boxrule=2pt, 111 | fontupper=\huge\ttfamily 112 | } 113 | } 114 | \setlength{\parindent}{0pt}\sffamily 115 | \Huge{\bfseries #2}\par\bigskip 116 | \Large #4\par\bigskip 117 | \large #5 118 | \IfValueT{#6}{ 119 | \,\footnote{#6}\par\bigskip 120 | } 121 | #7\par\bigskip 122 | \rule{\textwidth}{.08em} 123 | \group_end: 124 | } 125 | } 126 | 127 | \NewExpandableDocumentCommand{\printdoctitle}{ }{ 128 | \tl_use:N \l_jsonparse_doc_doctitle_tl 129 | } 130 | \ExplSyntaxOff 131 | 132 | \NewDocumentCommand{\expandablestarcode}{ O{0,0} }{ 133 | \fill[black!50, shift={([xshift={-10pt}, yshift={-10pt-0.5em}]#1)}] 134 | (90:3.75pt) 135 | \foreach \i in {1,...,4} { 136 | -- ({90+360/5*\i*2}:3.75pt) 137 | } -- cycle; 138 | } 139 | 140 | \NewDocumentCommand{\expandablestar}{}{ 141 | \tikz{\expandablestarcode} 142 | } 143 | 144 | \NewDocumentCommand{\warning}{}{% 145 | \begin{tikzpicture}[overlay, baseline={(w.base)}] 146 | \node[circle, fill, black!25!red!10, text=black!25!red, outer sep=0pt] 147 | (w) at (-3em,0pt) {\bfseries !}; 148 | \fill[black!25!red!10] 149 | (w.east) |- (w.south) -- cycle; 150 | \end{tikzpicture}% 151 | } 152 | 153 | \tcbset{ 154 | size=small, 155 | arc=2.5pt, 156 | outer arc=2.5pt, 157 | colframe=black!10, 158 | colback=black!10, 159 | title style hook/.style={}, 160 | } 161 | 162 | \NewTCBListing{macrodef}{ s o }{ 163 | listing only, 164 | listing style={jsonparsedocmacro}, 165 | grow to left by=2cm, 166 | boxrule=0pt, 167 | after={\par\smallskip\noindent}, 168 | enhanced, 169 | IfValueT={#2}{ 170 | hypertarget=#2 171 | }, 172 | IfBooleanT={#1}{ 173 | overlay={ 174 | \expandablestarcode[frame.north east] 175 | } 176 | } 177 | } 178 | 179 | \NewTCBListing{codeexample}{ }{ 180 | listing only, 181 | listing style={jsonparsedoccodeexample}, 182 | after={\par\smallskip\noindent} 183 | } 184 | 185 | \NewTCBListing{codeexamplecolumns}{ }{ 186 | text side listing, 187 | lefthand width=4.5cm, 188 | listing style={jsonparsedoccodeexample}, 189 | after={\par\smallskip\noindent} 190 | } 191 | 192 | \NewTCBListing{macrodeprecated}{ s }{ 193 | listing only, 194 | listing style={jsonparsedocmacro}, 195 | boxrule=0pt, 196 | width={0.5\linewidth-2.5pt}, 197 | box align=top, 198 | after=\hfill, 199 | colframe=black!25!red!10, 200 | colback=black!25!red!10, 201 | enhanced, 202 | IfBooleanT={#1}{ 203 | overlay={ 204 | \fill[black!25!red!10] 205 | ([yshift={5pt-1.5em}]frame.north east) 206 | -- ++(5pt,-5pt) -- ++(-5pt,-5pt) 207 | -- cycle; 208 | } 209 | } 210 | } 211 | 212 | \NewTCBListing{macroreplacement}{ }{ 213 | listing only, 214 | listing style={jsonparsedocmacro}, 215 | boxrule=0pt, 216 | width={0.5\linewidth-2.5pt}, 217 | box align=top, 218 | before={}, 219 | colframe=black!25!green!10, 220 | colback=black!25!green!10 221 | } 222 | 223 | \NewTotalTCBox{\macro}{ O{} v }{ 224 | verbatim, 225 | size=tight, 226 | arc=1pt, 227 | outer arc=1pt, 228 | top=0.5pt, 229 | bottom=0.5pt, 230 | left=2pt, 231 | right=2pt, 232 | boxrule=0pt, 233 | colframe=black!10, 234 | colback=black!10, 235 | fontupper=\small\ttfamily, 236 | #1 237 | }{\vphantom{/g}\lstinline^#2^} 238 | 239 | \NewTotalTCBox{\titlemacro}{ s O{} m }{ 240 | verbatim, 241 | size=tight, 242 | arc=1pt, 243 | outer arc=1pt, 244 | top=0.5pt, 245 | bottom=0.5pt, 246 | left=2pt, 247 | right=2pt, 248 | boxrule=0pt, 249 | colframe=black!10, 250 | colback=black!10, 251 | fontupper=\small\ttfamily, 252 | title style hook, 253 | #2 254 | }{\vphantom{/g}\IfBooleanT{#1}{\textbackslash}#3} 255 | 256 | \usepackage{jsonparse} 257 | 258 | \makedoctitle 259 | [The jsonparse package] 260 | {The \titlemacro{jsonparse} package} 261 | {A handy way to parse, store and access JSON data from files or strings in LaTeX documents} 262 | {Jasper Habicht}[E-mail: \href{mailto:mail@jasperhabicht.de}{mail@jasperhabicht.de}. I am grateful to Joseph Wright, Jonathan P. Spratte and David Carlisle who helped me navigating the peculiarities of TeX and optimizing the code. Jason, the JSON parsing horse: \textcopyright{} 2024--2025 Hannah Klöber.] 263 | {Version \jsonparsefileversion, released on \jsonparsefiledate} 264 | 265 | %\changes{v0.3.0}{2024/04/08}{First public beta release.} 266 | \changes{v0.5.0}{2024/04/09}{Changed from string token variables to token lists to support Unicode.} 267 | %\changes{v0.5.5}{2024/04/09}{Bug fixes, introduction and enhancement of user functions.} 268 | \changes{v0.5.6}{2024/04/11}{Bug fixes, escaping of special chars added.} 269 | \changes{v0.5.7}{2024/04/14}{Bug fixes, key-value option setting added.} 270 | %\changes{v0.6.0}{2024/04/15}{Bug fixes, renaming of several commands.} 271 | \changes{v0.7.0}{2024/04/18}{Renaming and rearranging of keys, escaping of special JSON escape sequences added.} 272 | \changes{v0.7.1}{2024/04/20}{Access to top-level keys of object added.} 273 | \changes{v0.8.0}{2024/04/24}{Internal rewrite, escaping procedures changed.} 274 | \changes{v0.8.2}{2024/04/26}{Bug fixes, externalizing parsed data.} 275 | \changes{v0.8.3}{2024/04/28}{Escaping of characters with special meaning in TeX.} 276 | %\changes{v0.8.5}{2024/05/05}{Enhanced key management.} 277 | %\changes{v0.8.6}{2024/05/09}{Bug fix in nesting function.} 278 | %\changes{v0.8.7}{2024/08/08}{Corrections in documentation, error messages.} 279 | \changes{v0.9.0}{2024/08/27}{Adaption to updated verbatim tokenization.} 280 | \changes{v0.9.1}{2024/09/21}{Added functions to test for valid JSON numbers.} 281 | \changes{v0.9.3}{2024/10/24}{Fixed a bug that prevented tabs in source from being parsed properly.} 282 | %\changes{v0.9.5}{2024/10/27}{Streamlining of code, clarification of explanations in documentation.} 283 | \changes{v0.9.6}{2024/10/31}{Allowing for multiple return values when mapping over arrays.} 284 | %\changes{v0.9.7}{2024/11/05}{Streamlining of code, ensuring backward compatibility.} 285 | \changes{v0.9.8}{2024/11/19}{Bug fixes; adding possibility to store value in token list.} 286 | %\changes{v0.9.10}{2024/12/10}{Enhanced backward compatibility; switching to linked property lists.} 287 | %\changes{v0.9.11}{2025/01/15}{Bug fixes; adding additional command to loop through arrays.} 288 | \changes{v0.9.12}{2025/01/17}{Bug fixes; adding commands to access items in arrays.} 289 | %\changes{v1.0.0}{2025/01/20}{Streamlining of code, unification of command structure.} 290 | \changes{v1.0.1}{2025/01/21}{Fixes in documentation. Added user command for filtering.} 291 | \changes{v1.0.2}{2025/01/23}{Support for Unicode surrogate pairs.} 292 | %\changes{v1.0.3}{2025/01/26}{Test for valid JSON numbers expandable.} 293 | \changes{v1.1.0}{2025/01/30}{Unified names of user functions; renaming key for keywords.} 294 | \changes{v1.1.1}{2025/02/03}{Added option to store result of mapped inline function.} 295 | \changes{v1.1.2}{2025/02/08}{Added option to store result of array function.} 296 | %\changes{v1.1.3}{2025/02/13}{Bug fixes.} 297 | %\changes{v1.2.0}{2025/02/22}{Unified option to store result.} 298 | \changes{v1.2.1}{2025/02/24}{Unified functions, added option to store result globally.} 299 | %\changes{v1.2.2}{2025/02/25}{Fixes in documentation.} 300 | \changes{v1.2.3}{2025/03/23}{Enabled nesting of mapped inline function.} 301 | \changes{v1.3.0}{2025/03/25}{Enhancements in key setting mechanism.} 302 | %\changes{v1.3.1}{2025/03/30}{Fixes in documentation. Bug fixes.} 303 | %\changes{v1.3.2}{2025/04/07}{Bug fixes and code improvements.} 304 | \changes{v1.4.0}{2025/04/10}{Enhancements in parsing speed.} 305 | \changes{v1.5.0}{2025/04/20}{Enhancements in parsing speed. Bug fixes.} 306 | %\changes{v1.5.1}{2025/05/05}{Bug fixes.} 307 | %\changes{v1.5.2}{2025/05/06}{Bug fixes.} 308 | \changes{v1.6.0}{2025/05/10}{Renaming of L3 functions. Bug fixes.} 309 | %\changes{v1.6.1}{2025/06/03}{Bug fixes. FAQ in Doc.} 310 | 311 | \begin{document} 312 | \vspace*{-1cm} 313 | \noindent\hspace*{-3.5mm}\includegraphics[width=5cm]{jason/jason.pdf} 314 | 315 | \printdoctitle 316 | 317 | \bigskip 318 | 319 | \section{Introduction} 320 | 321 | Hello guys, I am Jason, the JSON parsing horse. JSON data is my favorite thing to parse! But I found that converting JSON to TeX can be a bit tricky. Therefore, I created this package which I am happy to introduce to you.\footnote{Credit for the idea of Jason, the JSON parsing horse, goes to Paulo Cereda and the circumstance of his presumably insufficiently illuminated workplace (or his late working hours).} 322 | 323 | The \macro{jsonparse} package provides a handy way to read in JSON data from files or strings in LaTeX documents, parse the data and store it in a user-defined token variable. The package allows accessing the stored data via a JavaScript-flavored syntax. 324 | 325 | The package is continuously being tested, but bugs cannot be ruled out. The author is grateful for reporting any bugs via GitHub at \url{https://github.com/jasperhabicht/jsonparse/issues}. A site for asking questions about the package and for suggestions for improvement is available at \url{https://github.com/jasperhabicht/jsonparse/discussions}. 326 | 327 | \section{Frequently asked questions} 328 | 329 | \subsection{Why does a comparison of two equal strings return false?} 330 | 331 | Note that if you probably need to use \macro{\JSONParseExpandableValue} if you want to compare a value from parsed JSON data with the contents of another token list. See the explanation \hyperlink{macro:expandablevalue}{below} for more details. 332 | 333 | Also note that \macro{\JSONParseExpandableValue} returns a string with specific category codes, namely category code 12 (``other'') for all characters except spaces. Therefore, you need to test against another string. You can use \macro{\detokenize} for conversion. See section \ref{sec:general} for more information. 334 | 335 | \subsection{Why does parsing take so long?} 336 | 337 | Make sure that you use the most recent version of the package. Parsing speed is constantly being monitored and updates typically increase parsing speed. 338 | 339 | If you don't need to access whole objects or arrays from the JSON data, you can set the key \macro{skip structures} which should increase parsing speed.See the explanation \hyperlink{key:skipstructures}{below} for more details. 340 | 341 | If the JSON data contains a considerable amount of numbers, parsing speed can be increased slightly by setting \macro{validate numbers=false}. See the explanation \hyperlink{key:validatenumbers}{below} for more details. 342 | 343 | If the JSON data does not change often, you can externalise the parsed result by setting the key \macro{externalize}. See section \ref{sec:externalizing} for more information. 344 | 345 | \section{Loading the package} 346 | 347 | To install the package, copy the package file \macro{jsonparse.sty} into the working directory or into the \macro{texmf} directory. After the package has been installed, the \macro{jsonparse} package is loaded by calling \macro{\usepackage{jsonparse}} in the preamble of the document. 348 | 349 | The package can be used with PDFLaTeX, LuaLaTeX or XeLaTeX. It should also work with upTeX. The package does not load any dependencies, but it needs a LaTeX kernel of 1 June 2022 or newer. It is recommended to use the package with an up-to-date TeX distribution. 350 | 351 | \begin{macrodef} 352 | |debug| 353 | \end{macrodef} 354 | The package can be loaded with the option \macro{debug}. It will then output to the log file every instance of a string, a boolean (true or false) value, a null value, a number as well as the start and end of every object and the start and end of every array that is found while parsing the JSON string or JSON file. It will also show the relevant keys associated to the values. If the key \macro{skip structures} is not set (or not set to \macro{true}), objects and arrays will be shown as values to the respective keys, including the pseudo key \macro{.} (or the string defined using the key \macro{separator/child}) that represents the complete JSON string. 355 | 356 | Let us assume that the following JSON data is parsed: 357 | 358 | \begin{codeexample} 359 | { 360 | "string" : "a" , 361 | "boolean true" : true , 362 | "boolean false" : false , 363 | "null" : null , 364 | "number" : "number" : -1.1e-1 , 365 | "array" : [ "a" , "b" , "c" ] 366 | } 367 | \end{codeexample} 368 | 369 | This will then result in the following output to the log: 370 | 371 | \begin{codeexample} 372 | Parsing JSON ... 373 | (obj begin) 374 | (key) string: 375 | (str) a 376 | (key) boolean true: 377 | (tru) true 378 | (key) boolean false: 379 | (fal) false 380 | (key) null: 381 | (nul) null 382 | (key) number: 383 | (num) -1.1e-1 384 | (arr begin) 385 | (key) array[0]: 386 | (str) a 387 | (key) array[1]: 388 | (str) b 389 | (key) array[2]: 390 | (str) c 391 | (key) array: 392 | (arr) [ "a" , "b" , "c" ] 393 | (arr end) 394 | (key) .: 395 | (obj) { "string" : "a" , "boolean true" : true , "boolean false" : 396 | false , "null" : null , "number" : "number" : -1.1e-1 , "array" : 397 | [ "a" , "b" , "c" ] } 398 | (obj end) 399 | JSON parsing done. 400 | \end{codeexample} 401 | 402 | The \macro{debug} key can be set either as package option or using \macro{\JSONParseSet}. It can also be set locally as option to the commands \macro{\JSONParse} and \macro{\JSONParseFromFile}. 403 | 404 | \section{General remarks of the parsing procedure}\label{sec:general} 405 | 406 | In general, the package will read and store the JSON source and data as string, which means that all characters have category code 12 (``other''), except for spaces and (horizontal) tabs which have category code 10 (``space''). The \macro{\endlinechar} value is set to $-1$ which means that linefeeds and carriage returns are ignored by TeX. These settings are in line with the JSON specification of handling whitespace. Furthermore, if PDFLaTeX is used, the upper-half of the 8-bit range is set to ``active''. Additionally, JSON defines a small set of escape sequences and in order to be able to process these, the category code of the backslash is set to 0 (``escape''). 407 | 408 | During parsing, the package identifies JSON objects, arrays, strings, numbers, boolean values and null values from the JSON data. It stores all these values together with the relevant keys in a property list. Once the parsing process is done, every value can be retrieved from the property list by calling the relevant key. The package ignores whitespace in the JSON data. In general, the package accepts any valid JSON data. If a key is defined multiple times, the latter definition will silently overwrite the former. 409 | 410 | \section{Escaping and special treatment of the input}\label{sec:escaping} 411 | 412 | JSON strings cannot contain the two characters \macro{"} and \macro{\}. These two characters need to be escaped with a preceding backslash (\macro{\}). This package therefore redefines locally the TeX control symbols \macro{\"}, \macro{\/}, \macro{\\}, \macro{\b}, \macro{\f}, \macro{\n}, \macro{\r}, \macro{\t} and \macro{\u}. These control symbols are prevented from expanding during parsing. For example, \macro{\"} is first defined as \macro{\exp_not:N \"} and only when typeset, \macro{\"} is expanded to \macro{"}, which ensures that strings are parsed properly. 413 | 414 | Similarly, the control symbol \macro{\/} expands eventually to \macro{/} and \macro{\\} to \macro{\c_backslash_str} (i.\,e.\ a backslash with category code 12). 415 | 416 | The escape sequence \macro{\u} followed by a hex value consisting of four digits eventually expands to \macro{\codepoint_generate:nn} that creates the character represented by the relevant four hex digits with category code 12 (``other''). If two escape sequences \macro{\u} with four hex digits each follow each other and together represent a Unicode surrogate pair, this surrogate pair is converted into the relevant Unicode codepoint. 417 | 418 | The JSON escape sequences \macro{\b}, \macro{\f}, \macro{\n}, \macro{\r} and \macro{\t} eventually expand to token variables of which the contents can be set using the relevant \macro{replace} key. See more on setting options below in section \ref{sec:options}. 419 | 420 | It is possible to insert TeX macros to the JSON source that will eventually be parsed when typesetting. Backslashes of TeX macros need to be escaped by another backslash. The TeX macros \macro{\"} and \macro{\\} must be escaped twice in the JSON source so that they become \macro{\\\"} and \macro{\\\\} respectively. 421 | 422 | \begin{macrodef} 423 | |\$|{}{} 424 | \end{macrodef} 425 | Using the control sequence \macro{\$}, it is possible to nest JSON strings into each other. Used inside the \macro{\JSONParse} command, the control sequence takes two arguments delimited by curly braces. The first argument represents the name of the token variable that holds the parsed JSON data where the inserted JSON string should be taken from. The second argument sets the key that should be selected. The following example shows a simple use case: 426 | 427 | \begin{codeexamplecolumns} 428 | \JSONParse{\myJSONdataA}{ 429 | { "a" : { "b" : "c" } } 430 | } 431 | 432 | \JSONParse{\myJSONdataB}{ 433 | { "d" : \${myJSONdataA}{a} } 434 | } 435 | 436 | \JSONParseValue{\myJSONdataB}{d.b} 437 | \end{codeexamplecolumns} 438 | 439 | \warning Note that the control sequence \macro{\$} is replaced by the value exactly. Therefore, if the value happens to be a string, the control sequence \macro{\$} should be placed between quotation marks (\macro{"}) in order for the resulting string to be valid JSON. The control sequence \macro{\$} is only available inside the \macro{\JSONParse} command, but not inside the \macro{\JSONParseFromFile} command. 440 | 441 | \begin{macrodef} 442 | |escape|={all} 443 | |escape|={none} 444 | |escape|={number sign} 445 | |escape|={dollar sign} 446 | |escape|={percent sign} 447 | |escape|={ampersand} 448 | |escape|={circumflex accent} 449 | |escape|={low line} 450 | |escape|={tilde} 451 | \end{macrodef} 452 | The key \macro{escape} can be used to convert characters that don't require escaping in JSON but in TeX into the relevant TeX escape sequences. Apart from the backslash and curly braces that need to be escaped anyways, these are the number sign, the dollar sign, the percent sign, the ampersand, the circumflex accent, the low line and the tilde. The characters can be selected individually separated by a comma (for example \macro{escape={dollar sign, circumflex accent, low line}}. With \macro{escape={all}}, all escaping sequences are selected, with \macro{escape={none}}, none is selected. 453 | 454 | The naming of the relevant characters follows their Unicode names. However, \macro{hash} exists as alias for \macro{number sign}, \macro{dollar} as alias for \macro{dollar sign}, \macro{percent} for \macro{percent sign}, \macro{circumflex} for \macro{circumflex accent} and \macro{underscore} for \macro{low line}. 455 | 456 | This key can be set using \macro{\JSONParseSet}. It can also be set locally as option to the commands \macro{\JSONParseValue}, \macro{\JSONParseArrayUse} and \macro{\JSONParseArrayMapFunction}. 457 | 458 | \begin{macrodef} 459 | |rescan| 460 | |rescan|={} 461 | \end{macrodef} 462 | The key \macro{rescan} can be used to activate and deactivate rescanning of the output. This key is active per default. Rescanning converts all tokens to their default category codes and TeX control sequences are expanded before typesetting. Further, during the rescanning process, JSON escape sequences are replaced and characters that don't require escaping in JSON but in TeX are replaced by the relevant TeX escape sequences. 463 | 464 | This key can be set using \macro{\JSONParseSet}. It can also be set locally as option to the commands \macro{\JSONParseValue}, \macro{\JSONParseArrayUse} and \macro{\JSONParseArrayMapFunction}. 465 | 466 | \section{Main user commands} 467 | 468 | The first part of this section describes the basic commands for parsing JSON data and retrieving values from parsed JSON data. The second part of this section describes the various commands for handling arrays provided by this package. Commands marked with a star (\expandablestar) are fully expandable. 469 | 470 | \subsection{Basic parsing commands} 471 | 472 | \begin{macrodef} 473 | |\JSONParse|[]{}{} 474 | \end{macrodef} 475 | The command \macro{\JSONParse} is used to parse a JSON string and globally store the parsed result in a token variable (a property list). The second argument takes the name of the token variable that is created by the command. The third argument takes the JSON string to be parsed. 476 | 477 | For example, using \macro{\JSONParse{\myJSONdata}{ { "key" : "value" } }}, the relevant JSON string will be parsed and the result stored in the token variable \macro{\myJSONdata} as property list. In this case, the property list only consists of two entries of which one has the key \macro{key} and the value \macro{value} and the other represents the whole object. Once the JSON string has been parsed, the command \macro{\JSONParseValue{\myJSONdata}{key}}, for example, can be used to extract the relevant value from this property list (see the description below). 478 | 479 | The first optional argument of the command \macro{\JSONParse} can be used to pass options to the command that are then applied locally. 480 | 481 | \warning The command \macro{\JSONParse} takes the JSON string as verbatim argument which means that the command can't be used inside a macro argument. One consequence of this for example is that when using the \macro{beamer} document class, the command \macro{\JSONParse} can only be used inside the \macro{frame} environment if the \macro{fragile} option is set. 482 | 483 | \begin{macrodef} 484 | |\JSONParseFromFile|[]{}{} 485 | \end{macrodef} 486 | The command \macro{\JSONParseFromFile} is used to parse a JSON file and store the parsed result in a token variable (a property list). It works the same way as \macro{\JSONParse}, but instead of a JSON string, it takes as third argument the path to the JSON file relative to the working directory. 487 | 488 | \begin{macrodef} 489 | |\JSONParseValue|[]{}{} 490 | \end{macrodef} 491 | The command \macro{\JSONParseValue} is used to select values from the token variable (property list) that has been created using the commands \macro{\JSONParse} or \macro{\JSONParseFromFile}. The second argument takes the token variable that holds the parsed JSON data. The third argument takes the key to select the relevant entry from the parsed JSON data using JavaScript syntax. 492 | 493 | If the JSON string \macro{{ "key" : "value" }} is parsed into the token variable \macro{\myJSONdata}, using \macro{\JSONParseValue{\myJSONdata}{key}} would extract the value associated with the key \macro{key}, which in this case is \macro{value}, and typeset it to the document. 494 | 495 | Nested objects and arrays are assigned keys that adhere to JavaScript syntax. For example, if the JSON string \macro{{ "outer_key" : { "inner_key" : "value" } }} is parsed into the token variable \macro{\myJSONdata}, to select the value associated with the key \macro{inner_key}, the command \macro{\JSONParseValue{\myJSONdata}{outer_key.inner_key}} can be used. To give an example for an array, the command \macro{\JSONParseValue{\myJSONdata}{key[0]}} selects the first value of the array associated with the key \macro{key} in the JSON string \macro{{ "key" : [ "one" , "two" ] }}. 496 | 497 | The first optional argument can be used to pass options to the command, such as \macro{escape} or \macro{rescan}, that are then applied locally. When the option \macro{rescan} is used, the token list is rescanned before it is typeset (which means that all category codes that may have been changed before are set to the default values). This is the default behavior. If rescanning is not desired, pass the option \macro{rescan=false} to the command. 498 | 499 | When a key is associated with an object or array (and the key \macro{skip structures} is not set), the whole object or array is output as JSON string. Additionally, the special key \macro{.} (or the string defined using the key \macro{separator/child}) returns the whole JSON object (or the whole JSON array if the JSON data only consists of one array) as string where all characters (except for spaces and tabs) have category code 12 (``other''). 500 | 501 | \warning The command \macro{\JSONParseValue} is not expandable and can therefore not be used as argument of certain other arguments where expansion is needed. In such cases, the expandable command \macro{\JSONParseExpandableValue} should be used. 502 | 503 | \begin{macrodef}[key:storein] 504 | |store in|={} 505 | \end{macrodef} 506 | The command \macro{\JSONParseValue} accepts the key \macro{store in} that can be used to store the return value in another token variable. If the token variable given as option to the \macro{store in} key has not yet been defined, it will be created by this command. 507 | 508 | The token list returned by this command is a string variable where all characters have category code 12 (``other''), except for spaces and (horizontal) tabs that have category code 10 (``space''). 509 | 510 | The key \macro{store in} can be used together with the key \macro{rescan} to rescan the return value before storing it in the token variable. This means that the value stored in the token list will have the category codes TeX uses per default. Option settings such as the \macro{escape} option are taken into consideration during the rescan process. 511 | 512 | This can, for example, be necessary when numbers stored in the JSON data in scientific format should be formatted using the \macro{siunitx} package. The rescan is needed here, because otherwise the character \macro{e} would have the wrong category code and would hence not be recognized by the formatting parser as exponent marker. Let us assume the key \macro{number} in some JSON source parsed into the token variable \macro{\myJSONnumber} represents the value \macro{-1.1e-1}, then the following could be used to format the output: 513 | 514 | \JSONParse{\myJSONnumber}{ { "number" : -1.1e-1 } } 515 | \begin{codeexamplecolumns} 516 | \JSONParseValue 517 | [rescan, store in=\mynumber] 518 | {\myJSONnumber}{number} 519 | \num{\mynumber} 520 | \end{codeexamplecolumns} 521 | 522 | The key \macro{store in} can also be set using \macro{\JSONParseSet}. Calling \macro{store in={}} will reset it to its default (empty) value. 523 | 524 | \begin{macrodef}*[macro:expandablevalue] 525 | |\JSONParseExpandableValue|{}{} 526 | \end{macrodef} 527 | Whole objects or arrays can be output as JSON string for further use in other macros using the expandable command \macro{\JSONParseExpandableValue}. The value that is returned by this command is typically a string variable where all characters have category code 12 (``other''), except for spaces and (horizontal) tabs that have category code 10 (``space''). This should be kept in mind if string comparisons should be made. A comparison against a token list with the default category codes used by TeX won't work, since letters will have category code 11 (``letter''), but it is possible to use \macro{\detokenize} to set the category codes of the token list in such a way that the comparison works. 528 | 529 | For example, if the JSON string \macro{{ "key" : "value" }} has been parsed into the token variable \macro{\myJSONdata}, the command \macro{\JSONParseExpandableValue{\myJSONdata}{key}} will have the same meaning as \macro{\detokenize{value}} and expand to a token list with all characters having category code 12 (``other''). 530 | 531 | \begin{macrodef} 532 | |\JSONParseKeys|[]{}{} 533 | \end{macrodef} 534 | The command \macro{\JSONParseKeys} is used to get all top-level keys of a JSON object as JSON array and return this array as string where all characters (except for spaces and tabs) have category code 12 (``other''). The first argument of the command takes the token variable that holds the parsed JSON data. The second argument takes the key to select the relevant entry from the parsed JSON data using JavaScript syntax. 535 | 536 | The command \macro{\JSONParseKeys} accepts as option the key \macro{store in} to get all top-level keys of a JSON object as JSON array and parse this array into a token variable. Note that the return value is stored as property list, not as string. The token variable to store the keys as array is created if it does not exist. 537 | 538 | As an example, let us assume that the following JSON data structure is parsed into the token variable \macro{\myJSONdata}: 539 | 540 | \begin{codeexample} 541 | { 542 | "array" : [ 543 | { 544 | "key_a" : "one" , 545 | "key_b" : "two" 546 | } , 547 | { 548 | "key_a" : "three" , 549 | "key_b" : "four" 550 | } 551 | ] 552 | } 553 | \end{codeexample} 554 | \JSONParse{\myJSONdata}{ { "array" : [ { "key_a" : "one" , "key_b" : "two" } , { "key_a" : "three" , "key_b" : "four" } ] } } 555 | 556 | We can then generate an array consisting of all keys of the object that is the first item of the array using \macro{\JSONParseKeys{\myJSONdata}{array[0]}}. We can also generate an array consisting of all top-level keys of the parsed JSON data using \macro{\JSONParseKeys{\myJSONdata}{.}} where \macro{.} is the pseudo key representing the complete parsed JSON string. Note that in addition to the key of an array, keys for each item of this array will be added to the array of keys: 557 | 558 | \begin{codeexamplecolumns} 559 | \JSONParseKeys{\myJSONdata}{array[0]} 560 | 561 | \JSONParseKeys{\myJSONdata}{.} 562 | \end{codeexamplecolumns} 563 | 564 | If we store such an array of keys in the token variable \macro{\myJSONkeys}, we can for example access the first item of this array of keys using \macro{\JSONParseValue{\myJSONkeys}{[0]}}: 565 | 566 | \begin{codeexamplecolumns} 567 | \JSONParseKeys[store in=\myJSONkeys] 568 | {\myJSONdata}{.} 569 | \JSONParseValue{\myJSONkeys}{[0]} 570 | \end{codeexamplecolumns} 571 | 572 | Note that the underscores in the names of the keys can be printed without changing to math mode in the above example because they are stored as strings where all characters (except for spaces and tabs) have category code 12 (``other''). 573 | 574 | \begin{macrodef} 575 | |\JSONParseFilter|{}{}{} 576 | \end{macrodef} 577 | The command \macro{\JSONParseFilter} is used to select a part (such as an object or an array) of a JSON object or JSON array and parse this into a token variable (a property list). The first argument denotes the token variable where the value should be stored into. The second argument of the command takes the token variable that holds the parsed JSON data. The third argument takes the key to select the relevant entry from the parsed JSON data using JavaScript syntax. 578 | 579 | \subsection{Commands for handling arrays} 580 | 581 | The package offers a variety of commands that can be used to process JSON arrays. Three commands are provided to loop through arrays, \macro{\JSONParseArrayUse}, \macro{\JSONParseArrayMapFunction} and \macro{\JSONParseArrayMapInline} which offer different functionality for different use cases. All three commands are implemented in a unique way and it should not be expected that what works with one of these commands also works with another. The commands differ in various respects, for example: 582 | 583 | \begin{itemize} 584 | \item With \macro{\JSONParseArrayUse} and \macro{\JSONParseArrayMapInline}, it is possible to store the result in a token list for later use via the option key \macro{store in}, but such is not possible with \macro{\JSONParseArrayMapFunction}. 585 | \item It is possible to store non-expandable commands (such as \macro{\emph} or \macro{\textbf}) in a token list using \macro{\JSONParseArrayUse}, but not using \macro{\JSONParseArrayMapInline}. 586 | \end{itemize} 587 | 588 | \begin{macrodef} 589 | |\JSONParseArrayCount|[]{}{} 590 | \end{macrodef} 591 | The command \macro{\JSONParseArrayCount} takes as first argument a token variable holding a parsed JSON string or JSON file and as second argument a key to select an array in the JSON data. It returns an integer representing the number of items contained in the selected array. 592 | 593 | The command \macro{\JSONParseArrayCount} accepts the use of the key \macro{store in} to store the number of items contained in the selected array in a token variable. 594 | 595 | \begin{macrodef} 596 | |\JSONParseArrayUse|[]{}{}[]{} 597 | \end{macrodef} 598 | The command \macro{\JSONParseArrayUse} is used to select all values from an array from a parsed JSON string or JSON file. The second argument takes the token variable that holds the parsed JSON data. The first argument takes the key to select the relevant entry from the parsed JSON data using JavaScript syntax. The third argument is optional and can be used to pass a subkey, i.\,e.\ a key that is used to select a value for every item. The last argument takes a string that is inserted between all values when they are typeset. 599 | 600 | Let us again assume the following JSON data structure being parsed into the token variable \macro{\myJSONdata}: 601 | 602 | \begin{codeexample} 603 | { 604 | "array" : [ 605 | { 606 | "key_a" : "one" , 607 | "key_b" : "two" 608 | } , 609 | { 610 | "key_a" : "three" , 611 | "key_b" : "four" 612 | } 613 | ] 614 | } 615 | \end{codeexample} 616 | 617 | When using \macro{\JSONParseArrayUse{\myJSONdata}{array}[key_a]{, }}, `\JSONParseArrayUse{\myJSONdata}{array}[key_a]{, }' is then typeset to the document. 618 | 619 | The first optional argument can be used to pass options to the command, such as \macro{escape} or \macro{rescan}, that are then applied locally. 620 | 621 | The command \macro{\JSONParseArrayUse} accepts as option set in the optional argument the key \macro{store in} which takes a token variable into which the result of the command should be stored. Storing the result of the mapped inline function can be helpful if JSON data should be reformatted for use in another function. 622 | 623 | \begin{codeexamplecolumns} 624 | \JSONParseArrayUse[store in=\myJSONitems] 625 | {\myJSONdata}{array}[key_a]{,} 626 | 627 | \begin{tikzpicture} 628 | \foreach \x [count=\i] in \myJSONitems { 629 | \fill[blue] (\i,0) circle[radius=2pt] 630 | node[above=5pt, black] {\x}; 631 | } 632 | \end{tikzpicture} 633 | \end{codeexamplecolumns} 634 | 635 | \begin{macrodef}[macro:arraymapfunction] 636 | |\JSONParseArrayMapFunction|[]{}{}[] 637 | {} 638 | \end{macrodef} 639 | The command \macro{\JSONParseArrayMapFunction} works in a similar way and takes the same first three arguments as the command \macro{\JSONParseArrayUse}. However, instead of a string that is added between the array items, it takes a command (a token list) as fourth argument. This command can be defined beforehand and will be called for every array item. Inside its definition, the commands \macro{\JSONParseArrayIndex}, \macro{\JSONParseArrayKey} and \macro{\JSONParseArrayValue} can be used which are updated for each item and output the index, the key and the value of the current item respectively. Note that these commands are defined globally to make accessing them as easy as possible. 640 | 641 | For example, let us assume the same JSON data structure as defined above parsed into the token variable \macro{\myJSONdata}. Then, the following can be done: 642 | 643 | \begin{codeexamplecolumns} 644 | \newcommand{\myJSONitem}{ 645 | \item \emph{\JSONParseArrayValue} 646 | } 647 | 648 | \begin{itemize} 649 | \JSONParseArrayMapFunction{\myJSONdata} 650 | {array}[key_a]{\myJSONitem} 651 | \end{itemize} 652 | \end{codeexamplecolumns} 653 | 654 | It is possible to make use of multiple subkeys by passing them as a comma separated list as third argument to the command. Inside the command that is called for every array item, the different keys and values can be access via commands numbered with uppercase Roman numerals such as \macro{\JSONParseArrayKeyI}, \macro{\JSONParseArrayKeyII}, \macro{\JSONParseArrayKeyIII} etc.\ and \macro{\JSONParseArrayValueI}, \macro{\JSONParseArrayValueII}, \macro{\JSONParseArrayValueIII} etc. 655 | 656 | We can extend the above example in the following way: 657 | 658 | \begin{codeexamplecolumns} 659 | \newcommand{\myJSONitem}{ 660 | \item \emph{\JSONParseArrayValueI :} 661 | \JSONParseArrayValueII 662 | } 663 | 664 | \begin{itemize} 665 | \JSONParseArrayMapFunction{\myJSONdata} 666 | {array}[key_a,key_b]{\myJSONitem} 667 | \end{itemize} 668 | \end{codeexamplecolumns} 669 | 670 | \begin{macrodef} 671 | |code before|={} 672 | |code after|={} 673 | \end{macrodef} 674 | 675 | The \macro{\JSONParseArrayMapFunction} command also accepts the options \macro{code before} and \macro{code after}. These options can be used to place code before and after the output that is generated by the command called for every array item, for example for typesetting tabular contents. 676 | 677 | Typesetting the above example in a tabular way can be achieved as follows: 678 | 679 | \begin{codeexamplecolumns} 680 | \newcommand{\myJSONitem}{ 681 | \JSONParseArrayValueI & 682 | \JSONParseArrayValueII \\ 683 | } 684 | 685 | \JSONParseArrayMapFunction[ 686 | code before={ 687 | \begin{tabular}{ c c } 688 | \textbf{key a} & 689 | \textbf{key b} \\ \hline 690 | }, 691 | code after={ 692 | \hline 693 | \end{tabular} 694 | } 695 | ]{\myJSONdata}{array}[key_a,key_b] 696 | {\myJSONitem} 697 | \end{codeexamplecolumns} 698 | 699 | Finally, the first optional argument of the command can be used to pass options to the command, such as \macro{escape} or \macro{rescan}, that are then applied locally. 700 | 701 | \begin{macrodef} 702 | |\JSONParseArrayMapInline|[]{}{}{} 703 | \end{macrodef} 704 | The command \macro{\JSONParseArrayMapInline} takes as first mandatory argument a token variable holding a parsed JSON string or JSON file and as second mandatory argument a key to select an array in the JSON data. The last argument can contain any code where the index of the current item is represented by \macro{#1}. The code may contain another instance of \macro{\JSONParseArrayMapInline}, which means that the command can be nested. 705 | 706 | Using the above example, the mechanism could be implemented as follows: 707 | 708 | \begin{codeexamplecolumns} 709 | \begin{itemize} 710 | \JSONParseArrayMapInline{\myJSONdata} 711 | {array}{ 712 | \item \JSONParseValue{\myJSONdata} 713 | {array[#1].key_a} 714 | } 715 | \end{itemize} 716 | \end{codeexamplecolumns} 717 | 718 | Making use of the commands \macro{\JSONParseKeys} and \macro{\JSONParseValue} together with the \macro{store in} option, keys and values can be accessed. Due to the fact that cells create scopes, we need to repeat the part of the code that selects the current key: 719 | 720 | \begin{codeexamplecolumns} 721 | \JSONParseArrayMapInline{\myJSONdata} 722 | {array}{ 723 | \JSONParseKeys[store in=\mykeys] 724 | {\myJSONdata}{array[#1]} 725 | \JSONParseValue 726 | [store in=\mykeya, rescan=false] 727 | {\mykeys}{[0]} 728 | \JSONParseValue 729 | [store in=\mykeyb, rescan=false] 730 | {\mykeys}{[1]} 731 | 732 | \emph{\mykeya :} 733 | \JSONParseValue{\myJSONdata} 734 | {array[#1].\mykeya}\par 735 | 736 | \emph{\mykeyb :} 737 | \JSONParseValue{\myJSONdata} 738 | {array[#1].\mykeyb}\par\bigskip 739 | } 740 | \end{codeexamplecolumns} 741 | 742 | Note that the underscores in the names of the keys can be printed without changing to math mode in the above example by switching off rescanning via \macro{rescan=false}. This is possible because all JSON data is stored as string where all characters (except for spaces and tabs) have category code 12 (``other''). 743 | 744 | The command \macro{\JSONParseArrayMapInline} accepts as option set in the optional argument the key \macro{store in} which takes a token variable into which the result of the mapped inline function should be stored. Refer to the relevant explanations to command \macro{\JSONParseArrayUse} \hyperlink{macro:arrayuse}{above} for more information. 745 | 746 | \warning In order for the result to be stored in a token variable, the inline function needs to be fully expandable. For example, it is not possible to use the command \macro{\JSONParseValue} in the code of the inline function while using the command \macro{\JSONParseExpandableValue} is allowed. Note that the command \macro{\JSONParseArrayMapInline} itself is not expandable which means that nested use of this command prevents storing the result in a token variable. 747 | 748 | Storing the result of the mapped inline function can be helpful if JSON data should be reformatted for use in a plotting functions. An example for a use case with PGFplots is shown below. In this example, the parsed JSON string \macro{{ "data": [ [0,0], [1,-1], [2,1] ] }} was stored in the token variable \macro{\myJSONplotdata}. 749 | 750 | \JSONParse{\myJSONplotdata}{ { "data": [ [0,0], [1,-1], [2,1] ] } } 751 | \pgfplotsset{width=5.25cm} 752 | 753 | \begin{codeexamplecolumns} 754 | \JSONParseArrayMapInline 755 | [store in={\myJSONplotcoords}] 756 | {\myJSONplotdata}{data}{ 757 | ( 758 | \JSONParseExpandableValue 759 | {\myJSONplotdata}{data[#1][0]} 760 | , 761 | \JSONParseExpandableValue 762 | {\myJSONplotdata}{data[#1][1]} 763 | ) 764 | } 765 | 766 | \begin{tikzpicture} 767 | \begin{axis} 768 | \addplot coordinates 769 | {\myJSONplotcoords}; 770 | \end{axis} 771 | \end{tikzpicture} 772 | \end{codeexamplecolumns} 773 | 774 | \section{Externalizing parsed JSON data}\label{sec:externalizing} 775 | 776 | Parsing large and complex JSON files can take quite a while. In order to speed up follow-up compilation runs, this package provides a way to store parsed JSON data for future use. Once a file for externalization has been created, the package will try to load the data from this file instead of parsing the JSON data again. 777 | 778 | \begin{macrodef} 779 | |externalize| 780 | |externalize|={} 781 | \end{macrodef} 782 | With the key \macro{externalize} set (or set to true), a file will be created in the working directory that stores the externalization of the parsed JSON data. The file name gets the extension \macro{.jsonparse}. The file name is created automatically and consists of the name of the current file followed by an underscore and the name of the token variable where the JSON data is stored into. If a file with the same name and file extension already exists, an error will be issued. 783 | 784 | This key can be set using \macro{\JSONParseSet}. It can also be set locally as option to the commands \macro{\JSONParse} and \macro{\JSONParseFromFile}. 785 | 786 | \begin{macrodef} 787 | |externalize prefix|={} 788 | \end{macrodef} 789 | With the key \macro{externalize prefix}, a prefix can be defined that is added to the file name. Per default this is an empty string. 790 | 791 | This key can be set using \macro{\JSONParseSet}. It can also be set locally as option to the commands \macro{\JSONParse} and \macro{\JSONParseFromFile}. 792 | 793 | \begin{macrodef} 794 | |externalize file name|={} 795 | \end{macrodef} 796 | The key \macro{externalize file name} sets the schema for the file name. The default schema is as follows: 797 | 798 | \begin{codeexample} 799 | \l_jsonparse_externalize_prefix_str \c_sys_jobname_str 800 | \c_underscore_str \l_jsonparse_current_prop_str 801 | \end{codeexample} 802 | 803 | The token variable \macro{\l_jsonparse_externalize_prefix_str} contains the prefix that is set using the key \macro{externalize prefix}. \macro{\c_sys_jobname_str} holds the name of the current file (the current job name), \macro{\c_underscore_str} is an underscore and the token variable \macro{\l_jsonparse_current_prop_str} contains the name of the property list where the relevant JSON data is stored into (without the leading backslash). 804 | 805 | This key can be set using \macro{\JSONParseSet}. It can also be set locally as option to the commands \macro{\JSONParse} and \macro{\JSONParseFromFile}. 806 | 807 | \begin{macrodef} 808 | |\JSONParsePut|{}{}[] 809 | \end{macrodef} 810 | The command \macro{\JSONParsePut} is used by the externalization procedure to re-read already parsed JSON data to the main file. It just adds a key-value pair to the property list (where the value part is read as string). Hence, it can also be used to append more entries to an already existing property list containing parsed JSON data. 811 | 812 | \section{Changing parsing and typesetting behavior via option keys}\label{sec:options} 813 | 814 | The package provides a set of keys that can be set to change the separators used to select the relevant value in the JSON structure, the output that is generated from the JSON data as well as other things. 815 | 816 | \begin{macrodef} 817 | |\JSONParseSet|{} 818 | \end{macrodef} 819 | The command \macro{\JSONParseSet} can be used to specify options via key-value pairs (separated by commas). Keys that are presented here as a subkey (i.\,e.\ preceded by another key and a slash such as \macro{key/subkey}) can also be set using the syntax \macro{key={subkey}} and multiple subkeys belonging to one key can be combined using commas as separator. Several user commands allow to pass keys directly which are then applied locally. 820 | 821 | Not every key takes effect in every situation. Some keys affect the parsing procedure and thus need to be set before parsing. Some keys affect the typeset result and some keys only affect the typeset result when used in combination with specific commands. 822 | 823 | \subsection{Keys affecting the parsing procedure} 824 | 825 | Information about the key \macro{externalize} as well as about the related keys \macro{externalize prefix} and \macro{externalize file name} can be found above in section \ref{sec:externalizing}. 826 | 827 | \begin{macrodef} 828 | separator/|child|={} 829 | separator/|array left|={} 830 | separator/|array right|={} 831 | \end{macrodef} 832 | With the key \macro{separator/child}, the separator for child objects that is used in the key to select a specific value in the JSON data structure can be changed. Per default, the child separator is a dot (\macro{.}). 833 | 834 | With the keys \macro{separator/array left} and \macro{separator/array right}, the separators for arrays that are used in the key to select a specific value in the JSON data structure can be changed. Per default, the separators are square brackets (\macro{[} and \macro{]}). Changing these separators to curly braces (\macro{{}}) is not supported due to their grouping function in TeX. 835 | 836 | Changing the separators can be useful if keys in the JSON structure already use these characters. These settings take place already during parsing. 837 | 838 | These keys can be set using \macro{\JSONParseSet}. They can also be set locally as option to the commands \macro{\JSONParse} and \macro{\JSONParseFromFile}. When set using \macro{\JSONParseSet}, these keys only take effect when set before parsing. 839 | 840 | \begin{macrodef} 841 | |zero-based| 842 | |zero-based|={} 843 | \end{macrodef} 844 | If the key \macro{zero-based} is set (or explicitly set to \macro{true}), the index of array items starts with zero. If set to false, the indexing starts with one instead. Per default, the package uses zero-based indexing to match JavaScript notation. This setting affects indexing already during parsing. 845 | 846 | This key can be set using \macro{\JSONParseSet}. It can also be set locally as option to the commands \macro{\JSONParse} and \macro{\JSONParseFromFile}. When set using \macro{\JSONParseSet}, this key only takes effect when set before parsing. 847 | 848 | \begin{macrodef}[key:validatenumbers] 849 | |validate numbers| 850 | |validate numbers|={} 851 | \end{macrodef} 852 | If set to \macro{false}, the key \macro{validate numbers} omits an internal validation of numerical expressions against the JSON specification for numbers. Turning off this feature can increase the parsing speed if many numbers are to be parsed. Validations are carried out per default. 853 | 854 | This key can be set using \macro{\JSONParseSet}. It can also be set locally as option to the commands \macro{\JSONParse} and \macro{\JSONParseFromFile}. 855 | 856 | \begin{macrodef}[key:skipstructures] 857 | |skip structures| 858 | |skip structures|={} 859 | \end{macrodef} 860 | If set or explicitly set to \macro{true}, the key \macro{skip structures} deactivates the storage of arrays and objects as values to the relative keys. Also, it omits storing of the whole JSON string as value with the pseudo key \macro{.} (or the string defined using the key \macro{separator/child}). Skipping structures is deactivated per default. Setting this key to true can speed up the parsing process and circumvent memory limitations. 861 | 862 | This key can be set using \macro{\JSONParseSet}. It can also be set locally as option to the commands \macro{\JSONParse} and \macro{\JSONParseFromFile}. 863 | 864 | \subsection{Keys affecting the typesetting} 865 | 866 | Some keys that change the typesetting behavior are explained in other parts of this documentation. 867 | 868 | \begin{itemize} 869 | \item Information about the keys \macro{escape} and \macro{rescan} can be found above in section \ref{sec:escaping}. 870 | \item Information about the key \macro{store in} can be found \hyperlink{key:storein}{above} in the context of the description to the command \macro{\JSONParseValue} as well as in the description to \macro{\JSONParseKeys}, \macro{\JSONParseArrayCount}, \macro{\JSONParseArrayUse} and \macro{\JSONParseArrayMapInline}. 871 | \item Information about the keys \macro{code before} and \macro{code after} can be found above in the description to the command \macro{\JSONParseArrayMapFunction} \hyperlink{macro:arraymapfunction}{above}. 872 | \end{itemize} 873 | 874 | \begin{macrodef} 875 | keyword/|true|={} 876 | keyword/|false|={} 877 | keyword/|null|={} 878 | \end{macrodef} 879 | With the keys \macro{keyword/true}, \macro{keyword/false} and \macro{keyword/null}, the string that is typeset for true, false and null values can be changed. The default strings that are typeset are \macro{true}, \macro{false} and \macro{null} respectively. Only strings should be used as replacement. These replacements take place already during parsing. 880 | 881 | These keys can be set using \macro{\JSONParseSet}. They can also be set locally as option to the commands \macro{\JSONParse} and \macro{\JSONParseFromFile}. When set using \macro{\JSONParseSet}, these keys only take effect when set before parsing. 882 | 883 | \begin{macrodef} 884 | replace/|backspace|={} 885 | replace/|formfeed|={} 886 | replace/|linefeed|={} 887 | replace/|carriage return|={} 888 | replace/|horizontal tab|={} 889 | \end{macrodef} 890 | These keys can be used to set the replacement for the JSON escape sequences \macro{\b} (backspace), \macro{\f} (formfeed), \macro{\n} (linefeed), \macro{\r} (carriage return) and \macro{\t} (horizontal tab). The default replacement is a space in each case. These replacements take place only during typesetting. 891 | 892 | \warning Note that commands such as \macro{\par} needs to be masked (for example by using a copy created via \macro{\let}) in order to be used as replacement. 893 | 894 | These keys can be set using \macro{\JSONParseSet}. They can also be set locally as option to the commands \macro{\JSONParseValue}, \macro{\JSONParseArrayUse} and \macro{\JSONParseArrayMapFunction}. 895 | 896 | \begin{macrodef} 897 | |global| 898 | |global|={} 899 | \end{macrodef} 900 | The key \macro{global} can be used together with the key \macro{store in} to globally set the value of the relevant token list. Detailed information about the key \macro{store in} can be \hyperlink{key:storein}{above}. 901 | 902 | \section{L3 commands} 903 | 904 | The following token variables and commands are provided for defining user functions by package authors. For the conditional functions described below, apart from the variant that provides a true and a false branch, the and variants that only provide an argument for the true or for the false branch respectively are defined as well which is indicated by the letters {\itshape\macro{TF}} printed in italics. Commands marked with a star (\expandablestar) are fully expandable. 905 | 906 | \begin{macrodef} 907 | |\g_jsonparse_entries_prop| 908 | \end{macrodef} 909 | The token variable \macro{\g_jsonparse_entries_prop} holds as property list all elements of a JSON data structure that was parsed by the command \macro{\jsonparse_parse:n}. 910 | 911 | \begin{macrodef} 912 | |\l_jsonparse_current_prop_str| 913 | \end{macrodef} 914 | The token variable \macro{\l_jsonparse_current_prop_str} holds the name of the property list as defined by the user when calling the command \macro{\JSONParse} or \macro{\JSONParseFromFile} (without the leading backslash). 915 | 916 | \begin{macrodef} 917 | |\l_jsonparse_externalize_prefix_str| 918 | \end{macrodef} 919 | The token variable \macro{\l_jsonparse_externalize_prefix_str} holds the prefix for externalization file names as defined by the user via the key \macro{externalize prefix}. 920 | 921 | \begin{macrodef} 922 | |\jsonparse_parse:n| {} 923 | |\jsonparse_parse:o| {} 924 | |\jsonparse_parse:e| {} 925 | \end{macrodef} 926 | The command \macro{\jsonparse_parse:n} takes as argument a JSON string and populates the token variable (property list) \macro{\g_jsonparse_entries_prop} with key-value pairs representing all elements of the JSON data structure represented by this string. This command does not escape the input in any way. 927 | 928 | \begin{macrodef} 929 | |\jsonparse_set_parse:Nn| {} 930 | |\jsonparse_set_parse:No| {} 931 | |\jsonparse_set_parse:Ne| {} 932 | \end{macrodef} 933 | The command \macro{\jsonparse_set_parse:Nn} creates the token variable given as the first arguments as property list and, after having called \macro{\jsonparse_parse:n} using the second argument, locally sets this newly created property list equal to \macro{\g_jsonparse_entries_prop}. If escaping is activated, this command will pre-process the input according to the selected escaping mode before forwarding it to \macro{\jsonparse_parse:n}. See more on escaping above in section \ref{sec:escaping}. 934 | 935 | \begin{macrodef} 936 | |\jsonparse_gset_parse:Nn| {} 937 | |\jsonparse_gset_parse:No| {} 938 | |\jsonparse_gset_parse:Ne| {} 939 | \end{macrodef} 940 | The command \macro{\jsonparse_gset_parse:Nn} is identical to \macro{\jsonparse_set_parse:Nn} but with the property list being set globally. 941 | 942 | \begin{macrodef} 943 | |\jsonparse_set_parse_keys:NN| 944 | \end{macrodef} 945 | The command \macro{\jsonparse_parse_keys:NN} processes the token variable given as the second arguments as property list and selects all top-level keys which are then stored in the string variable given as first argument as JSON array. The pseudo key \macro{.} (or the string defined using the key \macro{separator/child}) to select the complete JSON data is ignored. If the JSON data is an array, the indices of the items (wrapped into the separators defined by \macro{separator/array left} and \macro{separator/array right}) are used as keys. 946 | 947 | \begin{macrodef} 948 | |\jsonparse_rescan:n| {} 949 | |\jsonparse_rescan:e| {} 950 | \end{macrodef} 951 | The command \macro{\jsonparse_rescan:n} rescans the JSON value given in the argument. Rescanning converts all tokens to their default category codes and TeX control sequences are expanded. Further, during the rescanning process, JSON escape sequences are replaced and characters that don't require escaping in JSON but in TeX are replaced by the relevant TeX escape sequences. 952 | 953 | \begin{macrodef} 954 | |\jsonparse_set_rescan:Nn| {} 955 | |\jsonparse_set_rescan:Ne| {} 956 | \end{macrodef} 957 | The command \macro{\jsonparse_set_rescan:Nn} rescans the JSON value given in the second argument and stores the result in the token variable specified in the second argument. 958 | 959 | \begin{macrodef} 960 | |\jsonparse_gset_rescan:Nn| {} 961 | |\jsonparse_gset_rescan:Ne| {} 962 | \end{macrodef} 963 | The command \macro{\jsonparse_gset_rescan:Nn} rescans the JSON value given in the second argument and stores the result globally in the token variable specified in the second argument. 964 | 965 | \begin{macrodef} 966 | |\jsonparse_put_right_rescan:Nn| {} 967 | |\jsonparse_put_right_rescan:Ne| {} 968 | \end{macrodef} 969 | The command \macro{\jsonparse_put_right_rescan:Nn} rescans the JSON value given in the second argument and adds the result to the end of the token variable specified in the second argument. 970 | 971 | \begin{macrodef} 972 | |\jsonparse_gput_right_rescan:Nn| {} 973 | |\jsonparse_gput_right_rescan:Ne| {} 974 | \end{macrodef} 975 | The command \macro{\jsonparse_gput_right_rescan:Nn} rescans the JSON value given in the second argument and adds the result globally to the end of the token variable specified in the second argument. 976 | 977 | \begin{macrodef} 978 | |\jsonparse_set_filter:Nn| {} 979 | \end{macrodef} 980 | The command \macro{\jsonparse_set_filter:Nn} processes the token variable given as the first arguments as property list and filters it according to the key given as second argument. Filtering means that for every entry in the property list, the key of this entry is compared against the key given to the command. If the key in the property list starts with the given key, the matching part is removed from the key in the property list. If the keys do not match, the entry is completely removed from the property list. If the second argument matches the pseudo key \macro{.} (or the string defined using the key \macro{separator/child}) exactly, the complete property list except for this key is returned. 981 | 982 | \begin{macrodef} 983 | |\jsonparse_set_array_count:NN| 984 | \end{macrodef} 985 | The command \macro{\jsonparse_set_array_count:NN} processes the token variable given as the first arguments as property list and, assuming that it is an array, counts its items and stores the result in the integer variable. If the token variable does not expand to a key that represents an array item, that is if the key does not start with the character defined by \macro{separator/array left}, the command will return an error. The command \macro{\JSONParseArrayCount} serves as a wrapper of this command. 986 | 987 | \begin{macrodef}* 988 | |\jsonparse_if_num:n|?TF? {} {} {} 989 | |\jsonparse_if_num:V|?TF? {} {} {} 990 | |\jsonparse_if_num_p:n| {} 991 | |\jsonparse_if_num_p:V| 992 | \end{macrodef} 993 | The expandable conditional function \macro{\jsonparse_if_num:nTF} checks whether a string is a valid JSON number according the relevant specification. It executes the true code if the string is a valid JSON number and the false code if not. The variants that only provide an argument for the true or false case work accordingly. The command \macro{\jsonparse_if_num_p:n} returns a boolean true or false (i.\,e.\ \macro{\c_true_bool} or \macro{\c_false_bool}). 994 | 995 | \begin{macrodef}* 996 | |\jsonparse_unicode_if_high_surrogate:n|?TF? {} 997 | {} {} 998 | |\jsonparse_unicode_if_high_surrogate:e|?TF? {} 999 | {} {} 1000 | |\jsonparse_unicode_if_high_surrogate_p:n| {} 1001 | |\jsonparse_unicode_if_high_surrogate_p:e| {} 1002 | \end{macrodef} 1003 | The expandable conditional function \macro{\jsonparse_unicode_if_high_surrogate:nTF} checks whether the codepoint entered as argument (an integer that can be hexadecimal if preceded by \macro{"}) is in the range of \macro{"D800} and \macro{"DBFF} which means that it is the first part of a surrogate pair (a high surrogate). The conditional function executes the true or false code depending on the evaluation. The variants that only provide an argument for the true or false case work accordingly. The command \macro{\jsonparse_unicode_if_high_surrogate_p:n} returns a boolean true or false (i.\,e.\ \macro{\c_true_bool} or \macro{\c_false_bool}). 1004 | 1005 | \begin{macrodef}* 1006 | |\jsonparse_unicode_if_low_surrogate:n|?TF? {} 1007 | {} {} 1008 | |\jsonparse_unicode_if_low_surrogate:e|?TF? {} 1009 | {} {} 1010 | |\jsonparse_unicode_if_low_surrogate_p:n| {} 1011 | |\jsonparse_unicode_if_low_surrogate_p:e| {} 1012 | \end{macrodef} 1013 | The expandable conditional function \macro{\jsonparse_unicode_if_low_surrogate:nTF} checks whether the codepoint entered as argument (an integer that can be hexadecimal if preceded by \macro{"}) is in the range of \macro{"DC00} and \macro{"DFFF} which means that it is the last part of a surrogate pair (a low surrogate). The conditional function executes the true or false code depending on the evaluation. The variants that only provide an argument for the true or false case work accordingly. The command \macro{\jsonparse_unicode_if_low_surrogate_p:n} returns a boolean true or false (i.\,e.\ \macro{\c_true_bool} or \macro{\c_false_bool}). 1014 | 1015 | \begin{macrodef}* 1016 | |\jsonparse_unicode_convert_surrogate_pair:nn| {} {} 1017 | |\jsonparse_unicode_convert_surrogate_pair:ee| {} {} 1018 | \end{macrodef} 1019 | The expandable command \macro{\jsonparse_unicode_convert_surrogate_pair:nn} converts a surrogate pair to the relevant Unicode codepoint. The returned value is an integer. It takes as first argument the codepoint of the low surrogate and as second argument the codepoint of the high surrogate. It does not check whether the codepoints actually belong to the relevant ranges of codepoints for high and low surrogates. 1020 | 1021 | \section{Deprecated commands} 1022 | 1023 | The following commands displayed in red boxes on the left have been deprecated and the relevant replacement displayed in green box right to it should be used. To simplify the representation of the code and clarify how arguments are supposed to be used, numbers are used to identify the arguments. 1024 | 1025 | To ensure backward compatibility, the deprecated commands are still supported, but their use is not recommended. 1026 | 1027 | \subsection{Deprecated user commands} 1028 | 1029 | \begin{macrodeprecated}* 1030 | |\JSONParseSetValue| 1031 | {<1>}{<2>}{<3>} 1032 | \end{macrodeprecated} 1033 | \hfill 1034 | \begin{macroreplacement} 1035 | |\JSONParseValue|[store in={<1>}, 1036 | rescan=false]{<2>}{<3>} 1037 | \end{macroreplacement} 1038 | 1039 | \begin{macrodeprecated}* 1040 | |\JSONParseSetRescanValue| 1041 | {<1>}{<2>}{<3>} 1042 | \end{macrodeprecated} 1043 | \hfill 1044 | \begin{macroreplacement} 1045 | |\JSONParseValue|[store in={<1>}, 1046 | rescan]{<2>}{<3>} 1047 | \end{macroreplacement} 1048 | 1049 | \begin{macrodeprecated}* 1050 | |\JSONParseSetKeys| 1051 | {<1>}{<2>}{<3>} 1052 | \end{macrodeprecated} 1053 | \hfill 1054 | \begin{macroreplacement} 1055 | |\JSONParseKeys| 1056 | [store in={<1>}]{<2>}{<3>} 1057 | \end{macroreplacement} 1058 | 1059 | \begin{macrodeprecated}* 1060 | |\JSONParseSetArrayCount| 1061 | {<1>}{<2>}{<3>} 1062 | \end{macrodeprecated} 1063 | \hfill 1064 | \begin{macroreplacement} 1065 | |\JSONParseArrayCount| 1066 | [store in={<1>}]{<2>}{<3>} 1067 | \end{macroreplacement} 1068 | 1069 | \begin{macrodeprecated}* 1070 | |\JSONParseArrayValues| 1071 | [<1>]{<2>}{<3>}[<4>]{<5>} 1072 | \end{macrodeprecated} 1073 | \hfill 1074 | \begin{macroreplacement} 1075 | |\JSONParseArrayUse| 1076 | [<1>]{<2>}{<3>}[<4>]{<5>} 1077 | \end{macroreplacement} 1078 | 1079 | \begin{macrodeprecated}* 1080 | |\JSONParseArrayValuesMap| 1081 | [<1>]{<2>}{<3>}[<4>]{<5>} 1082 | [<6>][<7>] 1083 | \end{macrodeprecated} 1084 | \hfill 1085 | \begin{macroreplacement} 1086 | |\JSONParseArrayMapFunction| 1087 | [<1>, code before={<6>}, 1088 | code after={<7>}] 1089 | {<2>}{<3>}[<4>]{\<5>} 1090 | \end{macroreplacement} 1091 | 1092 | \begin{macrodeprecated}* 1093 | |\x|{<1>}{<2>} 1094 | \end{macrodeprecated} 1095 | \hfill 1096 | \begin{macroreplacement} 1097 | |\$|{<1>}{<2>} 1098 | \end{macroreplacement} 1099 | 1100 | The command \macro{\JSONParseArrayMapFunction} takes as last argument a command denoting the relevant mapping function including the preceding backslash, while the deprecated command \macro{\JSONParseArrayValuesMap} required the name of this function without preceding backslash. 1101 | 1102 | The commands \macro{\JSONParseSetRescanValue} and \macro{\JSONParseSetKeys} will locally set the relevant token variable. 1103 | 1104 | \subsection{Deprecated L3 commands} 1105 | 1106 | \begin{macrodeprecated}* 1107 | |\jsonparse_to_prop:Nn| 1108 | <1> {<2>} 1109 | \end{macrodeprecated} 1110 | \hfill 1111 | \begin{macroreplacement} 1112 | |\jsonparse_gset_parse:Nn| 1113 | <1> {<2>} 1114 | \end{macroreplacement} 1115 | 1116 | \begin{macrodeprecated}* 1117 | |\jsonparse_to_prop_local:Nn| 1118 | <1> {<2>} 1119 | \end{macrodeprecated} 1120 | \hfill 1121 | \begin{macroreplacement} 1122 | |\jsonparse_set_parse:Nn| 1123 | <1> {<2>} 1124 | \end{macroreplacement} 1125 | 1126 | \begin{macrodeprecated}* 1127 | |\jsonparse_filter:Nn| 1128 | <1> {<2>} 1129 | \end{macrodeprecated} 1130 | \hfill 1131 | \begin{macroreplacement} 1132 | |\jsonparse_set_filter:Nn| 1133 | <1> {<2>} 1134 | \end{macroreplacement} 1135 | 1136 | \begin{macrodeprecated}* 1137 | |\jsonparse_parse_keys:NN| 1138 | <1> <2> 1139 | \end{macrodeprecated} 1140 | \hfill 1141 | \begin{macroreplacement} 1142 | |\jsonparse_set_parse_keys:NN| 1143 | <2> <1> 1144 | \end{macroreplacement} 1145 | 1146 | \begin{macrodeprecated}* 1147 | |\jsonparse_array_count:NN| 1148 | <1> <2> 1149 | \end{macrodeprecated} 1150 | \hfill 1151 | \begin{macroreplacement} 1152 | |\jsonparse_set_array_count:NN| 1153 | <2> <1> 1154 | \end{macroreplacement} 1155 | 1156 | Note that the order of the arguments of the commands \macro{\jsonparse_set_parse_keys:NN} is reversed compared to the deprecated command \macro{\jsonparse_parse_keys:NN}. The same holds for the command \macro{\jsonparse_set_array_count:NN} compared to the deprecated command \macro{\jsonparse_array_count:NN}. 1157 | 1158 | % ===== 1159 | 1160 | \printchanges 1161 | 1162 | \end{document} 1163 | 1164 | %% End of file `jsonparse-doc.tex`. 1165 | -------------------------------------------------------------------------------- /testfiles/README.md: -------------------------------------------------------------------------------- 1 | # Test files 2 | 3 | The following tests are performed upon Push or Pull-Request. The first column depicts the name of the `.lvt` file. All tests are compared against `.tlg` files, except for `testßß3`, `testß04` and `testß10` which are compared against `.vle` files. Most tests are tested with PDFTeX while some tests (involving Unicode) are performed using LuaTeX. 4 | 5 | | Test file | Compared against | PDFTeX | LuaTeX | Tests performed | 6 | | :--- | :--- | :---: | :---: | :--- | 7 | | `test001` | `.tlg` | ✅ | | Parsing of JSON strings, inline and from file; parsing of strings, numbers, null, booleans, arrays and objects | 8 | | `test002` | `.tlg` | | ✅ | Parsing of Unicode in JSON strings, inline and from file; parsing of Unicode (BMP and other planes) as key and value | 9 | | `test003` | `.vle` | ✅ | | Parsing of arrays: `\JSONParseArrayCount`, `\JSONParseArrayUse`, `\JSONParseArrayMapFunction` and `\JSONParseArrayMapInline` | 10 | | `test004` | `.vle` | ✅ | | Parsing of JSON escape sequences (except `\u`) | 11 | | `test005` | `.tlg` | ✅ | | Parsing of nested JSON strings via `\x`; indexing not zero-based | 12 | | `test006` | `.tlg` | ✅ | | Parsing of replacements for JSON escape sequences | 13 | | `test007` | `.tlg` | ✅ | ✅ | Parsing of JSON escape sequences for Unicode (`\u`) including conversion of surrogate pairs | 14 | | `test008` | `.tlg` | ✅ | | Parsing of custom separators for arrays and children; parsing of custom replacements for boolean and null values | 15 | | `test009` | `.tlg` | ✅ | | Externalizing of parsed data | 16 | | `test010` | `.vle` | ✅ | | Use of `\JSONParseKeys` and `\JSONParseFilter` | 17 | | `test011` | `.vle` | ✅ | | Use of TeX escape sequences | 18 | | `test012` | `.tlg` | ✅ | | Error handling of misplaced commas or colons in arrays and objects | 19 | | `test100` | `.tlg` | ✅ | | Validation of JSON numbers | 20 | | `test200` | `.tlg` | ✅ | | Checking for high and low surrogates; checking conversion of surrogate pairs | 21 | -------------------------------------------------------------------------------- /testfiles/test001.lvt: -------------------------------------------------------------------------------- 1 | \documentclass{article} 2 | \usepackage[T1]{fontenc} 3 | \input{regression-test} 4 | \usepackage[debug]{jsonparse} 5 | 6 | \begin{document} 7 | 8 | \START 9 | 10 | \SEPARATOR 11 | 12 | \JSONParse{\myJSONdata}{ { "A" : true , "B" : false , "C" : null } } 13 | 14 | \SEPARATOR 15 | 16 | \JSONParse{\myJSONdataA}{ { "A" : [ { "foo" : "bar" , "a" : 1 } , -1.23 , null ] , "B" : "baz" } } 17 | 18 | \ExplSyntaxOn 19 | 20 | \TEST { parsing , ~ from ~ string } { 21 | \str_if_eq:enTF { \JSONParseExpandableValue{\myJSONdataA}{A[0].foo} } { bar } { 22 | \TRUE 23 | } { 24 | \FALSE 25 | } 26 | 27 | \str_if_eq:enTF { \JSONParseExpandableValue{\myJSONdataA}{A[2]} } { null } { 28 | \TRUE 29 | } { 30 | \FALSE 31 | } 32 | 33 | \str_if_eq:enTF { \JSONParseExpandableValue{\myJSONdataA}{B} } { baz } { 34 | \TRUE 35 | } { 36 | \FALSE 37 | } 38 | } 39 | 40 | \ExplSyntaxOff 41 | 42 | \OMIT 43 | \begin{filecontents}[noheader,force]{test-1.json} 44 | { "A" : [ { "foo" : "bar" , "a" : 1 } , -1.23 , null ] , "B" : "baz" } 45 | \end{filecontents} 46 | \TIMO 47 | 48 | \JSONParseFromFile{\myJSONdataB}{test-1.json} 49 | 50 | \ExplSyntaxOn 51 | 52 | \TEST { parsing , ~ from ~ file } { 53 | \str_if_eq:enTF { \JSONParseExpandableValue{\myJSONdataB}{A[0].foo} } { bar } { 54 | \TRUE 55 | } { 56 | \FALSE 57 | } 58 | 59 | \str_if_eq:enTF { \JSONParseExpandableValue{\myJSONdataB}{A[2]} } { null } { 60 | \TRUE 61 | } { 62 | \FALSE 63 | } 64 | 65 | \str_if_eq:enTF { \JSONParseExpandableValue{\myJSONdataB}{B} } { baz } { 66 | \TRUE 67 | } { 68 | \FALSE 69 | } 70 | } 71 | 72 | \ExplSyntaxOff 73 | 74 | \END 75 | 76 | \end{document} 77 | -------------------------------------------------------------------------------- /testfiles/test001.tlg: -------------------------------------------------------------------------------- 1 | This is a generated file for the l3build validation system. 2 | Don't change this file in any respect. 3 | ============================================================ 4 | Parsing JSON ... 5 | (obj begin) 6 | (key) A: 7 | (tru) true 8 | (key) B: 9 | (fal) false 10 | (key) C: 11 | (nul) null 12 | (key) .: 13 | (obj) { "A" : true , "B" : false , "C" : null } 14 | (obj end) 15 | JSON parsing done. 16 | ============================================================ 17 | Parsing JSON ... 18 | (obj begin) 19 | (arr begin) 20 | (obj begin) 21 | (key) A[0].foo: 22 | (str) bar 23 | (key) A[0].a: 24 | (num) 1 25 | (key) A[0]: 26 | (obj) { "foo" : "bar" , "a" : 1 } 27 | (obj end) 28 | (key) A[1]: 29 | (num) -1.23 30 | (key) A[2]: 31 | (nul) null 32 | (key) A: 33 | (arr) [ { "foo" : "bar" , "a" : 1 } , -1.23 , null ] 34 | (arr end) 35 | (key) B: 36 | (str) baz 37 | (key) .: 38 | (obj) { "A" : [ { "foo" : "bar" , "a" : 1 } , -1.23 , null ] , "B" : "baz" } 39 | (obj end) 40 | JSON parsing done. 41 | ============================================================ 42 | TEST 1: parsing, from string 43 | ============================================================ 44 | TRUE 45 | TRUE 46 | TRUE 47 | ============================================================ 48 | (test-1.json) 49 | Parsing JSON ... 50 | (obj begin) 51 | (arr begin) 52 | (obj begin) 53 | (key) A[0].foo: 54 | (str) bar 55 | (key) A[0].a: 56 | (num) 1 57 | (key) A[0]: 58 | (obj) { "foo" : "bar" , "a" : 1 } 59 | (obj end) 60 | (key) A[1]: 61 | (num) -1.23 62 | (key) A[2]: 63 | (nul) null 64 | (key) A: 65 | (arr) [ { "foo" : "bar" , "a" : 1 } , -1.23 , null ] 66 | (arr end) 67 | (key) B: 68 | (str) baz 69 | (key) .: 70 | (obj) { "A" : [ { "foo" : "bar" , "a" : 1 } , -1.23 , null ] , "B" : "baz" } 71 | (obj end) 72 | JSON parsing done. 73 | ============================================================ 74 | TEST 2: parsing, from file 75 | ============================================================ 76 | TRUE 77 | TRUE 78 | TRUE 79 | ============================================================ 80 | -------------------------------------------------------------------------------- /testfiles/test002.lvt: -------------------------------------------------------------------------------- 1 | \documentclass{article} 2 | \usepackage[T1]{fontenc} 3 | \input{regression-test} 4 | \usepackage{jsonparse} 5 | 6 | \begin{document} 7 | 8 | \START 9 | 10 | \BEGINTEST{parsing Unicode: from string} 11 | 12 | \OMIT 13 | \JSONParse{\myJSONdataA}{ { "A" : [ { "😀" : "😎" , "a" : "中" } , "🛹" , "🚗" ] , "B" : "日" } } 14 | \TIMO 15 | 16 | \ExplSyntaxOn 17 | 18 | \str_if_eq:eeTF { 19 | \JSONParseExpandableValue{\myJSONdataA}{A[0].😀} 20 | } { 21 | \codepoint_generate:nn { "1F60E } { 12 } 22 | } { \TRUE } { \FALSE } 23 | 24 | \str_if_eq:eeTF { 25 | \JSONParseExpandableValue{\myJSONdataA}{A[1]} 26 | } { 27 | \codepoint_generate:nn { "1F6F9 } { 12 } 28 | } { \TRUE } { \FALSE } 29 | 30 | \str_if_eq:eeTF { 31 | \JSONParseExpandableValue{\myJSONdataA}{B} 32 | } { 33 | \codepoint_generate:nn { "65E5 } { 12 } 34 | } { \TRUE } { \FALSE } 35 | 36 | \ExplSyntaxOff 37 | 38 | \ENDTEST 39 | 40 | \BEGINTEST{parsing Unicode: from file} 41 | 42 | \OMIT 43 | \begin{filecontents}[noheader,force]{test-2.json} 44 | { "A" : [ { "😀" : "😎" , "a" : "中"} , "🛹" , "🚗" ] , "B" : "日" } 45 | \end{filecontents} 46 | \TIMO 47 | 48 | \JSONParseFromFile{\myJSONdataB}{test-2.json} 49 | 50 | \ExplSyntaxOn 51 | 52 | \str_if_eq:eeTF { 53 | \JSONParseExpandableValue{\myJSONdataB}{A[0].😀} 54 | } { 55 | \codepoint_generate:nn { "1F60E } { 12 } 56 | } { \TRUE } { \FALSE } 57 | 58 | \str_if_eq:eeTF { 59 | \JSONParseExpandableValue{\myJSONdataB}{A[1]} 60 | } { 61 | \codepoint_generate:nn { "1F6F9 } { 12 } 62 | } { \TRUE } { \FALSE } 63 | 64 | \str_if_eq:eeTF { 65 | \JSONParseExpandableValue{\myJSONdataB}{B} 66 | } { 67 | \codepoint_generate:nn { "65E5 } { 12 } 68 | } { \TRUE } { \FALSE } 69 | 70 | \ExplSyntaxOff 71 | 72 | \ENDTEST 73 | 74 | \END 75 | 76 | \end{document} 77 | -------------------------------------------------------------------------------- /testfiles/test002.tlg: -------------------------------------------------------------------------------- 1 | This is a generated file for the l3build validation system. 2 | Don't change this file in any respect. 3 | ============================================================ 4 | TEST 1: parsing Unicode: from string 5 | ============================================================ 6 | TRUE 7 | TRUE 8 | TRUE 9 | ============================================================ 10 | ============================================================ 11 | TEST 2: parsing Unicode: from file 12 | ============================================================ 13 | (test-2.json) 14 | TRUE 15 | TRUE 16 | TRUE 17 | ============================================================ 18 | -------------------------------------------------------------------------------- /testfiles/test003.lve: -------------------------------------------------------------------------------- 1 | \documentclass{article} 2 | \usepackage[T1]{fontenc} 3 | \input{regression-test} 4 | \usepackage{jsonparse} 5 | 6 | \begin{document} 7 | 8 | \ExplSyntaxOn 9 | \box_new:N \l_jsonparse_test_box 10 | \ExplSyntaxOff 11 | 12 | \START 13 | 14 | \BEGINTEST{arrays: array count} 15 | 16 | \ExplSyntaxOn 17 | 18 | \int_compare:nNnTF { 4 } = { 4 } { 19 | \TRUE 20 | } { 21 | \FALSE 22 | } 23 | 24 | \ExplSyntaxOff 25 | 26 | \ENDTEST 27 | 28 | 29 | \BEGINTEST{arrays: array use} 30 | 31 | \ExplSyntaxOn 32 | 33 | \hbox_set:Nn \l_jsonparse_test_box { 2 , ~ 4 , ~ 5 , ~ 8 } 34 | \box_show:N \l_jsonparse_test_box 35 | 36 | \hbox_set:Nn \l_jsonparse_test_box { a , ~ b , ~ c , ~ d } 37 | \box_show:N \l_jsonparse_test_box 38 | 39 | \str_if_eq:onTF { a , ~ b , ~ c , ~ d } { a , ~ b , ~ c , ~ d } { 40 | \TRUE 41 | } { 42 | \FALSE 43 | } 44 | 45 | \hbox_set:Nn \l_jsonparse_test_box { a , ~ b , ~ \emph{c} , ~ d } 46 | \box_show:N \l_jsonparse_test_box 47 | 48 | \ExplSyntaxOff 49 | 50 | \ENDTEST 51 | 52 | 53 | \BEGINTEST{arrays: array map function} 54 | 55 | \ExplSyntaxOn 56 | 57 | \hbox_set:Nn \l_jsonparse_test_box { ~ 58 | 0 : ~ [0].y : ~ 2 ~ 59 | 1 : ~ [1].y : ~ 4 ~ 60 | 2 : ~ [2].y : ~ 5 ~ 61 | 3 : ~ [3].y : ~ 8 62 | } 63 | \box_show:N \l_jsonparse_test_box 64 | 65 | \hbox_set:Nn \l_jsonparse_test_box { ~ 66 | 0 : ~ [0].y : ~ 2 ~ 67 | 1 : ~ [1].y : ~ 4 ~ 68 | 2 : ~ [2].y : ~ 5 ~ 69 | 3 : ~ [3].y : ~ 8 70 | } 71 | \box_show:N \l_jsonparse_test_box 72 | 73 | \ExplSyntaxOff 74 | 75 | \ENDTEST 76 | 77 | 78 | \BEGINTEST{arrays: array map inline} 79 | 80 | \ExplSyntaxOn 81 | 82 | \hbox_set:Nn \l_jsonparse_test_box { 83 | 1 : 2 : 3 : 4 : 84 | } 85 | \box_show:N \l_jsonparse_test_box 86 | 87 | \str_if_eq:nnTF { 1 : 2 : 3 : 4 : } { 1 : 2 : 3 : 4 : } { 88 | \TRUE 89 | } { 90 | \FALSE 91 | } 92 | 93 | \ExplSyntaxOff 94 | 95 | \ENDTEST 96 | 97 | 98 | \BEGINTEST{arrays: nested array map inline} 99 | 100 | \ExplSyntaxOn 101 | 102 | \hbox_set:Nn \l_jsonparse_test_box { 103 | 1 : a + b + c + d + \par 104 | 2 : a + b + c + d + \par 105 | 3 : a + b + c + d + \par 106 | 4 : a + b + c + d + \par 107 | } 108 | \box_show:N \l_jsonparse_test_box 109 | 110 | \ExplSyntaxOff 111 | 112 | \ENDTEST 113 | 114 | \END 115 | 116 | \end{document} 117 | -------------------------------------------------------------------------------- /testfiles/test003.lvt: -------------------------------------------------------------------------------- 1 | \documentclass{article} 2 | \usepackage[T1]{fontenc} 3 | \input{regression-test} 4 | \usepackage{jsonparse} 5 | 6 | \begin{document} 7 | 8 | \ExplSyntaxOn 9 | \box_new:N \l_jsonparse_test_box 10 | \ExplSyntaxOff 11 | 12 | \JSONParse{\JSONdataC}{ 13 | { 14 | "data" : [ 15 | { "x" : 1 , "y" : 2 } , 16 | { "x" : 2 , "y" : 4 } , 17 | { "x" : 3 , "y" : 5 } , 18 | { "x" : 4 , "y" : 8 } 19 | ] , 20 | "items" : [ "a" , "b" , "c" , "d" ] , 21 | "items_e" : [ "a" , "b" , "\\emph{c}" , "d" ] 22 | } 23 | } 24 | 25 | \START 26 | 27 | \BEGINTEST{arrays: array count} 28 | 29 | \ExplSyntaxOn 30 | 31 | \JSONParseSetArrayCount{\myJSONdataCount}{\JSONdataC}{data} 32 | 33 | \int_compare:nNnTF { \myJSONdataCount } = { 4 } { 34 | \TRUE 35 | } { 36 | \FALSE 37 | } 38 | 39 | \ExplSyntaxOff 40 | 41 | \ENDTEST 42 | 43 | 44 | \BEGINTEST{arrays: array use} 45 | 46 | \ExplSyntaxOn 47 | 48 | \hbox_set:Nn \l_jsonparse_test_box { \JSONParseArrayUse{\JSONdataC}{data}[y]{, ~ } } 49 | \box_show:N \l_jsonparse_test_box 50 | 51 | \hbox_set:Nn \l_jsonparse_test_box { \JSONParseArrayUse{\JSONdataC}{items}{, ~ } } 52 | \box_show:N \l_jsonparse_test_box 53 | 54 | \JSONParseArrayUse[store ~ in=\JSONvaluesA]{\JSONdataC}{items}{, ~ } 55 | 56 | \str_if_eq:onTF { \JSONvaluesA } { a , ~ b , ~ c , ~ d } { 57 | \TRUE 58 | } { 59 | \FALSE 60 | } 61 | 62 | \JSONParseArrayUse[store ~ in=\JSONvaluesB]{\JSONdataC}{items_e}{, ~ } 63 | 64 | \hbox_set:Nn \l_jsonparse_test_box { \JSONvaluesB } 65 | \box_show:N \l_jsonparse_test_box 66 | 67 | \ExplSyntaxOff 68 | 69 | \ENDTEST 70 | 71 | 72 | \BEGINTEST{arrays: array map function} 73 | 74 | \newcommand{\useJSONvalue}{ 75 | \JSONParseArrayIndex : \JSONParseArrayKey : \JSONParseArrayValue 76 | } 77 | 78 | \ExplSyntaxOn 79 | 80 | \hbox_set:Nn \l_jsonparse_test_box { \JSONParseArrayMapFunction{\JSONdataC}{data}[y]{\useJSONvalue} } 81 | \box_show:N \l_jsonparse_test_box 82 | 83 | \hbox_set:Nn \l_jsonparse_test_box { \JSONParseArrayValuesMap{\JSONdataC}{data}[y]{useJSONvalue} } 84 | \box_show:N \l_jsonparse_test_box 85 | 86 | \ExplSyntaxOff 87 | 88 | \ENDTEST 89 | 90 | 91 | \BEGINTEST{arrays: array map inline} 92 | 93 | \ExplSyntaxOn 94 | 95 | \hbox_set:Nn \l_jsonparse_test_box { 96 | \JSONParseArrayMapInline{\JSONdataC}{data}{ 97 | \JSONParseValue{\JSONdataC}{data[#1].x} : 98 | } 99 | } 100 | \box_show:N \l_jsonparse_test_box 101 | 102 | \JSONParseArrayMapInline[store ~ in={\JSONvaluesC}]{\JSONdataC}{data}{ 103 | \JSONParseExpandableValue{\JSONdataC}{data[#1].x} : 104 | } 105 | 106 | \str_if_eq:onTF { \JSONvaluesC } { 1 : 2 : 3 : 4 : } { 107 | \TRUE 108 | } { 109 | \FALSE 110 | } 111 | 112 | \ExplSyntaxOff 113 | 114 | \ENDTEST 115 | 116 | 117 | \BEGINTEST{arrays: nested array map inline} 118 | 119 | \ExplSyntaxOn 120 | 121 | \hbox_set:Nn \l_jsonparse_test_box { 122 | \JSONParseArrayMapInline{\JSONdataC}{data}{ 123 | \JSONParseValue{\JSONdataC}{data[#1].x} : 124 | \JSONParseArrayMapInline{\JSONdataC}{items}{ 125 | \JSONParseValue{\JSONdataC}{items[##1]} + 126 | } \par 127 | } 128 | } 129 | \box_show:N \l_jsonparse_test_box 130 | 131 | \ExplSyntaxOff 132 | 133 | \ENDTEST 134 | 135 | \END 136 | 137 | \end{document} 138 | -------------------------------------------------------------------------------- /testfiles/test004.lve: -------------------------------------------------------------------------------- 1 | \documentclass{article} 2 | \usepackage[T1]{fontenc} 3 | \input{regression-test} 4 | \usepackage{jsonparse} 5 | 6 | \begin{document} 7 | 8 | \ExplSyntaxOn 9 | \box_new:N \l_jsonparse_test_box 10 | \ExplSyntaxOff 11 | 12 | \START 13 | 14 | \newcommand{\foo}{*foo*} 15 | 16 | \BEGINTEST{escape sequences} 17 | 18 | \ExplSyntaxOn 19 | 20 | \hbox_set:Nn \l_jsonparse_test_box { 21 | + foo\/ba\\emph\{r\"b\}az\\foo + f\\\"oo + bar\\\\baz + 22 | } 23 | \box_show:N \l_jsonparse_test_box 24 | 25 | \hbox_set:Nn \l_jsonparse_test_box { 26 | + foo/ba\emph{r"b}az\foo + f\"oo + bar\\baz + 27 | } 28 | \box_show:N \l_jsonparse_test_box 29 | 30 | \ExplSyntaxOff 31 | 32 | \ENDTEST 33 | 34 | 35 | \BEGINTEST{escape sequences: arrays} 36 | 37 | \ExplSyntaxOn 38 | 39 | \hbox_set:Nn \l_jsonparse_test_box { 40 | foo/ba\emph{r"b}az\foo, f\"oo, bar\\baz 41 | } 42 | \box_show:N \l_jsonparse_test_box 43 | 44 | \hbox_set:Nn \l_jsonparse_test_box { ~ 45 | [0] : ~ foo/ba\emph{r"b}az\foo :: ~ { } ~ [1] : ~ f\"oo :: ~ { } ~ [2] : ~ bar\\baz :: ~ 46 | } 47 | \box_show:N \l_jsonparse_test_box 48 | 49 | \ExplSyntaxOff 50 | 51 | \ENDTEST 52 | 53 | 54 | \BEGINTEST{escape sequences: nesting} 55 | 56 | \ExplSyntaxOn 57 | 58 | \hbox_set:Nn \l_jsonparse_test_box { 59 | foo\\emph\{bar\}baz + foo\emph{bar}baz 60 | } 61 | \box_show:N \l_jsonparse_test_box 62 | 63 | \hbox_set:Nn \l_jsonparse_test_box { 64 | + foo\\emph\{bar\}baz + f\\\"oo + 65 | } 66 | \box_show:N \l_jsonparse_test_box 67 | 68 | \hbox_set:Nn \l_jsonparse_test_box { 69 | foo\emph{bar}baz + f\"oo + f\%oo 70 | } 71 | \box_show:N \l_jsonparse_test_box 72 | 73 | \ExplSyntaxOff 74 | 75 | \ENDTEST 76 | 77 | \END 78 | 79 | \end{document} -------------------------------------------------------------------------------- /testfiles/test004.lvt: -------------------------------------------------------------------------------- 1 | \documentclass{article} 2 | \usepackage[T1]{fontenc} 3 | \input{regression-test} 4 | \usepackage{jsonparse} 5 | 6 | \begin{document} 7 | 8 | \ExplSyntaxOn 9 | \box_new:N \l_jsonparse_test_box 10 | \ExplSyntaxOff 11 | 12 | \START 13 | 14 | \JSONParse{\myJSONdataX}{ 15 | { 16 | "a" : [ "foo\/ba\\emph{r\"b}az\\foo" , "f\\\"oo" , "bar\\\\baz" ] 17 | } 18 | } 19 | 20 | \newcommand{\foo}{*foo*} 21 | 22 | \BEGINTEST{escape sequences} 23 | 24 | \ExplSyntaxOn 25 | 26 | % + foo\/ba\\emph\{r\"b\}az\\foo + f\\\"oo + bar\\\\baz + 27 | 28 | \hbox_set:Nn \l_jsonparse_test_box { 29 | + \JSONParseValue[rescan=false]{\myJSONdataX}{a[0]} + \JSONParseValue[rescan=false]{\myJSONdataX}{a[1]} + \JSONParseValue[rescan=false]{\myJSONdataX}{a[2]} + 30 | } 31 | \box_show:N \l_jsonparse_test_box 32 | 33 | % + foo/ba\emph{r"b}az\foo + f\"oo + bar\\baz + 34 | 35 | \hbox_set:Nn \l_jsonparse_test_box { 36 | + \JSONParseValue{\myJSONdataX}{a[0]} + \JSONParseValue{\myJSONdataX}{a[1]} + \JSONParseValue{\myJSONdataX}{a[2]} + 37 | } 38 | \box_show:N \l_jsonparse_test_box 39 | 40 | \ExplSyntaxOff 41 | 42 | \ENDTEST 43 | 44 | 45 | \BEGINTEST{escape sequences: arrays} 46 | 47 | \newcommand{\useJSONvalue}{ 48 | \JSONParseArrayKey : \JSONParseArrayValue :: 49 | } 50 | 51 | \ExplSyntaxOn 52 | 53 | % foo/ba\emph{r"b}az\foo, f\"oo, bar\\baz 54 | 55 | \hbox_set:Nn \l_jsonparse_test_box { 56 | \JSONParseArrayUse{\myJSONdataX}{a}{, } 57 | } 58 | \box_show:N \l_jsonparse_test_box 59 | 60 | % [0] : foo/ba\emph{r"b}az\foo :: [1] : f\"oo :: [2] : bar\\baz :: 61 | 62 | \hbox_set:Nn \l_jsonparse_test_box { 63 | \JSONParseArrayMapFunction{\myJSONdataX}{a}{\useJSONvalue} 64 | } 65 | \box_show:N \l_jsonparse_test_box 66 | 67 | \ExplSyntaxOff 68 | 69 | \ENDTEST 70 | 71 | 72 | \BEGINTEST{escape sequences: nesting} 73 | 74 | \JSONParse{\myJSONdataY}{ { "a" : "foo\\emph{bar}baz" , "b" : { "one" : "foo\\emph{bar}baz" , "two" : "f\\\"oo" , "three" : "f\\%oo"} } } 75 | 76 | \ExplSyntaxOn 77 | 78 | % foo\\emph\{bar\}baz + foo\emph{bar}baz 79 | 80 | \hbox_set:Nn \l_jsonparse_test_box { 81 | \JSONParseValue[rescan=false]{\myJSONdataY}{a} + \JSONParseValue{\myJSONdataY}{a} 82 | } 83 | \box_show:N \l_jsonparse_test_box 84 | 85 | \JSONParse{\myJSONdataZ}{ \x{myJSONdataY}{b} } 86 | 87 | % + foo\\emph\{bar\}baz + f\\\"oo + 88 | 89 | \hbox_set:Nn \l_jsonparse_test_box { 90 | + \JSONParseValue[rescan=false]{\myJSONdataZ}{one} + \JSONParseValue[rescan=false]{\myJSONdataZ}{two} + 91 | } 92 | \box_show:N \l_jsonparse_test_box 93 | 94 | % foo\emph{bar}baz + f\"oo + f\%oo 95 | 96 | \hbox_set:Nn \l_jsonparse_test_box { 97 | \JSONParseValue{\myJSONdataZ}{one} + \JSONParseValue{\myJSONdataZ}{two} + \JSONParseValue{\myJSONdataZ}{three} 98 | } 99 | \box_show:N \l_jsonparse_test_box 100 | 101 | \ExplSyntaxOff 102 | 103 | \ENDTEST 104 | 105 | \END 106 | 107 | \end{document} -------------------------------------------------------------------------------- /testfiles/test005.lvt: -------------------------------------------------------------------------------- 1 | \documentclass{article} 2 | \usepackage[T1]{fontenc} 3 | \input{regression-test} 4 | \usepackage[debug]{jsonparse} 5 | 6 | \begin{document} 7 | 8 | \START 9 | 10 | \SEPARATOR 11 | 12 | \JSONParse{\theJSONdataA}{ { "a" : { "b" : [ "c" , "d" ] } } } 13 | 14 | \JSONParse{\theJSONdataB}{ { "d" : "\${theJSONdataA}{a.b[0]}" } } 15 | 16 | \ExplSyntaxOn 17 | 18 | \TEST { parsing , ~ nested } { 19 | 20 | \str_if_eq:enTF { \JSONParseExpandableValue{\theJSONdataB}{d} } { c } { 21 | \TRUE 22 | } { 23 | \FALSE 24 | } 25 | 26 | } 27 | 28 | \ExplSyntaxOff 29 | 30 | 31 | \JSONParseSet{ 32 | zero-based=false 33 | } 34 | 35 | \JSONParse{\theJSONdataC}{ { "a" : [ "b" , "c" , "d" ] } } 36 | 37 | \ExplSyntaxOn 38 | 39 | \TEST { parsing , ~ not ~ zero-based } { 40 | \str_if_eq:enTF { \JSONParseExpandableValue{\theJSONdataC}{a[2]} } { c } { 41 | \TRUE 42 | } { 43 | \FALSE 44 | } 45 | } 46 | 47 | \ExplSyntaxOff 48 | 49 | \END 50 | 51 | \end{document} 52 | -------------------------------------------------------------------------------- /testfiles/test005.tlg: -------------------------------------------------------------------------------- 1 | This is a generated file for the l3build validation system. 2 | Don't change this file in any respect. 3 | ============================================================ 4 | Parsing JSON ... 5 | (obj begin) 6 | (obj begin) 7 | (arr begin) 8 | (key) a.b[0]: 9 | (str) c 10 | (key) a.b[1]: 11 | (str) d 12 | (key) a.b: 13 | (arr) [ "c" , "d" ] 14 | (arr end) 15 | (key) a: 16 | (obj) { "b" : [ "c" , "d" ] } 17 | (obj end) 18 | (key) .: 19 | (obj) { "a" : { "b" : [ "c" , "d" ] } } 20 | (obj end) 21 | JSON parsing done. 22 | Parsing JSON ... 23 | (obj begin) 24 | (key) d: 25 | (str) c 26 | (key) .: 27 | (obj) { "d" : "c" } 28 | (obj end) 29 | JSON parsing done. 30 | ============================================================ 31 | TEST 1: parsing, nested 32 | ============================================================ 33 | TRUE 34 | ============================================================ 35 | Parsing JSON ... 36 | (obj begin) 37 | (arr begin) 38 | (key) a[1]: 39 | (str) b 40 | (key) a[2]: 41 | (str) c 42 | (key) a[3]: 43 | (str) d 44 | (key) a: 45 | (arr) [ "b" , "c" , "d" ] 46 | (arr end) 47 | (key) .: 48 | (obj) { "a" : [ "b" , "c" , "d" ] } 49 | (obj end) 50 | JSON parsing done. 51 | ============================================================ 52 | TEST 2: parsing, not zero-based 53 | ============================================================ 54 | TRUE 55 | ============================================================ 56 | -------------------------------------------------------------------------------- /testfiles/test006.lvt: -------------------------------------------------------------------------------- 1 | \documentclass{article} 2 | \usepackage[T1]{fontenc} 3 | \input{regression-test} 4 | \usepackage{jsonparse} 5 | 6 | \begin{document} 7 | 8 | \START 9 | 10 | \JSONParseSet{ 11 | replace={ 12 | backspace={<
>}, 13 | formfeed={<>}, 14 | linefeed={<>}, 15 | carriage return={<>}, 16 | horizontal tab={<>} 17 | } 18 | } 19 | 20 | \JSONParse{\myJSONdataX}{ { "a" : "ab\bcd\fef" , "b" : "ab\ncd\ref" , "c" : "ab\tcd" } } 21 | 22 | \JSONParseSetRescanValue{\myJSONvalueA}{\myJSONdataX}{a} 23 | \JSONParseSetRescanValue{\myJSONvalueB}{\myJSONdataX}{b} 24 | \JSONParseSetRescanValue{\myJSONvalueC}{\myJSONdataX}{c} 25 | 26 | \ExplSyntaxOn 27 | 28 | \TEST { parsing , ~ custom ~ keyword ~ replacements } { 29 | \str_if_eq:enTF { \myJSONvalueA } { ab<
>cd<>ef } { 30 | \TRUE 31 | } { 32 | \FALSE 33 | } 34 | 35 | \str_if_eq:enTF { \myJSONvalueB } { ab<>cd<>ef } { 36 | \TRUE 37 | } { 38 | \FALSE 39 | } 40 | 41 | \str_if_eq:enTF { \myJSONvalueC } { ab<>cd } { 42 | \TRUE 43 | } { 44 | \FALSE 45 | } 46 | } 47 | 48 | \ExplSyntaxOff 49 | 50 | \END 51 | 52 | \end{document} 53 | -------------------------------------------------------------------------------- /testfiles/test006.tlg: -------------------------------------------------------------------------------- 1 | This is a generated file for the l3build validation system. 2 | Don't change this file in any respect. 3 | ============================================================ 4 | TEST 1: parsing, custom keyword replacements 5 | ============================================================ 6 | TRUE 7 | TRUE 8 | TRUE 9 | ============================================================ 10 | -------------------------------------------------------------------------------- /testfiles/test007.lvt: -------------------------------------------------------------------------------- 1 | \documentclass{article} 2 | \usepackage[T1]{fontenc} 3 | \input{regression-test} 4 | \usepackage{jsonparse} 5 | 6 | \begin{document} 7 | 8 | \JSONParse{\myJSONdata}{ 9 | { 10 | "a" : "ab\u0063d" , 11 | "b" : "ab\ud83d\ude0ed" , 12 | "c" : "ab\u65e5d" , 13 | "d" : "\u65e5" , 14 | "e" : "\u0062\ud83d\ude0e\u0063\u0063\u0062" , 15 | "f" : "\u0062\u0063\ud83d\ude0e\u0063\u0062" , 16 | "g" : "\u0062\u0063\u0063\ud83d\ude0e\u0062" , 17 | "h" : "\ud83d\ude0e" 18 | } 19 | } 20 | 21 | \START 22 | 23 | \BEGINTEST{parsing Unicode: from JSON Unicode} 24 | 25 | \ExplSyntaxOn 26 | 27 | \OMIT 28 | \JSONParseSetRescanValue{\myJSONa}{\myJSONdata}{a} 29 | \TIMO 30 | 31 | \str_if_eq:enTF { 32 | \myJSONa 33 | } { 34 | abcd 35 | } { \TRUE } { \FALSE } 36 | 37 | \OMIT 38 | \JSONParseSetRescanValue{\myJSONb}{\myJSONdata}{b} 39 | \TIMO 40 | 41 | \str_if_eq:eeTF { 42 | \myJSONb 43 | } { 44 | ab \codepoint_generate:nn { "1F60E } { 12 } d 45 | } { \TRUE } { \FALSE } 46 | 47 | \OMIT 48 | \JSONParseSetRescanValue{\myJSONc}{\myJSONdata}{c} 49 | \TIMO 50 | 51 | \str_if_eq:eeTF { 52 | \myJSONc 53 | } { 54 | ab \codepoint_generate:nn { "65E5 } { 12 } d 55 | } { \TRUE } { \FALSE } 56 | 57 | \OMIT 58 | \JSONParseSetRescanValue{\myJSONd}{\myJSONdata}{d} 59 | \TIMO 60 | 61 | \str_if_eq:eeTF { 62 | \myJSONd 63 | } { 64 | \codepoint_generate:nn { "65E5 } { 12 } 65 | } { \TRUE } { \FALSE } 66 | 67 | \OMIT 68 | \JSONParseSetRescanValue{\myJSONe}{\myJSONdata}{e} 69 | \TIMO 70 | 71 | \str_if_eq:eeTF { 72 | \myJSONe 73 | } { 74 | b \codepoint_generate:nn { "1F60E } { 12 } ccb 75 | } { \TRUE } { \FALSE } 76 | 77 | \OMIT 78 | \JSONParseSetRescanValue{\myJSONf}{\myJSONdata}{f} 79 | \TIMO 80 | 81 | \str_if_eq:eeTF { 82 | \myJSONf 83 | } { 84 | bc \codepoint_generate:nn { "1F60E } { 12 } cb 85 | } { \TRUE } { \FALSE } 86 | 87 | \OMIT 88 | \JSONParseSetRescanValue{\myJSONg}{\myJSONdata}{g} 89 | \TIMO 90 | 91 | \str_if_eq:eeTF { 92 | \myJSONg 93 | } { 94 | bcc \codepoint_generate:nn { "1F60E } { 12 } b 95 | } { \TRUE } { \FALSE } 96 | 97 | \OMIT 98 | \JSONParseSetRescanValue{\myJSONh}{\myJSONdata}{h} 99 | \TIMO 100 | 101 | \str_if_eq:eeTF { 102 | \myJSONh 103 | } { 104 | \codepoint_generate:nn { "1F60E } { 12 } 105 | } { \TRUE } { \FALSE } 106 | 107 | \ExplSyntaxOff 108 | 109 | \ENDTEST 110 | 111 | \END 112 | 113 | \end{document} 114 | -------------------------------------------------------------------------------- /testfiles/test007.tlg: -------------------------------------------------------------------------------- 1 | This is a generated file for the l3build validation system. 2 | Don't change this file in any respect. 3 | ============================================================ 4 | TEST 1: parsing Unicode: from JSON Unicode 5 | ============================================================ 6 | TRUE 7 | TRUE 8 | TRUE 9 | TRUE 10 | TRUE 11 | TRUE 12 | TRUE 13 | TRUE 14 | ============================================================ 15 | -------------------------------------------------------------------------------- /testfiles/test008.lvt: -------------------------------------------------------------------------------- 1 | \documentclass{article} 2 | \usepackage[T1]{fontenc} 3 | \input{regression-test} 4 | \usepackage{jsonparse} 5 | 6 | \begin{document} 7 | 8 | \START 9 | 10 | \JSONParseSet{separator={array left={<<}, array right={>>}}} 11 | 12 | \JSONParse{\myJSONdataV}{ { "a" : [ 1, 2, 3 ] } } 13 | \JSONParseSetValue{\myJSONvalueV}{\myJSONdataV}{a<<0>>} 14 | 15 | \ExplSyntaxOn 16 | 17 | \TEST { parsing , ~ custom ~ array ~ separators } { 18 | \str_if_eq:enTF { \myJSONvalueV } { 1 } { 19 | \TRUE 20 | } { 21 | \FALSE 22 | } 23 | } 24 | 25 | \ExplSyntaxOff 26 | 27 | \JSONParseSet{separator={child={++}}} 28 | 29 | \JSONParse{\myJSONdataW}{ { "a" : { "b" : 1 } } } 30 | \JSONParseSetValue{\myJSONvalueW}{\myJSONdataW}{a++b} 31 | 32 | \ExplSyntaxOn 33 | 34 | \TEST { parsing , ~ custom ~ child ~ separator } { 35 | \str_if_eq:enTF { \myJSONvalueW } { 1 } { 36 | \TRUE 37 | } { 38 | \FALSE 39 | } 40 | } 41 | 42 | \ExplSyntaxOff 43 | 44 | \JSONParseSet{keyword={true={YES}, false={NO}, null={ZERO}}} 45 | 46 | \JSONParse{\myJSONdataX}{ { "a" : true , "b" : false , "c" : null } } 47 | 48 | \ExplSyntaxOn 49 | 50 | \TEST { parsing , ~ custom ~ replacements } { 51 | \str_if_eq:enTF { \JSONParseExpandableValue{\myJSONdataX}{a} } { YES } { 52 | \TRUE 53 | } { 54 | \FALSE 55 | } 56 | 57 | \str_if_eq:enTF { \JSONParseExpandableValue{\myJSONdataX}{b} } { NO } { 58 | \TRUE 59 | } { 60 | \FALSE 61 | } 62 | 63 | \str_if_eq:enTF { \JSONParseExpandableValue{\myJSONdataX}{c} } { ZERO } { 64 | \TRUE 65 | } { 66 | \FALSE 67 | } 68 | } 69 | 70 | \ExplSyntaxOff 71 | 72 | \END 73 | 74 | \end{document} 75 | -------------------------------------------------------------------------------- /testfiles/test008.tlg: -------------------------------------------------------------------------------- 1 | This is a generated file for the l3build validation system. 2 | Don't change this file in any respect. 3 | ============================================================ 4 | TEST 1: parsing, custom array separators 5 | ============================================================ 6 | TRUE 7 | ============================================================ 8 | ============================================================ 9 | TEST 2: parsing, custom child separator 10 | ============================================================ 11 | TRUE 12 | ============================================================ 13 | ============================================================ 14 | TEST 3: parsing, custom replacements 15 | ============================================================ 16 | TRUE 17 | TRUE 18 | TRUE 19 | ============================================================ 20 | -------------------------------------------------------------------------------- /testfiles/test009.lvt: -------------------------------------------------------------------------------- 1 | \documentclass{article} 2 | \usepackage[T1]{fontenc} 3 | \input{regression-test} 4 | \usepackage{jsonparse} 5 | 6 | \begin{document} 7 | 8 | \JSONParse[externalize]{\myJSONdataE}{ 9 | { "a" : 1 } 10 | } 11 | 12 | \START 13 | 14 | \ExplSyntaxOn 15 | 16 | \TEST { externalize: ~ create ~ file } { 17 | \file_if_exist:nTF { test009_myJSONdataE.jsonparse } { 18 | \TRUE 19 | } { 20 | \FALSE 21 | } 22 | } 23 | 24 | \str_new:N \l_jsonparse_test_a_str 25 | \str_new:N \l_jsonparse_test_b_str 26 | 27 | \TEST { externalize: ~ check ~ file } { 28 | \file_get:nnNTF { test009_myJSONdataE.jsonparse } { \cctab_select:N \c_str_cctab } \l_jsonparse_test_a_str { 29 | \TRUE 30 | } { 31 | \FALSE 32 | } 33 | 34 | \str_set:Nn \l_jsonparse_test_b_str { 35 | \JSONParsePut ~ {\myJSONdataE ~ }{a}{1} 36 | \JSONParsePut ~ {\myJSONdataE ~ }{.}{{ ~ "a" ~ : ~ 1 ~ }} 37 | } 38 | 39 | \tl_if_eq:NNTF \l_jsonparse_test_a_str \l_jsonparse_test_b_str { 40 | \TRUE 41 | } { 42 | \FALSE 43 | } 44 | } 45 | 46 | \JSONParsePut{\myJSONdataE}{b}{foo} 47 | 48 | \TEST { externalize: ~ put } { 49 | 50 | \str_if_eq:enTF { \JSONParseExpandableValue{\myJSONdataE}{b} } { foo } { 51 | \TRUE 52 | } { 53 | \FALSE 54 | } 55 | } 56 | 57 | \ExplSyntaxOff 58 | 59 | \END 60 | 61 | \end{document} 62 | -------------------------------------------------------------------------------- /testfiles/test009.tlg: -------------------------------------------------------------------------------- 1 | This is a generated file for the l3build validation system. 2 | Don't change this file in any respect. 3 | ============================================================ 4 | TEST 1: externalize: create file 5 | ============================================================ 6 | TRUE 7 | ============================================================ 8 | ============================================================ 9 | TEST 2: externalize: check file 10 | ============================================================ 11 | (test009_myJSONdataE.jsonparse) 12 | TRUE 13 | TRUE 14 | ============================================================ 15 | ============================================================ 16 | TEST 3: externalize: put 17 | ============================================================ 18 | TRUE 19 | ============================================================ 20 | -------------------------------------------------------------------------------- /testfiles/test010.lve: -------------------------------------------------------------------------------- 1 | \documentclass{article} 2 | \usepackage[T1]{fontenc} 3 | \input{regression-test} 4 | \usepackage{jsonparse} 5 | 6 | \begin{document} 7 | 8 | \ExplSyntaxOn 9 | \box_new:N \l_jsonparse_test_box 10 | \ExplSyntaxOff 11 | 12 | \START 13 | 14 | \BEGINTEST{keys} 15 | 16 | \ExplSyntaxOn 17 | 18 | \hbox_set:Nn \l_jsonparse_test_box { [ "[0]" , "[1]" , "[2]" , "[3]" ] } 19 | \box_show:N \l_jsonparse_test_box 20 | 21 | \str_if_eq:enTF { 22 | x 23 | } { 24 | x 25 | } { \TRUE } { \FALSE } 26 | 27 | \ExplSyntaxOff 28 | 29 | \ENDTEST 30 | 31 | 32 | \BEGINTEST{filter} 33 | 34 | \ExplSyntaxOn 35 | 36 | \str_if_eq:enTF { 37 | 1 38 | } { 39 | 1 40 | } { \TRUE } { \FALSE } 41 | 42 | \ExplSyntaxOff 43 | 44 | \ENDTEST 45 | 46 | \END 47 | 48 | \end{document} -------------------------------------------------------------------------------- /testfiles/test010.lvt: -------------------------------------------------------------------------------- 1 | \documentclass{article} 2 | \usepackage[T1]{fontenc} 3 | \input{regression-test} 4 | \usepackage{jsonparse} 5 | 6 | \begin{document} 7 | 8 | \ExplSyntaxOn 9 | \box_new:N \l_jsonparse_test_box 10 | \ExplSyntaxOff 11 | 12 | \JSONParse{\JSONdataD}{ 13 | { 14 | "data" : [ 15 | { "x" : 1 , "y" : 2 } , 16 | { "x" : 2 , "y" : 4 } , 17 | { "x" : 3 , "y" : 5 } , 18 | { "x" : 4 , "y" : 8 } 19 | ] 20 | } 21 | } 22 | 23 | \START 24 | 25 | \BEGINTEST{keys} 26 | 27 | \ExplSyntaxOn 28 | 29 | \hbox_set:Nn \l_jsonparse_test_box { \JSONParseKeys{\JSONdataD}{data} } 30 | \box_show:N \l_jsonparse_test_box 31 | 32 | \JSONParseSetKeys{\JSONdataKeys}{\JSONdataD}{data[0]} 33 | 34 | \str_if_eq:enTF { 35 | \JSONParseExpandableValue{\JSONdataKeys}{[0]} 36 | } { 37 | x 38 | } { \TRUE } { \FALSE } 39 | 40 | \ExplSyntaxOff 41 | 42 | \ENDTEST 43 | 44 | 45 | \BEGINTEST{filter} 46 | 47 | \ExplSyntaxOn 48 | 49 | \JSONParseFilter{\JSONdataE}{\JSONdataD}{data[0]} 50 | 51 | \str_if_eq:enTF { 52 | \JSONParseExpandableValue{\JSONdataE}{x} 53 | } { 54 | 1 55 | } { \TRUE } { \FALSE } 56 | 57 | \ExplSyntaxOff 58 | 59 | \ENDTEST 60 | 61 | \END 62 | 63 | \end{document} -------------------------------------------------------------------------------- /testfiles/test011.lve: -------------------------------------------------------------------------------- 1 | \documentclass{article} 2 | \usepackage[T1]{fontenc} 3 | \input{regression-test} 4 | \usepackage{jsonparse} 5 | 6 | \begin{document} 7 | 8 | \ExplSyntaxOn 9 | \box_new:N \l_jsonparse_test_box 10 | \ExplSyntaxOff 11 | 12 | \START 13 | 14 | \ExplSyntaxOn 15 | 16 | \TEST { parsing , ~ escape ~ sequences } { 17 | 18 | \hbox_set:Nn \l_jsonparse_test_box { 19 | abc\#def , abc\$def , abc\%def , abc\&def , 20 | abc\^{}def , abc\_{}def , abc\~{}def 21 | } 22 | \box_show:N \l_jsonparse_test_box 23 | 24 | } 25 | 26 | \ExplSyntaxOff 27 | 28 | \END 29 | 30 | \end{document} 31 | -------------------------------------------------------------------------------- /testfiles/test011.lvt: -------------------------------------------------------------------------------- 1 | \documentclass{article} 2 | \usepackage[T1]{fontenc} 3 | \input{regression-test} 4 | \usepackage{jsonparse} 5 | 6 | \begin{document} 7 | 8 | \ExplSyntaxOn 9 | \box_new:N \l_jsonparse_test_box 10 | \ExplSyntaxOff 11 | 12 | \START 13 | 14 | \JSONParseSet{ 15 | escape=all 16 | } 17 | 18 | \JSONParse{\myJSONdataX}{ 19 | { 20 | "a" : [ "abc#def" , "abc$def" , "abc%def" , "abc&def" , 21 | "abc^def" , "abc_def" , "abc~def" ] 22 | } 23 | } 24 | 25 | \ExplSyntaxOn 26 | 27 | \TEST { parsing , ~ escape ~ sequences } { 28 | 29 | \hbox_set:Nn \l_jsonparse_test_box { \JSONParseArrayUse{\myJSONdataX}{a}{,} } 30 | \box_show:N \l_jsonparse_test_box 31 | 32 | } 33 | 34 | \ExplSyntaxOff 35 | 36 | \END 37 | 38 | \end{document} 39 | -------------------------------------------------------------------------------- /testfiles/test012.lvt: -------------------------------------------------------------------------------- 1 | \documentclass{article} 2 | \usepackage[T1]{fontenc} 3 | \input{regression-test} 4 | \usepackage{jsonparse} 5 | 6 | \begin{document} 7 | 8 | \START 9 | 10 | \ExplSyntaxOn 11 | 12 | \BEGINTEST { empty ~ objects ~ and ~ arrays } 13 | 14 | \JSONParse{\myJSONdataA}{ 15 | { } 16 | } 17 | 18 | \JSONParse{\myJSONdataB}{ 19 | { "a" : { } } 20 | } 21 | 22 | \JSONParse{\myJSONdataC}{ 23 | [ ] 24 | } 25 | 26 | \JSONParse{\myJSONdataD}{ 27 | [ [ ] ] 28 | } 29 | 30 | \JSONParse{\myJSONdataE}{ 31 | { "a" : [ ] } 32 | } 33 | 34 | \ENDTEST 35 | 36 | \BEGINTEST { error: ~ misplaced ~ comma } 37 | 38 | \JSONParse{\myJSONdataF}{ 39 | [ , ] 40 | } 41 | 42 | \SEPARATOR 43 | 44 | \JSONParse{\myJSONdataG}{ 45 | [ "a" , ] 46 | } 47 | 48 | \ENDTEST 49 | 50 | \BEGINTEST { error: ~ misplaced ~ colon } 51 | 52 | \JSONParse{\myJSONdataH}{ 53 | [ "a" : "b" ] 54 | } 55 | 56 | \ENDTEST 57 | 58 | \BEGINTEST { error: ~ missing ~ key ~ in ~ object } 59 | 60 | \JSONParse{\myJSONdataI}{ 61 | { , } 62 | } 63 | 64 | \SEPARATOR 65 | 66 | \JSONParse{\myJSONdataJ}{ 67 | { "a" : "b" , } 68 | } 69 | 70 | \SEPARATOR 71 | 72 | \JSONParse{\myJSONdataK}{ 73 | { "a" } 74 | } 75 | 76 | \SEPARATOR 77 | 78 | \JSONParse{\myJSONdataL}{ 79 | { "a" : "b" , "c" } 80 | } 81 | 82 | \SEPARATOR 83 | 84 | \JSONParse{\myJSONdataM}{ 85 | { "a" , "b" : "c" } 86 | } 87 | 88 | \ENDTEST 89 | 90 | \end{document} -------------------------------------------------------------------------------- /testfiles/test012.tlg: -------------------------------------------------------------------------------- 1 | This is a generated file for the l3build validation system. 2 | Don't change this file in any respect. 3 | ============================================================ 4 | TEST 1: empty objects and arrays 5 | ============================================================ 6 | ============================================================ 7 | ============================================================ 8 | TEST 2: error: misplaced comma 9 | ============================================================ 10 | ! Package jsonparse Error: Package jsonparse Error 11 | (jsonparse) Could not parse JSON. 12 | (jsonparse) Misplaced comma. 13 | Type to continue. 14 | ... 15 | l. ...} 16 | ^^M 17 | LaTeX does not know anything more about this error, sorry. 18 | Try typing to proceed. 19 | If that doesn't work, type X to quit. 20 | ============================================================ 21 | ! Package jsonparse Error: Package jsonparse Error 22 | (jsonparse) Could not parse JSON. 23 | (jsonparse) Misplaced comma. 24 | Type to continue. 25 | ... 26 | l. ...} 27 | ^^M 28 | LaTeX does not know anything more about this error, sorry. 29 | Try typing to proceed. 30 | If that doesn't work, type X to quit. 31 | ============================================================ 32 | ============================================================ 33 | TEST 3: error: misplaced colon 34 | ============================================================ 35 | ! Package jsonparse Error: Package jsonparse Error 36 | (jsonparse) Could not parse JSON. 37 | (jsonparse) Misplaced colon at key [0]. 38 | Type to continue. 39 | ... 40 | l. ...} 41 | ^^M 42 | LaTeX does not know anything more about this error, sorry. 43 | Try typing to proceed. 44 | If that doesn't work, type X to quit. 45 | ============================================================ 46 | ============================================================ 47 | TEST 4: error: missing key in object 48 | ============================================================ 49 | ! Package jsonparse Error: Package jsonparse Error 50 | (jsonparse) Could not parse JSON. 51 | (jsonparse) Missing key in object. 52 | Type to continue. 53 | ... 54 | l. ...} 55 | ^^M 56 | LaTeX does not know anything more about this error, sorry. 57 | Try typing to proceed. 58 | If that doesn't work, type X to quit. 59 | ============================================================ 60 | ! Package jsonparse Error: Package jsonparse Error 61 | (jsonparse) Could not parse JSON. 62 | (jsonparse) Missing key in object. 63 | Type to continue. 64 | ... 65 | l. ...} 66 | ^^M 67 | LaTeX does not know anything more about this error, sorry. 68 | Try typing to proceed. 69 | If that doesn't work, type X to quit. 70 | ============================================================ 71 | ! Package jsonparse Error: Package jsonparse Error 72 | (jsonparse) Could not parse JSON. 73 | (jsonparse) Missing key in object. 74 | Type to continue. 75 | ... 76 | l. ...} 77 | ^^M 78 | LaTeX does not know anything more about this error, sorry. 79 | Try typing to proceed. 80 | If that doesn't work, type X to quit. 81 | ============================================================ 82 | ! Package jsonparse Error: Package jsonparse Error 83 | (jsonparse) Could not parse JSON. 84 | (jsonparse) Missing key in object. 85 | Type to continue. 86 | ... 87 | l. ...} 88 | ^^M 89 | LaTeX does not know anything more about this error, sorry. 90 | Try typing to proceed. 91 | If that doesn't work, type X to quit. 92 | ============================================================ 93 | ! Package jsonparse Error: Package jsonparse Error 94 | (jsonparse) Could not parse JSON. 95 | (jsonparse) Missing key in object. 96 | Type to continue. 97 | ... 98 | l. ...} 99 | ^^M 100 | LaTeX does not know anything more about this error, sorry. 101 | Try typing to proceed. 102 | If that doesn't work, type X to quit. 103 | ============================================================ 104 | (test012.aux) 105 | -------------------------------------------------------------------------------- /testfiles/test100.lvt: -------------------------------------------------------------------------------- 1 | \documentclass{article} 2 | \usepackage[T1]{fontenc} 3 | \input{regression-test} 4 | \usepackage{jsonparse} 5 | 6 | \begin{document} 7 | \ExplSyntaxOn 8 | 9 | \START 10 | 11 | \OMIT 12 | 13 | \cs_new:Npn \jsonparse_check_if_num:n #1 { 14 | \jsonparse_if_num:nTF { #1 } { 15 | \TRUE 16 | } { 17 | \FALSE 18 | } 19 | } 20 | 21 | \TIMO 22 | 23 | \TEST { jsonparse_if_num , ~ check ~ values } { 24 | \clist_map_function:nN { 25 | 0 , 02 , 1 , 23 , 252636 , 123 , 26 | -0 , -1 , -23 , -2367 , 27 | 0.12 , .12 , 1.12 , -1.27 , -.12 , .-12 , 28 | 1e1 , 0e1 , -1e12 , 3e-78 , 2e+2 , 29 | 1.3e+23 , -0.2e121 , 3.1e23 , -1e-23 , 30 | +13 , 1.3e2.3 , abc 31 | } \jsonparse_check_if_num:n 32 | } 33 | 34 | \ExplSyntaxOff 35 | 36 | \END 37 | \end{document} 38 | -------------------------------------------------------------------------------- /testfiles/test100.tlg: -------------------------------------------------------------------------------- 1 | This is a generated file for the l3build validation system. 2 | Don't change this file in any respect. 3 | ============================================================ 4 | TEST 1: jsonparse_if_num, check values 5 | ============================================================ 6 | TRUE 7 | FALSE 8 | TRUE 9 | TRUE 10 | TRUE 11 | TRUE 12 | TRUE 13 | TRUE 14 | TRUE 15 | TRUE 16 | TRUE 17 | FALSE 18 | TRUE 19 | TRUE 20 | FALSE 21 | FALSE 22 | TRUE 23 | TRUE 24 | TRUE 25 | TRUE 26 | TRUE 27 | TRUE 28 | TRUE 29 | TRUE 30 | TRUE 31 | FALSE 32 | FALSE 33 | FALSE 34 | ============================================================ 35 | -------------------------------------------------------------------------------- /testfiles/test200.lvt: -------------------------------------------------------------------------------- 1 | \documentclass{article} 2 | \usepackage[T1]{fontenc} 3 | \input{regression-test} 4 | \usepackage{jsonparse} 5 | 6 | \begin{document} 7 | \ExplSyntaxOn 8 | 9 | \START 10 | 11 | \OMIT 12 | 13 | \cs_new:Npn \jsonparse_check_if_high_surrogate:n #1 { 14 | \jsonparse_unicode_if_high_surrogate:nTF { #1 } { 15 | \TRUE 16 | } { 17 | \FALSE 18 | } 19 | } 20 | 21 | \cs_new:Npn \jsonparse_check_if_low_surrogate:n #1 { 22 | \jsonparse_unicode_if_low_surrogate:nTF { #1 } { 23 | \TRUE 24 | } { 25 | \FALSE 26 | } 27 | } 28 | 29 | \TIMO 30 | 31 | \TEST { jsonparse_if_high_surrogate , ~ check ~ values } { 32 | \clist_map_function:nN { 33 | "0010 , "D7FF , "D800 , "DBFF , "DC00 , "DFFF , "E000 , "F000 34 | } \jsonparse_check_if_high_surrogate:n 35 | } 36 | 37 | \TEST { jsonparse_if_low_surrogate , ~ check ~ values } { 38 | \clist_map_function:nN { 39 | "0010 , "D7FF , "D800 , "DBFF , "DC00 , "DFFF , "E000 , "F000 40 | } \jsonparse_check_if_low_surrogate:n 41 | } 42 | 43 | \TEST { jsonparse_unicode_convert_surrogate_pair } { 44 | \int_compare:nNnTF { 45 | \jsonparse_unicode_convert_surrogate_pair:nn { "D83D } { "DE00 } 46 | } = { "1F600 } { 47 | \TRUE 48 | } { 49 | \FALSE 50 | } 51 | } 52 | 53 | \ExplSyntaxOff 54 | 55 | \END 56 | 57 | \end{document} 58 | -------------------------------------------------------------------------------- /testfiles/test200.tlg: -------------------------------------------------------------------------------- 1 | This is a generated file for the l3build validation system. 2 | Don't change this file in any respect. 3 | ============================================================ 4 | TEST 1: jsonparse_if_high_surrogate, check values 5 | ============================================================ 6 | FALSE 7 | FALSE 8 | TRUE 9 | TRUE 10 | FALSE 11 | FALSE 12 | FALSE 13 | FALSE 14 | ============================================================ 15 | ============================================================ 16 | TEST 2: jsonparse_if_low_surrogate, check values 17 | ============================================================ 18 | FALSE 19 | FALSE 20 | FALSE 21 | FALSE 22 | TRUE 23 | TRUE 24 | FALSE 25 | FALSE 26 | ============================================================ 27 | ============================================================ 28 | TEST 3: jsonparse_unicode_convert_surrogate_pair 29 | ============================================================ 30 | TRUE 31 | ============================================================ 32 | --------------------------------------------------------------------------------