├── .githooks ├── post-checkout ├── post-commit ├── post-merge └── pre-push ├── .github └── workflows │ └── ci.yml ├── .gitignore ├── LICENSE ├── README.md ├── build.gradle ├── demo ├── build.gradle └── src │ └── main │ └── java │ └── io │ └── jafar │ └── demo │ ├── JVMInfoEvent.java │ ├── Main.java │ └── Printer.java ├── get_resources.sh ├── gradle.properties ├── gradle └── wrapper │ ├── gradle-wrapper.jar │ └── gradle-wrapper.properties ├── gradlew ├── gradlew.bat ├── jafar-gradle-plugin ├── build.gradle ├── gradle.properties ├── gradle │ └── wrapper │ │ ├── gradle-wrapper.jar │ │ └── gradle-wrapper.properties ├── gradlew ├── gradlew.bat ├── settings.gradle └── src │ └── main │ └── groovy │ └── io │ └── jafar │ └── gradle │ └── TypeGeneratorPlugin.groovy ├── parser ├── build.gradle └── src │ ├── main │ └── java │ │ └── io │ │ └── jafar │ │ ├── parser │ │ ├── AbstractEvent.java │ │ ├── JafarParserImpl.java │ │ ├── MutableConstantPool.java │ │ ├── MutableConstantPools.java │ │ ├── MutableMetadataLookup.java │ │ ├── ParsingUtils.java │ │ ├── TypeFilter.java │ │ ├── ValueLoader.java │ │ ├── api │ │ │ ├── Control.java │ │ │ ├── HandlerRegistration.java │ │ │ ├── JFRHandler.java │ │ │ ├── JafarParser.java │ │ │ ├── JfrField.java │ │ │ ├── JfrIgnore.java │ │ │ ├── JfrType.java │ │ │ └── ParsingContext.java │ │ └── internal_api │ │ │ ├── CheckpointEvent.java │ │ │ ├── ChunkHeader.java │ │ │ ├── ChunkParserListener.java │ │ │ ├── CodeGenerator.java │ │ │ ├── ConstantPool.java │ │ │ ├── ConstantPools.java │ │ │ ├── Deserializer.java │ │ │ ├── MetadataLookup.java │ │ │ ├── ParsingContextImpl.java │ │ │ ├── RecordingParserContext.java │ │ │ ├── RecordingStream.java │ │ │ ├── RecordingStreamReader.java │ │ │ ├── StreamingChunkParser.java │ │ │ ├── TypeSkipper.java │ │ │ └── metadata │ │ │ ├── AbstractMetadataElement.java │ │ │ ├── MetadataAnnotation.java │ │ │ ├── MetadataClass.java │ │ │ ├── MetadataElement.java │ │ │ ├── MetadataElementKind.java │ │ │ ├── MetadataEvent.java │ │ │ ├── MetadataField.java │ │ │ ├── MetadataRegion.java │ │ │ ├── MetadataRoot.java │ │ │ ├── MetadataSetting.java │ │ │ └── MetadataVisitor.java │ │ └── utils │ │ ├── BytePacking.java │ │ ├── CachedStringParser.java │ │ ├── CustomByteBuffer.java │ │ ├── SplicedMappedByteBuffer.java │ │ └── TypeGenerator.java │ └── test │ └── java │ └── io │ └── jafar │ ├── TestJfrRecorder.java │ ├── parser │ ├── ExecutionSampleEvent.java │ ├── JafarParserTest.java │ ├── ParserEvent.java │ ├── ParserEvent1.java │ └── ThreadEndEvent.java │ └── utils │ ├── BytePackingTest.java │ └── SplicedMappedByteBufferTest.java ├── rebuild_plugin.sh └── settings.gradle /.githooks/post-checkout: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | command -v git-lfs >/dev/null 2>&1 || { echo >&2 "\nThis repository is configured for Git LFS but 'git-lfs' was not found on your path. If you no longer wish to use Git LFS, remove this hook by deleting the 'post-checkout' file in the hooks directory (set by 'core.hookspath'; usually '.git/hooks').\n"; exit 2; } 3 | git lfs post-checkout "$@" 4 | -------------------------------------------------------------------------------- /.githooks/post-commit: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | command -v git-lfs >/dev/null 2>&1 || { echo >&2 "\nThis repository is configured for Git LFS but 'git-lfs' was not found on your path. If you no longer wish to use Git LFS, remove this hook by deleting the 'post-commit' file in the hooks directory (set by 'core.hookspath'; usually '.git/hooks').\n"; exit 2; } 3 | git lfs post-commit "$@" 4 | -------------------------------------------------------------------------------- /.githooks/post-merge: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | command -v git-lfs >/dev/null 2>&1 || { echo >&2 "\nThis repository is configured for Git LFS but 'git-lfs' was not found on your path. If you no longer wish to use Git LFS, remove this hook by deleting the 'post-merge' file in the hooks directory (set by 'core.hookspath'; usually '.git/hooks').\n"; exit 2; } 3 | git lfs post-merge "$@" 4 | -------------------------------------------------------------------------------- /.githooks/pre-push: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | command -v git-lfs >/dev/null 2>&1 || { echo >&2 "\nThis repository is configured for Git LFS but 'git-lfs' was not found on your path. If you no longer wish to use Git LFS, remove this hook by deleting the 'pre-push' file in the hooks directory (set by 'core.hookspath'; usually '.git/hooks').\n"; exit 2; } 3 | git lfs pre-push "$@" 4 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: Publish Snapshot 2 | 3 | on: 4 | push: 5 | branches: 6 | - 'main' 7 | workflow_dispatch: 8 | 9 | permissions: 10 | contents: read 11 | pull-requests: read 12 | actions: read 13 | 14 | jobs: 15 | publish-snapshot: 16 | runs-on: ubuntu-latest 17 | steps: 18 | - uses: actions/checkout@v3 19 | - name: Prepare test JDK 20 | uses: actions/setup-java@v3 21 | with: 22 | distribution: 'temurin' 23 | java-version: "21.0.3" 24 | - name: Publish 25 | run: | 26 | ./gradlew publish 27 | cd jafar-gradle-plugin 28 | ./gradlew publish 29 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Compiled class file 2 | *.class 3 | 4 | # Log file 5 | *.log 6 | 7 | # BlueJ files 8 | *.ctxt 9 | 10 | # Mobile Tools for Java (J2ME) 11 | .mtj.tmp/ 12 | 13 | # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml 14 | hs_err_pid* 15 | replay_pid* 16 | 17 | build/ 18 | .gradle 19 | .idea 20 | 21 | *.jfr 22 | 23 | harness* 24 | launcher* 25 | 26 | /lib/ 27 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # jafar 2 | Experimental, incomplete JFR parser 3 | 4 | Very much a work in progress. 5 | The goal is to be able to parse JFR files and extract the event data in programmatic way with the least effort possible. 6 | 7 | ## Requirements 8 | Java 21 (mostly just because I wanted to try the pattern matching) 9 | 10 | Git LFS is used to store the JFR recordings, so you will need to have it installed to clone the repository. 11 | Install it following the instructions at https://docs.github.com/en/repositories/working-with-files/managing-large-files/installing-git-large-file-storage 12 | 13 | ## Tl;DR 14 | Allow quickly wiring JFR with interface based handlers using bytecode generation. 15 | I was nerdsniped by [@nitsanw](https://github.com/nitsanw) and quickly thrown together this more or less a PoC. 16 | 17 | The parser is pretty fast, actually. You can try the demo app which will extract all the `jdk.ExecutionSample` events, 18 | count the number of samples and calculate the sum of the associated thread ids (useful, right?). On Mac M1 and ~600MiB 19 | JFR this takes around 1 second as compared to cca. 7 seconds using JMC parser. The JDK `jfr` tool will run out of memory, 20 | but to be fair it is trying to print the full content of each event. 21 | 22 | ### Building 23 | First, retrieve the binary resources via `./get_resources.sh` 24 | Then, build the project with `./gradlew shadowJar` 25 | 26 | Now, you can run the demo app with: 27 | ```shell 28 | # The Jafar parser 29 | java -jar demo/build/libs/demo-all.jar [jafar|jmc|jfr] path_to_jfr.jfr 30 | ``` 31 | 32 | ## Usage 33 | The main idea is to define a handling interface which corresponds to a JFR event type. The linking is done via `@JfrType` 34 | annotation. For convenience, there is a `JfrEvent` interface which can be extended to define the event handling interface. 35 | 36 | The interface methods should correspond to the fields of the JFR event. The method names should be the same as the field names. 37 | If the field name is not a valid Java identifier, the method will be linked with the field via `@JfrField` annotation. 38 | The interface can have methods excluded from linking with the JFR types - by annotating such methods with `@JfrIgnore`. 39 | 40 | ```java 41 | 42 | @JfrType("custom.MyEvent") 43 | public interface MyEvent extends JfrEvent { 44 | String myfield(); 45 | } 46 | 47 | try (JafarParser parser = JafarParser.open("path_to_jfr.jfr")) {} 48 | // registering a handler will return a cookie which can be used to deregister the same handler 49 | var cookie = parser.handle(MyEvent.class, event -> { 50 | System.out.println(event.startTime()); 51 | System.out.println(event.eventThread().javaName()); 52 | System.out.println(event.myfield()); 53 | }); 54 | parser.handle(MyEvent.class, event -> { 55 | // do something else 56 | }); 57 | parser.run(); 58 | 59 | cookie.destroy(parser); 60 | // this time only the second handler will be called 61 | parser.run(); 62 | } 63 | 64 | ``` 65 | 66 | This short program will parse the recording and call the `handle` method for each `custom.MyEvent` event. 67 | The number of handlers per type is not limited, they all will be executed sequentially. 68 | With the handlers known beforehand, the parser can safely skip all unreachable events and types, massively saving on the parsing time. 69 | 70 | As an optimization for batch processing applications where the JFR files share the same type structure, 71 | it is possible to use a global parser context and be reusing the generated handler code 72 | 73 | ```java 74 | @JfrType("custom.MyEvent") 75 | public interface MyEvent extends JfrEvent { 76 | String myfield(); 77 | } 78 | 79 | ParsingContext parsingContext = ParsingContext.create(); 80 | 81 | String path; 82 | while ((path = getNextPath()) != null) { 83 | try (JafarParser parser = JafarParser.open(path), parsingContext) {} 84 | // registering a handler will return a cookie which can be used to deregister the same handler 85 | var cookie = parser.handle(MyEvent.class, event -> { 86 | System.out.println(event.startTime()); 87 | System.out.println(event.eventThread().javaName()); 88 | System.out.println(event.myfield()); 89 | }); 90 | parser.handle(MyEvent.class, event -> { 91 | // do something else 92 | }); 93 | parser.run(); 94 | 95 | cookie.destroy(parser); 96 | // this time only the second handler will be called 97 | parser.run(); 98 | } 99 | } 100 | ``` 101 | 102 | ### Generate Jafar Type Interfaces during the build 103 | There is an in-progress Gradle plugin for generating the Jafar type interfaces based on either the JVM runtime JFR metadata 104 | or the metadata extracted from a JFR file. 105 | 106 | ```gradle 107 | plugins { 108 | id 'io.btrace.jafar-gradle-plugin' version '0.0.1-SNAPSHOT' 109 | } 110 | 111 | repositories { 112 | mavenCentral() 113 | mavenLocal() 114 | // use sonatype snapshots - that's where the plugin artifact lives for now 115 | maven { 116 | url "https://oss.sonatype.org/content/repositories/snapshots/" 117 | } 118 | } 119 | 120 | // This will be called implicitly before project compilation. 121 | // Can also be invoked by hand to bootstrap the development 122 | generateJafarTypes { 123 | inputFile = file('/tmp/my.jfr') // extract the metadata from this JFR file; otherwise use the JVM runtime metadata 124 | outputDir = project.file('src/main/java') // generate the files under this particular source directory; if not specified 'buld/generated/sources/jafar/src/main' will be used 125 | overwrite = false // specify whether to overwrite the existing files 126 | eventTypeFilter { // process only certain event types (and transitive closure of types they depend on) 127 | it == 'jdk.ExecutionSample' 128 | } 129 | targetPackage = 'io.jafar.demo.types' // generate the types in this package 130 | 131 | } 132 | ``` -------------------------------------------------------------------------------- /build.gradle: -------------------------------------------------------------------------------- 1 | plugins { 2 | id "com.diffplug.spotless" version "6.11.0" 3 | id 'io.github.gradle-nexus.publish-plugin' version '2.0.0' 4 | } 5 | 6 | project.version="0.0.1-SNAPSHOT" 7 | 8 | repositories { 9 | mavenLocal() 10 | mavenCentral() 11 | maven { 12 | url "https://oss.sonatype.org/content/repositories/snapshots/" 13 | } 14 | } 15 | 16 | nexusPublishing { 17 | repositories { 18 | sonatype { 19 | username = project.hasProperty("sonatype.user") ? project.property("sonatype.user") : System.getenv("SONATYPE_USERNAME") 20 | password = project.hasProperty("sonatype.password") ? project.property("sonatype.password") : System.getenv("SONATYPE_PASSWORD") 21 | } 22 | } 23 | } 24 | 25 | 26 | -------------------------------------------------------------------------------- /demo/build.gradle: -------------------------------------------------------------------------------- 1 | plugins { 2 | id 'java' 3 | id 'application' 4 | id "io.github.goooler.shadow" version "8.1.8" 5 | id 'io.btrace.jafar-gradle-plugin' version '0.0.1-SNAPSHOT' 6 | } 7 | 8 | repositories { 9 | mavenCentral() 10 | mavenLocal() 11 | maven { 12 | url "https://oss.sonatype.org/content/repositories/snapshots/" 13 | } 14 | } 15 | 16 | java { 17 | toolchain { 18 | languageVersion = JavaLanguageVersion.of(21) 19 | } 20 | } 21 | 22 | application { 23 | mainClass = 'io.jafar.demo.Main' 24 | } 25 | 26 | task explodedDist(type: Copy) { 27 | dependsOn installDist 28 | from("$buildDir/install/${project.name}") // Location where the application plugin installs the distribution 29 | into("$buildDir/exploded/${project.name}") // Target directory for the exploded distribution 30 | } 31 | 32 | tasks.build { 33 | dependsOn explodedDist 34 | } 35 | 36 | dependencies { 37 | implementation project(':parser') 38 | implementation 'org.jctools:jctools-core:4.0.1' 39 | implementation 'org.openjdk.jmc:flightrecorder:8.3.1' 40 | } 41 | 42 | test { 43 | useJUnitPlatform() 44 | } 45 | 46 | generateJafarTypes { 47 | overwrite = false 48 | eventTypeFilter { 49 | it == 'jdk.ExecutionSample' 50 | } 51 | targetPackage = 'io.jafar.demo.types' 52 | } -------------------------------------------------------------------------------- /demo/src/main/java/io/jafar/demo/JVMInfoEvent.java: -------------------------------------------------------------------------------- 1 | package io.jafar.demo; 2 | 3 | import io.jafar.parser.api.JfrType; 4 | 5 | @JfrType("jdk.JVMInformation") 6 | public interface JVMInfoEvent { 7 | String jvmName(); 8 | String jvmVersion(); 9 | } 10 | -------------------------------------------------------------------------------- /demo/src/main/java/io/jafar/demo/Main.java: -------------------------------------------------------------------------------- 1 | package io.jafar.demo; 2 | 3 | import io.jafar.demo.types.JFRExecutionSample; 4 | import io.jafar.parser.api.HandlerRegistration; 5 | import io.jafar.parser.api.JafarParser; 6 | import io.jafar.parser.api.ParsingContext; 7 | import io.jafar.parser.internal_api.ParsingContextImpl; 8 | import jdk.jfr.consumer.EventStream; 9 | import jdk.jfr.consumer.RecordedEvent; 10 | import jdk.jfr.consumer.RecordingFile; 11 | import org.openjdk.jmc.common.item.IItem; 12 | import org.openjdk.jmc.common.item.IItemCollection; 13 | import org.openjdk.jmc.common.item.IItemIterable; 14 | import org.openjdk.jmc.common.item.ItemFilters; 15 | import org.openjdk.jmc.flightrecorder.CouldNotLoadRecordingException; 16 | import org.openjdk.jmc.flightrecorder.JfrAttributes; 17 | import org.openjdk.jmc.flightrecorder.JfrLoaderToolkit; 18 | import org.openjdk.jmc.flightrecorder.jdk.JdkAttributes; 19 | 20 | import java.io.File; 21 | import java.io.IOException; 22 | import java.nio.file.Paths; 23 | import java.util.concurrent.atomic.AtomicInteger; 24 | import java.util.concurrent.atomic.LongAccumulator; 25 | 26 | public class Main { 27 | public static void main(String[] args) throws Exception { 28 | if (args.length < 2) { 29 | throw new IllegalArgumentException("Usage: "); 30 | } 31 | 32 | AtomicInteger cnt = new AtomicInteger(); 33 | LongAccumulator sum = new LongAccumulator(Long::sum, 0); 34 | File file = new File(args[1]).getAbsoluteFile(); 35 | if ("jafar".equalsIgnoreCase(args[0])) { 36 | runWithJafar(file, sum, cnt); 37 | } else if ("jmc".equalsIgnoreCase(args[0])) { 38 | runWithJmc(file, sum, cnt); 39 | } else if ("jfr".equalsIgnoreCase(args[0])) { 40 | runWithJfr(file, sum, cnt); 41 | } else if ("jfr-stream".equalsIgnoreCase(args[0])) { 42 | runWithJfrStream(file, sum, cnt); 43 | } else { 44 | throw new IllegalArgumentException("Unknown parser: " + args[0]); 45 | } 46 | System.out.println("Total events: " + cnt.get()); 47 | System.out.println("Sum of thread ids: " + sum.get()); 48 | } 49 | 50 | private static void runWithJmc(File file, LongAccumulator sum, AtomicInteger cnt) throws IOException, CouldNotLoadRecordingException { 51 | IItemCollection events = JfrLoaderToolkit.loadEvents(file); 52 | events = events.apply(ItemFilters.type("jdk.ExecutionSample")); 53 | for (IItemIterable lane : events) { 54 | var threadIdAccessor = JdkAttributes.EVENT_THREAD_ID.getAccessor(lane.getType()); 55 | var stackAccessor = JfrAttributes.EVENT_STACKTRACE.getAccessor(lane.getType()); 56 | for (IItem event : lane) { 57 | long threadId = threadIdAccessor.getMember(event).longValue(); 58 | sum.accumulate(threadId); 59 | sum.accumulate(stackAccessor.getMember(event).getFrames().size()); 60 | cnt.incrementAndGet(); 61 | } 62 | } 63 | } 64 | 65 | private static void runWithJafar(File file, LongAccumulator sum, AtomicInteger cnt) throws Exception { 66 | ParsingContext parsingContext = ParsingContext.create(); 67 | try (JafarParser p = JafarParser.open(file.getPath(), parsingContext)) { 68 | HandlerRegistration h1 = p.handle(JFRExecutionSample.class, (event, ctl) -> { 69 | if (event.sampledThread() == null) { 70 | throw new RuntimeException(); 71 | } 72 | 73 | sum.accumulate(event.sampledThread().javaThreadId()); 74 | sum.accumulate(event.stackTrace().frames().length); 75 | cnt.incrementAndGet(); 76 | }); 77 | 78 | p.run(); 79 | } 80 | // If we used the same parsing context for a different recording 81 | // the generated bytecode would be reused. If processing the same kind of recordings 82 | // in a loop, this is a nice optimization. 83 | // try (JafarParser p = JafarParser.open(file.getPath(), parsingContext)) { 84 | // HandlerRegistration h1 = p.handle(JFRExecutionSample.class, (event, ctl) -> { 85 | // if (event.sampledThread() == null) { 86 | // throw new RuntimeException(); 87 | // } 88 | // 89 | // sum.accumulate(event.sampledThread().javaThreadId()); 90 | // sum.accumulate(event.stackTrace().frames().length); 91 | // cnt.incrementAndGet(); 92 | // }); 93 | // 94 | // p.run(); 95 | // } 96 | } 97 | 98 | private static void runWithJfr(File file, LongAccumulator sum, AtomicInteger cnt) throws IOException, CouldNotLoadRecordingException { 99 | try (RecordingFile recording = new RecordingFile(file.toPath())) { 100 | while (recording.hasMoreEvents()) { 101 | RecordedEvent e = recording.readEvent(); 102 | if (e.getEventType().getName().equals("jdk.ExecutionSample")) { 103 | sum.accumulate(e.getThread("sampledThread").getJavaThreadId()); 104 | sum.accumulate(e.getStackTrace().getFrames().size()); 105 | cnt.incrementAndGet(); 106 | } 107 | } 108 | } 109 | } 110 | 111 | private static void runWithJfrStream(File file, LongAccumulator sum, AtomicInteger cnt) throws IOException, CouldNotLoadRecordingException { 112 | var es = EventStream.openFile(Paths.get(file.getPath())); 113 | es.setReuse(true); 114 | es.setOrdered(false); 115 | es.onEvent("jdk.ExecutionSample", e -> { 116 | sum.accumulate(e.getThread("sampledThread").getJavaThreadId()); 117 | sum.accumulate(e.getStackTrace().getFrames().size()); 118 | cnt.incrementAndGet(); 119 | }); 120 | es.start(); 121 | try { 122 | es.awaitTermination(); 123 | } catch (InterruptedException e) { 124 | Thread.currentThread().interrupt(); 125 | } 126 | } 127 | } 128 | -------------------------------------------------------------------------------- /demo/src/main/java/io/jafar/demo/Printer.java: -------------------------------------------------------------------------------- 1 | package io.jafar.demo; 2 | 3 | import io.jafar.parser.api.HandlerRegistration; 4 | import io.jafar.parser.api.JafarParser; 5 | import io.jafar.parser.api.ParsingContext; 6 | 7 | public class Printer { 8 | public static void main(String[] args) throws Exception { 9 | ParsingContext parsingContext = ParsingContext.create(); 10 | try (JafarParser p = JafarParser.open(args[0], parsingContext)) { 11 | HandlerRegistration h1 = p.handle(JVMInfoEvent.class, (event, ctl) -> { 12 | System.out.println("JVM Name: " + event.jvmName()); 13 | System.out.println("JVM Version: " + event.jvmVersion()); 14 | }); 15 | 16 | p.run(); 17 | } 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /get_resources.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | HERE=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) 4 | 5 | if [ ! -f demo/src/test/resources/test-ap.jfr ]; then 6 | wget --no-check-certificate -O demo/src/test/resources/test-ap.jfr "https://www.dropbox.com/scl/fi/lp5bj8adi3l7jge9ykayr/test-ap.jfr?rlkey=28wghlmp7ge4bxnan9ccwarby&st=0kd2p1u1&dl=0" 7 | fi 8 | if [ ! -f demo/src/test/resources/test-jfr.jfr ]; then 9 | wget --no-check-certificate -O demo/src/test/resources/test-jfr.jfr "https://www.dropbox.com/scl/fi/5uhp13h9ltj38joyqmwo5/test-jfr.jfr?rlkey=p0wmznxgm7zud6xzaydled69c&st=ilfirsrg&dl=0" 10 | fi 11 | 12 | if [ ! -f parser/src/test/resource/test-ap.jfr ]; then 13 | ln -s ${HERE}/demo/src/test/resources/test-ap.jfr ${HERE}/parser/src/test/resources/test-ap.jfr 14 | fi 15 | -------------------------------------------------------------------------------- /gradle.properties: -------------------------------------------------------------------------------- 1 | sonatype.user=XXSEjG1P 2 | sonatype.password=P5w7F2tDqHdOO5CDCbyK66t1dV93Owtudyaibs2k30mL 3 | -------------------------------------------------------------------------------- /gradle/wrapper/gradle-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/btraceio/jafar/98667c9f1d93289f122cbf90d4976f84eb146cf1/gradle/wrapper/gradle-wrapper.jar -------------------------------------------------------------------------------- /gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | distributionBase=GRADLE_USER_HOME 2 | distributionPath=wrapper/dists 3 | distributionUrl=https\://services.gradle.org/distributions/gradle-8.11.1-bin.zip 4 | networkTimeout=10000 5 | validateDistributionUrl=true 6 | zipStoreBase=GRADLE_USER_HOME 7 | zipStorePath=wrapper/dists 8 | -------------------------------------------------------------------------------- /gradlew: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # 4 | # Copyright © 2015-2021 the original authors. 5 | # 6 | # Licensed under the Apache License, Version 2.0 (the "License"); 7 | # you may not use this file except in compliance with the License. 8 | # You may obtain a copy of the License at 9 | # 10 | # https://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | # See the License for the specific language governing permissions and 16 | # limitations under the License. 17 | # 18 | # SPDX-License-Identifier: Apache-2.0 19 | # 20 | 21 | ############################################################################## 22 | # 23 | # Gradle start up script for POSIX generated by Gradle. 24 | # 25 | # Important for running: 26 | # 27 | # (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is 28 | # noncompliant, but you have some other compliant shell such as ksh or 29 | # bash, then to run this script, type that shell name before the whole 30 | # command line, like: 31 | # 32 | # ksh Gradle 33 | # 34 | # Busybox and similar reduced shells will NOT work, because this script 35 | # requires all of these POSIX shell features: 36 | # * functions; 37 | # * expansions «$var», «${var}», «${var:-default}», «${var+SET}», 38 | # «${var#prefix}», «${var%suffix}», and «$( cmd )»; 39 | # * compound commands having a testable exit status, especially «case»; 40 | # * various built-in commands including «command», «set», and «ulimit». 41 | # 42 | # Important for patching: 43 | # 44 | # (2) This script targets any POSIX shell, so it avoids extensions provided 45 | # by Bash, Ksh, etc; in particular arrays are avoided. 46 | # 47 | # The "traditional" practice of packing multiple parameters into a 48 | # space-separated string is a well documented source of bugs and security 49 | # problems, so this is (mostly) avoided, by progressively accumulating 50 | # options in "$@", and eventually passing that to Java. 51 | # 52 | # Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS, 53 | # and GRADLE_OPTS) rely on word-splitting, this is performed explicitly; 54 | # see the in-line comments for details. 55 | # 56 | # There are tweaks for specific operating systems such as AIX, CygWin, 57 | # Darwin, MinGW, and NonStop. 58 | # 59 | # (3) This script is generated from the Groovy template 60 | # https://github.com/gradle/gradle/blob/HEAD/platforms/jvm/plugins-application/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt 61 | # within the Gradle project. 62 | # 63 | # You can find Gradle at https://github.com/gradle/gradle/. 64 | # 65 | ############################################################################## 66 | 67 | # Attempt to set APP_HOME 68 | 69 | # Resolve links: $0 may be a link 70 | app_path=$0 71 | 72 | # Need this for daisy-chained symlinks. 73 | while 74 | APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path 75 | [ -h "$app_path" ] 76 | do 77 | ls=$( ls -ld "$app_path" ) 78 | link=${ls#*' -> '} 79 | case $link in #( 80 | /*) app_path=$link ;; #( 81 | *) app_path=$APP_HOME$link ;; 82 | esac 83 | done 84 | 85 | # This is normally unused 86 | # shellcheck disable=SC2034 87 | APP_BASE_NAME=${0##*/} 88 | # Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036) 89 | APP_HOME=$( cd -P "${APP_HOME:-./}" > /dev/null && printf '%s 90 | ' "$PWD" ) || exit 91 | 92 | # Use the maximum available, or set MAX_FD != -1 to use that value. 93 | MAX_FD=maximum 94 | 95 | warn () { 96 | echo "$*" 97 | } >&2 98 | 99 | die () { 100 | echo 101 | echo "$*" 102 | echo 103 | exit 1 104 | } >&2 105 | 106 | # OS specific support (must be 'true' or 'false'). 107 | cygwin=false 108 | msys=false 109 | darwin=false 110 | nonstop=false 111 | case "$( uname )" in #( 112 | CYGWIN* ) cygwin=true ;; #( 113 | Darwin* ) darwin=true ;; #( 114 | MSYS* | MINGW* ) msys=true ;; #( 115 | NONSTOP* ) nonstop=true ;; 116 | esac 117 | 118 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar 119 | 120 | 121 | # Determine the Java command to use to start the JVM. 122 | if [ -n "$JAVA_HOME" ] ; then 123 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then 124 | # IBM's JDK on AIX uses strange locations for the executables 125 | JAVACMD=$JAVA_HOME/jre/sh/java 126 | else 127 | JAVACMD=$JAVA_HOME/bin/java 128 | fi 129 | if [ ! -x "$JAVACMD" ] ; then 130 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME 131 | 132 | Please set the JAVA_HOME variable in your environment to match the 133 | location of your Java installation." 134 | fi 135 | else 136 | JAVACMD=java 137 | if ! command -v java >/dev/null 2>&1 138 | then 139 | die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 140 | 141 | Please set the JAVA_HOME variable in your environment to match the 142 | location of your Java installation." 143 | fi 144 | fi 145 | 146 | # Increase the maximum file descriptors if we can. 147 | if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then 148 | case $MAX_FD in #( 149 | max*) 150 | # In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked. 151 | # shellcheck disable=SC2039,SC3045 152 | MAX_FD=$( ulimit -H -n ) || 153 | warn "Could not query maximum file descriptor limit" 154 | esac 155 | case $MAX_FD in #( 156 | '' | soft) :;; #( 157 | *) 158 | # In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked. 159 | # shellcheck disable=SC2039,SC3045 160 | ulimit -n "$MAX_FD" || 161 | warn "Could not set maximum file descriptor limit to $MAX_FD" 162 | esac 163 | fi 164 | 165 | # Collect all arguments for the java command, stacking in reverse order: 166 | # * args from the command line 167 | # * the main class name 168 | # * -classpath 169 | # * -D...appname settings 170 | # * --module-path (only if needed) 171 | # * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables. 172 | 173 | # For Cygwin or MSYS, switch paths to Windows format before running java 174 | if "$cygwin" || "$msys" ; then 175 | APP_HOME=$( cygpath --path --mixed "$APP_HOME" ) 176 | CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" ) 177 | 178 | JAVACMD=$( cygpath --unix "$JAVACMD" ) 179 | 180 | # Now convert the arguments - kludge to limit ourselves to /bin/sh 181 | for arg do 182 | if 183 | case $arg in #( 184 | -*) false ;; # don't mess with options #( 185 | /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath 186 | [ -e "$t" ] ;; #( 187 | *) false ;; 188 | esac 189 | then 190 | arg=$( cygpath --path --ignore --mixed "$arg" ) 191 | fi 192 | # Roll the args list around exactly as many times as the number of 193 | # args, so each arg winds up back in the position where it started, but 194 | # possibly modified. 195 | # 196 | # NB: a `for` loop captures its iteration list before it begins, so 197 | # changing the positional parameters here affects neither the number of 198 | # iterations, nor the values presented in `arg`. 199 | shift # remove old arg 200 | set -- "$@" "$arg" # push replacement arg 201 | done 202 | fi 203 | 204 | 205 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 206 | DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' 207 | 208 | # Collect all arguments for the java command: 209 | # * DEFAULT_JVM_OPTS, JAVA_OPTS, JAVA_OPTS, and optsEnvironmentVar are not allowed to contain shell fragments, 210 | # and any embedded shellness will be escaped. 211 | # * For example: A user cannot expect ${Hostname} to be expanded, as it is an environment variable and will be 212 | # treated as '${Hostname}' itself on the command line. 213 | 214 | set -- \ 215 | "-Dorg.gradle.appname=$APP_BASE_NAME" \ 216 | -classpath "$CLASSPATH" \ 217 | org.gradle.wrapper.GradleWrapperMain \ 218 | "$@" 219 | 220 | # Stop when "xargs" is not available. 221 | if ! command -v xargs >/dev/null 2>&1 222 | then 223 | die "xargs is not available" 224 | fi 225 | 226 | # Use "xargs" to parse quoted args. 227 | # 228 | # With -n1 it outputs one arg per line, with the quotes and backslashes removed. 229 | # 230 | # In Bash we could simply go: 231 | # 232 | # readarray ARGS < <( xargs -n1 <<<"$var" ) && 233 | # set -- "${ARGS[@]}" "$@" 234 | # 235 | # but POSIX shell has neither arrays nor command substitution, so instead we 236 | # post-process each arg (as a line of input to sed) to backslash-escape any 237 | # character that might be a shell metacharacter, then use eval to reverse 238 | # that process (while maintaining the separation between arguments), and wrap 239 | # the whole thing up as a single "set" statement. 240 | # 241 | # This will of course break if any of these variables contains a newline or 242 | # an unmatched quote. 243 | # 244 | 245 | eval "set -- $( 246 | printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" | 247 | xargs -n1 | 248 | sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' | 249 | tr '\n' ' ' 250 | )" '"$@"' 251 | 252 | exec "$JAVACMD" "$@" 253 | -------------------------------------------------------------------------------- /gradlew.bat: -------------------------------------------------------------------------------- 1 | @rem 2 | @rem Copyright 2015 the original author or authors. 3 | @rem 4 | @rem Licensed under the Apache License, Version 2.0 (the "License"); 5 | @rem you may not use this file except in compliance with the License. 6 | @rem You may obtain a copy of the License at 7 | @rem 8 | @rem https://www.apache.org/licenses/LICENSE-2.0 9 | @rem 10 | @rem Unless required by applicable law or agreed to in writing, software 11 | @rem distributed under the License is distributed on an "AS IS" BASIS, 12 | @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | @rem See the License for the specific language governing permissions and 14 | @rem limitations under the License. 15 | @rem 16 | @rem SPDX-License-Identifier: Apache-2.0 17 | @rem 18 | 19 | @if "%DEBUG%"=="" @echo off 20 | @rem ########################################################################## 21 | @rem 22 | @rem Gradle startup script for Windows 23 | @rem 24 | @rem ########################################################################## 25 | 26 | @rem Set local scope for the variables with windows NT shell 27 | if "%OS%"=="Windows_NT" setlocal 28 | 29 | set DIRNAME=%~dp0 30 | if "%DIRNAME%"=="" set DIRNAME=. 31 | @rem This is normally unused 32 | set APP_BASE_NAME=%~n0 33 | set APP_HOME=%DIRNAME% 34 | 35 | @rem Resolve any "." and ".." in APP_HOME to make it shorter. 36 | for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi 37 | 38 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 39 | set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" 40 | 41 | @rem Find java.exe 42 | if defined JAVA_HOME goto findJavaFromJavaHome 43 | 44 | set JAVA_EXE=java.exe 45 | %JAVA_EXE% -version >NUL 2>&1 46 | if %ERRORLEVEL% equ 0 goto execute 47 | 48 | echo. 1>&2 49 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 1>&2 50 | echo. 1>&2 51 | echo Please set the JAVA_HOME variable in your environment to match the 1>&2 52 | echo location of your Java installation. 1>&2 53 | 54 | goto fail 55 | 56 | :findJavaFromJavaHome 57 | set JAVA_HOME=%JAVA_HOME:"=% 58 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe 59 | 60 | if exist "%JAVA_EXE%" goto execute 61 | 62 | echo. 1>&2 63 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 1>&2 64 | echo. 1>&2 65 | echo Please set the JAVA_HOME variable in your environment to match the 1>&2 66 | echo location of your Java installation. 1>&2 67 | 68 | goto fail 69 | 70 | :execute 71 | @rem Setup the command line 72 | 73 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar 74 | 75 | 76 | @rem Execute Gradle 77 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* 78 | 79 | :end 80 | @rem End local scope for the variables with windows NT shell 81 | if %ERRORLEVEL% equ 0 goto mainEnd 82 | 83 | :fail 84 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of 85 | rem the _cmd.exe /c_ return code! 86 | set EXIT_CODE=%ERRORLEVEL% 87 | if %EXIT_CODE% equ 0 set EXIT_CODE=1 88 | if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE% 89 | exit /b %EXIT_CODE% 90 | 91 | :mainEnd 92 | if "%OS%"=="Windows_NT" endlocal 93 | 94 | :omega 95 | -------------------------------------------------------------------------------- /jafar-gradle-plugin/build.gradle: -------------------------------------------------------------------------------- 1 | plugins { 2 | id 'java-gradle-plugin' 3 | id 'groovy' 4 | id 'maven-publish' 5 | id 'io.github.gradle-nexus.publish-plugin' version '2.0.0' 6 | } 7 | 8 | group = "io.btrace" 9 | version = "0.0.1-SNAPSHOT" 10 | 11 | repositories { 12 | mavenLocal() 13 | mavenCentral() 14 | maven { 15 | url "https://oss.sonatype.org/content/repositories/snapshots/" 16 | } 17 | maven { 18 | url "https://oss.sonatype.org/content/repositories/snapshots/" 19 | } 20 | } 21 | 22 | java { 23 | toolchain { 24 | languageVersion = JavaLanguageVersion.of(21) 25 | } 26 | } 27 | 28 | dependencies { 29 | implementation gradleApi() // Provides the Gradle API, including Plugin 30 | implementation localGroovy() 31 | 32 | implementation 'io.btrace:jafar-parser:0.0.1-SNAPSHOT' 33 | implementation 'org.jctools:jctools-core:4.0.1' 34 | implementation 'com.github.ben-manes.caffeine:caffeine:3.1.5' 35 | implementation 'org.openjdk.jmc:flightrecorder:8.3.1' 36 | } 37 | 38 | gradlePlugin { 39 | plugins { 40 | create("jafarGradlePlugin") { 41 | id = "io.btrace.jafar-gradle-plugin" 42 | implementationClass = "io.jafar.gradle.TypeGeneratorPlugin" 43 | } 44 | } 45 | } 46 | 47 | test { 48 | useJUnitPlatform() 49 | } 50 | 51 | tasks.register('sourcesJar', Jar) { 52 | from sourceSets.main.allJava 53 | archiveBaseName = libraryName 54 | archiveClassifier = "sources" 55 | archiveVersion = component_version 56 | } 57 | 58 | tasks.register('javadocJar', Jar) { 59 | dependsOn javadoc 60 | archiveBaseName = libraryName 61 | archiveClassifier = 'javadoc' 62 | archiveVersion = component_version 63 | from javadoc.destinationDir 64 | } 65 | 66 | publishing { 67 | publications { 68 | pluginMaven(MavenPublication) { 69 | artifactId = "jafar-gradle-plugin" // Set your custom artifact ID 70 | } 71 | } 72 | } 73 | 74 | nexusPublishing { 75 | repositories { 76 | sonatype { 77 | username = project.hasProperty("sonatype.user") ? project.property("sonatype.user") : System.getenv("SONATYPE_USERNAME") 78 | password = project.hasProperty("sonatype.password") ? project.property("sonatype.password") : System.getenv("SONATYPE_PASSWORD") 79 | } 80 | } 81 | } 82 | 83 | -------------------------------------------------------------------------------- /jafar-gradle-plugin/gradle.properties: -------------------------------------------------------------------------------- 1 | sonatype.user=XXSEjG1P 2 | sonatype.password=P5w7F2tDqHdOO5CDCbyK66t1dV93Owtudyaibs2k30mL 3 | -------------------------------------------------------------------------------- /jafar-gradle-plugin/gradle/wrapper/gradle-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/btraceio/jafar/98667c9f1d93289f122cbf90d4976f84eb146cf1/jafar-gradle-plugin/gradle/wrapper/gradle-wrapper.jar -------------------------------------------------------------------------------- /jafar-gradle-plugin/gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | distributionBase=GRADLE_USER_HOME 2 | distributionPath=wrapper/dists 3 | distributionUrl=https\://services.gradle.org/distributions/gradle-8.11.1-bin.zip 4 | networkTimeout=10000 5 | validateDistributionUrl=true 6 | zipStoreBase=GRADLE_USER_HOME 7 | zipStorePath=wrapper/dists 8 | -------------------------------------------------------------------------------- /jafar-gradle-plugin/gradlew: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # 4 | # Copyright © 2015-2021 the original authors. 5 | # 6 | # Licensed under the Apache License, Version 2.0 (the "License"); 7 | # you may not use this file except in compliance with the License. 8 | # You may obtain a copy of the License at 9 | # 10 | # https://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | # See the License for the specific language governing permissions and 16 | # limitations under the License. 17 | # 18 | # SPDX-License-Identifier: Apache-2.0 19 | # 20 | 21 | ############################################################################## 22 | # 23 | # Gradle start up script for POSIX generated by Gradle. 24 | # 25 | # Important for running: 26 | # 27 | # (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is 28 | # noncompliant, but you have some other compliant shell such as ksh or 29 | # bash, then to run this script, type that shell name before the whole 30 | # command line, like: 31 | # 32 | # ksh Gradle 33 | # 34 | # Busybox and similar reduced shells will NOT work, because this script 35 | # requires all of these POSIX shell features: 36 | # * functions; 37 | # * expansions «$var», «${var}», «${var:-default}», «${var+SET}», 38 | # «${var#prefix}», «${var%suffix}», and «$( cmd )»; 39 | # * compound commands having a testable exit status, especially «case»; 40 | # * various built-in commands including «command», «set», and «ulimit». 41 | # 42 | # Important for patching: 43 | # 44 | # (2) This script targets any POSIX shell, so it avoids extensions provided 45 | # by Bash, Ksh, etc; in particular arrays are avoided. 46 | # 47 | # The "traditional" practice of packing multiple parameters into a 48 | # space-separated string is a well documented source of bugs and security 49 | # problems, so this is (mostly) avoided, by progressively accumulating 50 | # options in "$@", and eventually passing that to Java. 51 | # 52 | # Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS, 53 | # and GRADLE_OPTS) rely on word-splitting, this is performed explicitly; 54 | # see the in-line comments for details. 55 | # 56 | # There are tweaks for specific operating systems such as AIX, CygWin, 57 | # Darwin, MinGW, and NonStop. 58 | # 59 | # (3) This script is generated from the Groovy template 60 | # https://github.com/gradle/gradle/blob/HEAD/platforms/jvm/plugins-application/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt 61 | # within the Gradle project. 62 | # 63 | # You can find Gradle at https://github.com/gradle/gradle/. 64 | # 65 | ############################################################################## 66 | 67 | # Attempt to set APP_HOME 68 | 69 | # Resolve links: $0 may be a link 70 | app_path=$0 71 | 72 | # Need this for daisy-chained symlinks. 73 | while 74 | APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path 75 | [ -h "$app_path" ] 76 | do 77 | ls=$( ls -ld "$app_path" ) 78 | link=${ls#*' -> '} 79 | case $link in #( 80 | /*) app_path=$link ;; #( 81 | *) app_path=$APP_HOME$link ;; 82 | esac 83 | done 84 | 85 | # This is normally unused 86 | # shellcheck disable=SC2034 87 | APP_BASE_NAME=${0##*/} 88 | # Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036) 89 | APP_HOME=$( cd -P "${APP_HOME:-./}" > /dev/null && printf '%s 90 | ' "$PWD" ) || exit 91 | 92 | # Use the maximum available, or set MAX_FD != -1 to use that value. 93 | MAX_FD=maximum 94 | 95 | warn () { 96 | echo "$*" 97 | } >&2 98 | 99 | die () { 100 | echo 101 | echo "$*" 102 | echo 103 | exit 1 104 | } >&2 105 | 106 | # OS specific support (must be 'true' or 'false'). 107 | cygwin=false 108 | msys=false 109 | darwin=false 110 | nonstop=false 111 | case "$( uname )" in #( 112 | CYGWIN* ) cygwin=true ;; #( 113 | Darwin* ) darwin=true ;; #( 114 | MSYS* | MINGW* ) msys=true ;; #( 115 | NONSTOP* ) nonstop=true ;; 116 | esac 117 | 118 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar 119 | 120 | 121 | # Determine the Java command to use to start the JVM. 122 | if [ -n "$JAVA_HOME" ] ; then 123 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then 124 | # IBM's JDK on AIX uses strange locations for the executables 125 | JAVACMD=$JAVA_HOME/jre/sh/java 126 | else 127 | JAVACMD=$JAVA_HOME/bin/java 128 | fi 129 | if [ ! -x "$JAVACMD" ] ; then 130 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME 131 | 132 | Please set the JAVA_HOME variable in your environment to match the 133 | location of your Java installation." 134 | fi 135 | else 136 | JAVACMD=java 137 | if ! command -v java >/dev/null 2>&1 138 | then 139 | die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 140 | 141 | Please set the JAVA_HOME variable in your environment to match the 142 | location of your Java installation." 143 | fi 144 | fi 145 | 146 | # Increase the maximum file descriptors if we can. 147 | if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then 148 | case $MAX_FD in #( 149 | max*) 150 | # In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked. 151 | # shellcheck disable=SC2039,SC3045 152 | MAX_FD=$( ulimit -H -n ) || 153 | warn "Could not query maximum file descriptor limit" 154 | esac 155 | case $MAX_FD in #( 156 | '' | soft) :;; #( 157 | *) 158 | # In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked. 159 | # shellcheck disable=SC2039,SC3045 160 | ulimit -n "$MAX_FD" || 161 | warn "Could not set maximum file descriptor limit to $MAX_FD" 162 | esac 163 | fi 164 | 165 | # Collect all arguments for the java command, stacking in reverse order: 166 | # * args from the command line 167 | # * the main class name 168 | # * -classpath 169 | # * -D...appname settings 170 | # * --module-path (only if needed) 171 | # * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables. 172 | 173 | # For Cygwin or MSYS, switch paths to Windows format before running java 174 | if "$cygwin" || "$msys" ; then 175 | APP_HOME=$( cygpath --path --mixed "$APP_HOME" ) 176 | CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" ) 177 | 178 | JAVACMD=$( cygpath --unix "$JAVACMD" ) 179 | 180 | # Now convert the arguments - kludge to limit ourselves to /bin/sh 181 | for arg do 182 | if 183 | case $arg in #( 184 | -*) false ;; # don't mess with options #( 185 | /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath 186 | [ -e "$t" ] ;; #( 187 | *) false ;; 188 | esac 189 | then 190 | arg=$( cygpath --path --ignore --mixed "$arg" ) 191 | fi 192 | # Roll the args list around exactly as many times as the number of 193 | # args, so each arg winds up back in the position where it started, but 194 | # possibly modified. 195 | # 196 | # NB: a `for` loop captures its iteration list before it begins, so 197 | # changing the positional parameters here affects neither the number of 198 | # iterations, nor the values presented in `arg`. 199 | shift # remove old arg 200 | set -- "$@" "$arg" # push replacement arg 201 | done 202 | fi 203 | 204 | 205 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 206 | DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' 207 | 208 | # Collect all arguments for the java command: 209 | # * DEFAULT_JVM_OPTS, JAVA_OPTS, JAVA_OPTS, and optsEnvironmentVar are not allowed to contain shell fragments, 210 | # and any embedded shellness will be escaped. 211 | # * For example: A user cannot expect ${Hostname} to be expanded, as it is an environment variable and will be 212 | # treated as '${Hostname}' itself on the command line. 213 | 214 | set -- \ 215 | "-Dorg.gradle.appname=$APP_BASE_NAME" \ 216 | -classpath "$CLASSPATH" \ 217 | org.gradle.wrapper.GradleWrapperMain \ 218 | "$@" 219 | 220 | # Stop when "xargs" is not available. 221 | if ! command -v xargs >/dev/null 2>&1 222 | then 223 | die "xargs is not available" 224 | fi 225 | 226 | # Use "xargs" to parse quoted args. 227 | # 228 | # With -n1 it outputs one arg per line, with the quotes and backslashes removed. 229 | # 230 | # In Bash we could simply go: 231 | # 232 | # readarray ARGS < <( xargs -n1 <<<"$var" ) && 233 | # set -- "${ARGS[@]}" "$@" 234 | # 235 | # but POSIX shell has neither arrays nor command substitution, so instead we 236 | # post-process each arg (as a line of input to sed) to backslash-escape any 237 | # character that might be a shell metacharacter, then use eval to reverse 238 | # that process (while maintaining the separation between arguments), and wrap 239 | # the whole thing up as a single "set" statement. 240 | # 241 | # This will of course break if any of these variables contains a newline or 242 | # an unmatched quote. 243 | # 244 | 245 | eval "set -- $( 246 | printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" | 247 | xargs -n1 | 248 | sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' | 249 | tr '\n' ' ' 250 | )" '"$@"' 251 | 252 | exec "$JAVACMD" "$@" 253 | -------------------------------------------------------------------------------- /jafar-gradle-plugin/gradlew.bat: -------------------------------------------------------------------------------- 1 | @rem 2 | @rem Copyright 2015 the original author or authors. 3 | @rem 4 | @rem Licensed under the Apache License, Version 2.0 (the "License"); 5 | @rem you may not use this file except in compliance with the License. 6 | @rem You may obtain a copy of the License at 7 | @rem 8 | @rem https://www.apache.org/licenses/LICENSE-2.0 9 | @rem 10 | @rem Unless required by applicable law or agreed to in writing, software 11 | @rem distributed under the License is distributed on an "AS IS" BASIS, 12 | @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | @rem See the License for the specific language governing permissions and 14 | @rem limitations under the License. 15 | @rem 16 | @rem SPDX-License-Identifier: Apache-2.0 17 | @rem 18 | 19 | @if "%DEBUG%"=="" @echo off 20 | @rem ########################################################################## 21 | @rem 22 | @rem Gradle startup script for Windows 23 | @rem 24 | @rem ########################################################################## 25 | 26 | @rem Set local scope for the variables with windows NT shell 27 | if "%OS%"=="Windows_NT" setlocal 28 | 29 | set DIRNAME=%~dp0 30 | if "%DIRNAME%"=="" set DIRNAME=. 31 | @rem This is normally unused 32 | set APP_BASE_NAME=%~n0 33 | set APP_HOME=%DIRNAME% 34 | 35 | @rem Resolve any "." and ".." in APP_HOME to make it shorter. 36 | for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi 37 | 38 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 39 | set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" 40 | 41 | @rem Find java.exe 42 | if defined JAVA_HOME goto findJavaFromJavaHome 43 | 44 | set JAVA_EXE=java.exe 45 | %JAVA_EXE% -version >NUL 2>&1 46 | if %ERRORLEVEL% equ 0 goto execute 47 | 48 | echo. 1>&2 49 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 1>&2 50 | echo. 1>&2 51 | echo Please set the JAVA_HOME variable in your environment to match the 1>&2 52 | echo location of your Java installation. 1>&2 53 | 54 | goto fail 55 | 56 | :findJavaFromJavaHome 57 | set JAVA_HOME=%JAVA_HOME:"=% 58 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe 59 | 60 | if exist "%JAVA_EXE%" goto execute 61 | 62 | echo. 1>&2 63 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 1>&2 64 | echo. 1>&2 65 | echo Please set the JAVA_HOME variable in your environment to match the 1>&2 66 | echo location of your Java installation. 1>&2 67 | 68 | goto fail 69 | 70 | :execute 71 | @rem Setup the command line 72 | 73 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar 74 | 75 | 76 | @rem Execute Gradle 77 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* 78 | 79 | :end 80 | @rem End local scope for the variables with windows NT shell 81 | if %ERRORLEVEL% equ 0 goto mainEnd 82 | 83 | :fail 84 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of 85 | rem the _cmd.exe /c_ return code! 86 | set EXIT_CODE=%ERRORLEVEL% 87 | if %EXIT_CODE% equ 0 set EXIT_CODE=1 88 | if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE% 89 | exit /b %EXIT_CODE% 90 | 91 | :mainEnd 92 | if "%OS%"=="Windows_NT" endlocal 93 | 94 | :omega 95 | -------------------------------------------------------------------------------- /jafar-gradle-plugin/settings.gradle: -------------------------------------------------------------------------------- 1 | /* 2 | * This file was generated by the Gradle 'init' task. 3 | * 4 | * The settings file is used to specify which projects to include in your build. 5 | * 6 | * Detailed information about configuring a multi-project build in Gradle can be found 7 | * in the user manual at https://docs.gradle.org/8.0/userguide/multi_project_builds.html 8 | */ 9 | 10 | pluginManagement { 11 | repositories { 12 | mavenLocal() // Look for plugins in Maven Local 13 | maven { 14 | url "https://oss.sonatype.org/content/repositories/snapshots/" 15 | } 16 | gradlePluginPortal() // Fall back to Gradle Plugin Portal 17 | } 18 | } 19 | 20 | rootProject.name = 'gradle-plugin' -------------------------------------------------------------------------------- /jafar-gradle-plugin/src/main/groovy/io/jafar/gradle/TypeGeneratorPlugin.groovy: -------------------------------------------------------------------------------- 1 | package io.jafar.gradle 2 | 3 | import io.jafar.utils.TypeGenerator 4 | import org.gradle.api.Plugin 5 | import org.gradle.api.Project 6 | import org.gradle.api.provider.Property 7 | import org.gradle.api.file.DirectoryProperty 8 | 9 | import java.util.function.Predicate 10 | 11 | class TypeGeneratorPlugin implements Plugin { 12 | @Override 13 | void apply(Project project) { 14 | def extension = project.extensions.create('generateJafarTypes', GenerateJafarTypesExtension, project) 15 | 16 | project.afterEvaluate { 17 | if (project.hasProperty('jafar.input')) { 18 | extension.inputFile.set(project.file(project.property('jafar.input'))) 19 | } 20 | } 21 | 22 | // Define the directory for generated sources 23 | def generatedSourcesDir = project.file("${project.buildDir}/generated/sources/jafar/src/main") 24 | 25 | // Register a task for generating sources 26 | project.tasks.register('generateJafarTypes') { 27 | group = "build" 28 | description = "Generate Jafar type sources into the 'generated/sources' folder" 29 | 30 | // Define inputs and outputs for task up-to-date checks 31 | inputs.file(extension.inputFile).optional(true) 32 | outputs.dir(extension.outputDir.orElse(project.layout.buildDirectory.dir("generated/sources/jafar/src/main"))) 33 | 34 | doLast { 35 | if (!extension.inputFile.isPresent()) { 36 | println "No input file provided. Using runtime provided JFR type definitions." 37 | } else { 38 | println "Running TypeGenerator with input file ${extension.inputFile.get()} to generate sources into ${generatedSourcesDir}" 39 | } 40 | 41 | def input = extension.inputFile.isPresent() ? extension.inputFile.get() : null 42 | def output = extension.outputDir.orElse(project.layout.buildDirectory.dir("generated/sources/jafar/src/main")).get().asFile 43 | def overwrite = extension.overwrite.getOrElse(false) 44 | def targetPackage = extension.targetPackage.getOrElse("io.jafar.parser.api.types") 45 | 46 | 47 | // Ensure output directory exists 48 | output.mkdirs() 49 | 50 | Predicate predicate = null 51 | if (extension.eventTypeFilter.isPresent()) { 52 | def filterClosure = extension.eventTypeFilter.get() 53 | predicate = filterClosure as Predicate // Convert Closure to Predicate 54 | } 55 | 56 | // Instantiate and execute TypeGenerator 57 | def generator = new TypeGenerator(input?.toPath(), output.toPath(), targetPackage, overwrite, predicate) 58 | generator.generate() 59 | } 60 | } 61 | 62 | // Add generated sources to the main source set 63 | project.afterEvaluate { 64 | project.sourceSets.main.java.srcDir(generatedSourcesDir) 65 | project.tasks.named('compileJava').configure { 66 | dependsOn 'generateJafarTypes' 67 | } 68 | } 69 | } 70 | 71 | // Extension to configure the input file 72 | static class GenerateJafarTypesExtension { 73 | final Property inputFile 74 | final DirectoryProperty outputDir 75 | final Property targetPackage 76 | final Property overwrite 77 | final Property> eventTypeFilter 78 | 79 | GenerateJafarTypesExtension(Project project) { 80 | inputFile = project.objects.property(File) 81 | outputDir = project.objects.directoryProperty() 82 | targetPackage = project.objects.property(String) 83 | overwrite = project.objects.property(Boolean) 84 | eventTypeFilter = project.objects.property(Closure) 85 | } 86 | 87 | void eventTypeFilter(Closure eventTypeFilterClosure) { 88 | this.eventTypeFilter.set(eventTypeFilterClosure) 89 | } 90 | } 91 | } -------------------------------------------------------------------------------- /parser/build.gradle: -------------------------------------------------------------------------------- 1 | import jdk.jfr.FlightRecorder 2 | 3 | plugins { 4 | id("io.github.goooler.shadow") version "8.1.8" 5 | id 'java' 6 | id 'maven-publish' 7 | } 8 | 9 | def libraryName="jafar-parser" 10 | def component_version = project.hasProperty("jafar_version") ? project.jafar_version : rootProject.version 11 | 12 | repositories { 13 | mavenLocal() 14 | mavenCentral() 15 | maven { 16 | url "https://oss.sonatype.org/content/repositories/snapshots/" 17 | } 18 | } 19 | 20 | java { 21 | toolchain { 22 | languageVersion = JavaLanguageVersion.of(21) 23 | } 24 | } 25 | 26 | dependencies { 27 | implementation 'org.slf4j:slf4j-api:2.0.5' 28 | implementation 'org.slf4j:slf4j-simple:2.0.5' 29 | implementation 'it.unimi.dsi:fastutil:8.5.12' 30 | implementation 'org.ow2.asm:asm:9.7.1' 31 | 32 | testImplementation 'org.openjdk.jmc:flightrecorder.writer:8.2.1' 33 | testImplementation 'org.mockito:mockito-core:5.12.0' 34 | testImplementation 'org.mockito:mockito-junit-jupiter:5.12.0' 35 | testImplementation 'org.junit.jupiter:junit-jupiter-api:5.9.2' 36 | testImplementation 'org.junit.jupiter:junit-jupiter-engine:5.9.2' 37 | testImplementation 'org.junit.jupiter:junit-jupiter-params:5.9.2' 38 | } 39 | 40 | test { 41 | // jvmArgs '-Xmx8g', '-Xms1g', '-XX:+HeapDumpOnOutOfMemoryError', '-XX:HeapDumpPath=build/heapdump.hprof' 42 | jvmArgs '-Xmx8g', '-Xms1g' 43 | useJUnitPlatform() 44 | } 45 | 46 | tasks.register('sourcesJar', Jar) { 47 | from sourceSets.main.allJava 48 | archiveBaseName = libraryName 49 | archiveClassifier = "sources" 50 | archiveVersion = component_version 51 | } 52 | 53 | tasks.register('javadocJar', Jar) { 54 | dependsOn javadoc 55 | archiveBaseName = libraryName 56 | archiveClassifier = 'javadoc' 57 | archiveVersion = component_version 58 | from javadoc.destinationDir 59 | } 60 | 61 | publishing { 62 | publications { 63 | assembled(MavenPublication) { publication -> 64 | publication.groupId = 'io.btrace' 65 | publication.artifactId = 'jafar-parser' 66 | publication.version = component_version 67 | 68 | publication.artifact shadowJar 69 | publication.artifact sourcesJar 70 | publication.artifact javadocJar 71 | } 72 | } 73 | } 74 | 75 | shadowJar { 76 | group 'Build' 77 | archiveBaseName = libraryName 78 | archiveVersion = component_version 79 | archiveClassifier.set('') 80 | 81 | relocate 'it.unimi.dsi.fastutil', 'io.jafar.libs.it.unimi.dsi.fastutil' 82 | relocate 'org.objectweb.asm', 'io.jafar.libs.org.objectweb.asm' 83 | relocate 'org.slf4j', 'io.jafar.libs.org.slf4j' 84 | } -------------------------------------------------------------------------------- /parser/src/main/java/io/jafar/parser/AbstractEvent.java: -------------------------------------------------------------------------------- 1 | package io.jafar.parser; 2 | 3 | import io.jafar.parser.internal_api.RecordingParserContext; 4 | import io.jafar.parser.internal_api.RecordingStream; 5 | 6 | public abstract class AbstractEvent { 7 | private final RecordingParserContext context; 8 | 9 | protected AbstractEvent(RecordingStream stream) { 10 | this.context = stream.getContext(); 11 | } 12 | 13 | public final RecordingParserContext getContext() { 14 | return context; 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /parser/src/main/java/io/jafar/parser/JafarParserImpl.java: -------------------------------------------------------------------------------- 1 | package io.jafar.parser; 2 | 3 | import io.jafar.parser.api.HandlerRegistration; 4 | import io.jafar.parser.api.JafarParser; 5 | import io.jafar.parser.api.JfrIgnore; 6 | import io.jafar.parser.api.JfrType; 7 | import io.jafar.parser.api.JFRHandler; 8 | import io.jafar.parser.api.ParsingContext; 9 | import io.jafar.parser.internal_api.ParsingContextImpl; 10 | import io.jafar.parser.internal_api.CheckpointEvent; 11 | import io.jafar.parser.internal_api.ChunkHeader; 12 | import io.jafar.parser.internal_api.ChunkParserListener; 13 | import io.jafar.parser.internal_api.RecordingParserContext; 14 | import io.jafar.parser.internal_api.RecordingStream; 15 | import io.jafar.parser.internal_api.StreamingChunkParser; 16 | import io.jafar.parser.internal_api.metadata.MetadataClass; 17 | import io.jafar.parser.internal_api.metadata.MetadataEvent; 18 | import io.jafar.utils.CustomByteBuffer; 19 | import it.unimi.dsi.fastutil.ints.Int2ObjectMap; 20 | import it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap; 21 | import it.unimi.dsi.fastutil.longs.Long2ObjectMap; 22 | import it.unimi.dsi.fastutil.longs.Long2ObjectOpenHashMap; 23 | 24 | import java.io.IOException; 25 | import java.lang.ref.WeakReference; 26 | import java.lang.reflect.Method; 27 | import java.nio.file.Path; 28 | import java.util.ArrayList; 29 | import java.util.HashMap; 30 | import java.util.List; 31 | import java.util.Map; 32 | 33 | public final class JafarParserImpl implements JafarParser { 34 | // private record Handlers(MethodHandle ctr, MethodHandle skip) {} 35 | 36 | private final class HandlerRegistrationImpl implements HandlerRegistration { 37 | private final WeakReference> clzRef; 38 | private final WeakReference cookieRef; 39 | HandlerRegistrationImpl(Class clz, JafarParser cookie) { 40 | this.clzRef = new WeakReference<>(clz); 41 | this.cookieRef = new WeakReference<>(cookie); 42 | } 43 | 44 | @Override 45 | public void destroy(JafarParser cookie) { 46 | if (cookie != null && cookie.equals(cookieRef.get())) { 47 | Class clz = clzRef.get(); 48 | if (clz != null) { 49 | handlerMap.remove(clz); 50 | 51 | handlerMap.keySet().forEach(JafarParserImpl.this::addDeserializer); 52 | } 53 | } 54 | } 55 | } 56 | private final StreamingChunkParser parser; 57 | private final Path recording; 58 | 59 | private final Map, List>> handlerMap = new HashMap<>(); 60 | private final Int2ObjectMap>> chunkTypeClassMap = new Int2ObjectOpenHashMap<>(); 61 | 62 | private final Map> globalHandlerMap = new HashMap<>(); 63 | 64 | private boolean closed = false; 65 | 66 | public JafarParserImpl(Path recording, ParsingContextImpl parsingContext) { 67 | this.parser = new StreamingChunkParser(parsingContext.newRecordingParserContext()); 68 | this.recording = recording; 69 | } 70 | 71 | @Override 72 | public HandlerRegistration handle(Class clz, JFRHandler handler) { 73 | addDeserializer(clz); 74 | handlerMap.computeIfAbsent(clz, k -> new ArrayList<>()).add(new JFRHandler.Impl<>(clz, handler)); 75 | 76 | return new HandlerRegistrationImpl<>(clz, this); 77 | } 78 | 79 | private void addDeserializer(Class clz) { 80 | if (clz.isArray()) { 81 | clz = clz.getComponentType(); 82 | } 83 | boolean isPrimitive = clz.isPrimitive() || clz.isAssignableFrom(String.class); 84 | 85 | if (!isPrimitive && !clz.isInterface()) { 86 | throw new RuntimeException("JFR type handler must be an interface: " + clz.getName()); 87 | } 88 | String typeName = clz.getName(); 89 | if (!isPrimitive) { 90 | JfrType typeAnnotation = clz.getAnnotation(JfrType.class); 91 | if (typeAnnotation == null) { 92 | throw new RuntimeException("JFR type annotation missing on class: " + clz.getName()); 93 | } 94 | typeName = typeAnnotation.value(); 95 | } 96 | 97 | if (globalHandlerMap.containsKey(typeName)) { 98 | return; 99 | } 100 | globalHandlerMap.put(typeName, clz); 101 | if (!isPrimitive) { 102 | Class superClass = clz.getSuperclass(); 103 | if (superClass != null && superClass.isInterface()) { 104 | addDeserializer(superClass); 105 | } 106 | for (Method m : clz.getMethods()) { 107 | if (m.getAnnotation(JfrIgnore.class) == null) { 108 | addDeserializer(m.getReturnType()); 109 | } 110 | } 111 | } 112 | } 113 | 114 | @Override 115 | public void run() throws IOException { 116 | if (closed) { 117 | throw new IOException("Parser is closed"); 118 | } 119 | // parse JFR and run handlers 120 | parser.parse(recording, new ChunkParserListener() { 121 | @Override 122 | public void onRecordingStart(RecordingParserContext context) { 123 | if (!globalHandlerMap.isEmpty()) { 124 | context.setTypeFilter(t -> t!= null && globalHandlerMap.containsKey(t.getName())); 125 | } 126 | } 127 | 128 | @Override 129 | public boolean onChunkStart(int chunkIndex, ChunkHeader header, RecordingParserContext context) { 130 | if (!globalHandlerMap.isEmpty()) { 131 | synchronized (this) { 132 | context.setClassTypeMap(chunkTypeClassMap.computeIfAbsent(chunkIndex, k -> new Long2ObjectOpenHashMap<>())); 133 | context.addTargetTypeMap(globalHandlerMap); 134 | } 135 | return true; 136 | } 137 | return false; 138 | } 139 | 140 | @Override 141 | public boolean onChunkEnd(int chunkIndex, boolean skipped) { 142 | return true; 143 | } 144 | 145 | @Override 146 | public boolean onMetadata(MetadataEvent metadata) { 147 | Long2ObjectMap> typeClassMap = metadata.getContext().getClassTypeMap(); 148 | 149 | RecordingParserContext context = metadata.getContext(); 150 | // typeClassMap must be fully initialized before trying to resolve/generate the handlers 151 | for (MetadataClass clz : metadata.getClasses()) { 152 | Class targetClass = context.getClassTargetType(clz.getName()); 153 | if (targetClass != null) { 154 | typeClassMap.putIfAbsent(clz.getId(), targetClass); 155 | } 156 | } 157 | 158 | return true; 159 | } 160 | 161 | @Override 162 | public boolean onCheckpoint(CheckpointEvent checkpoint) { 163 | return ChunkParserListener.super.onCheckpoint(checkpoint); 164 | } 165 | 166 | @Override 167 | public boolean onEvent(long typeId, RecordingStream stream, long payloadSize) { 168 | Long2ObjectMap> typeClassMap = stream.getContext().getClassTypeMap(); 169 | Class typeClz = typeClassMap.get(typeId); 170 | if (typeClz != null) { 171 | if (handlerMap.containsKey(typeClz)) { 172 | MetadataClass clz = stream.getContext().getMetadataLookup().getClass(typeId); 173 | Object deserialized = clz.read(stream); 174 | for (JFRHandler.Impl handler : handlerMap.get(typeClz)) { 175 | handler.handle(deserialized, null); 176 | } 177 | } 178 | } 179 | return true; 180 | }; 181 | }); 182 | } 183 | 184 | @Override 185 | public void close() throws Exception { 186 | if (!closed) { 187 | closed = true; 188 | 189 | parser.close(); 190 | chunkTypeClassMap.clear(); 191 | handlerMap.clear(); 192 | globalHandlerMap.clear(); 193 | } 194 | } 195 | 196 | private static CustomByteBuffer openJfrStream(Path jfrFile) { 197 | try { 198 | return CustomByteBuffer.map(jfrFile, Integer.MAX_VALUE); 199 | } catch (IOException e) { 200 | throw new RuntimeException(e); 201 | } 202 | } 203 | } 204 | -------------------------------------------------------------------------------- /parser/src/main/java/io/jafar/parser/MutableConstantPool.java: -------------------------------------------------------------------------------- 1 | package io.jafar.parser; 2 | 3 | import io.jafar.parser.internal_api.ConstantPool; 4 | import io.jafar.parser.internal_api.RecordingStream; 5 | import io.jafar.parser.internal_api.metadata.MetadataClass; 6 | import it.unimi.dsi.fastutil.longs.Long2LongMap; 7 | import it.unimi.dsi.fastutil.longs.Long2LongOpenHashMap; 8 | import it.unimi.dsi.fastutil.longs.Long2ObjectMap; 9 | import it.unimi.dsi.fastutil.longs.Long2ObjectOpenHashMap; 10 | 11 | public final class MutableConstantPool implements ConstantPool { 12 | private final Long2LongMap offsets; 13 | private final Long2ObjectMap entries; 14 | 15 | private final RecordingStream stream; 16 | private final MetadataClass clazz; 17 | 18 | public MutableConstantPool(RecordingStream chunkStream, long typeId, int count) { 19 | this.offsets = new Long2LongOpenHashMap(count); 20 | this.entries = new Long2ObjectOpenHashMap<>(count); 21 | this.stream = chunkStream; 22 | var context = chunkStream.getContext(); 23 | clazz = context.getMetadataLookup().getClass(typeId); 24 | } 25 | 26 | public Object get(long id) { 27 | long offset = offsets.get(id); 28 | if (offset > 0) { 29 | Object o = entries.get(id); 30 | if (o == null) { 31 | long pos = stream.position(); 32 | try { 33 | stream.position(offsets.get(id)); 34 | o = clazz.read(stream); 35 | entries.put(id, o); 36 | } finally { 37 | stream.position(pos); 38 | } 39 | } 40 | return o; 41 | } 42 | return null; 43 | } 44 | 45 | public boolean containsKey(long key) { 46 | return offsets.containsKey(key); 47 | } 48 | 49 | public void addOffset(long id, long offset) { 50 | offsets.put(id, offset); 51 | } 52 | 53 | @Override 54 | public int size() { 55 | return entries.size(); 56 | } 57 | 58 | @Override 59 | public boolean isEmpty() { 60 | return entries.isEmpty(); 61 | } 62 | 63 | @Override 64 | public MetadataClass getType() { 65 | return clazz; 66 | } 67 | } 68 | -------------------------------------------------------------------------------- /parser/src/main/java/io/jafar/parser/MutableConstantPools.java: -------------------------------------------------------------------------------- 1 | package io.jafar.parser; 2 | 3 | import io.jafar.parser.internal_api.ConstantPool; 4 | import io.jafar.parser.internal_api.ConstantPools; 5 | import io.jafar.parser.internal_api.MetadataLookup; 6 | import io.jafar.parser.internal_api.RecordingStream; 7 | import it.unimi.dsi.fastutil.longs.Long2ObjectMap; 8 | import it.unimi.dsi.fastutil.longs.Long2ObjectOpenHashMap; 9 | 10 | import java.util.stream.Stream; 11 | 12 | public final class MutableConstantPools implements ConstantPools { 13 | private final Long2ObjectMap poolMap = new Long2ObjectOpenHashMap<>(); 14 | 15 | private final MetadataLookup metadata; 16 | private boolean ready = false; 17 | 18 | public MutableConstantPools(MetadataLookup metadata) { 19 | this.metadata = metadata; 20 | } 21 | 22 | @Override 23 | public MutableConstantPool getConstantPool(long typeId) { 24 | return poolMap.get(typeId); 25 | } 26 | 27 | public MutableConstantPool addOrGetConstantPool(RecordingStream chunkStream, long typeId, int count) { 28 | MutableConstantPool p = poolMap.get(typeId); 29 | if (p == null) { 30 | p = new MutableConstantPool(chunkStream, typeId, count); 31 | poolMap.put(typeId, p); 32 | } 33 | return p; 34 | } 35 | 36 | @Override 37 | public boolean hasConstantPool(long typeId) { 38 | return poolMap.containsKey(typeId); 39 | } 40 | 41 | @Override 42 | public boolean isReady() { 43 | return ready; 44 | } 45 | 46 | public void setReady() { 47 | ready = true; 48 | } 49 | 50 | @Override 51 | public Stream pools() { 52 | return poolMap.values().stream(); 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /parser/src/main/java/io/jafar/parser/MutableMetadataLookup.java: -------------------------------------------------------------------------------- 1 | package io.jafar.parser; 2 | 3 | import io.jafar.parser.internal_api.MetadataLookup; 4 | import io.jafar.parser.internal_api.metadata.MetadataClass; 5 | import it.unimi.dsi.fastutil.longs.Long2ObjectMap; 6 | import it.unimi.dsi.fastutil.longs.Long2ObjectOpenHashMap; 7 | 8 | import java.util.Arrays; 9 | 10 | public final class MutableMetadataLookup implements MetadataLookup { 11 | private String[] strings; 12 | private final Long2ObjectMap classes = new Long2ObjectOpenHashMap<>(); 13 | 14 | @Override 15 | public String getString(int idx) { 16 | return strings[idx]; 17 | } 18 | 19 | @Override 20 | public MetadataClass getClass(long id) { 21 | return classes.get(id); 22 | } 23 | 24 | public MetadataClass addClass(long id, MetadataClass clazz) { 25 | MetadataClass rslt = classes.get(id); 26 | if (rslt == null) { 27 | rslt = clazz; 28 | classes.put(id, clazz); 29 | } 30 | return rslt; 31 | } 32 | 33 | public void setStringtable(String[] stringTable) { 34 | this.strings = Arrays.copyOf(stringTable, stringTable.length); 35 | } 36 | 37 | public void bindDeserializers() { 38 | for (MetadataClass clazz : classes.values()) { 39 | clazz.bindDeserializer(); 40 | } 41 | } 42 | 43 | public void clear() { 44 | strings = null; 45 | classes.clear(); 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /parser/src/main/java/io/jafar/parser/ParsingUtils.java: -------------------------------------------------------------------------------- 1 | package io.jafar.parser; 2 | 3 | import io.jafar.parser.internal_api.RecordingStream; 4 | 5 | import java.io.IOException; 6 | import java.nio.charset.StandardCharsets; 7 | 8 | public final class ParsingUtils { 9 | public static String bytesToString(byte[] array, int offset, int len) { 10 | StringBuilder sb = new StringBuilder("["); 11 | boolean comma = false; 12 | for (int i = 0; i < len; i++) { 13 | if (comma) { 14 | sb.append(", "); 15 | } else { 16 | comma = true; 17 | } 18 | sb.append(array[i + offset]); 19 | } 20 | sb.append(']'); 21 | return sb.toString(); 22 | } 23 | 24 | public static String readUTF8(RecordingStream stream) throws IOException { 25 | byte id = stream.read(); 26 | if (id == 0) { 27 | return null; 28 | } else if (id == 1) { 29 | return ""; 30 | } else if (id == 2) { 31 | // string constant 32 | int ptr = (int)stream.readVarint(); 33 | return stream.getContext().getMetadataLookup().getString(ptr); 34 | } else if (id == 3) { 35 | // UTF8 36 | int size = (int) stream.readVarint(); 37 | if (size == 0) { 38 | return ""; 39 | } 40 | byte[] content = size <= stream.getContext().byteBuffer.length ? stream.getContext().byteBuffer : new byte[size]; 41 | stream.read(content, 0, size); 42 | return stream.getContext().utf8Parser.parse(content, size, StandardCharsets.UTF_8); 43 | } else if (id == 4) { 44 | int size = (int) stream.readVarint(); 45 | if (size == 0) { 46 | return ""; 47 | } 48 | char[] chars = size <= stream.getContext().charBuffer.length ? stream.getContext().charBuffer : new char[size]; 49 | for (int i = 0; i < size; i++) { 50 | chars[i] = (char) stream.readVarint(); 51 | } 52 | return stream.getContext().charParser.parse(chars, size); 53 | } else if (id == 5) { 54 | // LATIN1 55 | int size = (int) stream.readVarint(); 56 | if (size == 0) { 57 | return ""; 58 | } 59 | byte[] content = size <= stream.getContext().byteBuffer.length ? stream.getContext().byteBuffer : new byte[size]; 60 | stream.read(content, 0, size); 61 | return stream.getContext().utf8Parser.parse(content, size, StandardCharsets.ISO_8859_1); 62 | } else { 63 | throw new IOException("Unexpected string constant id: " + id); 64 | } 65 | } 66 | 67 | public static void skipUTF8(RecordingStream stream) throws IOException { 68 | byte id = stream.read(); 69 | switch (id) { 70 | case 3, 5 -> { 71 | int size = (int) stream.readVarint(); 72 | stream.skip(size); 73 | } 74 | case 4 -> { 75 | int size = (int) stream.readVarint(); 76 | for (int i = 0; i < size; i++) { 77 | stream.readVarint(); 78 | } 79 | } 80 | case 2 -> { 81 | stream.readVarint(); 82 | } 83 | } 84 | } 85 | } 86 | -------------------------------------------------------------------------------- /parser/src/main/java/io/jafar/parser/TypeFilter.java: -------------------------------------------------------------------------------- 1 | package io.jafar.parser; 2 | 3 | import io.jafar.parser.internal_api.metadata.MetadataClass; 4 | 5 | import java.util.function.Predicate; 6 | 7 | @FunctionalInterface 8 | public interface TypeFilter extends Predicate { 9 | } 10 | -------------------------------------------------------------------------------- /parser/src/main/java/io/jafar/parser/ValueLoader.java: -------------------------------------------------------------------------------- 1 | package io.jafar.parser; 2 | 3 | import io.jafar.parser.internal_api.RecordingStream; 4 | import io.jafar.parser.internal_api.metadata.MetadataClass; 5 | 6 | import java.io.IOException; 7 | 8 | public final class ValueLoader { 9 | public static void skip(RecordingStream stream, MetadataClass typeDescriptor, boolean isArray, boolean hasConstantPool) throws IOException { 10 | int len = isArray ? (int) stream.readVarint() : 1; 11 | if (hasConstantPool) { 12 | for (int i = 0; i < len; i++) { 13 | stream.readVarint(); 14 | } 15 | } else { 16 | for (int i = 0; i < len; i++) { 17 | typeDescriptor.skip(stream); 18 | } 19 | } 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /parser/src/main/java/io/jafar/parser/api/Control.java: -------------------------------------------------------------------------------- 1 | package io.jafar.parser.api; 2 | 3 | public final class Control { 4 | } 5 | -------------------------------------------------------------------------------- /parser/src/main/java/io/jafar/parser/api/HandlerRegistration.java: -------------------------------------------------------------------------------- 1 | package io.jafar.parser.api; 2 | 3 | public interface HandlerRegistration { 4 | void destroy(JafarParser cookie); 5 | } 6 | -------------------------------------------------------------------------------- /parser/src/main/java/io/jafar/parser/api/JFRHandler.java: -------------------------------------------------------------------------------- 1 | package io.jafar.parser.api; 2 | 3 | @FunctionalInterface 4 | public interface JFRHandler { 5 | class Impl { 6 | private final Class clazz; 7 | private final JFRHandler handler; 8 | 9 | public Impl(Class clazz, JFRHandler handler) { 10 | this.clazz = clazz; 11 | this.handler = handler; 12 | } 13 | 14 | public void handle(Object event, Control ctl) { 15 | handler.handle(clazz.cast(event), ctl); 16 | } 17 | } 18 | 19 | void handle(T event, Control ctl); 20 | } -------------------------------------------------------------------------------- /parser/src/main/java/io/jafar/parser/api/JafarParser.java: -------------------------------------------------------------------------------- 1 | package io.jafar.parser.api; 2 | import io.jafar.parser.JafarParserImpl; 3 | import io.jafar.parser.internal_api.ParsingContextImpl; 4 | 5 | import java.io.IOException; 6 | import java.nio.file.Paths; 7 | 8 | public interface JafarParser extends AutoCloseable { 9 | /** 10 | * Start a new parsing session 11 | * @param path the recording path 12 | * @return the parser instance 13 | */ 14 | static JafarParser open(String path) { 15 | return open(path, ParsingContextImpl.EMPTY); 16 | } 17 | 18 | /** 19 | * Start a new parsing session with shared context, 20 | * @param path the recording path 21 | * @param context the shared context 22 | * If another recording is opened with the same context some resources, 23 | * like generated handler bytecode, might be reused 24 | * @return the parser instance 25 | */ 26 | static JafarParser open(String path, ParsingContext context) { 27 | if (!(context instanceof ParsingContextImpl)) { 28 | throw new IllegalArgumentException("parsingContext must be an instance of ParsingContextImpl"); 29 | } 30 | return new JafarParserImpl(Paths.get(path), (ParsingContextImpl) context); 31 | } 32 | 33 | HandlerRegistration handle(Class clz, JFRHandler handler); 34 | 35 | void run() throws IOException; 36 | } 37 | -------------------------------------------------------------------------------- /parser/src/main/java/io/jafar/parser/api/JfrField.java: -------------------------------------------------------------------------------- 1 | package io.jafar.parser.api; 2 | 3 | import java.lang.annotation.ElementType; 4 | import java.lang.annotation.Retention; 5 | import java.lang.annotation.RetentionPolicy; 6 | import java.lang.annotation.Target; 7 | 8 | @Target(ElementType.METHOD) 9 | @Retention(RetentionPolicy.RUNTIME) 10 | public @interface JfrField { 11 | String value(); 12 | } 13 | -------------------------------------------------------------------------------- /parser/src/main/java/io/jafar/parser/api/JfrIgnore.java: -------------------------------------------------------------------------------- 1 | package io.jafar.parser.api; 2 | 3 | import java.lang.annotation.ElementType; 4 | import java.lang.annotation.Retention; 5 | import java.lang.annotation.RetentionPolicy; 6 | import java.lang.annotation.Target; 7 | 8 | @Target(ElementType.METHOD) 9 | @Retention(RetentionPolicy.RUNTIME) 10 | public @interface JfrIgnore { 11 | } 12 | -------------------------------------------------------------------------------- /parser/src/main/java/io/jafar/parser/api/JfrType.java: -------------------------------------------------------------------------------- 1 | package io.jafar.parser.api; 2 | 3 | import java.lang.annotation.ElementType; 4 | import java.lang.annotation.Retention; 5 | import java.lang.annotation.RetentionPolicy; 6 | import java.lang.annotation.Target; 7 | 8 | @Target(ElementType.TYPE) 9 | @Retention(RetentionPolicy.RUNTIME) 10 | public @interface JfrType { 11 | String value(); 12 | } 13 | -------------------------------------------------------------------------------- /parser/src/main/java/io/jafar/parser/api/ParsingContext.java: -------------------------------------------------------------------------------- 1 | package io.jafar.parser.api; 2 | 3 | import io.jafar.parser.internal_api.ParsingContextImpl; 4 | 5 | /** 6 | * Cross-recording context. 7 | * Implementation specific, but allows sharing computationally intensive resources 8 | * between parsing sessions, 9 | */ 10 | public interface ParsingContext { 11 | static ParsingContext create() { 12 | return new ParsingContextImpl(); 13 | } 14 | } 15 | 16 | -------------------------------------------------------------------------------- /parser/src/main/java/io/jafar/parser/internal_api/CheckpointEvent.java: -------------------------------------------------------------------------------- 1 | package io.jafar.parser.internal_api; 2 | 3 | import io.jafar.parser.AbstractEvent; 4 | import io.jafar.parser.MutableConstantPool; 5 | import io.jafar.parser.MutableConstantPools; 6 | import io.jafar.parser.TypeFilter; 7 | import io.jafar.parser.internal_api.metadata.MetadataClass; 8 | 9 | import java.io.IOException; 10 | 11 | public final class CheckpointEvent extends AbstractEvent { 12 | public final long startTime; 13 | public final long duration; 14 | public final int nextOffsetDelta; 15 | 16 | public final boolean isFlush; 17 | 18 | private final RecordingStream stream; 19 | 20 | CheckpointEvent(RecordingStream stream) throws IOException { 21 | super(stream); 22 | this.stream = stream; 23 | int size = (int) stream.readVarint(); 24 | if (size == 0) { 25 | throw new IOException("Unexpected event size. Should be > 0"); 26 | } 27 | long typeId = stream.readVarint(); 28 | if (typeId != 1) { 29 | throw new IOException("Unexpected event type: " + typeId + " (should be 1)"); 30 | } 31 | this.startTime = stream.readVarint(); 32 | this.duration = stream.readVarint(); 33 | this.nextOffsetDelta = (int)stream.readVarint(); 34 | this.isFlush = stream.read() != 0; 35 | } 36 | 37 | void readConstantPools() throws IOException { 38 | RecordingParserContext context = stream.getContext(); 39 | TypeFilter typeFilter = context.getTypeFilter(); 40 | 41 | boolean skipAll = context.getConstantPools().isReady(); 42 | 43 | long cpCount = stream.readVarint(); 44 | for (long i = 0; i < cpCount; i++) { 45 | long typeId = 0; 46 | while ((typeId = stream.readVarint()) == 0) ; // workaround for a bug in JMC JFR writer 47 | try { 48 | MetadataClass clz = context.getMetadataLookup().getClass(typeId); 49 | if (clz == null) { 50 | continue; 51 | } 52 | int count = (int) stream.readVarint(); 53 | boolean skip = skipAll || (typeFilter != null && !typeFilter.test(clz)); 54 | MutableConstantPool constantPool = skip ? null : ((MutableConstantPools) context.getConstantPools()).addOrGetConstantPool(stream, typeId, count); 55 | for (int j = 0; j < count; j++) { 56 | long id = stream.readVarint(); 57 | if (!skip && !constantPool.containsKey(id)) { 58 | constantPool.addOffset(id, stream.position()); 59 | } 60 | clz.skip(stream); 61 | } 62 | } catch (IOException e) { 63 | throw e; 64 | } 65 | } 66 | } 67 | } 68 | -------------------------------------------------------------------------------- /parser/src/main/java/io/jafar/parser/internal_api/ChunkHeader.java: -------------------------------------------------------------------------------- 1 | package io.jafar.parser.internal_api; 2 | 3 | import io.jafar.utils.BytePacking; 4 | 5 | import java.io.IOException; 6 | import java.nio.ByteOrder; 7 | 8 | /** A chunk header data object */ 9 | public final class ChunkHeader { 10 | public static final int MAGIC_BE = BytePacking.pack(ByteOrder.BIG_ENDIAN, 'F', 'L', 'R', '\0'); 11 | 12 | public final int order; 13 | public final long offset; 14 | public final short major; 15 | public final short minor; 16 | public final int size; 17 | public final int cpOffset; 18 | public final int metaOffset; 19 | public final long startNanos; 20 | public final long duration; 21 | public final long startTicks; 22 | public final long frequency; 23 | public final boolean compressed; 24 | 25 | ChunkHeader(RecordingStream recording, int index) throws IOException { 26 | order = index; 27 | offset = recording.position(); 28 | int magic = recording.readInt(); 29 | if (magic != MAGIC_BE) { 30 | throw new IOException("Invalid JFR Magic Number: " + Integer.toHexString(magic)); 31 | } 32 | major = recording.readShort(); 33 | minor = recording.readShort(); 34 | size = (int) recording.readLong(); 35 | cpOffset = (int) recording.readLong(); 36 | metaOffset = (int) recording.readLong(); 37 | startNanos = recording.readLong(); 38 | duration = recording.readLong(); 39 | startTicks = recording.readLong(); 40 | frequency = recording.readLong(); 41 | compressed = recording.readInt() != 0; 42 | } 43 | 44 | @Override 45 | public String toString() { 46 | return "ChunkHeader{" 47 | + "major=" 48 | + major 49 | + ", minor=" 50 | + minor 51 | + ", size=" 52 | + size 53 | + ", offset=" 54 | + offset 55 | + ", cpOffset=" 56 | + cpOffset 57 | + ", metaOffset=" 58 | + metaOffset 59 | + ", startNanos=" 60 | + startNanos 61 | + ", duration=" 62 | + duration 63 | + ", startTicks=" 64 | + startTicks 65 | + ", frequency=" 66 | + frequency 67 | + ", compressed=" 68 | + compressed 69 | + '}'; 70 | } 71 | 72 | } 73 | -------------------------------------------------------------------------------- /parser/src/main/java/io/jafar/parser/internal_api/ChunkParserListener.java: -------------------------------------------------------------------------------- 1 | package io.jafar.parser.internal_api; 2 | 3 | import io.jafar.parser.internal_api.metadata.MetadataEvent; 4 | 5 | /** 6 | * A callback to be provided to {@linkplain StreamingChunkParser#parse(java.nio.file.Path, ChunkParserListener)} 7 | */ 8 | public interface ChunkParserListener { 9 | /** Called when the recording starts to be processed */ 10 | default void onRecordingStart(RecordingParserContext context) {} 11 | 12 | /** 13 | * Called for each discovered chunk 14 | * 15 | * @param chunkIndex the chunk index (1-based) 16 | * @param header the parsed chunk header 17 | * @return {@literal false} if the chunk should be skipped 18 | */ 19 | default boolean onChunkStart(int chunkIndex, ChunkHeader header, RecordingParserContext context) { 20 | return true; 21 | } 22 | 23 | /** 24 | * Called for the chunk metadata event 25 | * 26 | * @param metadata the chunk metadata event 27 | * @return {@literal false} if the remainder of the chunk should be skipped 28 | */ 29 | default boolean onMetadata(MetadataEvent metadata) { 30 | return true; 31 | } 32 | 33 | default boolean onCheckpoint(CheckpointEvent checkpoint) { return true; } 34 | 35 | /** 36 | * Called for each parsed event 37 | * 38 | * @param typeId event type id 39 | * @param stream {@linkplain RecordingStream} positioned at the event payload start 40 | * @param payloadSize the size of the payload in bytes 41 | * @return {@literal false} if the remainder of the chunk should be skipped 42 | */ 43 | default boolean onEvent(long typeId, RecordingStream stream, long payloadSize) { 44 | return true; 45 | } 46 | 47 | /** 48 | * Called when a chunk is fully processed or skipped 49 | * 50 | * @param chunkIndex the chunk index (1-based) 51 | * @param skipped {@literal true} if the chunk was skipped 52 | * @return {@literal false} if the remaining chunks in the recording should be skipped 53 | */ 54 | default boolean onChunkEnd(int chunkIndex, boolean skipped) { 55 | return true; 56 | } 57 | 58 | /** Called when the recording was fully processed */ 59 | default void onRecordingEnd(RecordingParserContext context) {} 60 | } 61 | -------------------------------------------------------------------------------- /parser/src/main/java/io/jafar/parser/internal_api/ConstantPool.java: -------------------------------------------------------------------------------- 1 | package io.jafar.parser.internal_api; 2 | 3 | import io.jafar.parser.internal_api.metadata.MetadataClass; 4 | 5 | public interface ConstantPool { 6 | Object get(long id); 7 | int size(); 8 | 9 | boolean isEmpty(); 10 | MetadataClass getType(); 11 | } 12 | -------------------------------------------------------------------------------- /parser/src/main/java/io/jafar/parser/internal_api/ConstantPools.java: -------------------------------------------------------------------------------- 1 | package io.jafar.parser.internal_api; 2 | 3 | import java.util.stream.Stream; 4 | 5 | public interface ConstantPools { 6 | ConstantPool getConstantPool(long typeId); 7 | boolean hasConstantPool(long typeId); 8 | boolean isReady(); 9 | void setReady(); 10 | 11 | Stream pools(); 12 | } 13 | -------------------------------------------------------------------------------- /parser/src/main/java/io/jafar/parser/internal_api/Deserializer.java: -------------------------------------------------------------------------------- 1 | package io.jafar.parser.internal_api; 2 | 3 | import io.jafar.parser.ParsingUtils; 4 | import io.jafar.parser.internal_api.metadata.MetadataClass; 5 | 6 | import java.lang.invoke.MethodHandle; 7 | import java.util.Map; 8 | 9 | public abstract class Deserializer { 10 | private static final Deserializer UTF8_STRING = new Deserializer<>() { 11 | @Override 12 | public void skip(RecordingStream stream) throws Exception { 13 | ParsingUtils.skipUTF8(stream); 14 | } 15 | 16 | @Override 17 | public String deserialize(RecordingStream stream) throws Exception { 18 | return ParsingUtils.readUTF8(stream); 19 | } 20 | }; 21 | private static final Deserializer VARINT = new Deserializer<>() { 22 | @Override 23 | public void skip(RecordingStream stream) throws Exception { 24 | stream.readVarint(); 25 | } 26 | 27 | @Override 28 | public Object deserialize(RecordingStream stream) throws Exception { 29 | throw new UnsupportedOperationException(); 30 | } 31 | }; 32 | private static final Deserializer FLOAT = new Deserializer<>() { 33 | @Override 34 | public void skip(RecordingStream stream) throws Exception { 35 | stream.readFloat(); 36 | } 37 | 38 | @Override 39 | public Object deserialize(RecordingStream stream) throws Exception { 40 | throw new UnsupportedOperationException(); 41 | } 42 | }; 43 | private static final Deserializer DOUBLE = new Deserializer<>() { 44 | @Override 45 | public void skip(RecordingStream stream) throws Exception { 46 | stream.readDouble(); 47 | } 48 | 49 | @Override 50 | public Object deserialize(RecordingStream stream) throws Exception { 51 | throw new UnsupportedOperationException(); 52 | } 53 | }; 54 | private static final Deserializer BYTE = new Deserializer<>() { 55 | @Override 56 | public void skip(RecordingStream stream) throws Exception { 57 | stream.read(); 58 | } 59 | 60 | @Override 61 | public Object deserialize(RecordingStream stream) throws Exception { 62 | throw new UnsupportedOperationException(); 63 | } 64 | }; 65 | private static final Map> DESERIALIZERS = Map.of( 66 | "java.lang.String", UTF8_STRING, 67 | "short", VARINT, 68 | "char", VARINT, 69 | "int", VARINT, 70 | "long", VARINT, 71 | "double", DOUBLE, 72 | "float", FLOAT, 73 | "byte", BYTE, 74 | "boolean", BYTE 75 | ); 76 | 77 | public static final class Generated extends Deserializer { 78 | private final MethodHandle skipHandler; 79 | private final MethodHandle deserializeHandler; 80 | private final TypeSkipper typeSkipper; 81 | 82 | public Generated(MethodHandle deserializeHandler, MethodHandle skipHandler, TypeSkipper skipper) { 83 | this.deserializeHandler = deserializeHandler; 84 | this.skipHandler = skipHandler; 85 | this.typeSkipper = skipper; 86 | } 87 | 88 | @Override 89 | public void skip(RecordingStream stream) throws Exception { 90 | if (typeSkipper != null) { 91 | typeSkipper.skip(stream); 92 | } else if (skipHandler != null) { 93 | try { 94 | skipHandler.invokeExact(stream); 95 | } catch (Throwable t) { 96 | throw new RuntimeException(t); 97 | } 98 | } else { 99 | throw new RuntimeException("Unsupported skip handler type"); 100 | } 101 | } 102 | 103 | @SuppressWarnings("unchecked") 104 | @Override 105 | public T deserialize(RecordingStream stream) throws Exception { 106 | try { 107 | if (deserializeHandler == null) { 108 | // no deserialize method, skip 109 | skip(stream); 110 | // no value to return 111 | return null; 112 | } 113 | return (T)deserializeHandler.invoke(stream); 114 | } catch (Throwable t) { 115 | throw new RuntimeException(t); 116 | } 117 | } 118 | } 119 | 120 | public static Deserializer forType(MetadataClass clazz) { 121 | if (clazz.isPrimitive()) { 122 | return DESERIALIZERS.get(clazz.getName()); 123 | } 124 | try { 125 | return CodeGenerator.generateDeserializer(clazz); 126 | } catch (Exception e) { 127 | throw new RuntimeException(e); 128 | } 129 | } 130 | 131 | public abstract void skip(RecordingStream stream) throws Exception; 132 | public abstract T deserialize(RecordingStream stream) throws Exception; 133 | } 134 | -------------------------------------------------------------------------------- /parser/src/main/java/io/jafar/parser/internal_api/MetadataLookup.java: -------------------------------------------------------------------------------- 1 | package io.jafar.parser.internal_api; 2 | 3 | import io.jafar.parser.internal_api.metadata.MetadataClass; 4 | 5 | public interface MetadataLookup { 6 | 7 | String getString(int idx); 8 | 9 | MetadataClass getClass(long id); 10 | } 11 | -------------------------------------------------------------------------------- /parser/src/main/java/io/jafar/parser/internal_api/ParsingContextImpl.java: -------------------------------------------------------------------------------- 1 | package io.jafar.parser.internal_api; 2 | 3 | import io.jafar.parser.api.ParsingContext; 4 | 5 | import java.util.concurrent.ConcurrentHashMap; 6 | import java.util.concurrent.ConcurrentMap; 7 | 8 | public final class ParsingContextImpl implements ParsingContext { 9 | public static final ParsingContext EMPTY = new ParsingContextImpl(); 10 | 11 | private ConcurrentMap> deserializerCache = new ConcurrentHashMap<>(); 12 | 13 | public RecordingParserContext newRecordingParserContext() { 14 | return new RecordingParserContext(deserializerCache); 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /parser/src/main/java/io/jafar/parser/internal_api/RecordingParserContext.java: -------------------------------------------------------------------------------- 1 | package io.jafar.parser.internal_api; 2 | 3 | import io.jafar.parser.MutableConstantPools; 4 | import io.jafar.parser.MutableMetadataLookup; 5 | import io.jafar.parser.TypeFilter; 6 | import io.jafar.parser.internal_api.metadata.MetadataClass; 7 | import io.jafar.parser.internal_api.metadata.MetadataField; 8 | import io.jafar.utils.CachedStringParser; 9 | import it.unimi.dsi.fastutil.longs.Long2ObjectMap; 10 | 11 | import java.lang.ref.WeakReference; 12 | import java.util.ArrayList; 13 | import java.util.List; 14 | import java.util.Map; 15 | import java.util.Objects; 16 | import java.util.concurrent.ConcurrentHashMap; 17 | import java.util.concurrent.ConcurrentMap; 18 | 19 | public final class RecordingParserContext { 20 | private final MutableMetadataLookup metadataLookup; 21 | private final MutableConstantPools constantPools; 22 | 23 | private final int chunkIndex; 24 | private volatile TypeFilter typeFilter; 25 | 26 | private final Map> classTargetTypeMap = new ConcurrentHashMap<>(); 27 | private final ConcurrentMap> bag = new ConcurrentHashMap<>(); 28 | 29 | private Long2ObjectMap> classTypeMap = null; 30 | 31 | public static class DeserializerKey { 32 | private final long id; 33 | private final String name; 34 | private final String superType; 35 | private final List fieldNames; 36 | 37 | public DeserializerKey(MetadataClass clz) { 38 | this.id = clz.getId(); 39 | this.name = clz.getName(); 40 | this.superType = clz.getSuperType(); 41 | this.fieldNames = new ArrayList<>(clz.getFields().size()); 42 | for (MetadataField field : clz.getFields()) { 43 | this.fieldNames.add(field.getType().getName() + ":" + field.getName()); 44 | } 45 | } 46 | 47 | @Override 48 | public boolean equals(Object o) { 49 | if (this == o) return true; 50 | if (o == null || getClass() != o.getClass()) return false; 51 | DeserializerKey that = (DeserializerKey) o; 52 | return id == that.id && Objects.equals(name, that.name) && Objects.equals(superType, that.superType) && Objects.equals(fieldNames, that.fieldNames); 53 | } 54 | 55 | @Override 56 | public int hashCode() { 57 | return Objects.hash(id, name, superType, fieldNames); 58 | } 59 | 60 | @Override 61 | public String toString() { 62 | return "DeserializerKey{" + 63 | "id=" + id + 64 | ", name='" + name + '\'' + 65 | ", superType='" + superType + '\'' + 66 | ", fieldNames=" + fieldNames + 67 | '}'; 68 | } 69 | } 70 | 71 | private final ConcurrentMap> globalDeserializerCache; 72 | 73 | public final CachedStringParser.ByteArrayParser utf8Parser = CachedStringParser.byteParser(); 74 | public final CachedStringParser.CharArrayParser charParser = CachedStringParser.charParser(); 75 | public final byte[] byteBuffer = new byte[4096]; 76 | public final char[] charBuffer = new char[4096]; 77 | 78 | public RecordingParserContext() { 79 | this(new ConcurrentHashMap<>()); 80 | } 81 | 82 | public RecordingParserContext(ConcurrentMap> deserializerCache) { 83 | this.metadataLookup = new MutableMetadataLookup(); 84 | this.constantPools = new MutableConstantPools(metadataLookup); 85 | this.globalDeserializerCache = deserializerCache != null ? deserializerCache : new ConcurrentHashMap<>(); 86 | 87 | this.typeFilter = null; 88 | this.chunkIndex = 0; 89 | } 90 | 91 | public RecordingParserContext(TypeFilter typeFilter, int chunkIndex, MutableMetadataLookup metadataLookup, MutableConstantPools constantPools, ConcurrentMap> deserializerCache) { 92 | this.metadataLookup = metadataLookup; 93 | this.constantPools = constantPools; 94 | this.globalDeserializerCache = deserializerCache; 95 | 96 | this.typeFilter = typeFilter; 97 | this.chunkIndex = chunkIndex; 98 | } 99 | 100 | public void clear() { 101 | classTargetTypeMap.clear(); 102 | bag.clear(); 103 | } 104 | 105 | public MetadataLookup getMetadataLookup() { 106 | return metadataLookup; 107 | } 108 | 109 | public ConstantPools getConstantPools() { 110 | return constantPools; 111 | } 112 | 113 | public TypeFilter getTypeFilter() { 114 | return typeFilter; 115 | } 116 | 117 | public void setTypeFilter(TypeFilter typeFilter) { 118 | this.typeFilter = typeFilter; 119 | } 120 | 121 | public int getChunkIndex() { 122 | return chunkIndex; 123 | } 124 | 125 | public void put(String key, Class clz, T value) { 126 | bag.put(key, new WeakReference<>(value)); 127 | } 128 | 129 | public T get(String key, Class clz) { 130 | return clz.cast(bag.get(key).get()); 131 | } 132 | 133 | public void addTargetTypeMap(Map> map) { 134 | this.classTargetTypeMap.putAll(map); 135 | } 136 | 137 | public Class getClassTargetType(String name) { 138 | return classTargetTypeMap.get(name); 139 | } 140 | 141 | public void bindDeserializers() { 142 | metadataLookup.bindDeserializers(); 143 | } 144 | 145 | public void setClassTypeMap(Long2ObjectMap> map) { 146 | classTypeMap = map; 147 | } 148 | 149 | public Long2ObjectMap> getClassTypeMap() { 150 | return classTypeMap; 151 | } 152 | 153 | public ConcurrentMap> getDeserializerCache() { 154 | return globalDeserializerCache; 155 | } 156 | } 157 | -------------------------------------------------------------------------------- /parser/src/main/java/io/jafar/parser/internal_api/RecordingStream.java: -------------------------------------------------------------------------------- 1 | package io.jafar.parser.internal_api; 2 | 3 | import java.io.IOException; 4 | import java.nio.file.Path; 5 | 6 | public final class RecordingStream implements AutoCloseable { 7 | private final RecordingStreamReader reader; 8 | 9 | private final RecordingParserContext context; 10 | private long mark = -1; 11 | 12 | 13 | RecordingStream(Path path, RecordingParserContext context) throws IOException { 14 | this(RecordingStreamReader.mapped(path), context); 15 | } 16 | 17 | public RecordingStream slice(long pos, long len, RecordingParserContext context) { 18 | return new RecordingStream(reader.slice(pos, len), context); 19 | } 20 | 21 | public RecordingStream(RecordingStreamReader reader, RecordingParserContext context) { 22 | this.reader = reader; 23 | this.context = context; 24 | } 25 | 26 | public RecordingParserContext getContext() { 27 | return context; 28 | } 29 | 30 | public void position(long position) { 31 | reader.position(position); 32 | } 33 | 34 | public long position() { 35 | return reader.position(); 36 | } 37 | 38 | public void read(byte[] buffer, int offset, int length) { 39 | if (available() < length) { 40 | throw new RuntimeException("unexpected EOF"); 41 | } 42 | reader.read(buffer, offset, length); 43 | } 44 | 45 | public byte read() { 46 | return reader.read(); 47 | } 48 | 49 | public short readShort() { 50 | return reader.readShort(); 51 | } 52 | 53 | public int readInt() { 54 | return reader.readInt(); 55 | } 56 | 57 | public long readLong() { 58 | return reader.readLong(); 59 | } 60 | 61 | public float readFloat() { 62 | return reader.readFloat(); 63 | } 64 | 65 | public double readDouble() { 66 | return reader.readDouble(); 67 | } 68 | 69 | public long readVarint() { 70 | return reader.readVarint(); 71 | } 72 | 73 | public boolean readBoolean() { 74 | return reader.readBoolean(); 75 | } 76 | 77 | public long available() { 78 | return reader.remaining(); 79 | } 80 | 81 | public void skip(int bytes) { 82 | reader.skip(bytes); 83 | } 84 | 85 | public void mark() { 86 | mark = reader.position(); 87 | } 88 | 89 | public void reset() { 90 | if (mark > -1) { 91 | position(mark); 92 | mark = -1; 93 | } 94 | } 95 | 96 | @Override 97 | public void close() { 98 | try { 99 | reader.close(); 100 | } catch (IOException ignored) {} 101 | } 102 | } 103 | -------------------------------------------------------------------------------- /parser/src/main/java/io/jafar/parser/internal_api/RecordingStreamReader.java: -------------------------------------------------------------------------------- 1 | package io.jafar.parser.internal_api; 2 | 3 | import io.jafar.utils.CustomByteBuffer; 4 | 5 | import java.io.IOException; 6 | import java.nio.file.Files; 7 | import java.nio.file.Path; 8 | 9 | public abstract class RecordingStreamReader { 10 | public static final class MappedRecordingStreamReader extends RecordingStreamReader { 11 | private final CustomByteBuffer buffer; 12 | private final long length; 13 | private final boolean nativeOrder; 14 | private final int alignementOffset; 15 | 16 | private long remaining; 17 | 18 | public MappedRecordingStreamReader(Path path) throws IOException { 19 | this(CustomByteBuffer.map(path, Integer.MAX_VALUE), Files.size(path), 0); 20 | } 21 | 22 | private MappedRecordingStreamReader(CustomByteBuffer buffer, long length, int alignementOffset) { 23 | this.buffer = buffer; 24 | this.length = length; 25 | this.nativeOrder = buffer.isNativeOrder(); 26 | this.alignementOffset = alignementOffset; 27 | this.remaining = length; 28 | } 29 | 30 | @Override 31 | public RecordingStreamReader slice() { 32 | long sliceLength = buffer.remaining(); 33 | return new MappedRecordingStreamReader(buffer.slice(), sliceLength, (int)(alignementOffset + buffer.position()) % 8); 34 | } 35 | 36 | @Override 37 | public RecordingStreamReader slice(long pos, long size) { 38 | return new MappedRecordingStreamReader(buffer.slice(pos, size), size, (int)(alignementOffset + pos) % 8); 39 | } 40 | 41 | @Override 42 | public long length() { 43 | return length; 44 | } 45 | 46 | @Override 47 | public long remaining() { 48 | return remaining; 49 | } 50 | 51 | @Override 52 | public long position() { 53 | return buffer.position(); 54 | } 55 | 56 | @Override 57 | public void position(long newPosition) { 58 | remaining = length - newPosition; 59 | buffer.position(newPosition); 60 | } 61 | 62 | @Override 63 | public void skip(long n) { 64 | remaining -= n; 65 | buffer.position(buffer.position() + n); 66 | } 67 | 68 | @Override 69 | public byte read() { 70 | remaining--; 71 | return buffer.get(); 72 | } 73 | 74 | @Override 75 | public void read(byte[] b, int off, int len) { 76 | remaining -= len; 77 | buffer.get(b, off, len); 78 | } 79 | 80 | @Override 81 | public boolean readBoolean() { 82 | remaining--; 83 | return buffer.get() != 0; 84 | } 85 | 86 | @Override 87 | public short readShort() { 88 | remaining -= 2; 89 | short s = buffer.getShort(); 90 | return nativeOrder ? s : Short.reverseBytes(s); 91 | } 92 | 93 | @Override 94 | public int readInt() { 95 | remaining -= 4; 96 | int i = buffer.getInt(); 97 | return nativeOrder ? i : Integer.reverseBytes(i); 98 | } 99 | 100 | @Override 101 | public long readLong() { 102 | remaining -= 8; 103 | long l = buffer.getLong(); 104 | return nativeOrder ? l : Long.reverseBytes(l); 105 | } 106 | 107 | private static float reverseBytes(float f) { 108 | int i = Float.floatToRawIntBits(f); 109 | return Float.intBitsToFloat(Integer.reverseBytes(i)); 110 | } 111 | 112 | private static double reverseBytes(double d) { 113 | long l = Double.doubleToRawLongBits(d); 114 | return Double.longBitsToDouble(Long.reverseBytes(l)); 115 | } 116 | 117 | @Override 118 | public float readFloat() { 119 | remaining -= 4; 120 | float f = buffer.getFloat(); 121 | return nativeOrder ? f : reverseBytes(f); 122 | } 123 | 124 | @Override 125 | public double readDouble() { 126 | remaining -= 8; 127 | double d = buffer.getDouble(); 128 | return nativeOrder ? d : reverseBytes(d); 129 | } 130 | 131 | private static int findFirstUnset8thBit(long value) { 132 | // Step 1: Mask out the 8th bits of each byte 133 | long mask = 0x8080808080808080L; 134 | long eighthBits = value & mask; 135 | 136 | // Step 2: Identify which bytes have the 8th bit unset 137 | long unsetBits = (~eighthBits) & mask; 138 | 139 | // Step 3: Collapse each byte to a single bit 140 | long collapsed = unsetBits * 0x0101010101010101L; 141 | 142 | // Step 4: Find the first unset byte 143 | return Long.numberOfTrailingZeros(collapsed) / 8; 144 | } 145 | 146 | private static final boolean VARINT_FROM_LONG = Boolean.getBoolean("io.jafar.parser.varint_from_long"); 147 | 148 | @Override 149 | public long readVarint() { 150 | if (VARINT_FROM_LONG) { 151 | // TODO: Experimental - tries optimizing varint decoding by loading 8 bytes at once 152 | // So far it looks this is actually slowing down the decoding, but I will leave the code 153 | // here so it can be revisited later 154 | // The guard flag is false, unless a system property is provided so the condition will 155 | // be elided 156 | long pos = checkVarintFromLongPos(); 157 | if (pos > -1) { 158 | return readVarintFromLong(pos); 159 | } 160 | } 161 | return readVarintSeq(); 162 | } 163 | 164 | private long checkVarintFromLongPos() { 165 | long pos = buffer.position(); 166 | if (((pos + alignementOffset) & 7) == 0) { 167 | if (remaining >= 8) { 168 | return pos; 169 | } 170 | } 171 | return -1; 172 | } 173 | 174 | private long readVarintFromLong(long pos) { 175 | long value = buffer.getLong(); 176 | 177 | int parts = findFirstUnset8thBit(value) + 1; 178 | long l = value; 179 | if (parts < 8) { 180 | long mask = (0XFFFFFFFFFFFFFFFFL >>> (8 - parts) * 8); 181 | l = l & mask; 182 | } 183 | 184 | long extracted = l & 0x7F7F7F7F7F7F7F7FL; // Extract lower 7 bits 185 | long result = 186 | ((extracted & 0x000000000000007FL)) | 187 | ((extracted & 0x0000000000007F00L) >> 1) | 188 | ((extracted & 0x00000000007F0000L) >> 2) | 189 | ((extracted & 0x000000007F000000L) >> 3) | 190 | ((extracted & 0x0000007F00000000L) >> 4) | 191 | ((extracted & 0x00007F0000000000L) >> 5) | 192 | ((extracted & 0x007F000000000000L) >> 6) | 193 | ((extracted & 0x7F00000000000000L) >> 7); 194 | 195 | if (parts == 9) { 196 | byte b = buffer.get(); 197 | result |= (b & 0x7FL) << 56; 198 | } else { 199 | position(pos + parts); 200 | } 201 | remaining -= parts; 202 | return result; 203 | } 204 | 205 | private long readVarintSeq() { 206 | byte b0 = buffer.get(); 207 | remaining--; 208 | long ret = (b0 & 0x7FL); 209 | if (b0 >= 0) { 210 | return ret; 211 | } 212 | int b1 = buffer.get(); 213 | remaining--; 214 | ret += (b1 & 0x7FL) << 7; 215 | if (b1 >= 0) { 216 | return ret; 217 | } 218 | int b2 = buffer.get(); 219 | remaining--; 220 | ret += (b2 & 0x7FL) << 14; 221 | if (b2 >= 0) { 222 | return ret; 223 | } 224 | int b3 = buffer.get(); 225 | remaining--; 226 | ret += (b3 & 0x7FL) << 21; 227 | if (b3 >= 0) { 228 | return ret; 229 | } 230 | int b4 = buffer.get(); 231 | remaining--; 232 | ret += (b4 & 0x7FL) << 28; 233 | if (b4 >= 0) { 234 | return ret; 235 | } 236 | int b5 = buffer.get(); 237 | remaining--; 238 | ret += (b5 & 0x7FL) << 35; 239 | if (b5 >= 0) { 240 | return ret; 241 | } 242 | int b6 = buffer.get(); 243 | remaining--; 244 | ret += (b6 & 0x7FL) << 42; 245 | if (b6 >= 0) { 246 | return ret; 247 | } 248 | int b7 = buffer.get(); 249 | remaining--; 250 | ret += (b7 & 0x7FL) << 49; 251 | if (b7 >= 0) { 252 | return ret; 253 | } 254 | int b8 = buffer.get();// read last byte raw 255 | remaining--; 256 | return ret + (((long) (b8 & 0XFF)) << 56); 257 | } 258 | 259 | @Override 260 | public void close() throws IOException { 261 | 262 | } 263 | } 264 | 265 | public abstract RecordingStreamReader slice(); 266 | public abstract RecordingStreamReader slice(long pos, long size); 267 | public abstract long length(); 268 | public abstract long remaining(); 269 | public abstract long position(); 270 | public abstract void position(long newPosition); 271 | public abstract void skip(long n); 272 | public abstract byte read(); 273 | public abstract void read(byte[] b, int off, int len); 274 | public abstract boolean readBoolean(); 275 | public abstract short readShort(); 276 | public abstract int readInt(); 277 | public abstract long readLong(); 278 | public abstract float readFloat(); 279 | public abstract double readDouble(); 280 | public abstract long readVarint(); 281 | public abstract void close() throws IOException; 282 | 283 | public static RecordingStreamReader mapped(Path path) throws IOException { 284 | return new MappedRecordingStreamReader(path); 285 | } 286 | } 287 | -------------------------------------------------------------------------------- /parser/src/main/java/io/jafar/parser/internal_api/StreamingChunkParser.java: -------------------------------------------------------------------------------- 1 | package io.jafar.parser.internal_api; 2 | 3 | import io.jafar.parser.MutableConstantPools; 4 | import io.jafar.parser.MutableMetadataLookup; 5 | import io.jafar.parser.internal_api.metadata.MetadataEvent; 6 | import it.unimi.dsi.fastutil.ints.Int2ObjectMap; 7 | import it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap; 8 | import org.slf4j.Logger; 9 | import org.slf4j.LoggerFactory; 10 | 11 | import java.io.EOFException; 12 | import java.io.IOException; 13 | import java.nio.file.Path; 14 | import java.util.ArrayList; 15 | import java.util.List; 16 | import java.util.concurrent.ExecutorService; 17 | import java.util.concurrent.Executors; 18 | import java.util.concurrent.Future; 19 | 20 | /** 21 | * Streaming, almost zero-allocation, JFR chunk parser implementation.
22 | * This is an MVP of a chunk parser allowing to stream the JFR events efficiently. The parser 23 | * notifies its listeners as the data becomes available. Because of this it is possible for the 24 | * metadata events to come 'out-of-band' (although not very probable) and it is up to the caller to 25 | * deal with that eventuality.
26 | */ 27 | public final class StreamingChunkParser implements AutoCloseable { 28 | private static final Logger log = LoggerFactory.getLogger(StreamingChunkParser.class); 29 | 30 | private final Int2ObjectMap chunkMetadataLookup = new Int2ObjectOpenHashMap<>(); 31 | private final Int2ObjectMap chunkConstantPools = new Int2ObjectOpenHashMap<>(); 32 | 33 | private final ExecutorService executor = Executors.newFixedThreadPool( 34 | Math.max(Runtime.getRuntime().availableProcessors() - 2, 1), 35 | r -> { 36 | Thread t = new Thread(r); 37 | t.setDaemon(true); 38 | return t; 39 | }); 40 | 41 | private boolean closed = false; 42 | private final RecordingParserContext context; 43 | 44 | public StreamingChunkParser() { 45 | this(new RecordingParserContext()); 46 | } 47 | 48 | public StreamingChunkParser(RecordingParserContext context) { 49 | this.context = context; 50 | } 51 | 52 | 53 | /** 54 | * Parse the given JFR recording stream.
55 | * The parser will process the recording stream and call the provided listener in this order: 56 | * 57 | *
    58 | *
  1. listener.onRecordingStart()
  2. 59 | *
  3. listener.onChunkStart()
  4. 60 | *
  5. listener.onMetadata()
  6. 61 | *
  7. listener.onCheckpoint()
  8. 62 | *
  9. listener.onEvent()
  10. 63 | *
  11. listener.onChunkEnd()
  12. 64 | *
  13. listener.onRecordingEnd()
  14. 65 | *
66 | * 67 | * @param path the JFR recording path 68 | * @param listener the parser listener 69 | * @throws IOException 70 | */ 71 | public void parse(Path path, ChunkParserListener listener) throws IOException { 72 | if (closed) { 73 | throw new IllegalStateException("Parser is closed"); 74 | } 75 | try (RecordingStream stream = new RecordingStream(path, context)) { 76 | parse(stream, listener); 77 | } 78 | } 79 | 80 | @Override 81 | public void close() throws Exception { 82 | if (!closed) { 83 | closed = true; 84 | executor.shutdown(); 85 | chunkConstantPools.clear(); 86 | chunkMetadataLookup.clear(); 87 | } 88 | } 89 | 90 | private Future submitParsingTask(ChunkHeader chunkHeader, RecordingStream chunkStream, ChunkParserListener listener, long remainder) { 91 | return executor.submit(() -> { 92 | int chunkCounter = chunkHeader.order; 93 | try { 94 | if (!listener.onChunkStart(chunkCounter, chunkHeader, chunkStream.getContext())) { 95 | log.debug( 96 | "'onChunkStart' returned false. Skipping metadata and events for chunk {}", 97 | chunkCounter); 98 | listener.onChunkEnd(chunkCounter, true); 99 | return true; 100 | } 101 | // read metadata 102 | if (!readMetadata(chunkStream, chunkHeader, listener, false)) { 103 | log.debug( 104 | "'onMetadata' returned false. Skipping events for chunk {}", chunkCounter); 105 | listener.onChunkEnd(chunkCounter, true); 106 | return false; 107 | } 108 | if (!readConstantPool(chunkStream, chunkHeader, listener)) { 109 | log.debug( 110 | "'onCheckpoint' returned false. Skipping the rest of the chunk {}", chunkCounter); 111 | listener.onChunkEnd(chunkCounter, true); 112 | return false; 113 | } 114 | chunkStream.position(remainder); 115 | while (chunkStream.position() < chunkHeader.size) { 116 | long eventStartPos = chunkStream.position(); 117 | chunkStream.mark(); // max 2 varints ahead 118 | int eventSize = (int) chunkStream.readVarint(); 119 | if (eventSize > 0) { 120 | long eventType = chunkStream.readVarint(); 121 | if (eventType > 1) { // skip metadata and checkpoint events 122 | long currentPos = chunkStream.position(); 123 | if (!listener.onEvent(eventType, chunkStream, eventSize - (currentPos - eventStartPos))) { 124 | log.debug( 125 | "'onEvent({}, stream, {})' returned false. Skipping the rest of the chunk {}", 126 | eventType, 127 | eventSize - (currentPos - eventStartPos), 128 | chunkCounter); 129 | listener.onChunkEnd(chunkCounter, true); 130 | return false; 131 | } 132 | } 133 | // always skip any unconsumed event data to get the stream into consistent state 134 | chunkStream.position(eventStartPos + eventSize); 135 | } 136 | } 137 | return listener.onChunkEnd(chunkCounter, false); 138 | } catch (IOException e) { 139 | throw new RuntimeException(e); 140 | } 141 | }); 142 | } 143 | 144 | private void parse(RecordingStream stream, ChunkParserListener listener) throws IOException { 145 | if (stream.available() == 0) { 146 | return; 147 | } 148 | try { 149 | List> results = new ArrayList<>(); 150 | listener.onRecordingStart(stream.getContext()); 151 | int chunkCounter = 1; 152 | while (stream.available() > 0) { 153 | ChunkHeader header = new ChunkHeader(stream, chunkCounter); 154 | long remainder = (stream.position() - header.offset); 155 | MutableMetadataLookup metadataLookup = chunkMetadataLookup.computeIfAbsent(chunkCounter, k -> new MutableMetadataLookup()); 156 | MutableConstantPools constantPools = chunkConstantPools.computeIfAbsent(chunkCounter, k -> new MutableConstantPools(metadataLookup)); 157 | 158 | RecordingStream chunkStream = stream.slice(header.offset, header.size, new RecordingParserContext(stream.getContext().getTypeFilter(), chunkCounter, metadataLookup, constantPools, stream.getContext().getDeserializerCache())); 159 | stream.position(header.offset + header.size); 160 | 161 | results.add(submitParsingTask(header, chunkStream, listener, remainder)); 162 | chunkCounter++; 163 | } 164 | results.forEach(f -> { 165 | try { 166 | f.get(); 167 | } catch (Throwable t) { 168 | throw new RuntimeException(t); 169 | } 170 | }); 171 | } catch(EOFException e) { 172 | throw new IOException("Invalid buffer", e); 173 | } catch (Throwable t) { 174 | throw new IOException("Error parsing recording", t); 175 | } finally { 176 | listener.onRecordingEnd(stream.getContext()); 177 | } 178 | } 179 | 180 | private boolean readMetadata(RecordingStream stream, ChunkHeader header, ChunkParserListener listener, boolean forceConstantPools) throws IOException { 181 | stream.mark(); 182 | stream.position(header.metaOffset); 183 | MetadataEvent m = new MetadataEvent(stream, forceConstantPools); 184 | if (!listener.onMetadata(m)) { 185 | return false; 186 | } 187 | stream.getContext().bindDeserializers(); 188 | stream.reset(); 189 | return true; 190 | } 191 | 192 | private boolean readConstantPool(RecordingStream stream, ChunkHeader header, ChunkParserListener listener) throws IOException { 193 | return readConstantPool(stream, header.cpOffset, listener); 194 | } 195 | 196 | private boolean readConstantPool(RecordingStream stream, int position, ChunkParserListener listener) throws IOException { 197 | while (true) { 198 | stream.position(position); 199 | CheckpointEvent event = new CheckpointEvent(stream); 200 | event.readConstantPools(); 201 | if (!listener.onCheckpoint(event)) { 202 | return false; 203 | } 204 | int delta = event.nextOffsetDelta; 205 | if (delta != 0) { 206 | position += delta; 207 | } else { 208 | break; 209 | } 210 | } 211 | stream.getContext().getConstantPools().setReady(); 212 | return true; 213 | } 214 | } 215 | -------------------------------------------------------------------------------- /parser/src/main/java/io/jafar/parser/internal_api/TypeSkipper.java: -------------------------------------------------------------------------------- 1 | package io.jafar.parser.internal_api; 2 | 3 | import io.jafar.parser.ParsingUtils; 4 | 5 | import java.io.IOException; 6 | 7 | public final class TypeSkipper { 8 | public static final class Instructions { 9 | public static final int ARRAY = 1; 10 | public static final int BYTE = 2; 11 | public static final int FLOAT = 3; 12 | public static final int DOUBLE = 4; 13 | public static final int STRING = 5; 14 | public static final int VARINT = 6; 15 | public static final int CP_ENTRY = 7; 16 | } 17 | 18 | private final int[] instructions; 19 | 20 | public TypeSkipper(int[] instructions) { 21 | this.instructions = instructions; 22 | } 23 | 24 | public void skip(RecordingStream stream) throws IOException { 25 | for (int i = 0; i < instructions.length; i++) { 26 | int instruction = instructions[i]; 27 | if (instruction == Instructions.ARRAY) { 28 | int endIndex = (++i) + instructions[i++]; // next instruction for array is encoding the number of instructions per array item 29 | int cnt = (int)stream.readVarint(); 30 | if (cnt == 0) { 31 | i = endIndex; 32 | continue; 33 | } 34 | int savedIndex = i; 35 | for (int j = 0; j < cnt; ) { 36 | skip(instructions[i], stream); 37 | if (endIndex == i++) { 38 | i = savedIndex; 39 | j++; 40 | } 41 | } 42 | i = endIndex; 43 | continue; 44 | } 45 | skip(instruction, stream); 46 | } 47 | } 48 | 49 | private static void skip(int instruction, RecordingStream stream) throws IOException { 50 | switch (instruction) { 51 | case Instructions.VARINT: 52 | case Instructions.CP_ENTRY: stream.readVarint(); break; 53 | case Instructions.BYTE: stream.skip(1); break; 54 | case Instructions.FLOAT: stream.skip(4); break; 55 | case Instructions.DOUBLE: stream.skip(8); break; 56 | case Instructions.STRING: ParsingUtils.skipUTF8(stream); break; 57 | } 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /parser/src/main/java/io/jafar/parser/internal_api/metadata/AbstractMetadataElement.java: -------------------------------------------------------------------------------- 1 | package io.jafar.parser.internal_api.metadata; 2 | 3 | import io.jafar.parser.MutableMetadataLookup; 4 | import io.jafar.parser.internal_api.RecordingParserContext; 5 | import io.jafar.parser.internal_api.RecordingStream; 6 | 7 | import java.io.IOException; 8 | 9 | public abstract class AbstractMetadataElement { 10 | private final RecordingStream stream; 11 | 12 | final MutableMetadataLookup metadataLookup; 13 | 14 | private String id = "-1"; 15 | private String name = null; 16 | private String simpleName = null; 17 | private final MetadataElementKind kind; 18 | 19 | AbstractMetadataElement(RecordingStream stream, MetadataElementKind kind) throws IOException { 20 | this.stream = stream; 21 | this.kind = kind; 22 | this.metadataLookup = (MutableMetadataLookup) stream.getContext().getMetadataLookup(); 23 | processAttributes(); 24 | } 25 | 26 | protected final void readSubelements(MetadataEvent event) throws IOException { 27 | // now inspect all the enclosed elements 28 | int elemCount = (int) stream.readVarint(); 29 | for (int i = 0; i < elemCount; i++) { 30 | onSubelement(elemCount, event.readElement(stream)); 31 | } 32 | } 33 | 34 | protected void onSubelement(int count, AbstractMetadataElement element) {} 35 | 36 | abstract public void accept(MetadataVisitor visitor); 37 | 38 | protected void onAttribute(String key, String value) {} 39 | 40 | protected final void processAttributes() throws IOException { 41 | int attrCount = (int) stream.readVarint(); 42 | for (int i = 0; i < attrCount; i++) { 43 | int kv = (int) stream.readVarint(); 44 | String key = metadataLookup.getString(kv); 45 | int vv = (int) stream.readVarint(); 46 | String value = metadataLookup.getString(vv); 47 | if ("id".equals(key)) { 48 | id = value; 49 | } 50 | if ("name".equals(key)) { 51 | name = value; 52 | } 53 | onAttribute(key, value); 54 | } 55 | } 56 | 57 | public long getId() { 58 | return Long.parseLong(id); 59 | } 60 | 61 | public String getName() { 62 | return name; 63 | } 64 | 65 | public String getSimpleName() { 66 | if (simpleName == null) { 67 | int idx = name.lastIndexOf('.'); 68 | simpleName = idx == -1 ? name : name.substring(idx + 1); 69 | } 70 | return simpleName; 71 | } 72 | 73 | public MetadataElementKind getKind() { 74 | return kind; 75 | } 76 | 77 | public RecordingParserContext getContext() { 78 | return stream.getContext(); 79 | } 80 | } 81 | -------------------------------------------------------------------------------- /parser/src/main/java/io/jafar/parser/internal_api/metadata/MetadataAnnotation.java: -------------------------------------------------------------------------------- 1 | package io.jafar.parser.internal_api.metadata; 2 | 3 | import io.jafar.parser.internal_api.RecordingStream; 4 | 5 | import java.io.IOException; 6 | import java.util.ArrayList; 7 | import java.util.List; 8 | import java.util.Objects; 9 | 10 | public final class MetadataAnnotation extends AbstractMetadataElement { 11 | private boolean hasHashCode = false; 12 | private int hashCode; 13 | 14 | private List annotations = null; 15 | 16 | private Long classId = null; 17 | private String classIdVal = null; 18 | public String value; 19 | MetadataAnnotation(RecordingStream stream, MetadataEvent event) throws IOException { 20 | super(stream, MetadataElementKind.ANNOTATION); 21 | readSubelements(event); 22 | } 23 | 24 | @Override 25 | protected void onAttribute(String key, String value) { 26 | switch (key) { 27 | case "class": 28 | classIdVal = value; 29 | break; 30 | case "value": 31 | this.value = value; 32 | break; 33 | } 34 | } 35 | 36 | public MetadataClass getType() { 37 | return metadataLookup.getClass(classId); 38 | } 39 | 40 | public long getClassId() { 41 | if (classId == null) { 42 | classId = Long.parseLong(classIdVal); 43 | } 44 | return classId; 45 | } 46 | 47 | public String getValue() { 48 | return value; 49 | } 50 | 51 | @Override 52 | protected void onSubelement(int count, AbstractMetadataElement element) { 53 | if (annotations == null) { 54 | annotations = new ArrayList<>(count); 55 | } 56 | if (element.getKind() == MetadataElementKind.ANNOTATION) { 57 | annotations.add((MetadataAnnotation) element); 58 | } else { 59 | throw new IllegalStateException("Unexpected subelement: " + element.getKind()); 60 | } 61 | } 62 | 63 | @Override 64 | public void accept(MetadataVisitor visitor) { 65 | visitor.visitAnnotation(this); 66 | if (annotations != null) { 67 | annotations.forEach(a -> a.accept(visitor)); 68 | } 69 | visitor.visitEnd(this); 70 | } 71 | 72 | @Override 73 | public String toString() { 74 | return "MetadataAnnotation{" + 75 | "type='" + (getType() != null ? getType().getName() : getClassId()) + '\'' + 76 | ", value='" + getValue() + '\'' + 77 | '}'; 78 | } 79 | 80 | @Override 81 | public boolean equals(Object o) { 82 | if (this == o) return true; 83 | if (o == null || getClass() != o.getClass()) return false; 84 | MetadataAnnotation that = (MetadataAnnotation) o; 85 | return getClassId() == that.getClassId() && Objects.equals(annotations, that.annotations) && Objects.equals(getValue(), that.getValue()); 86 | } 87 | 88 | @Override 89 | public int hashCode() { 90 | if (!hasHashCode) { 91 | long mixed = getClassId() * 0x9E3779B97F4A7C15L + 92 | Objects.hashCode(annotations) * 0xC6BC279692B5C323L + 93 | Objects.hashCode(getValue()) * 0xD8163841FDE6A8F9L; 94 | hashCode = Long.hashCode(mixed); 95 | hasHashCode = true; 96 | } 97 | return hashCode; 98 | } 99 | } 100 | -------------------------------------------------------------------------------- /parser/src/main/java/io/jafar/parser/internal_api/metadata/MetadataClass.java: -------------------------------------------------------------------------------- 1 | package io.jafar.parser.internal_api.metadata; 2 | 3 | import io.jafar.parser.internal_api.Deserializer; 4 | import io.jafar.parser.internal_api.RecordingParserContext; 5 | import io.jafar.parser.internal_api.RecordingStream; 6 | 7 | import java.io.IOException; 8 | import java.util.ArrayList; 9 | import java.util.Collections; 10 | import java.util.HashMap; 11 | import java.util.List; 12 | import java.util.Map; 13 | import java.util.Objects; 14 | import java.util.Set; 15 | import java.util.concurrent.atomic.AtomicReferenceFieldUpdater; 16 | 17 | public final class MetadataClass extends AbstractMetadataElement { 18 | private boolean hasHashCode = false; 19 | private int hashCode; 20 | 21 | private static final Set primitiveTypeNames = Set.of("byte", "char", "short", "int", "long", "float", "double", "boolean", "java.lang.String"); 22 | 23 | private Map settings = null; 24 | private List annotations = null; 25 | private List fields = null; 26 | 27 | private String superType; 28 | private Boolean isPrimitive; 29 | private Boolean isSimpleType; 30 | private String simpleTypeVal; 31 | 32 | private final int associatedChunk; 33 | 34 | @SuppressWarnings("rawtypes") 35 | private static final AtomicReferenceFieldUpdater DESERIALIZER_UPDATER = AtomicReferenceFieldUpdater.newUpdater(MetadataClass.class, Deserializer.class, "deserializer"); 36 | private volatile Deserializer deserializer; 37 | 38 | MetadataClass(RecordingStream stream, MetadataEvent eventr) throws IOException { 39 | super(stream, MetadataElementKind.CLASS); 40 | this.associatedChunk = stream.getContext().getChunkIndex(); 41 | readSubelements(eventr); 42 | metadataLookup.addClass(getId(), this); 43 | } 44 | 45 | @Override 46 | protected void onAttribute(String key, String value) { 47 | if (key.equals("superType")) { 48 | superType = value; 49 | } else if (key.equals("simpleType")) { 50 | simpleTypeVal = value; 51 | } 52 | } 53 | 54 | public void bindDeserializer() { 55 | DESERIALIZER_UPDATER.updateAndGet(this, v -> (v == null) ? getContext().getDeserializerCache().computeIfAbsent(new RecordingParserContext.DeserializerKey(MetadataClass.this), k -> Deserializer.forType(MetadataClass.this)) : v); 56 | } 57 | 58 | /** 59 | * Get the associated deserializer. 60 | * Used in the generated handler classes. 61 | * @return the associated deserializer 62 | */ 63 | public Deserializer getDeserializer() { 64 | return deserializer; 65 | } 66 | 67 | public String getSuperType() { 68 | return superType; 69 | } 70 | 71 | public boolean isPrimitive() { 72 | if (isPrimitive == null) { 73 | isPrimitive = primitiveTypeNames.contains(getName()); 74 | } 75 | return isPrimitive; 76 | } 77 | 78 | public boolean isSimpleType() { 79 | if (isSimpleType == null) { 80 | isSimpleType = Boolean.parseBoolean(simpleTypeVal); 81 | } 82 | return isSimpleType; 83 | } 84 | 85 | protected void onSubelement(int count, AbstractMetadataElement element) { 86 | if (element.getKind() == MetadataElementKind.SETTING) { 87 | if (settings == null) { 88 | settings = new HashMap<>(count * 2, 0.5f); 89 | } 90 | MetadataSetting setting = (MetadataSetting) element; 91 | settings.put(setting.getName(), setting); 92 | } else if (element.getKind() == MetadataElementKind.ANNOTATION) { 93 | if (annotations == null) { 94 | annotations = new ArrayList<>(count); 95 | } 96 | annotations.add((MetadataAnnotation) element); 97 | } else if (element.getKind() == MetadataElementKind.FIELD) { 98 | if (fields == null) { 99 | fields = new ArrayList<>(count); 100 | } 101 | MetadataField field = (MetadataField) element; 102 | fields.add(field); 103 | } else { 104 | throw new IllegalStateException("Unexpected subelement: " + element.getKind()); 105 | } 106 | } 107 | 108 | @Override 109 | public void accept(MetadataVisitor visitor) { 110 | visitor.visitClass(this); 111 | if (settings != null) { 112 | settings.values().forEach(s -> s.accept(visitor)); 113 | } 114 | if (annotations != null) { 115 | annotations.forEach(a -> a.accept(visitor)); 116 | } 117 | if (fields != null) { 118 | fields.forEach(f -> f.accept(visitor)); 119 | } 120 | visitor.visitEnd(this); 121 | } 122 | 123 | public List getFields() { 124 | return Collections.unmodifiableList(fields == null ? Collections.emptyList() : fields); 125 | } 126 | 127 | public void skip(RecordingStream stream) throws IOException { 128 | if (deserializer == null) { 129 | return; 130 | } 131 | try { 132 | deserializer.skip(stream); 133 | } catch (Exception e) { 134 | throw new IOException(e); 135 | } 136 | } 137 | 138 | @SuppressWarnings("unchecked") 139 | public T read(RecordingStream stream) { 140 | if (deserializer == null) { 141 | return null; 142 | } 143 | try { 144 | return (T) deserializer.deserialize(stream); 145 | } catch (Exception e) { 146 | throw new RuntimeException(e); 147 | } 148 | } 149 | 150 | private static boolean isPrimitive(String typeName) { 151 | return typeName.equals("byte") || 152 | typeName.equals("short") || 153 | typeName.equals("char") || 154 | typeName.equals("int") || 155 | typeName.equals("long") || 156 | typeName.equals("float") || 157 | typeName.equals("double") || 158 | typeName.equals("boolean") || 159 | typeName.equals("java.lang.String"); 160 | } 161 | 162 | @Override 163 | public String toString() { 164 | return "MetadataClass{" + 165 | "id='" + getId() + '\'' + 166 | ", chunk=" + associatedChunk + 167 | ", name='" + getName() + "'" + 168 | ", superType='" + superType + '\'' + 169 | '}'; 170 | } 171 | 172 | @Override 173 | public boolean equals(Object o) { 174 | if (this == o) return true; 175 | if (o == null || getClass() != o.getClass()) return false; 176 | MetadataClass that = (MetadataClass) o; 177 | return getId() == that.getId() && Objects.equals(getName(), that.getName()) && Objects.equals(superType, that.superType) && Objects.equals(fields, that.fields); 178 | } 179 | 180 | @Override 181 | public int hashCode() { 182 | if (!hasHashCode) { 183 | long mixed = getId() * 0x9E3779B97F4A7C15L 184 | + getName().hashCode() * 0xC6BC279692B5C323L 185 | + Objects.hashCode(superType) * 0xD8163841FDE6A8F9L 186 | + Objects.hashCode(fields) * 0xA3B195354A39B70DL; 187 | hashCode = Long.hashCode(mixed); 188 | hasHashCode = true; 189 | } 190 | return hashCode; 191 | } 192 | } 193 | -------------------------------------------------------------------------------- /parser/src/main/java/io/jafar/parser/internal_api/metadata/MetadataElement.java: -------------------------------------------------------------------------------- 1 | package io.jafar.parser.internal_api.metadata; 2 | 3 | import io.jafar.parser.internal_api.RecordingStream; 4 | 5 | import java.io.IOException; 6 | import java.util.ArrayList; 7 | import java.util.List; 8 | import java.util.Objects; 9 | 10 | public final class MetadataElement extends AbstractMetadataElement { 11 | private boolean hasHashCode = false; 12 | private int hashCode; 13 | 14 | private List classes = null; 15 | 16 | MetadataElement(RecordingStream stream, MetadataEvent event) throws IOException { 17 | super(stream, MetadataElementKind.META); 18 | readSubelements(event); 19 | } 20 | 21 | @Override 22 | protected void onSubelement(int count, AbstractMetadataElement element) { 23 | if (element.getKind() == MetadataElementKind.CLASS) { 24 | if (classes == null) { 25 | classes = new ArrayList<>(count); 26 | } 27 | MetadataClass clz = (MetadataClass) element; 28 | classes.add(clz); 29 | } else { 30 | throw new IllegalStateException("Unexpected subelement: " + element.getKind()); 31 | } 32 | } 33 | 34 | @Override 35 | public void accept(MetadataVisitor visitor) { 36 | visitor.visitMetadata(this); 37 | if (classes != null) { 38 | classes.forEach(c -> c.accept(visitor)); 39 | } 40 | } 41 | 42 | @Override 43 | public String toString() { 44 | return "MetadataElement"; 45 | } 46 | 47 | @Override 48 | public boolean equals(Object o) { 49 | if (this == o) return true; 50 | if (o == null || getClass() != o.getClass()) return false; 51 | MetadataElement that = (MetadataElement) o; 52 | return Objects.equals(classes, that.classes); 53 | } 54 | 55 | @Override 56 | public int hashCode() { 57 | if (!hasHashCode) { 58 | hashCode = Objects.hash(classes); 59 | hasHashCode = true; 60 | } 61 | return hashCode; 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /parser/src/main/java/io/jafar/parser/internal_api/metadata/MetadataElementKind.java: -------------------------------------------------------------------------------- 1 | package io.jafar.parser.internal_api.metadata; 2 | 3 | public enum MetadataElementKind { 4 | ROOT, CLASS, ANNOTATION, FIELD, REGION, SETTING, META 5 | } 6 | -------------------------------------------------------------------------------- /parser/src/main/java/io/jafar/parser/internal_api/metadata/MetadataEvent.java: -------------------------------------------------------------------------------- 1 | package io.jafar.parser.internal_api.metadata; 2 | 3 | import io.jafar.parser.AbstractEvent; 4 | import io.jafar.parser.ParsingUtils; 5 | import io.jafar.parser.MutableMetadataLookup; 6 | import io.jafar.parser.internal_api.RecordingStream; 7 | 8 | import java.io.IOException; 9 | import java.util.ArrayList; 10 | import java.util.Collection; 11 | import java.util.Collections; 12 | import java.util.List; 13 | import java.util.Objects; 14 | 15 | /** 16 | * JFR Chunk metadata 17 | * 18 | *

It contains the chunk specific type specifications 19 | */ 20 | public final class MetadataEvent extends AbstractEvent { 21 | private boolean hasHashCode = false; 22 | private int hashCode; 23 | 24 | public final int size; 25 | public final long startTime; 26 | public final long duration; 27 | public final long metadataId; 28 | private final MetadataRoot root; 29 | private final List classes = new ArrayList<>(200); 30 | 31 | private final boolean forceConstantPools; 32 | 33 | public MetadataEvent(RecordingStream stream, boolean forceConstantPools) throws IOException { 34 | super(stream); 35 | size = (int) stream.readVarint(); 36 | if (size == 0) { 37 | throw new IOException("Unexpected event size. Should be > 0"); 38 | } 39 | long typeId = stream.readVarint(); 40 | if (typeId != 0) { 41 | throw new IOException("Unexpected event type: " + typeId + " (should be 0)"); 42 | } 43 | startTime = stream.readVarint(); 44 | duration = stream.readVarint(); 45 | metadataId = stream.readVarint(); 46 | this.forceConstantPools = forceConstantPools; 47 | 48 | readStringTable(stream); 49 | root = (MetadataRoot) readElement(stream); 50 | } 51 | 52 | private void readStringTable(RecordingStream stream) throws IOException { 53 | int stringCnt = (int) stream.readVarint(); 54 | String[] stringConstants = new String[stringCnt]; 55 | for (int stringIdx = 0; stringIdx < stringCnt; stringIdx++) { 56 | stringConstants[stringIdx] = ParsingUtils.readUTF8(stream); 57 | } 58 | ((MutableMetadataLookup)stream.getContext().getMetadataLookup()).setStringtable(stringConstants); 59 | } 60 | 61 | AbstractMetadataElement readElement(RecordingStream stream) throws IOException { 62 | try { 63 | // get the element name 64 | int stringPtr = (int) stream.readVarint(); 65 | String typeId = stream.getContext().getMetadataLookup().getString(stringPtr); 66 | AbstractMetadataElement element = null; 67 | switch (typeId) { 68 | case "class": { 69 | MetadataClass clz = new MetadataClass(stream, this); 70 | classes.add(clz); 71 | element = clz; 72 | break; 73 | } 74 | case "field": { 75 | element = new MetadataField(stream, this, forceConstantPools); 76 | break; 77 | } 78 | case "annotation": { 79 | element = new MetadataAnnotation(stream, this); 80 | break; 81 | } 82 | case "root": { 83 | element = new MetadataRoot(stream, this); 84 | break; 85 | } 86 | case "metadata": { 87 | element = new MetadataElement(stream, this); 88 | break; 89 | } 90 | case "region": { 91 | element = new MetadataRegion(stream, this); 92 | break; 93 | } 94 | case "setting": { 95 | element = new MetadataSetting(stream, this); 96 | break; 97 | } 98 | default: { 99 | throw new IOException("Unsupported metadata type: " + typeId); 100 | } 101 | }; 102 | 103 | return element; 104 | } catch (Throwable t) { 105 | t.printStackTrace(); 106 | throw t; 107 | } 108 | } 109 | 110 | public MetadataRoot getRoot() { 111 | return root; 112 | } 113 | 114 | public Collection getClasses() { 115 | return Collections.unmodifiableCollection(classes); 116 | } 117 | 118 | @Override 119 | public String toString() { 120 | return "Metadata{" 121 | + "size=" 122 | + size 123 | + ", startTime=" 124 | + startTime 125 | + ", duration=" 126 | + duration 127 | + ", metadataId=" 128 | + metadataId 129 | + '}'; 130 | } 131 | 132 | @Override 133 | public boolean equals(Object o) { 134 | if (this == o) return true; 135 | if (o == null || getClass() != o.getClass()) return false; 136 | MetadataEvent that = (MetadataEvent) o; 137 | return size == that.size && startTime == that.startTime && duration == that.duration && metadataId == that.metadataId && forceConstantPools == that.forceConstantPools && Objects.equals(root, that.root) && Objects.equals(classes, that.classes); 138 | } 139 | 140 | @Override 141 | public int hashCode() { 142 | if (!hasHashCode) { 143 | hashCode = Objects.hash(size, startTime, duration, metadataId, root, classes, forceConstantPools); 144 | hasHashCode = true; 145 | } 146 | return hashCode; 147 | } 148 | } 149 | -------------------------------------------------------------------------------- /parser/src/main/java/io/jafar/parser/internal_api/metadata/MetadataField.java: -------------------------------------------------------------------------------- 1 | package io.jafar.parser.internal_api.metadata; 2 | 3 | import io.jafar.parser.internal_api.RecordingStream; 4 | 5 | import java.io.IOException; 6 | import java.util.ArrayList; 7 | import java.util.List; 8 | 9 | public final class MetadataField extends AbstractMetadataElement { 10 | private boolean hasHashCode = false; 11 | private int hashCode; 12 | 13 | private List annotations = null; 14 | private Long classId; 15 | private String classIdVal; 16 | private Boolean hasConstantPool; 17 | private String hasConstantPoolVal; 18 | private Integer dimension; 19 | private String dimensionVal; 20 | 21 | private MetadataClass type = null; 22 | 23 | MetadataField(RecordingStream stream, MetadataEvent event, boolean forceConstantPools) throws IOException { 24 | super(stream, MetadataElementKind.FIELD); 25 | readSubelements(event); 26 | } 27 | 28 | @Override 29 | protected void onAttribute(String key, String value) { 30 | switch (key) { 31 | case "class": 32 | classIdVal = value; 33 | break; 34 | case "constantPool": 35 | hasConstantPoolVal = value; 36 | break; 37 | case "dimension": 38 | dimensionVal = value; 39 | break; 40 | } 41 | } 42 | 43 | public MetadataClass getType() { 44 | // all events from a single chunk, referencing a particular type will be procesed in a single thread 45 | // therefore, we are not risiking data race here 46 | if (type == null) { 47 | type = metadataLookup.getClass(getTypeId()); 48 | } 49 | return type; 50 | } 51 | 52 | public long getTypeId() { 53 | if (classId == null) { 54 | classId = Long.parseLong(classIdVal); 55 | } 56 | return classId; 57 | } 58 | 59 | public boolean hasConstantPool() { 60 | if (hasConstantPool == null) { 61 | hasConstantPool = Boolean.parseBoolean(hasConstantPoolVal); 62 | } 63 | return hasConstantPool; 64 | } 65 | 66 | public int getDimension() { 67 | if (dimension == null) { 68 | dimension = dimensionVal != null ? Integer.parseInt(dimensionVal) : -1; 69 | } 70 | return dimension; 71 | } 72 | 73 | @Override 74 | protected void onSubelement(int count, AbstractMetadataElement element) { 75 | if (element.getKind() == MetadataElementKind.ANNOTATION) { 76 | if (annotations == null) { 77 | annotations = new ArrayList<>(count); 78 | } 79 | annotations.add((MetadataAnnotation) element); 80 | } else { 81 | throw new IllegalStateException("Unexpected subelement: " + element.getKind()); 82 | } 83 | } 84 | 85 | @Override 86 | public void accept(MetadataVisitor visitor) { 87 | visitor.visitField(this); 88 | if (annotations != null) { 89 | annotations.forEach(a -> a.accept(visitor)); 90 | } 91 | visitor.visitEnd(this); 92 | } 93 | 94 | @Override 95 | public String toString() { 96 | return "MetadataField{" + 97 | "type='" + (getType() != null ? getType().getName() : getTypeId()) + '\'' + 98 | ", name='" + getName() + "'" + 99 | ", hasConstantPool=" + hasConstantPool + 100 | ", dimension=" + dimension + 101 | '}'; 102 | } 103 | 104 | @Override 105 | public boolean equals(Object o) { 106 | if (this == o) return true; 107 | if (o == null || getClass() != o.getClass()) return false; 108 | MetadataField that = (MetadataField) o; 109 | return getTypeId() == that.getTypeId() && hasConstantPool() == that.hasConstantPool() && getDimension() == that.getDimension(); 110 | } 111 | 112 | @Override 113 | public int hashCode() { 114 | if (!hasHashCode) { 115 | long mixed = getTypeId() * 0x9E3779B97F4A7C15L + (hasConstantPool() ? 1 : 0) * 0xC6BC279692B5C323L + getDimension() * 0xD8163841FDE6A8F9L; 116 | hashCode = Long.hashCode(mixed); 117 | hasHashCode = true; 118 | } 119 | return hashCode; 120 | } 121 | } 122 | -------------------------------------------------------------------------------- /parser/src/main/java/io/jafar/parser/internal_api/metadata/MetadataRegion.java: -------------------------------------------------------------------------------- 1 | package io.jafar.parser.internal_api.metadata; 2 | 3 | import io.jafar.parser.internal_api.RecordingStream; 4 | 5 | import java.io.IOException; 6 | import java.util.Objects; 7 | 8 | public final class MetadataRegion extends AbstractMetadataElement { 9 | private boolean hasHashCode = false; 10 | private int hashCode; 11 | 12 | private long dst; 13 | private long gmtOffset; 14 | private String locale; 15 | 16 | MetadataRegion(RecordingStream stream, MetadataEvent event) throws IOException { 17 | super(stream, MetadataElementKind.REGION); 18 | readSubelements(event); 19 | } 20 | 21 | @Override 22 | protected void onAttribute(String key, String value) { 23 | switch (key) { 24 | case "dst": 25 | dst = value != null ? Long.parseLong(value) : 0L; 26 | break; 27 | case "gmtOffset": 28 | gmtOffset = value!= null ? Long.parseLong(value) : 0L; 29 | break; 30 | case "locale": 31 | locale = value != null ? value : "en_US"; 32 | break; 33 | } 34 | } 35 | 36 | public long getDst() { 37 | return dst; 38 | } 39 | 40 | public long getGmtOffset() { 41 | return gmtOffset; 42 | } 43 | 44 | public String getLocale() { 45 | return locale; 46 | } 47 | 48 | @Override 49 | protected void onSubelement(int count, AbstractMetadataElement element) { 50 | throw new IllegalStateException("Unexpected subelement: " + element.getKind()); 51 | } 52 | 53 | @Override 54 | public void accept(MetadataVisitor visitor) { 55 | visitor.visitRegion(this); 56 | visitor.visitEnd(this); 57 | } 58 | 59 | @Override 60 | public String toString() { 61 | return "MetadataRegion{" + 62 | "dst=" + dst + 63 | ", gmtOffset=" + gmtOffset + 64 | ", locale='" + locale + '\'' + 65 | '}'; 66 | } 67 | 68 | @Override 69 | public boolean equals(Object o) { 70 | if (this == o) return true; 71 | if (o == null || getClass() != o.getClass()) return false; 72 | MetadataRegion that = (MetadataRegion) o; 73 | return dst == that.dst && gmtOffset == that.gmtOffset && Objects.equals(locale, that.locale); 74 | } 75 | 76 | @Override 77 | public int hashCode() { 78 | if (!hasHashCode) { 79 | long mixed = dst * 0x9E3779B97F4A7C15L + gmtOffset * 0xC6BC279692B5C323L + Objects.hashCode(locale) * 0xD8163841FDE6A8F9L; 80 | hashCode = Long.hashCode(mixed); 81 | hasHashCode = true; 82 | } 83 | return hashCode; 84 | } 85 | } 86 | -------------------------------------------------------------------------------- /parser/src/main/java/io/jafar/parser/internal_api/metadata/MetadataRoot.java: -------------------------------------------------------------------------------- 1 | package io.jafar.parser.internal_api.metadata; 2 | 3 | import io.jafar.parser.internal_api.RecordingStream; 4 | 5 | import java.io.IOException; 6 | import java.util.Objects; 7 | 8 | public final class MetadataRoot extends AbstractMetadataElement { 9 | private boolean hasHashHascode = false; 10 | private int hashCode; 11 | 12 | private MetadataElement metadata; 13 | private MetadataRegion region; 14 | 15 | MetadataRoot(RecordingStream stream, MetadataEvent event) throws IOException { 16 | super(stream, MetadataElementKind.ROOT); 17 | readSubelements(event); 18 | } 19 | 20 | @Override 21 | protected void onSubelement(int count, AbstractMetadataElement element) { 22 | if (element.getKind() == MetadataElementKind.META) { 23 | metadata = (MetadataElement) element; 24 | } else if (element.getKind() == MetadataElementKind.REGION) { 25 | region = (MetadataRegion) element; 26 | } else { 27 | throw new IllegalStateException("Unexpected subelement: " + element.getKind()); 28 | } 29 | } 30 | 31 | @Override 32 | public void accept(MetadataVisitor visitor) { 33 | visitor.visitRoot(this); 34 | metadata.accept(visitor); 35 | region.accept(visitor); 36 | visitor.visitEnd(this); 37 | } 38 | 39 | @Override 40 | public String toString() { 41 | return "MetadataRoot"; 42 | } 43 | 44 | @Override 45 | public boolean equals(Object o) { 46 | if (this == o) return true; 47 | if (o == null || getClass() != o.getClass()) return false; 48 | MetadataRoot that = (MetadataRoot) o; 49 | return Objects.equals(metadata, that.metadata) && Objects.equals(region, that.region); 50 | } 51 | 52 | @Override 53 | public int hashCode() { 54 | if (!hasHashHascode) { 55 | hashCode = Objects.hash(metadata, region); 56 | hasHashHascode = true; 57 | } 58 | return hashCode; 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /parser/src/main/java/io/jafar/parser/internal_api/metadata/MetadataSetting.java: -------------------------------------------------------------------------------- 1 | package io.jafar.parser.internal_api.metadata; 2 | 3 | import io.jafar.parser.internal_api.RecordingStream; 4 | 5 | import java.io.IOException; 6 | import java.util.Objects; 7 | 8 | final class MetadataSetting extends AbstractMetadataElement { 9 | private boolean hasHashCode = false; 10 | private int hashCode; 11 | 12 | private String value; 13 | private long typeId; 14 | 15 | public MetadataSetting(RecordingStream stream, MetadataEvent event) throws IOException { 16 | super(stream, MetadataElementKind.SETTING); 17 | readSubelements(event); 18 | } 19 | 20 | @Override 21 | protected void onAttribute(String key, String value) { 22 | switch (key) { 23 | case "defaultValue": 24 | this.value = value; 25 | break; 26 | case "class": 27 | typeId = Long.parseLong(value); 28 | break; 29 | } 30 | } 31 | 32 | public String getValue() { 33 | return value; 34 | } 35 | 36 | public MetadataClass getType() { 37 | return metadataLookup.getClass(typeId); 38 | } 39 | 40 | @Override 41 | protected void onSubelement(int count, AbstractMetadataElement element) { 42 | throw new IllegalStateException("Unexpected subelement: " + element.getKind()); 43 | } 44 | 45 | @Override 46 | public void accept(MetadataVisitor visitor) { 47 | visitor.visitSetting(this); 48 | visitor.visitEnd(this); 49 | } 50 | 51 | @Override 52 | public String toString() { 53 | return "MetadataSetting{" + 54 | "type='" + (getType() != null ? getType().getName() : typeId) + "'" + 55 | ", name='" + getName() + "'" + 56 | ", value='" + value + '\'' + 57 | '}'; 58 | } 59 | 60 | @Override 61 | public boolean equals(Object o) { 62 | if (this == o) return true; 63 | if (o == null || getClass() != o.getClass()) return false; 64 | MetadataSetting that = (MetadataSetting) o; 65 | return typeId == that.typeId && Objects.equals(value, that.value); 66 | } 67 | 68 | @Override 69 | public int hashCode() { 70 | if (!hasHashCode) { 71 | long mixed = typeId * 0x9E3779B97F4A7C15L + Objects.hashCode(value) * 0xC6BC279692B5C323L; 72 | hashCode = Long.hashCode(mixed); 73 | hasHashCode = true; 74 | } 75 | return hashCode; 76 | } 77 | } 78 | -------------------------------------------------------------------------------- /parser/src/main/java/io/jafar/parser/internal_api/metadata/MetadataVisitor.java: -------------------------------------------------------------------------------- 1 | package io.jafar.parser.internal_api.metadata; 2 | 3 | public interface MetadataVisitor { 4 | default void visitRoot(MetadataRoot root) {}; 5 | default void visitEnd(MetadataRoot root) {}; 6 | default void visitMetadata(MetadataElement metadata) {}; 7 | default void visitEnd(MetadataElement metadata) {}; 8 | default void visitClass(MetadataClass clz) {}; 9 | default void visitEnd(MetadataClass clz) {}; 10 | default void visitSetting(MetadataSetting setting) {}; 11 | default void visitEnd(MetadataSetting setting) {}; 12 | default void visitAnnotation(MetadataAnnotation annotation) {}; 13 | default void visitEnd(MetadataAnnotation annotation) {}; 14 | default void visitField(MetadataField field) {} 15 | default void visitEnd(MetadataField field) {} 16 | default void visitRegion(MetadataRegion region) {} 17 | default void visitEnd(MetadataRegion region) {} 18 | } 19 | -------------------------------------------------------------------------------- /parser/src/main/java/io/jafar/utils/BytePacking.java: -------------------------------------------------------------------------------- 1 | package io.jafar.utils; 2 | 3 | import java.nio.ByteOrder; 4 | 5 | public class BytePacking { 6 | public static int pack(ByteOrder order, char a, char b, char c, char d) { 7 | assert ((a | b | c | d) & 0xFF00) == 0 : "not ASCII"; 8 | int packed = (d << 24) | (c << 16) | (b << 8) | a; 9 | return order == ByteOrder.BIG_ENDIAN ? Integer.reverseBytes(packed) : packed; 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /parser/src/main/java/io/jafar/utils/CachedStringParser.java: -------------------------------------------------------------------------------- 1 | package io.jafar.utils; 2 | 3 | import java.nio.charset.Charset; 4 | import java.util.Arrays; 5 | 6 | @SuppressWarnings("UnstableApiUsage") 7 | public class CachedStringParser { 8 | public static final class ByteArrayParser { 9 | private byte[] previousData = new byte[4096]; 10 | private int previousLen = 0; 11 | private String lastString = null; 12 | 13 | public String parse(byte[] data, int len, Charset charset) { 14 | if (lastString != null && previousLen == len && Arrays.equals(data, 0, len, previousData, 0, len)) { 15 | return lastString; 16 | } 17 | if (len > previousData.length) { 18 | previousData = Arrays.copyOf(data, len); 19 | } else { 20 | System.arraycopy(data, 0, previousData, 0, len); 21 | } 22 | previousLen = len; 23 | lastString = new String(data, 0, len, charset); 24 | return lastString; 25 | } 26 | } 27 | 28 | public static final class CharArrayParser { 29 | private char[] previousData = new char[4096]; 30 | private int previousLen = 0; 31 | private String lastString = null; 32 | 33 | public String parse(char[] data, int len) { 34 | if (lastString != null && previousLen == len && Arrays.equals(data, 0, len, previousData, 0, len)) { 35 | return lastString; 36 | } 37 | if (len > previousData.length) { 38 | previousData = Arrays.copyOf(data, len); 39 | } else { 40 | System.arraycopy(data, 0, previousData, 0, len); 41 | } 42 | previousLen = len; 43 | lastString = new String(data, 0, len); 44 | return lastString; 45 | } 46 | } 47 | 48 | public static ByteArrayParser byteParser() { 49 | return new ByteArrayParser(); 50 | } 51 | 52 | public static CharArrayParser charParser() { 53 | return new CharArrayParser(); 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /parser/src/main/java/io/jafar/utils/CustomByteBuffer.java: -------------------------------------------------------------------------------- 1 | package io.jafar.utils; 2 | 3 | import java.io.IOException; 4 | import java.io.RandomAccessFile; 5 | import java.nio.ByteOrder; 6 | import java.nio.MappedByteBuffer; 7 | import java.nio.channels.FileChannel; 8 | import java.nio.file.Files; 9 | import java.nio.file.Path; 10 | 11 | public interface CustomByteBuffer { 12 | static CustomByteBuffer map(Path channel) throws IOException { 13 | return map(channel, Integer.MAX_VALUE); 14 | } 15 | 16 | static CustomByteBuffer map(Path path, int spliceSize) throws IOException { 17 | long size = Files.size(path); 18 | if (size > spliceSize) { 19 | return new SplicedMappedByteBuffer(path, spliceSize); 20 | } else { 21 | try (RandomAccessFile raf = new RandomAccessFile(path.toFile(), "r"); FileChannel channel = raf.getChannel()) { 22 | return new ByteBufferWrapper(channel.map(FileChannel.MapMode.READ_ONLY, 0, size)); 23 | } 24 | } 25 | } 26 | 27 | CustomByteBuffer slice(); 28 | CustomByteBuffer slice(long pos, long len); 29 | 30 | CustomByteBuffer order(ByteOrder bigEndian); 31 | ByteOrder order(); 32 | 33 | boolean isNativeOrder(); 34 | 35 | void position(long position); 36 | 37 | long position(); 38 | 39 | long remaining(); 40 | 41 | void get(byte[] buffer, int offset, int length); 42 | 43 | byte get(); 44 | 45 | short getShort(); 46 | 47 | int getInt(); 48 | 49 | float getFloat(); 50 | 51 | double getDouble(); 52 | 53 | void mark(); 54 | 55 | void reset(); 56 | 57 | long getLong(); 58 | 59 | class ByteBufferWrapper implements CustomByteBuffer { 60 | private final MappedByteBuffer delegate; 61 | private final boolean nativeOrder; 62 | 63 | public ByteBufferWrapper(MappedByteBuffer delegate) { 64 | this.delegate = delegate; 65 | this.nativeOrder = delegate.order() == ByteOrder.nativeOrder(); 66 | delegate.order(ByteOrder.nativeOrder()); 67 | } 68 | 69 | @Override 70 | public boolean isNativeOrder() { 71 | return nativeOrder; 72 | } 73 | 74 | @Override 75 | public CustomByteBuffer slice(long pos, long len) { 76 | return new ByteBufferWrapper(delegate.slice((int)pos, (int)len)); 77 | } 78 | 79 | @Override 80 | public CustomByteBuffer slice() { 81 | return new ByteBufferWrapper(delegate.slice()); 82 | } 83 | 84 | @Override 85 | public CustomByteBuffer order(ByteOrder order) { 86 | delegate.order(order); 87 | return this; 88 | } 89 | 90 | @Override 91 | public ByteOrder order() { 92 | return delegate.order(); 93 | } 94 | 95 | @Override 96 | public void position(long position) { 97 | delegate.position((int)position); 98 | // this.position = (int) position; 99 | } 100 | 101 | @Override 102 | public long position() { 103 | return delegate.position(); //position; 104 | } 105 | 106 | @Override 107 | public long remaining() { 108 | return delegate.remaining(); //length - position; 109 | } 110 | 111 | @Override 112 | public void get(byte[] buffer, int offset, int length) { 113 | delegate.get(buffer, offset, length); 114 | // delegate.get(position, buffer, offset, length); 115 | // position += length; 116 | } 117 | 118 | @Override 119 | public byte get() { 120 | return delegate.get(); 121 | // return delegate.get(position++); 122 | } 123 | 124 | @Override 125 | public short getShort() { 126 | return delegate.getShort(); 127 | // short s = delegate.getShort(position); 128 | // position += 2; 129 | // return s; 130 | } 131 | 132 | @Override 133 | public int getInt() { 134 | return delegate.getInt(); 135 | // int i = delegate.getInt(position); 136 | // position += 4; 137 | // return i; 138 | } 139 | 140 | @Override 141 | public float getFloat() { 142 | return delegate.getFloat(); 143 | // float f = delegate.getFloat(position); 144 | // position += 4; 145 | // return f; 146 | } 147 | 148 | @Override 149 | public double getDouble() { 150 | return delegate.getDouble(); 151 | // double d = delegate.getDouble(position); 152 | // position += 8; 153 | // return d; 154 | } 155 | 156 | @Override 157 | public long getLong() { 158 | return delegate.getLong(); 159 | // long l = delegate.getLong(position); 160 | // position += 8; 161 | // return l; 162 | } 163 | 164 | @Override 165 | public void mark() { 166 | delegate.mark(); 167 | // mark = position; 168 | } 169 | 170 | @Override 171 | public void reset() { 172 | delegate.reset(); 173 | // if (mark > -1) { 174 | // position = mark; 175 | // } 176 | } 177 | } 178 | } 179 | -------------------------------------------------------------------------------- /parser/src/main/java/io/jafar/utils/SplicedMappedByteBuffer.java: -------------------------------------------------------------------------------- 1 | package io.jafar.utils; 2 | 3 | import java.io.IOException; 4 | import java.io.RandomAccessFile; 5 | import java.nio.BufferOverflowException; 6 | import java.nio.ByteBuffer; 7 | import java.nio.ByteOrder; 8 | import java.nio.MappedByteBuffer; 9 | import java.nio.channels.FileChannel; 10 | import java.nio.file.Files; 11 | import java.nio.file.Path; 12 | 13 | public final class SplicedMappedByteBuffer implements CustomByteBuffer { 14 | private final int spliceSize; 15 | private int index = 0; 16 | private int offset = 0; 17 | private long position = 0; 18 | private long mark = 0; 19 | private final long limit; 20 | private final long sliceBase; 21 | private final boolean nativeOrder; 22 | 23 | private final MappedByteBuffer[] splices; 24 | 25 | SplicedMappedByteBuffer(MappedByteBuffer[] splices, int spliceSize, int sliceOffset, int sliceIndex, long limit, boolean nativeOrder) { 26 | this.splices = splices; 27 | this.index = sliceIndex; 28 | this.offset = sliceOffset; 29 | this.spliceSize = spliceSize; 30 | this.limit = limit; 31 | this.sliceBase = (long)index * spliceSize + offset; 32 | this.nativeOrder = nativeOrder; 33 | } 34 | 35 | SplicedMappedByteBuffer(Path file, int spliceSize) throws IOException { 36 | this.sliceBase = 0; 37 | this.spliceSize = spliceSize; 38 | limit = Files.size(file); 39 | int count = (int)(((long)spliceSize + limit - 1) / spliceSize); 40 | splices = new MappedByteBuffer[count]; 41 | boolean inOrder = true; 42 | try (RandomAccessFile raf = new RandomAccessFile(file.toFile(), "r"); 43 | FileChannel channel = raf.getChannel()) { 44 | long remaining = limit; 45 | for (int i = 0; i < count; i++) { 46 | splices[i] = channel.map(FileChannel.MapMode.READ_ONLY, (long)i * spliceSize, (long)Math.min(spliceSize, remaining)); 47 | inOrder &= splices[i].order() == ByteOrder.nativeOrder(); 48 | splices[i].order(ByteOrder.nativeOrder()); // force native order 49 | remaining -= spliceSize; 50 | } 51 | this.nativeOrder = inOrder; 52 | } catch (IOException e) { 53 | throw new RuntimeException(e); 54 | } 55 | } 56 | 57 | @Override 58 | public boolean isNativeOrder() { 59 | return nativeOrder; 60 | } 61 | 62 | @Override 63 | public CustomByteBuffer slice() { 64 | return new SplicedMappedByteBuffer(splices, spliceSize, offset, index, remaining(), nativeOrder); 65 | } 66 | 67 | @Override 68 | public CustomByteBuffer slice(long pos, long len) { 69 | if (pos + len > limit) { 70 | throw new BufferOverflowException(); 71 | } 72 | int realIndex = (int)((sliceBase + pos) / spliceSize); 73 | int realOffset = (int)((sliceBase + pos) % spliceSize); 74 | return new SplicedMappedByteBuffer(splices, spliceSize, realOffset, realIndex, len, nativeOrder); 75 | } 76 | 77 | @Override 78 | public CustomByteBuffer order(ByteOrder order) { 79 | for (int i = 0; i < splices.length; i++) { 80 | splices[i] = (MappedByteBuffer) splices[i].order(order); 81 | } 82 | return this; 83 | } 84 | 85 | @Override 86 | public ByteOrder order() { 87 | return splices[0].order(); 88 | } 89 | 90 | @Override 91 | public void position(long position) { 92 | if (position > limit) { 93 | throw new BufferOverflowException(); 94 | } 95 | index = (int)((position + sliceBase) / spliceSize); 96 | offset = (int)((position + sliceBase) % spliceSize); 97 | this.position = position; 98 | } 99 | 100 | @Override 101 | public long position() { 102 | return position; 103 | } 104 | 105 | @Override 106 | public long remaining() { 107 | return limit - position; 108 | } 109 | 110 | private void checkSpliceOffset() { 111 | if (offset == spliceSize) { 112 | if (++index == splices.length) { 113 | throw new BufferOverflowException(); 114 | } 115 | offset = 0; 116 | splices[index].position(offset); 117 | } 118 | } 119 | 120 | @Override 121 | public void get(byte[] buffer, int offset, int length) { 122 | int loaded = 0; 123 | do { 124 | checkSpliceOffset(); 125 | int toLoad = (int)Math.min(spliceSize - this.offset, length - loaded); 126 | splices[index].get(this.offset, buffer, offset + loaded, toLoad); 127 | loaded += toLoad; 128 | this.offset += toLoad; 129 | } while (loaded < length); 130 | position += length; 131 | } 132 | 133 | @Override 134 | public byte get() { 135 | checkSpliceOffset(); 136 | position++; 137 | return splices[index].get(offset++); 138 | } 139 | 140 | private final byte[] numArray = new byte[8]; 141 | 142 | @Override 143 | public short getShort() { 144 | checkSpliceOffset(); 145 | if (spliceSize - offset >= 2) { 146 | position += 2; 147 | short ret = splices[index].getShort(offset); 148 | offset += 2; 149 | return ret; 150 | } else { 151 | numArray[0] = get(); 152 | numArray[1] = get(); 153 | return ByteBuffer.wrap(numArray).order(splices[0].order()).getShort(); 154 | } 155 | } 156 | 157 | @Override 158 | public int getInt() { 159 | checkSpliceOffset(); 160 | if (spliceSize - offset >= 4) { 161 | position += 4; 162 | int ret = splices[index].getInt(offset); 163 | offset += 4; 164 | return ret; 165 | } else { 166 | int splitPoint = spliceSize - offset; 167 | get(numArray, 0, splitPoint); 168 | get(numArray, splitPoint, 4 - splitPoint); 169 | return ByteBuffer.wrap(numArray).order(splices[0].order()).getInt(); 170 | } 171 | } 172 | 173 | @Override 174 | public float getFloat() { 175 | checkSpliceOffset(); 176 | if (spliceSize - offset >= 4) { 177 | position += 4; 178 | float ret = splices[index].getFloat(offset); 179 | offset += 4; 180 | return ret; 181 | } else { 182 | int splitPoint = spliceSize - offset; 183 | get(numArray, 0, splitPoint); 184 | get(numArray, splitPoint, 4 - splitPoint); 185 | return ByteBuffer.wrap(numArray).order(splices[0].order()).getFloat(); 186 | } 187 | } 188 | 189 | @Override 190 | public double getDouble() { 191 | checkSpliceOffset(); 192 | if (spliceSize - offset >= 8) { 193 | position += 8; 194 | double ret = splices[index].getDouble(offset); 195 | offset += 8; 196 | return ret; 197 | } else { 198 | int splitPoint = spliceSize - offset; 199 | get(numArray, 0, splitPoint); 200 | get(numArray, splitPoint, 8 - splitPoint); 201 | return ByteBuffer.wrap(numArray).order(splices[0].order()).getDouble(); 202 | } 203 | } 204 | 205 | @Override 206 | public long getLong() { 207 | checkSpliceOffset(); 208 | if (spliceSize - offset >= 8) { 209 | position += 8; 210 | long ret = splices[index].getLong(offset); 211 | offset += 8; 212 | return ret; 213 | } else { 214 | int splitPoint = spliceSize - offset; 215 | get(numArray, 0, splitPoint); 216 | get(numArray, splitPoint, 8 - splitPoint); 217 | return ByteBuffer.wrap(numArray).order(splices[0].order()).getLong(); 218 | } 219 | } 220 | 221 | @Override 222 | public void mark() { 223 | mark = position; 224 | } 225 | 226 | @Override 227 | public void reset() { 228 | position = mark; 229 | index = (int)((position + sliceBase) / spliceSize); 230 | offset = (int)((position + sliceBase) % spliceSize); 231 | } 232 | } 233 | -------------------------------------------------------------------------------- /parser/src/main/java/io/jafar/utils/TypeGenerator.java: -------------------------------------------------------------------------------- 1 | package io.jafar.utils; 2 | 3 | import io.jafar.parser.internal_api.ChunkParserListener; 4 | import io.jafar.parser.internal_api.StreamingChunkParser; 5 | import io.jafar.parser.internal_api.metadata.MetadataClass; 6 | import io.jafar.parser.internal_api.metadata.MetadataEvent; 7 | import io.jafar.parser.internal_api.metadata.MetadataField; 8 | import jdk.jfr.EventType; 9 | import jdk.jfr.FlightRecorder; 10 | import jdk.jfr.ValueDescriptor; 11 | 12 | import java.io.IOException; 13 | import java.nio.file.Files; 14 | import java.nio.file.Path; 15 | import java.nio.file.StandardOpenOption; 16 | import java.util.HashSet; 17 | import java.util.Set; 18 | import java.util.function.Predicate; 19 | 20 | public final class TypeGenerator { 21 | private final Path jfr; 22 | private final Path output; 23 | private final String pkg; 24 | private final boolean overwrite; 25 | private final Predicate eventTypeFilter; 26 | 27 | public TypeGenerator(Path jfr, Path output, String targetPackage, boolean overwrite, Predicate eventTypeFilter) throws IOException{ 28 | if (!Files.isDirectory(output) || !Files.exists(output)) { 29 | throw new IllegalArgumentException("Output directory does not exist: " + output); 30 | } 31 | this.jfr = jfr; 32 | this.pkg = targetPackage; 33 | this.output = output.resolve(targetPackage.replace('.', '/')); 34 | this.overwrite = overwrite; 35 | this.eventTypeFilter = eventTypeFilter; 36 | Files.createDirectories(this.output); 37 | } 38 | 39 | public void generate() throws Exception { 40 | if (jfr == null) { 41 | generateFromRuntime(); 42 | } else { 43 | generateFromFile(); 44 | } 45 | } 46 | 47 | private void generateFromRuntime() throws Exception { 48 | Set generated = new HashSet<>(); 49 | FlightRecorder.getFlightRecorder().getEventTypes().forEach(et -> { 50 | if (eventTypeFilter == null || eventTypeFilter.test(et.getName())) { 51 | try { 52 | Path target = output.resolve("JFR" + getSimpleName(et.getName()) + ".java"); 53 | if (overwrite || !Files.exists(target)) { 54 | Files.writeString(target, generateTypeFromEvent(et, generated), StandardOpenOption.CREATE_NEW); 55 | } 56 | } catch (IOException e) { 57 | throw new RuntimeException(e); 58 | 59 | } 60 | } 61 | }); 62 | } 63 | 64 | private String generateTypeFromEvent(EventType et, Set generatedTypes) { 65 | StringBuilder sb = new StringBuilder(); 66 | sb.append("package ").append(pkg).append(";\n"); 67 | sb.append("\n"); 68 | sb.append("import io.jafar.parser.api.*;\n"); 69 | sb.append("@JfrType(\"").append(et.getName()).append("\")\n\n"); 70 | sb.append("public interface JFR").append(getSimpleName(et.getName())).append(" {\n"); 71 | et.getFields().forEach(field -> { 72 | try { 73 | writeTypeFromField(field, generatedTypes); 74 | } catch (Exception e) { 75 | throw new RuntimeException(e); 76 | } 77 | String fldName = sanitizeFieldName(field.getName()); 78 | sb.append('\t'); 79 | if (!fldName.equals(field.getName())) { 80 | sb.append("@JfrField(\"").append(field.getName()).append("\") "); 81 | } 82 | sb.append(isPrimitiveName(field.getTypeName()) ? "" : "JFR").append(getSimpleName(field.getTypeName())); 83 | if (field.isArray()) { 84 | sb.append("[]"); 85 | } 86 | sb.append(" "); 87 | sb.append(fldName).append("();\n"); 88 | }); 89 | sb.append("}\n"); 90 | return sb.toString(); 91 | } 92 | 93 | private void writeTypeFromField(ValueDescriptor f, Set generatedTypes) throws Exception { 94 | String data = getTypeFromField(f, generatedTypes); 95 | 96 | if (data != null) { 97 | String typeName = f.getTypeName(); 98 | String targetName = isPrimitiveName(typeName) ? typeName : "JFR" + getSimpleName(typeName); 99 | Path target = output.resolve(targetName + ".java"); 100 | if (overwrite || !Files.exists(target)) { 101 | Files.writeString(output.resolve(targetName + ".java"), data, StandardOpenOption.CREATE_NEW); 102 | } 103 | } 104 | } 105 | 106 | private String getTypeFromField(ValueDescriptor field, Set generatedTypes) { 107 | String typeName = field.getTypeName(); 108 | if (isPrimitiveName(typeName)) { 109 | return null; 110 | } 111 | 112 | if (generatedTypes.add(typeName)) { 113 | StringBuilder sb = new StringBuilder(); 114 | sb.append("package ").append(pkg).append(";\n"); 115 | sb.append("\n"); 116 | sb.append("import io.jafar.parser.api.*;\n"); 117 | sb.append("@JfrType(\"").append(typeName).append("\")\n\n"); 118 | sb.append("public interface JFR").append(getSimpleName(typeName)).append(" {\n"); 119 | field.getFields().forEach(subfield -> { 120 | try { 121 | writeTypeFromField(subfield, generatedTypes); 122 | } catch (Exception e) { 123 | throw new RuntimeException(e); 124 | } 125 | String fldName = sanitizeFieldName(subfield.getName()); 126 | sb.append('\t'); 127 | if (!fldName.equals(subfield.getName())) { 128 | sb.append("@JfrField(\"").append(subfield.getName()).append("\") "); 129 | } 130 | sb.append(isPrimitiveName(subfield.getTypeName()) ? "" : "JFR").append(getSimpleName(subfield.getTypeName())); 131 | if (subfield.isArray()) { 132 | sb.append("[]"); 133 | } 134 | sb.append(" "); 135 | sb.append(fldName).append("();\n"); 136 | }); 137 | sb.append("}\n"); 138 | return sb.toString(); 139 | } 140 | return null; 141 | } 142 | 143 | private void generateFromFile() throws Exception { 144 | try (StreamingChunkParser parser = new StreamingChunkParser()) { 145 | parser.parse(jfr, new ChunkParserListener() { 146 | @Override 147 | public boolean onMetadata(MetadataEvent metadata) { 148 | metadata.getClasses().forEach(TypeGenerator.this::writeClass); 149 | // stop processing 150 | return false; 151 | } 152 | }); 153 | } 154 | } 155 | 156 | private void writeClass(MetadataClass metadataClass) { 157 | if (metadataClass.isPrimitive()) { 158 | return; 159 | } 160 | if (isAnnotation(metadataClass) || isSettingControl(metadataClass)) { 161 | return; 162 | } 163 | try { 164 | Path classFile = output.resolve(getClassName(metadataClass) + ".java"); 165 | if (overwrite || !Files.exists(classFile)) { 166 | Files.writeString(classFile, generateClass(metadataClass), StandardOpenOption.CREATE_NEW); 167 | } 168 | } catch (IOException e) { 169 | throw new RuntimeException(e); 170 | } 171 | } 172 | 173 | private String generateClass(MetadataClass clazz) { 174 | StringBuilder sb = new StringBuilder(); 175 | sb.append("package ").append(pkg).append(";\n"); 176 | sb.append("\n"); 177 | sb.append("import io.jafar.parser.api.*;\n"); 178 | sb.append("@JfrType(\"").append(clazz.getName()).append("\")\n\n"); 179 | sb.append("public interface ").append(getClassName(clazz)); 180 | sb.append(" {\n"); 181 | for (MetadataField field : clazz.getFields()) { 182 | String fldName = sanitizeFieldName(field.getName()); 183 | sb.append('\t'); 184 | if (!fldName.equals(field.getName())) { 185 | sb.append("@JfrField(\"").append(field.getName()).append("\") "); 186 | } 187 | MetadataClass fldType = field.getType(); 188 | while (fldType.isSimpleType()) { 189 | fldType = fldType.getFields().getFirst().getType(); 190 | } 191 | sb.append(getClassName(fldType)); 192 | sb.append("[]".repeat(Math.max(0, field.getDimension()))); 193 | sb.append(" "); 194 | sb.append(fldName).append("();\n"); 195 | 196 | } 197 | sb.append("}\n"); 198 | return sb.toString(); 199 | } 200 | 201 | private String getClassName(MetadataClass clazz) { 202 | return (!clazz.isPrimitive() ? "JFR" : "") + clazz.getSimpleName(); 203 | } 204 | 205 | private String sanitizeFieldName(String fieldName) { 206 | switch (fieldName) { 207 | case "class": return "clz"; 208 | case "package": return "pkg"; 209 | default: return fieldName; 210 | } 211 | } 212 | 213 | private static boolean isEvent(MetadataClass clazz) { 214 | String superType = clazz.getSuperType(); 215 | if (superType == null) { 216 | return false; 217 | } 218 | if ("jdk.jfr.Event".equals(superType)) { 219 | return true; 220 | } 221 | /* 222 | TODO: this is not technically true as a type may have JFR event upper in hierarchy but 223 | let's ignore it for now 224 | */ 225 | return false; 226 | } 227 | 228 | private static boolean isAnnotation(MetadataClass clazz) { 229 | String superType = clazz.getSuperType(); 230 | if (superType == null) { 231 | return false; 232 | } 233 | if ("java.lang.annotation.Annotation".equals(superType)) { 234 | return true; 235 | } 236 | /* 237 | TODO: this is not technically true as a type may have JFR event upper in hierarchy but 238 | let's ignore it for now 239 | */ 240 | return false; 241 | } 242 | 243 | private static boolean isSettingControl(MetadataClass clazz) { 244 | String superType = clazz.getSuperType(); 245 | if (superType == null) { 246 | return false; 247 | } 248 | if ("jdk.jfr.SettingControl".equals(superType)) { 249 | return true; 250 | } 251 | /* 252 | TODO: this is not technically true as a type may have JFR event upper in hierarchy but 253 | let's ignore it for now 254 | */ 255 | return false; 256 | } 257 | 258 | private static String getSimpleName(String name) { 259 | int idx = name.lastIndexOf('.'); 260 | return idx == -1 ? name : name.substring(idx + 1); 261 | } 262 | 263 | private static boolean isPrimitiveName(String name) { 264 | return name.lastIndexOf('.') == -1 || "java.lang.String".equals(name); 265 | } 266 | } 267 | -------------------------------------------------------------------------------- /parser/src/test/java/io/jafar/parser/ExecutionSampleEvent.java: -------------------------------------------------------------------------------- 1 | package io.jafar.parser; 2 | 3 | import io.jafar.parser.api.JfrField; 4 | import io.jafar.parser.api.JfrType; 5 | import io.jafar.parser.api.types.JFREvent; 6 | import io.jafar.parser.api.types.JFRThread; 7 | 8 | @JfrType("jdk.ExecutionSample") 9 | public interface ExecutionSampleEvent extends JFREvent { 10 | long startTime(); 11 | @JfrField("sampledThread") JFRThread eventThread(); 12 | } 13 | -------------------------------------------------------------------------------- /parser/src/test/java/io/jafar/parser/JafarParserTest.java: -------------------------------------------------------------------------------- 1 | package io.jafar.parser; 2 | 3 | import io.jafar.TestJfrRecorder; 4 | import io.jafar.parser.api.HandlerRegistration; 5 | import io.jafar.parser.api.JafarParser; 6 | import io.jafar.parser.api.types.JFRStackFrame; 7 | import io.jafar.parser.api.types.JFRStackTrace; 8 | import org.junit.jupiter.api.Test; 9 | import org.openjdk.jmc.flightrecorder.writer.api.Recording; 10 | import org.openjdk.jmc.flightrecorder.writer.api.Recordings; 11 | 12 | import java.io.ByteArrayOutputStream; 13 | import java.io.File; 14 | import java.net.URI; 15 | import java.nio.file.Files; 16 | import java.nio.file.Path; 17 | import java.nio.file.Paths; 18 | import java.util.concurrent.atomic.AtomicInteger; 19 | import java.util.concurrent.atomic.AtomicLong; 20 | 21 | import static org.junit.jupiter.api.Assertions.assertEquals; 22 | import static org.junit.jupiter.api.Assertions.assertFalse; 23 | import static org.junit.jupiter.api.Assertions.assertNotEquals; 24 | import static org.junit.jupiter.api.Assertions.assertNotNull; 25 | import static org.junit.jupiter.api.Assertions.assertNull; 26 | import static org.junit.jupiter.api.Assertions.assertTrue; 27 | 28 | public class JafarParserTest { 29 | @Test 30 | void testEventParsing() throws Exception { 31 | 32 | ByteArrayOutputStream recordingStream = new ByteArrayOutputStream(); 33 | long eventTypeId = -1; 34 | try (Recording recording = Recordings.newRecording(recordingStream)) { 35 | TestJfrRecorder rec = new TestJfrRecorder(recording); 36 | eventTypeId = rec.registerEventType(ParserEvent.class).getId(); 37 | rec.writeEvent(new ParserEvent(10)); 38 | } 39 | 40 | assertNotEquals(-1, eventTypeId); 41 | 42 | Path tmpFile = Files.createTempFile("recording", ".jfr"); 43 | tmpFile.toFile().deleteOnExit(); 44 | 45 | Files.write(tmpFile, recordingStream.toByteArray()); 46 | 47 | JafarParser parser = JafarParser.open(tmpFile.toString()); 48 | 49 | AtomicInteger eventCount = new AtomicInteger(0); 50 | parser.handle(ParserEvent1.class, (event, ctl) -> { 51 | eventCount.incrementAndGet(); 52 | assertEquals(10, event.value()); 53 | }); 54 | 55 | parser.run(); 56 | 57 | assertEquals(1, eventCount.get()); 58 | } 59 | 60 | @Test 61 | void testRealFile() throws Exception { 62 | URI uri = JafarParserTest.class.getClassLoader().getResource("test-ap.jfr").toURI(); 63 | 64 | try (JafarParser p = JafarParser.open(new File(uri).getAbsolutePath())) { 65 | AtomicLong eventCount = new AtomicLong(0); 66 | HandlerRegistration h1 = p.handle(ExecutionSampleEvent.class, (event, ctl) -> { 67 | assertNotNull(event.eventThread()); 68 | assertNotNull(event.stackTrace()); 69 | assertNotNull(event.eventThread()); 70 | assertTrue(event.stackTrace().frames().length > 0); 71 | eventCount.incrementAndGet(); 72 | }); 73 | } 74 | } 75 | } 76 | -------------------------------------------------------------------------------- /parser/src/test/java/io/jafar/parser/ParserEvent.java: -------------------------------------------------------------------------------- 1 | package io.jafar.parser; 2 | 3 | import jdk.jfr.Category; 4 | import jdk.jfr.Event; 5 | import jdk.jfr.Label; 6 | import jdk.jfr.Name; 7 | 8 | @Label("Parser Event") 9 | @Name("datadog.ParserEvent") 10 | @Category({"datadog", "test"}) 11 | public class ParserEvent extends Event { 12 | @Label("value") 13 | private final int value; 14 | 15 | public ParserEvent(int value) { 16 | this.value = value; 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /parser/src/test/java/io/jafar/parser/ParserEvent1.java: -------------------------------------------------------------------------------- 1 | package io.jafar.parser; 2 | 3 | import io.jafar.parser.api.JfrType; 4 | import io.jafar.parser.api.types.JFREvent; 5 | 6 | @JfrType("datadog.ParserEvent") 7 | public interface ParserEvent1 extends JFREvent { 8 | int value(); 9 | } 10 | -------------------------------------------------------------------------------- /parser/src/test/java/io/jafar/parser/ThreadEndEvent.java: -------------------------------------------------------------------------------- 1 | package io.jafar.parser; 2 | 3 | import io.jafar.parser.api.JfrType; 4 | import io.jafar.parser.api.types.JFREvent; 5 | import io.jafar.parser.api.types.JFRThread; 6 | 7 | @JfrType("jdk.ThreadEnd") 8 | public interface ThreadEndEvent { 9 | JFRThread thread(); 10 | } 11 | -------------------------------------------------------------------------------- /parser/src/test/java/io/jafar/utils/BytePackingTest.java: -------------------------------------------------------------------------------- 1 | package io.jafar.utils; 2 | 3 | import org.junit.jupiter.params.ParameterizedTest; 4 | import org.junit.jupiter.params.provider.Arguments; 5 | import org.junit.jupiter.params.provider.MethodSource; 6 | 7 | import java.nio.ByteBuffer; 8 | import java.nio.ByteOrder; 9 | import java.util.stream.Stream; 10 | 11 | import static java.nio.ByteOrder.BIG_ENDIAN; 12 | import static java.nio.ByteOrder.LITTLE_ENDIAN; 13 | import static org.junit.jupiter.api.Assertions.assertArrayEquals; 14 | 15 | public class BytePackingTest { 16 | 17 | public static Stream byteOrders() { 18 | return Stream.of(Arguments.of(BIG_ENDIAN, LITTLE_ENDIAN)); 19 | } 20 | 21 | @ParameterizedTest 22 | @MethodSource("byteOrders") 23 | public void testPackedMagic(ByteOrder order) { 24 | int packed = BytePacking.pack(order, 'F', 'L', 'R', '\0'); 25 | ByteBuffer buffer = ByteBuffer.allocate(4).order(order); 26 | buffer.putInt(0, packed); 27 | // no matter what endianness we read/write, we should get the same magic 28 | byte[] expected = new byte[] {'F', 'L', 'R', '\0'}; 29 | byte[] actual = new byte[4]; 30 | buffer.get(actual); 31 | assertArrayEquals(expected, actual); 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /parser/src/test/java/io/jafar/utils/SplicedMappedByteBufferTest.java: -------------------------------------------------------------------------------- 1 | package io.jafar.utils; 2 | 3 | import org.junit.jupiter.api.BeforeAll; 4 | import org.junit.jupiter.api.BeforeEach; 5 | import org.junit.jupiter.api.Test; 6 | 7 | import java.io.IOException; 8 | import java.nio.BufferOverflowException; 9 | import java.nio.ByteBuffer; 10 | import java.nio.ByteOrder; 11 | import java.nio.MappedByteBuffer; 12 | import java.nio.file.Files; 13 | import java.nio.file.Path; 14 | import java.util.Arrays; 15 | 16 | import static org.junit.jupiter.api.Assertions.assertArrayEquals; 17 | import static org.junit.jupiter.api.Assertions.assertEquals; 18 | import static org.junit.jupiter.api.Assertions.assertThrows; 19 | 20 | public class SplicedMappedByteBufferTest { 21 | private static final int FILE_SIZE = 2048; 22 | private static final int SLICE_SIZE = 71; 23 | private static Path mapFile; 24 | 25 | private SplicedMappedByteBuffer instance; 26 | 27 | @BeforeAll 28 | static void setupAll() throws IOException { 29 | mapFile = Files.createTempFile("jafar-", ".tmp"); 30 | mapFile.toFile().deleteOnExit(); 31 | byte[] data = new byte[FILE_SIZE]; 32 | ByteBuffer bb = MappedByteBuffer.wrap(data).order(ByteOrder.nativeOrder()); 33 | bb.put((byte)1); 34 | bb.putShort((short)2); 35 | bb.putInt((int)3); 36 | bb.putFloat((float)4.1); 37 | bb.putDouble((double)5.2); 38 | bb.putLong((long)6); 39 | bb.put(new byte[]{10, 20, 30}); 40 | 41 | bb.position(SLICE_SIZE - 1); 42 | bb.putShort((short)1); 43 | bb.position(SLICE_SIZE * 2 - 2); 44 | bb.putInt((int)2); 45 | bb.position(SLICE_SIZE * 3 - 3); 46 | bb.putFloat((float)3.1); 47 | bb.position(SLICE_SIZE * 4 - 1); 48 | bb.putDouble((double)4.2); 49 | bb.position(SLICE_SIZE * 5 - 1); 50 | bb.putLong((long)5); 51 | bb.position(SLICE_SIZE * 6); 52 | byte[] subdata = new byte[2 * SLICE_SIZE + 17]; 53 | Arrays.fill(subdata, (byte)8); 54 | bb.put(subdata); 55 | Files.write(mapFile, data); 56 | } 57 | 58 | @BeforeEach 59 | void setup() throws IOException { 60 | instance = new SplicedMappedByteBuffer(mapFile, SLICE_SIZE); 61 | } 62 | 63 | @Test 64 | void testPosition() { 65 | assertEquals(0, instance.position()); 66 | instance.position(SLICE_SIZE + 1); 67 | assertEquals(SLICE_SIZE + 1, instance.position()); 68 | 69 | assertThrows(BufferOverflowException.class, () -> { 70 | instance.position(FILE_SIZE + 1); 71 | }); 72 | } 73 | 74 | @Test 75 | void testLimit() { 76 | assertEquals(FILE_SIZE, instance.remaining()); 77 | instance.position(SLICE_SIZE + 1); 78 | assertEquals(FILE_SIZE - SLICE_SIZE - 1, instance.remaining()); 79 | } 80 | 81 | @Test 82 | void testSlice() { 83 | CustomByteBuffer sliced1 = instance.slice(); 84 | assertEquals(0, sliced1.position()); 85 | assertEquals(FILE_SIZE, sliced1.remaining()); 86 | 87 | sliced1.position(5); 88 | // make sure the slice does not affect the master copy 89 | assertEquals(0, instance.position()); 90 | 91 | instance.position(SLICE_SIZE + 3); 92 | CustomByteBuffer sliced2 = instance.slice(SLICE_SIZE + 3, 2 * SLICE_SIZE); 93 | assertEquals(0, sliced2.position()); 94 | assertEquals(2 * SLICE_SIZE, sliced2.remaining()); 95 | 96 | assertThrows(BufferOverflowException.class, () -> { 97 | instance.slice(3 * SLICE_SIZE, FILE_SIZE); 98 | }); 99 | } 100 | 101 | @Test 102 | void testSubSlice() { 103 | CustomByteBuffer sliced1 = instance.slice(SLICE_SIZE - 1, 2* SLICE_SIZE); 104 | assertEquals(1, sliced1.getShort()); 105 | CustomByteBuffer sliced2 = sliced1.slice(SLICE_SIZE - 1, SLICE_SIZE); 106 | assertEquals(2, sliced2.getInt()); 107 | } 108 | 109 | @Test 110 | void getSimple() { 111 | assertEquals(1, instance.get()); 112 | assertEquals(2, instance.getShort()); 113 | assertEquals(3, instance.getInt()); 114 | assertEquals(4.1f, instance.getFloat()); 115 | assertEquals(5.2d, instance.getDouble()); 116 | assertEquals(6, instance.getLong()); 117 | 118 | byte[] dataBuffer = new byte[3]; 119 | byte[] expected = new byte[] {10, 20, 30}; 120 | 121 | instance.get(dataBuffer, 0, 3); 122 | assertArrayEquals(expected, dataBuffer); 123 | } 124 | 125 | @Test 126 | void getAcrossSplices() { 127 | instance.position(SLICE_SIZE - 1); 128 | assertEquals(1, instance.getShort()); 129 | 130 | instance.position(SLICE_SIZE * 2 - 2); 131 | assertEquals(2, instance.getInt()); 132 | 133 | instance.position(SLICE_SIZE * 3 - 3); 134 | assertEquals(3.1f, instance.getFloat()); 135 | 136 | instance.position(SLICE_SIZE * 4 - 1); 137 | assertEquals(4.2d, instance.getDouble()); 138 | 139 | instance.position(SLICE_SIZE * 5 - 1); 140 | assertEquals(5, instance.getLong()); 141 | 142 | byte[] expected = new byte[2 * SLICE_SIZE + 17]; 143 | Arrays.fill(expected, (byte)8); 144 | 145 | instance.position(SLICE_SIZE * 6); 146 | byte[] data = new byte[expected.length]; 147 | instance.get(data, 0, data.length); 148 | assertArrayEquals(expected, data); 149 | } 150 | } 151 | -------------------------------------------------------------------------------- /rebuild_plugin.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -e 4 | 5 | ./gradlew :parser:publishToMavenLocal 6 | 7 | ( 8 | cd jafar-gradle-plugin || exit 9 | ./gradlew clean publishToMavenLocal 10 | ) -------------------------------------------------------------------------------- /settings.gradle: -------------------------------------------------------------------------------- 1 | /* 2 | * This file was generated by the Gradle 'init' task. 3 | * 4 | * The settings file is used to specify which projects to include in your build. 5 | * 6 | * Detailed information about configuring a multi-project build in Gradle can be found 7 | * in the user manual at https://docs.gradle.org/8.0/userguide/multi_project_builds.html 8 | */ 9 | 10 | pluginManagement { 11 | repositories { 12 | mavenLocal() // Look for plugins in Maven Local 13 | maven { 14 | url "https://oss.sonatype.org/content/repositories/snapshots/" 15 | } 16 | gradlePluginPortal() // Fall back to Gradle Plugin Portal 17 | } 18 | } 19 | 20 | rootProject.name = 'jafar' 21 | include ':parser' 22 | include ':demo' 23 | --------------------------------------------------------------------------------