├── .DS_Store ├── .babelrc ├── .gitignore ├── LICENSE.txt ├── README.md ├── audio ├── .DS_Store ├── drum_samples │ ├── 249716__daandraait__slow-heartbeat.wav │ ├── 422442__vacuumfan7072__jbrake-sim-96k.wav │ ├── AX_bro.wav │ ├── kick_802_2.wav │ ├── muffled_kick.wav │ ├── snare_FPC.wav │ ├── tom_16_inch_tom_4.wav │ └── tom_909.wav └── output │ └── agent-self-regulation-1.wav ├── build.css ├── bundler ├── webpack.common.js ├── webpack.dev.js └── webpack.prod.js ├── colab └── agent_self_regulation.ipynb ├── netlify.toml ├── package-lock.json ├── package.json ├── postcss.config.js ├── src ├── assets │ ├── audio │ │ ├── r2d2_talk.mp3 │ │ ├── synth_melody.mp3 │ │ └── theremin_tone.mp3 │ ├── images │ │ ├── noisy_background.jpg │ │ └── vol_synth.png │ └── models │ │ ├── DragonAttenuation.glb │ │ └── Duck.glb ├── coi-serviceworker.js ├── index.html ├── main.js ├── playground │ ├── audio.js │ ├── constants.js │ ├── domElements │ ├── eventHandlers.js │ ├── globals.js │ ├── gui.js │ ├── index.html │ ├── main.js │ ├── melspectrogram-processor.js │ ├── objects.js │ ├── scene.js │ ├── sceneObject.js │ ├── shaders.js │ └── utils.js ├── style.css ├── utils │ ├── AudioWorkletFunctions.js │ ├── CappedTubeGeometry.js │ ├── GenerateSDFMaterial.js │ ├── RayCastSDFMaterial.js │ ├── RayMarchSDFMaterial.js │ └── ringbuf.js │ │ └── index.js ├── volume-minimal │ ├── index.html │ └── main.js └── volume │ ├── index.html │ ├── main.js │ └── melspectrogram-processor.js ├── tailwind.config.js └── webpack.config.js /.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/a-sumo/hyperstep/8f44400a74d3433d91ab6ab3740f2c273eed5c74/.DS_Store -------------------------------------------------------------------------------- /.babelrc: -------------------------------------------------------------------------------- 1 | { 2 | "presets": [ 3 | "@babel/preset-env", 4 | "@babel/preset-react" 5 | ] 6 | } 7 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | #Ignore Node modules 2 | 3 | node_modules/ 4 | dist/ 5 | src/.DS_Store 6 | src/assets/.DS_Store 7 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | GNU GENERAL PUBLIC LICENSE 2 | Version 3, 29 June 2007 3 | 4 | Copyright (C) 2007 Free Software Foundation, Inc. 5 | Everyone is permitted to copy and distribute verbatim copies 6 | of this license document, but changing it is not allowed. 7 | 8 | Preamble 9 | 10 | The GNU General Public License is a free, copyleft license for 11 | software and other kinds of works. 12 | 13 | The licenses for most software and other practical works are designed 14 | to take away your freedom to share and change the works. By contrast, 15 | the GNU General Public License is intended to guarantee your freedom to 16 | share and change all versions of a program--to make sure it remains free 17 | software for all its users. We, the Free Software Foundation, use the 18 | GNU General Public License for most of our software; it applies also to 19 | any other work released this way by its authors. You can apply it to 20 | your programs, too. 21 | 22 | When we speak of free software, we are referring to freedom, not 23 | price. Our General Public Licenses are designed to make sure that you 24 | have the freedom to distribute copies of free software (and charge for 25 | them if you wish), that you receive source code or can get it if you 26 | want it, that you can change the software or use pieces of it in new 27 | free programs, and that you know you can do these things. 28 | 29 | To protect your rights, we need to prevent others from denying you 30 | these rights or asking you to surrender the rights. Therefore, you have 31 | certain responsibilities if you distribute copies of the software, or if 32 | you modify it: responsibilities to respect the freedom of others. 33 | 34 | For example, if you distribute copies of such a program, whether 35 | gratis or for a fee, you must pass on to the recipients the same 36 | freedoms that you received. You must make sure that they, too, receive 37 | or can get the source code. And you must show them these terms so they 38 | know their rights. 39 | 40 | Developers that use the GNU GPL protect your rights with two steps: 41 | (1) assert copyright on the software, and (2) offer you this License 42 | giving you legal permission to copy, distribute and/or modify it. 43 | 44 | For the developers' and authors' protection, the GPL clearly explains 45 | that there is no warranty for this free software. For both users' and 46 | authors' sake, the GPL requires that modified versions be marked as 47 | changed, so that their problems will not be attributed erroneously to 48 | authors of previous versions. 49 | 50 | Some devices are designed to deny users access to install or run 51 | modified versions of the software inside them, although the manufacturer 52 | can do so. This is fundamentally incompatible with the aim of 53 | protecting users' freedom to change the software. The systematic 54 | pattern of such abuse occurs in the area of products for individuals to 55 | use, which is precisely where it is most unacceptable. Therefore, we 56 | have designed this version of the GPL to prohibit the practice for those 57 | products. If such problems arise substantially in other domains, we 58 | stand ready to extend this provision to those domains in future versions 59 | of the GPL, as needed to protect the freedom of users. 60 | 61 | Finally, every program is threatened constantly by software patents. 62 | States should not allow patents to restrict development and use of 63 | software on general-purpose computers, but in those that do, we wish to 64 | avoid the special danger that patents applied to a free program could 65 | make it effectively proprietary. To prevent this, the GPL assures that 66 | patents cannot be used to render the program non-free. 67 | 68 | The precise terms and conditions for copying, distribution and 69 | modification follow. 70 | 71 | TERMS AND CONDITIONS 72 | 73 | 0. Definitions. 74 | 75 | "This License" refers to version 3 of the GNU General Public License. 76 | 77 | "Copyright" also means copyright-like laws that apply to other kinds of 78 | works, such as semiconductor masks. 79 | 80 | "The Program" refers to any copyrightable work licensed under this 81 | License. Each licensee is addressed as "you". "Licensees" and 82 | "recipients" may be individuals or organizations. 83 | 84 | To "modify" a work means to copy from or adapt all or part of the work 85 | in a fashion requiring copyright permission, other than the making of an 86 | exact copy. The resulting work is called a "modified version" of the 87 | earlier work or a work "based on" the earlier work. 88 | 89 | A "covered work" means either the unmodified Program or a work based 90 | on the Program. 91 | 92 | To "propagate" a work means to do anything with it that, without 93 | permission, would make you directly or secondarily liable for 94 | infringement under applicable copyright law, except executing it on a 95 | computer or modifying a private copy. Propagation includes copying, 96 | distribution (with or without modification), making available to the 97 | public, and in some countries other activities as well. 98 | 99 | To "convey" a work means any kind of propagation that enables other 100 | parties to make or receive copies. Mere interaction with a user through 101 | a computer network, with no transfer of a copy, is not conveying. 102 | 103 | An interactive user interface displays "Appropriate Legal Notices" 104 | to the extent that it includes a convenient and prominently visible 105 | feature that (1) displays an appropriate copyright notice, and (2) 106 | tells the user that there is no warranty for the work (except to the 107 | extent that warranties are provided), that licensees may convey the 108 | work under this License, and how to view a copy of this License. If 109 | the interface presents a list of user commands or options, such as a 110 | menu, a prominent item in the list meets this criterion. 111 | 112 | 1. Source Code. 113 | 114 | The "source code" for a work means the preferred form of the work 115 | for making modifications to it. "Object code" means any non-source 116 | form of a work. 117 | 118 | A "Standard Interface" means an interface that either is an official 119 | standard defined by a recognized standards body, or, in the case of 120 | interfaces specified for a particular programming language, one that 121 | is widely used among developers working in that language. 122 | 123 | The "System Libraries" of an executable work include anything, other 124 | than the work as a whole, that (a) is included in the normal form of 125 | packaging a Major Component, but which is not part of that Major 126 | Component, and (b) serves only to enable use of the work with that 127 | Major Component, or to implement a Standard Interface for which an 128 | implementation is available to the public in source code form. A 129 | "Major Component", in this context, means a major essential component 130 | (kernel, window system, and so on) of the specific operating system 131 | (if any) on which the executable work runs, or a compiler used to 132 | produce the work, or an object code interpreter used to run it. 133 | 134 | The "Corresponding Source" for a work in object code form means all 135 | the source code needed to generate, install, and (for an executable 136 | work) run the object code and to modify the work, including scripts to 137 | control those activities. However, it does not include the work's 138 | System Libraries, or general-purpose tools or generally available free 139 | programs which are used unmodified in performing those activities but 140 | which are not part of the work. For example, Corresponding Source 141 | includes interface definition files associated with source files for 142 | the work, and the source code for shared libraries and dynamically 143 | linked subprograms that the work is specifically designed to require, 144 | such as by intimate data communication or control flow between those 145 | subprograms and other parts of the work. 146 | 147 | The Corresponding Source need not include anything that users 148 | can regenerate automatically from other parts of the Corresponding 149 | Source. 150 | 151 | The Corresponding Source for a work in source code form is that 152 | same work. 153 | 154 | 2. Basic Permissions. 155 | 156 | All rights granted under this License are granted for the term of 157 | copyright on the Program, and are irrevocable provided the stated 158 | conditions are met. This License explicitly affirms your unlimited 159 | permission to run the unmodified Program. The output from running a 160 | covered work is covered by this License only if the output, given its 161 | content, constitutes a covered work. This License acknowledges your 162 | rights of fair use or other equivalent, as provided by copyright law. 163 | 164 | You may make, run and propagate covered works that you do not 165 | convey, without conditions so long as your license otherwise remains 166 | in force. You may convey covered works to others for the sole purpose 167 | of having them make modifications exclusively for you, or provide you 168 | with facilities for running those works, provided that you comply with 169 | the terms of this License in conveying all material for which you do 170 | not control copyright. Those thus making or running the covered works 171 | for you must do so exclusively on your behalf, under your direction 172 | and control, on terms that prohibit them from making any copies of 173 | your copyrighted material outside their relationship with you. 174 | 175 | Conveying under any other circumstances is permitted solely under 176 | the conditions stated below. Sublicensing is not allowed; section 10 177 | makes it unnecessary. 178 | 179 | 3. Protecting Users' Legal Rights From Anti-Circumvention Law. 180 | 181 | No covered work shall be deemed part of an effective technological 182 | measure under any applicable law fulfilling obligations under article 183 | 11 of the WIPO copyright treaty adopted on 20 December 1996, or 184 | similar laws prohibiting or restricting circumvention of such 185 | measures. 186 | 187 | When you convey a covered work, you waive any legal power to forbid 188 | circumvention of technological measures to the extent such circumvention 189 | is effected by exercising rights under this License with respect to 190 | the covered work, and you disclaim any intention to limit operation or 191 | modification of the work as a means of enforcing, against the work's 192 | users, your or third parties' legal rights to forbid circumvention of 193 | technological measures. 194 | 195 | 4. Conveying Verbatim Copies. 196 | 197 | You may convey verbatim copies of the Program's source code as you 198 | receive it, in any medium, provided that you conspicuously and 199 | appropriately publish on each copy an appropriate copyright notice; 200 | keep intact all notices stating that this License and any 201 | non-permissive terms added in accord with section 7 apply to the code; 202 | keep intact all notices of the absence of any warranty; and give all 203 | recipients a copy of this License along with the Program. 204 | 205 | You may charge any price or no price for each copy that you convey, 206 | and you may offer support or warranty protection for a fee. 207 | 208 | 5. Conveying Modified Source Versions. 209 | 210 | You may convey a work based on the Program, or the modifications to 211 | produce it from the Program, in the form of source code under the 212 | terms of section 4, provided that you also meet all of these conditions: 213 | 214 | a) The work must carry prominent notices stating that you modified 215 | it, and giving a relevant date. 216 | 217 | b) The work must carry prominent notices stating that it is 218 | released under this License and any conditions added under section 219 | 7. This requirement modifies the requirement in section 4 to 220 | "keep intact all notices". 221 | 222 | c) You must license the entire work, as a whole, under this 223 | License to anyone who comes into possession of a copy. This 224 | License will therefore apply, along with any applicable section 7 225 | additional terms, to the whole of the work, and all its parts, 226 | regardless of how they are packaged. This License gives no 227 | permission to license the work in any other way, but it does not 228 | invalidate such permission if you have separately received it. 229 | 230 | d) If the work has interactive user interfaces, each must display 231 | Appropriate Legal Notices; however, if the Program has interactive 232 | interfaces that do not display Appropriate Legal Notices, your 233 | work need not make them do so. 234 | 235 | A compilation of a covered work with other separate and independent 236 | works, which are not by their nature extensions of the covered work, 237 | and which are not combined with it such as to form a larger program, 238 | in or on a volume of a storage or distribution medium, is called an 239 | "aggregate" if the compilation and its resulting copyright are not 240 | used to limit the access or legal rights of the compilation's users 241 | beyond what the individual works permit. Inclusion of a covered work 242 | in an aggregate does not cause this License to apply to the other 243 | parts of the aggregate. 244 | 245 | 6. Conveying Non-Source Forms. 246 | 247 | You may convey a covered work in object code form under the terms 248 | of sections 4 and 5, provided that you also convey the 249 | machine-readable Corresponding Source under the terms of this License, 250 | in one of these ways: 251 | 252 | a) Convey the object code in, or embodied in, a physical product 253 | (including a physical distribution medium), accompanied by the 254 | Corresponding Source fixed on a durable physical medium 255 | customarily used for software interchange. 256 | 257 | b) Convey the object code in, or embodied in, a physical product 258 | (including a physical distribution medium), accompanied by a 259 | written offer, valid for at least three years and valid for as 260 | long as you offer spare parts or customer support for that product 261 | model, to give anyone who possesses the object code either (1) a 262 | copy of the Corresponding Source for all the software in the 263 | product that is covered by this License, on a durable physical 264 | medium customarily used for software interchange, for a price no 265 | more than your reasonable cost of physically performing this 266 | conveying of source, or (2) access to copy the 267 | Corresponding Source from a network server at no charge. 268 | 269 | c) Convey individual copies of the object code with a copy of the 270 | written offer to provide the Corresponding Source. This 271 | alternative is allowed only occasionally and noncommercially, and 272 | only if you received the object code with such an offer, in accord 273 | with subsection 6b. 274 | 275 | d) Convey the object code by offering access from a designated 276 | place (gratis or for a charge), and offer equivalent access to the 277 | Corresponding Source in the same way through the same place at no 278 | further charge. You need not require recipients to copy the 279 | Corresponding Source along with the object code. If the place to 280 | copy the object code is a network server, the Corresponding Source 281 | may be on a different server (operated by you or a third party) 282 | that supports equivalent copying facilities, provided you maintain 283 | clear directions next to the object code saying where to find the 284 | Corresponding Source. Regardless of what server hosts the 285 | Corresponding Source, you remain obligated to ensure that it is 286 | available for as long as needed to satisfy these requirements. 287 | 288 | e) Convey the object code using peer-to-peer transmission, provided 289 | you inform other peers where the object code and Corresponding 290 | Source of the work are being offered to the general public at no 291 | charge under subsection 6d. 292 | 293 | A separable portion of the object code, whose source code is excluded 294 | from the Corresponding Source as a System Library, need not be 295 | included in conveying the object code work. 296 | 297 | A "User Product" is either (1) a "consumer product", which means any 298 | tangible personal property which is normally used for personal, family, 299 | or household purposes, or (2) anything designed or sold for incorporation 300 | into a dwelling. In determining whether a product is a consumer product, 301 | doubtful cases shall be resolved in favor of coverage. For a particular 302 | product received by a particular user, "normally used" refers to a 303 | typical or common use of that class of product, regardless of the status 304 | of the particular user or of the way in which the particular user 305 | actually uses, or expects or is expected to use, the product. A product 306 | is a consumer product regardless of whether the product has substantial 307 | commercial, industrial or non-consumer uses, unless such uses represent 308 | the only significant mode of use of the product. 309 | 310 | "Installation Information" for a User Product means any methods, 311 | procedures, authorization keys, or other information required to install 312 | and execute modified versions of a covered work in that User Product from 313 | a modified version of its Corresponding Source. The information must 314 | suffice to ensure that the continued functioning of the modified object 315 | code is in no case prevented or interfered with solely because 316 | modification has been made. 317 | 318 | If you convey an object code work under this section in, or with, or 319 | specifically for use in, a User Product, and the conveying occurs as 320 | part of a transaction in which the right of possession and use of the 321 | User Product is transferred to the recipient in perpetuity or for a 322 | fixed term (regardless of how the transaction is characterized), the 323 | Corresponding Source conveyed under this section must be accompanied 324 | by the Installation Information. But this requirement does not apply 325 | if neither you nor any third party retains the ability to install 326 | modified object code on the User Product (for example, the work has 327 | been installed in ROM). 328 | 329 | The requirement to provide Installation Information does not include a 330 | requirement to continue to provide support service, warranty, or updates 331 | for a work that has been modified or installed by the recipient, or for 332 | the User Product in which it has been modified or installed. Access to a 333 | network may be denied when the modification itself materially and 334 | adversely affects the operation of the network or violates the rules and 335 | protocols for communication across the network. 336 | 337 | Corresponding Source conveyed, and Installation Information provided, 338 | in accord with this section must be in a format that is publicly 339 | documented (and with an implementation available to the public in 340 | source code form), and must require no special password or key for 341 | unpacking, reading or copying. 342 | 343 | 7. Additional Terms. 344 | 345 | "Additional permissions" are terms that supplement the terms of this 346 | License by making exceptions from one or more of its conditions. 347 | Additional permissions that are applicable to the entire Program shall 348 | be treated as though they were included in this License, to the extent 349 | that they are valid under applicable law. If additional permissions 350 | apply only to part of the Program, that part may be used separately 351 | under those permissions, but the entire Program remains governed by 352 | this License without regard to the additional permissions. 353 | 354 | When you convey a copy of a covered work, you may at your option 355 | remove any additional permissions from that copy, or from any part of 356 | it. (Additional permissions may be written to require their own 357 | removal in certain cases when you modify the work.) You may place 358 | additional permissions on material, added by you to a covered work, 359 | for which you have or can give appropriate copyright permission. 360 | 361 | Notwithstanding any other provision of this License, for material you 362 | add to a covered work, you may (if authorized by the copyright holders of 363 | that material) supplement the terms of this License with terms: 364 | 365 | a) Disclaiming warranty or limiting liability differently from the 366 | terms of sections 15 and 16 of this License; or 367 | 368 | b) Requiring preservation of specified reasonable legal notices or 369 | author attributions in that material or in the Appropriate Legal 370 | Notices displayed by works containing it; or 371 | 372 | c) Prohibiting misrepresentation of the origin of that material, or 373 | requiring that modified versions of such material be marked in 374 | reasonable ways as different from the original version; or 375 | 376 | d) Limiting the use for publicity purposes of names of licensors or 377 | authors of the material; or 378 | 379 | e) Declining to grant rights under trademark law for use of some 380 | trade names, trademarks, or service marks; or 381 | 382 | f) Requiring indemnification of licensors and authors of that 383 | material by anyone who conveys the material (or modified versions of 384 | it) with contractual assumptions of liability to the recipient, for 385 | any liability that these contractual assumptions directly impose on 386 | those licensors and authors. 387 | 388 | All other non-permissive additional terms are considered "further 389 | restrictions" within the meaning of section 10. If the Program as you 390 | received it, or any part of it, contains a notice stating that it is 391 | governed by this License along with a term that is a further 392 | restriction, you may remove that term. If a license document contains 393 | a further restriction but permits relicensing or conveying under this 394 | License, you may add to a covered work material governed by the terms 395 | of that license document, provided that the further restriction does 396 | not survive such relicensing or conveying. 397 | 398 | If you add terms to a covered work in accord with this section, you 399 | must place, in the relevant source files, a statement of the 400 | additional terms that apply to those files, or a notice indicating 401 | where to find the applicable terms. 402 | 403 | Additional terms, permissive or non-permissive, may be stated in the 404 | form of a separately written license, or stated as exceptions; 405 | the above requirements apply either way. 406 | 407 | 8. Termination. 408 | 409 | You may not propagate or modify a covered work except as expressly 410 | provided under this License. Any attempt otherwise to propagate or 411 | modify it is void, and will automatically terminate your rights under 412 | this License (including any patent licenses granted under the third 413 | paragraph of section 11). 414 | 415 | However, if you cease all violation of this License, then your 416 | license from a particular copyright holder is reinstated (a) 417 | provisionally, unless and until the copyright holder explicitly and 418 | finally terminates your license, and (b) permanently, if the copyright 419 | holder fails to notify you of the violation by some reasonable means 420 | prior to 60 days after the cessation. 421 | 422 | Moreover, your license from a particular copyright holder is 423 | reinstated permanently if the copyright holder notifies you of the 424 | violation by some reasonable means, this is the first time you have 425 | received notice of violation of this License (for any work) from that 426 | copyright holder, and you cure the violation prior to 30 days after 427 | your receipt of the notice. 428 | 429 | Termination of your rights under this section does not terminate the 430 | licenses of parties who have received copies or rights from you under 431 | this License. If your rights have been terminated and not permanently 432 | reinstated, you do not qualify to receive new licenses for the same 433 | material under section 10. 434 | 435 | 9. Acceptance Not Required for Having Copies. 436 | 437 | You are not required to accept this License in order to receive or 438 | run a copy of the Program. Ancillary propagation of a covered work 439 | occurring solely as a consequence of using peer-to-peer transmission 440 | to receive a copy likewise does not require acceptance. However, 441 | nothing other than this License grants you permission to propagate or 442 | modify any covered work. These actions infringe copyright if you do 443 | not accept this License. Therefore, by modifying or propagating a 444 | covered work, you indicate your acceptance of this License to do so. 445 | 446 | 10. Automatic Licensing of Downstream Recipients. 447 | 448 | Each time you convey a covered work, the recipient automatically 449 | receives a license from the original licensors, to run, modify and 450 | propagate that work, subject to this License. You are not responsible 451 | for enforcing compliance by third parties with this License. 452 | 453 | An "entity transaction" is a transaction transferring control of an 454 | organization, or substantially all assets of one, or subdividing an 455 | organization, or merging organizations. If propagation of a covered 456 | work results from an entity transaction, each party to that 457 | transaction who receives a copy of the work also receives whatever 458 | licenses to the work the party's predecessor in interest had or could 459 | give under the previous paragraph, plus a right to possession of the 460 | Corresponding Source of the work from the predecessor in interest, if 461 | the predecessor has it or can get it with reasonable efforts. 462 | 463 | You may not impose any further restrictions on the exercise of the 464 | rights granted or affirmed under this License. For example, you may 465 | not impose a license fee, royalty, or other charge for exercise of 466 | rights granted under this License, and you may not initiate litigation 467 | (including a cross-claim or counterclaim in a lawsuit) alleging that 468 | any patent claim is infringed by making, using, selling, offering for 469 | sale, or importing the Program or any portion of it. 470 | 471 | 11. Patents. 472 | 473 | A "contributor" is a copyright holder who authorizes use under this 474 | License of the Program or a work on which the Program is based. The 475 | work thus licensed is called the contributor's "contributor version". 476 | 477 | A contributor's "essential patent claims" are all patent claims 478 | owned or controlled by the contributor, whether already acquired or 479 | hereafter acquired, that would be infringed by some manner, permitted 480 | by this License, of making, using, or selling its contributor version, 481 | but do not include claims that would be infringed only as a 482 | consequence of further modification of the contributor version. For 483 | purposes of this definition, "control" includes the right to grant 484 | patent sublicenses in a manner consistent with the requirements of 485 | this License. 486 | 487 | Each contributor grants you a non-exclusive, worldwide, royalty-free 488 | patent license under the contributor's essential patent claims, to 489 | make, use, sell, offer for sale, import and otherwise run, modify and 490 | propagate the contents of its contributor version. 491 | 492 | In the following three paragraphs, a "patent license" is any express 493 | agreement or commitment, however denominated, not to enforce a patent 494 | (such as an express permission to practice a patent or covenant not to 495 | sue for patent infringement). To "grant" such a patent license to a 496 | party means to make such an agreement or commitment not to enforce a 497 | patent against the party. 498 | 499 | If you convey a covered work, knowingly relying on a patent license, 500 | and the Corresponding Source of the work is not available for anyone 501 | to copy, free of charge and under the terms of this License, through a 502 | publicly available network server or other readily accessible means, 503 | then you must either (1) cause the Corresponding Source to be so 504 | available, or (2) arrange to deprive yourself of the benefit of the 505 | patent license for this particular work, or (3) arrange, in a manner 506 | consistent with the requirements of this License, to extend the patent 507 | license to downstream recipients. "Knowingly relying" means you have 508 | actual knowledge that, but for the patent license, your conveying the 509 | covered work in a country, or your recipient's use of the covered work 510 | in a country, would infringe one or more identifiable patents in that 511 | country that you have reason to believe are valid. 512 | 513 | If, pursuant to or in connection with a single transaction or 514 | arrangement, you convey, or propagate by procuring conveyance of, a 515 | covered work, and grant a patent license to some of the parties 516 | receiving the covered work authorizing them to use, propagate, modify 517 | or convey a specific copy of the covered work, then the patent license 518 | you grant is automatically extended to all recipients of the covered 519 | work and works based on it. 520 | 521 | A patent license is "discriminatory" if it does not include within 522 | the scope of its coverage, prohibits the exercise of, or is 523 | conditioned on the non-exercise of one or more of the rights that are 524 | specifically granted under this License. You may not convey a covered 525 | work if you are a party to an arrangement with a third party that is 526 | in the business of distributing software, under which you make payment 527 | to the third party based on the extent of your activity of conveying 528 | the work, and under which the third party grants, to any of the 529 | parties who would receive the covered work from you, a discriminatory 530 | patent license (a) in connection with copies of the covered work 531 | conveyed by you (or copies made from those copies), or (b) primarily 532 | for and in connection with specific products or compilations that 533 | contain the covered work, unless you entered into that arrangement, 534 | or that patent license was granted, prior to 28 March 2007. 535 | 536 | Nothing in this License shall be construed as excluding or limiting 537 | any implied license or other defenses to infringement that may 538 | otherwise be available to you under applicable patent law. 539 | 540 | 12. No Surrender of Others' Freedom. 541 | 542 | If conditions are imposed on you (whether by court order, agreement or 543 | otherwise) that contradict the conditions of this License, they do not 544 | excuse you from the conditions of this License. If you cannot convey a 545 | covered work so as to satisfy simultaneously your obligations under this 546 | License and any other pertinent obligations, then as a consequence you may 547 | not convey it at all. For example, if you agree to terms that obligate you 548 | to collect a royalty for further conveying from those to whom you convey 549 | the Program, the only way you could satisfy both those terms and this 550 | License would be to refrain entirely from conveying the Program. 551 | 552 | 13. Use with the GNU Affero General Public License. 553 | 554 | Notwithstanding any other provision of this License, you have 555 | permission to link or combine any covered work with a work licensed 556 | under version 3 of the GNU Affero General Public License into a single 557 | combined work, and to convey the resulting work. The terms of this 558 | License will continue to apply to the part which is the covered work, 559 | but the special requirements of the GNU Affero General Public License, 560 | section 13, concerning interaction through a network will apply to the 561 | combination as such. 562 | 563 | 14. Revised Versions of this License. 564 | 565 | The Free Software Foundation may publish revised and/or new versions of 566 | the GNU General Public License from time to time. Such new versions will 567 | be similar in spirit to the present version, but may differ in detail to 568 | address new problems or concerns. 569 | 570 | Each version is given a distinguishing version number. If the 571 | Program specifies that a certain numbered version of the GNU General 572 | Public License "or any later version" applies to it, you have the 573 | option of following the terms and conditions either of that numbered 574 | version or of any later version published by the Free Software 575 | Foundation. If the Program does not specify a version number of the 576 | GNU General Public License, you may choose any version ever published 577 | by the Free Software Foundation. 578 | 579 | If the Program specifies that a proxy can decide which future 580 | versions of the GNU General Public License can be used, that proxy's 581 | public statement of acceptance of a version permanently authorizes you 582 | to choose that version for the Program. 583 | 584 | Later license versions may give you additional or different 585 | permissions. However, no additional obligations are imposed on any 586 | author or copyright holder as a result of your choosing to follow a 587 | later version. 588 | 589 | 15. Disclaimer of Warranty. 590 | 591 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY 592 | APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT 593 | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY 594 | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, 595 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 596 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM 597 | IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF 598 | ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 599 | 600 | 16. Limitation of Liability. 601 | 602 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING 603 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS 604 | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY 605 | GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE 606 | USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF 607 | DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD 608 | PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), 609 | EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF 610 | SUCH DAMAGES. 611 | 612 | 17. Interpretation of Sections 15 and 16. 613 | 614 | If the disclaimer of warranty and limitation of liability provided 615 | above cannot be given local legal effect according to their terms, 616 | reviewing courts shall apply local law that most closely approximates 617 | an absolute waiver of all civil liability in connection with the 618 | Program, unless a warranty or assumption of liability accompanies a 619 | copy of the Program in return for a fee. 620 | 621 | END OF TERMS AND CONDITIONS 622 | 623 | How to Apply These Terms to Your New Programs 624 | 625 | If you develop a new program, and you want it to be of the greatest 626 | possible use to the public, the best way to achieve this is to make it 627 | free software which everyone can redistribute and change under these terms. 628 | 629 | To do so, attach the following notices to the program. It is safest 630 | to attach them to the start of each source file to most effectively 631 | state the exclusion of warranty; and each file should have at least 632 | the "copyright" line and a pointer to where the full notice is found. 633 | 634 | 635 | Copyright (C) 636 | 637 | This program is free software: you can redistribute it and/or modify 638 | it under the terms of the GNU General Public License as published by 639 | the Free Software Foundation, either version 3 of the License, or 640 | (at your option) any later version. 641 | 642 | This program is distributed in the hope that it will be useful, 643 | but WITHOUT ANY WARRANTY; without even the implied warranty of 644 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 645 | GNU General Public License for more details. 646 | 647 | You should have received a copy of the GNU General Public License 648 | along with this program. If not, see . 649 | 650 | Also add information on how to contact you by electronic and paper mail. 651 | 652 | If the program does terminal interaction, make it output a short 653 | notice like this when it starts in an interactive mode: 654 | 655 | Copyright (C) 656 | This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. 657 | This is free software, and you are welcome to redistribute it 658 | under certain conditions; type `show c' for details. 659 | 660 | The hypothetical commands `show w' and `show c' should show the appropriate 661 | parts of the General Public License. Of course, your program's commands 662 | might be different; for a GUI interface, you would use an "about box". 663 | 664 | You should also get your employer (if you work as a programmer) or school, 665 | if any, to sign a "copyright disclaimer" for the program, if necessary. 666 | For more information on this, and how to apply and follow the GNU GPL, see 667 | . 668 | 669 | The GNU General Public License does not permit incorporating your program 670 | into proprietary programs. If your program is a subroutine library, you 671 | may consider it more useful to permit linking proprietary applications with 672 | the library. If this is what you want to do, use the GNU Lesser General 673 | Public License instead of this License. But first, please read 674 | . -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # hyperstep 2 | 3 | ## [Volumetric Audio](https://a-sumo.github.io/hyperstep/) 4 | Quickstart: 5 | 6 | - Clone the GitHub repository. 7 | - If needed, install [Node.js and npm](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm). 8 | - Run locally with: 9 | ```npm run dev``` 10 | 11 | The project uses [SharedArrayBuffers](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/SharedArrayBuffer) which require a secure context. When prompted, open the project on localhost. The deployed GitHub page currently works on Chrome and Firefox. 12 | 13 |
14 | Examples: 15 | 16 | Spectacles Lens in Lens Studio 5: 17 | 18 | https://github.com/user-attachments/assets/6a6a3ab7-cfad-42f5-96af-802fe82c500d 19 | 20 | https://user-images.githubusercontent.com/75185852/199851952-30525228-27ca-4f32-9f7f-a04768d41703.mp4 21 | 22 | https://user-images.githubusercontent.com/75185852/199347450-c1074afa-6426-4ecd-a25b-dc19c0291554.mp4 23 | 24 |
25 | 26 | 27 | 28 | -------------------------------------------------------------------------------- /audio/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/a-sumo/hyperstep/8f44400a74d3433d91ab6ab3740f2c273eed5c74/audio/.DS_Store -------------------------------------------------------------------------------- /audio/drum_samples/249716__daandraait__slow-heartbeat.wav: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/a-sumo/hyperstep/8f44400a74d3433d91ab6ab3740f2c273eed5c74/audio/drum_samples/249716__daandraait__slow-heartbeat.wav -------------------------------------------------------------------------------- /audio/drum_samples/422442__vacuumfan7072__jbrake-sim-96k.wav: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/a-sumo/hyperstep/8f44400a74d3433d91ab6ab3740f2c273eed5c74/audio/drum_samples/422442__vacuumfan7072__jbrake-sim-96k.wav -------------------------------------------------------------------------------- /audio/drum_samples/AX_bro.wav: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/a-sumo/hyperstep/8f44400a74d3433d91ab6ab3740f2c273eed5c74/audio/drum_samples/AX_bro.wav -------------------------------------------------------------------------------- /audio/drum_samples/kick_802_2.wav: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/a-sumo/hyperstep/8f44400a74d3433d91ab6ab3740f2c273eed5c74/audio/drum_samples/kick_802_2.wav -------------------------------------------------------------------------------- /audio/drum_samples/muffled_kick.wav: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/a-sumo/hyperstep/8f44400a74d3433d91ab6ab3740f2c273eed5c74/audio/drum_samples/muffled_kick.wav -------------------------------------------------------------------------------- /audio/drum_samples/snare_FPC.wav: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/a-sumo/hyperstep/8f44400a74d3433d91ab6ab3740f2c273eed5c74/audio/drum_samples/snare_FPC.wav -------------------------------------------------------------------------------- /audio/drum_samples/tom_16_inch_tom_4.wav: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/a-sumo/hyperstep/8f44400a74d3433d91ab6ab3740f2c273eed5c74/audio/drum_samples/tom_16_inch_tom_4.wav -------------------------------------------------------------------------------- /audio/drum_samples/tom_909.wav: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/a-sumo/hyperstep/8f44400a74d3433d91ab6ab3740f2c273eed5c74/audio/drum_samples/tom_909.wav -------------------------------------------------------------------------------- /audio/output/agent-self-regulation-1.wav: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/a-sumo/hyperstep/8f44400a74d3433d91ab6ab3740f2c273eed5c74/audio/output/agent-self-regulation-1.wav -------------------------------------------------------------------------------- /build.css: -------------------------------------------------------------------------------- 1 | /*! tailwindcss v3.2.4 | MIT License | https://tailwindcss.com*/*,:after,:before{box-sizing:border-box;border:0 solid #e5e7eb}:after,:before{--tw-content:""}html{line-height:1.5;-webkit-text-size-adjust:100%;-moz-tab-size:4;-o-tab-size:4;tab-size:4;font-family:ui-sans-serif,system-ui,-apple-system,BlinkMacSystemFont,Segoe UI,Roboto,Helvetica Neue,Arial,Noto Sans,sans-serif,Apple Color Emoji,Segoe UI Emoji,Segoe UI Symbol,Noto Color Emoji;font-feature-settings:normal}body{margin:0;line-height:inherit}hr{height:0;color:inherit;border-top-width:1px}abbr:where([title]){-webkit-text-decoration:underline dotted;text-decoration:underline dotted}h1,h2,h3,h4,h5,h6{font-size:inherit;font-weight:inherit}a{color:inherit;text-decoration:inherit}b,strong{font-weight:bolder}code,kbd,pre,samp{font-family:ui-monospace,SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,monospace;font-size:1em}small{font-size:80%}sub,sup{font-size:75%;line-height:0;position:relative;vertical-align:initial}sub{bottom:-.25em}sup{top:-.5em}table{text-indent:0;border-color:inherit;border-collapse:collapse}button,input,optgroup,select,textarea{font-family:inherit;font-size:100%;font-weight:inherit;line-height:inherit;color:inherit;margin:0;padding:0}button,select{text-transform:none}[type=button],[type=reset],[type=submit],button{-webkit-appearance:button;background-color:initial;background-image:none}:-moz-focusring{outline:auto}:-moz-ui-invalid{box-shadow:none}progress{vertical-align:initial}::-webkit-inner-spin-button,::-webkit-outer-spin-button{height:auto}[type=search]{-webkit-appearance:textfield;outline-offset:-2px}::-webkit-search-decoration{-webkit-appearance:none}::-webkit-file-upload-button{-webkit-appearance:button;font:inherit}summary{display:list-item}blockquote,dd,dl,figure,h1,h2,h3,h4,h5,h6,hr,p,pre{margin:0}fieldset{margin:0}fieldset,legend{padding:0}menu,ol,ul{list-style:none;margin:0;padding:0}textarea{resize:vertical}input::-moz-placeholder,textarea::-moz-placeholder{opacity:1;color:#9ca3af}input::placeholder,textarea::placeholder{opacity:1;color:#9ca3af}[role=button],button{cursor:pointer}:disabled{cursor:default}audio,canvas,embed,iframe,img,object,svg,video{display:block;vertical-align:middle}img,video{max-width:100%;height:auto}[hidden]{display:none}*,::backdrop,:after,:before{--tw-border-spacing-x:0;--tw-border-spacing-y:0;--tw-translate-x:0;--tw-translate-y:0;--tw-rotate:0;--tw-skew-x:0;--tw-skew-y:0;--tw-scale-x:1;--tw-scale-y:1;--tw-pan-x: ;--tw-pan-y: ;--tw-pinch-zoom: ;--tw-scroll-snap-strictness:proximity;--tw-ordinal: ;--tw-slashed-zero: ;--tw-numeric-figure: ;--tw-numeric-spacing: ;--tw-numeric-fraction: ;--tw-ring-inset: ;--tw-ring-offset-width:0px;--tw-ring-offset-color:#fff;--tw-ring-color:#3b82f680;--tw-ring-offset-shadow:0 0 #0000;--tw-ring-shadow:0 0 #0000;--tw-shadow:0 0 #0000;--tw-shadow-colored:0 0 #0000;--tw-blur: ;--tw-brightness: ;--tw-contrast: ;--tw-grayscale: ;--tw-hue-rotate: ;--tw-invert: ;--tw-saturate: ;--tw-sepia: ;--tw-drop-shadow: ;--tw-backdrop-blur: ;--tw-backdrop-brightness: ;--tw-backdrop-contrast: ;--tw-backdrop-grayscale: ;--tw-backdrop-hue-rotate: ;--tw-backdrop-invert: ;--tw-backdrop-opacity: ;--tw-backdrop-saturate: ;--tw-backdrop-sepia: }.sr-only{position:absolute;width:1px;height:1px;padding:0;margin:-1px;overflow:hidden;clip:rect(0,0,0,0);white-space:nowrap;border-width:0}.visible{visibility:visible}.static{position:static}.absolute{position:absolute}.relative{position:relative}.m-8{margin:2rem}.mx-auto{margin-left:auto;margin-right:auto}.mt-2{margin-top:.5rem}.mt-0\.5{margin-top:.125rem}.mt-12{margin-top:3rem}.mt-0{margin-top:0}.block{display:block}.inline-block{display:inline-block}.flex{display:flex}.inline-flex{display:inline-flex}.grid{display:grid}.hidden{display:none}.h-20{height:5rem}.h-full{height:100%}.h-4{height:1rem}.w-56{width:14rem}.w-20{width:5rem}.w-40{width:10rem}.w-4{width:1rem}.max-w-5xl{max-width:64rem}.resize-none{resize:none}.resize{resize:both}.items-end{align-items:flex-end}.items-center{align-items:center}.justify-center{justify-content:center}.gap-6{gap:1.5rem}.rounded-3xl{border-radius:1.5rem}.rounded-md{border-radius:.375rem}.rounded{border-radius:.25rem}.rounded-t{border-top-left-radius:.25rem;border-top-right-radius:.25rem}.rounded-b{border-bottom-right-radius:.25rem;border-bottom-left-radius:.25rem}.border-4{border-width:4px}.border-black{--tw-border-opacity:1;border-color:rgb(0 0 0/var(--tw-border-opacity))}.bg-white{--tw-bg-opacity:1;background-color:rgb(255 255 255/var(--tw-bg-opacity))}.bg-gray-300{--tw-bg-opacity:1;background-color:rgb(209 213 219/var(--tw-bg-opacity))}.bg-gray-200{--tw-bg-opacity:1;background-color:rgb(229 231 235/var(--tw-bg-opacity))}.bg-emerald-200{--tw-bg-opacity:1;background-color:rgb(167 243 208/var(--tw-bg-opacity))}.from-teal-200{--tw-gradient-from:#99f6e4;--tw-gradient-to:#99f6e400;--tw-gradient-stops:var(--tw-gradient-from),var(--tw-gradient-to)}.to-lime-200{--tw-gradient-to:#d9f99d}.fill-black{fill:#000}.fill-current{fill:currentColor}.p-8{padding:2rem}.py-16{padding-top:4rem;padding-bottom:4rem}.py-2{padding-top:.5rem;padding-bottom:.5rem}.px-4{padding-left:1rem;padding-right:1rem}.pt-1{padding-top:.25rem}.text-center{text-align:center}.text-2xl{font-size:1.5rem;line-height:2rem}.text-lg{font-size:1.125rem;line-height:1.75rem}.font-bold{font-weight:700}.font-semibold{font-weight:600}.text-gray-900{--tw-text-opacity:1;color:rgb(17 24 39/var(--tw-text-opacity))}.text-gray-700{--tw-text-opacity:1;color:rgb(55 65 81/var(--tw-text-opacity))}.ring{--tw-ring-offset-shadow:var(--tw-ring-inset) 0 0 0 var(--tw-ring-offset-width) var(--tw-ring-offset-color);--tw-ring-shadow:var(--tw-ring-inset) 0 0 0 calc(3px + var(--tw-ring-offset-width)) var(--tw-ring-color);box-shadow:var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow,0 0 #0000)}.transition{transition-property:color,background-color,border-color,text-decoration-color,fill,stroke,opacity,box-shadow,transform,filter,-webkit-backdrop-filter;transition-property:color,background-color,border-color,text-decoration-color,fill,stroke,opacity,box-shadow,transform,filter,backdrop-filter;transition-property:color,background-color,border-color,text-decoration-color,fill,stroke,opacity,box-shadow,transform,filter,backdrop-filter,-webkit-backdrop-filter;transition-timing-function:cubic-bezier(.4,0,.2,1);transition-duration:.15s}.hover\:-translate-x-2:hover{--tw-translate-x:-0.5rem}.hover\:-translate-x-2:hover,.hover\:-translate-y-2:hover{transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(var(--tw-rotate)) skewX(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y))}.hover\:-translate-y-2:hover{--tw-translate-y:-0.5rem}.hover\:bg-emerald-400:hover{--tw-bg-opacity:1;background-color:rgb(52 211 153/var(--tw-bg-opacity))}.hover\:bg-emerald-200:hover{--tw-bg-opacity:1;background-color:rgb(167 243 208/var(--tw-bg-opacity))}.hover\:bg-gradient-to-r:hover{background-image:linear-gradient(to right,var(--tw-gradient-stops))}.hover\:fill-emerald-400:hover{fill:#34d399}.hover\:text-gray-700\/75:hover{color:#374151bf}.hover\:shadow-\[8px_8px_0_0_\#000\]:hover{--tw-shadow:8px 8px 0 0 #000;--tw-shadow-colored:8px 8px 0 0 var(--tw-shadow-color)}.hover\:shadow-\[2px_2px_0_0_\#000\]:hover,.hover\:shadow-\[8px_8px_0_0_\#000\]:hover{box-shadow:var(--tw-ring-offset-shadow,0 0 #0000),var(--tw-ring-shadow,0 0 #0000),var(--tw-shadow)}.hover\:shadow-\[2px_2px_0_0_\#000\]:hover{--tw-shadow:2px 2px 0 0 #000;--tw-shadow-colored:2px 2px 0 0 var(--tw-shadow-color)}.group:hover .group-hover\:block{display:block}@media (min-width:640px){.sm\:px-6{padding-left:1.5rem;padding-right:1.5rem}}@media (min-width:768px){.md\:gap-8{gap:2rem}}@media (min-width:1024px){.lg\:px-8{padding-left:2rem;padding-right:2rem}.lg\:hover\:relative:hover{position:relative}.lg\:hover\:opacity-100:hover{opacity:1}} -------------------------------------------------------------------------------- /bundler/webpack.common.js: -------------------------------------------------------------------------------- 1 | const HtmlWebpackPlugin = require('html-webpack-plugin'); 2 | const MiniCssExtractPlugin = require('mini-css-extract-plugin'); 3 | const path = require('path'); 4 | 5 | const pages = ["home", "volume", "volume-minimal", "playground"]; 6 | 7 | module.exports = (env, argv) => { 8 | return { 9 | // entry: path.resolve(__dirname, '../src/main.js'), 10 | entry: pages.reduce((config, page) => { 11 | if (page == 'home'){ 12 | config[page] = `./src/main.js`; 13 | } 14 | else{ 15 | config[page] = `./src/${page}/main.js`; 16 | } 17 | return config; 18 | }, {}), 19 | output: 20 | { 21 | filename: 'bundle.[contenthash].js', 22 | path: path.resolve(__dirname, '../dist'), 23 | publicPath: argv.mode === 'production' ? '/hyperstep/' : '/' 24 | }, 25 | optimization: { 26 | splitChunks: { 27 | chunks: "all", 28 | }, 29 | }, 30 | devServer: { 31 | headers: { 32 | 'Cross-Origin-Embedder-Policy': 'require-corp', 33 | 'Cross-Origin-Opener-Policy': 'same-origin', 34 | }, 35 | }, 36 | devtool: 'source-map', 37 | plugins: [new MiniCssExtractPlugin()].concat( 38 | pages.map( 39 | (page) => 40 | { 41 | if (page == 'home'){ 42 | return new HtmlWebpackPlugin({ 43 | inject: true, 44 | template: `./src/index.html`, 45 | filename: `index.html`, 46 | chunks: [page], 47 | }) 48 | } 49 | else{ 50 | return new HtmlWebpackPlugin({ 51 | inject: true, 52 | template: `./src/${page}/index.html`, 53 | filename: `${page}/index.html`, 54 | chunks: [page], 55 | }) 56 | } 57 | } 58 | ) 59 | ), 60 | module: 61 | { 62 | rules: 63 | [ 64 | // HTML 65 | { 66 | test: /\.(html)$/, 67 | use: ['html-loader'] 68 | }, 69 | 70 | // JS 71 | { 72 | test: /\.js$/, 73 | resourceQuery: { not: [/raw/] }, 74 | exclude: /node_modules/, 75 | use: 76 | [ 77 | 'babel-loader' 78 | ] 79 | }, 80 | 81 | // TS 82 | { 83 | test: /\.tsx?$/, 84 | use: 'ts-loader', 85 | exclude: /node_modules/, 86 | }, 87 | 88 | // CSS 89 | { 90 | test: /\.css$/, 91 | use: 92 | [ 93 | MiniCssExtractPlugin.loader, 94 | 'css-loader', 95 | 'postcss-loader' 96 | ] 97 | }, 98 | 99 | // Images 100 | { 101 | test: /\.(jpg|png|gif|svg|gltf|bin|ico)$/, 102 | type: 'asset/resource', 103 | }, 104 | 105 | // Shaders 106 | { 107 | test: /\.(glsl|vs|fs|vert|frag)$/, 108 | exclude: /node_modules/, 109 | use: 110 | [ 111 | 'raw-loader' 112 | ] 113 | }, 114 | // Audio 115 | { 116 | test: /\.(ogg|mp3|wav|mpe?g)$/i, 117 | type: 'asset/resource', 118 | }, 119 | // Fonts 120 | { 121 | test: /\.(ttf|eot|woff|woff2)$/, 122 | use: 123 | [ 124 | { 125 | loader: 'file-loader', 126 | options: 127 | { 128 | outputPath: 'assets/fonts/' 129 | } 130 | } 131 | ] 132 | } 133 | ] 134 | } 135 | } 136 | } 137 | -------------------------------------------------------------------------------- /bundler/webpack.dev.js: -------------------------------------------------------------------------------- 1 | const { merge } = require('webpack-merge') 2 | const commonConfiguration = require('./webpack.common.js') 3 | const portFinderSync = require('portfinder-sync') 4 | const path = require('path'); 5 | const infoColor = (_message) => { 6 | return `\u001b[1m\u001b[34m${_message}\u001b[39m\u001b[22m` 7 | } 8 | 9 | module.exports = (env, argv) => { 10 | return merge( 11 | commonConfiguration(env, argv), 12 | { 13 | mode: 'development', 14 | devServer: { 15 | static: { 16 | directory: path.join(__dirname, 'dist'), 17 | }, 18 | compress: true, 19 | port: 9000, 20 | }, 21 | } 22 | ) 23 | } 24 | -------------------------------------------------------------------------------- /bundler/webpack.prod.js: -------------------------------------------------------------------------------- 1 | const { merge } = require('webpack-merge') 2 | const commonConfiguration = require('./webpack.common.js') 3 | const { CleanWebpackPlugin } = require('clean-webpack-plugin') 4 | 5 | module.exports = (env, argv) => { 6 | return merge( 7 | commonConfiguration(env, argv), 8 | { 9 | mode: 'production', 10 | plugins: 11 | [ 12 | new CleanWebpackPlugin() 13 | ] 14 | } 15 | ) 16 | } 17 | -------------------------------------------------------------------------------- /netlify.toml: -------------------------------------------------------------------------------- 1 | [[headers]] 2 | for = "/*" 3 | [headers.values] 4 | Cross-Origin-Opener-Policy = "same-origin" 5 | Cross-Origin-Embedder-Policy = "require-corp" 6 | [build] 7 | base = /hyperstep 8 | // build command for myproject1 directory 9 | command = "webpack" -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "homepage": ".", 3 | "scripts": { 4 | "build": "webpack --mode=production --config ./bundler/webpack.prod.js", 5 | "dev": "webpack serve --mode=development --config ./bundler/webpack.dev.js", 6 | "predeploy": "npm run build", 7 | "deploy": "gh-pages -d dist" 8 | }, 9 | "dependencies": { 10 | "babel-loader": "^8.2.2", 11 | "buffer": "^6.0.3", 12 | "cannon": "^0.6.2", 13 | "chart.js": "^3.9.1", 14 | "chartjs-adapter-luxon": "^1.3.0", 15 | "chartjs-plugin-streaming": "^2.0.0", 16 | "clean-webpack-plugin": "^3.0.0", 17 | "coi-serviceworker": "^0.1.6", 18 | "copy-webpack-plugin": "^7.0.0", 19 | "css-loader": "^5.0.1", 20 | "essentia.js": "^0.1.3", 21 | "html-loader": "^1.3.2", 22 | "html-webpack-plugin": "^5.0.0-alpha.7", 23 | "luxon": "^3.1.0", 24 | "mini-css-extract-plugin": "^1.3.4", 25 | "portfinder-sync": "^0.0.2", 26 | "react": "^18.2.0", 27 | "react-dom": "^18.2.0", 28 | "stats-js": "^1.0.1", 29 | "style-loader": "^2.0.0", 30 | "three": "^0.142.0", 31 | "three-mesh-bvh": "^0.5.18", 32 | "tone": "^14.7.77", 33 | "webpack-cli": "^4.3.1", 34 | "webpack-dev-server": "^4.11.1", 35 | "webpack-merge": "^5.7.3", 36 | "yuka": "^0.7.8" 37 | }, 38 | "devDependencies": { 39 | "@babel/cli": "^7.21.0", 40 | "@babel/core": "^7.21.4", 41 | "@babel/preset-env": "^7.21.4", 42 | "@babel/preset-react": "^7.18.6", 43 | "@babel/preset-typescript": "^7.21.4", 44 | "@types/three": "^0.144.0", 45 | "@typescript-eslint/eslint-plugin": "^5.40.0", 46 | "@typescript-eslint/parser": "^5.40.0", 47 | "dat.gui": "^0.7.9", 48 | "eslint": "^8.18.0", 49 | "eslint-config-standard": "^17.0.0", 50 | "eslint-plugin-import": "^2.26.0", 51 | "eslint-plugin-n": "^15.2.3", 52 | "eslint-plugin-promise": "^6.0.0", 53 | "file-loader": "^6.2.0", 54 | "gh-pages": "^4.0.0", 55 | "postcss": "^8.4.19", 56 | "postcss-loader": "^7.0.1", 57 | "postcss-preset-env": "^7.8.3", 58 | "raw-loader": "^4.0.2", 59 | "tailwindcss": "^3.2.4", 60 | "webpack": "^5.89.0" 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /postcss.config.js: -------------------------------------------------------------------------------- 1 | const tailwindcss = require('tailwindcss'); 2 | module.exports = { 3 | plugins: [ 4 | 'postcss-preset-env', 5 | tailwindcss 6 | ], 7 | }; -------------------------------------------------------------------------------- /src/assets/audio/r2d2_talk.mp3: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/a-sumo/hyperstep/8f44400a74d3433d91ab6ab3740f2c273eed5c74/src/assets/audio/r2d2_talk.mp3 -------------------------------------------------------------------------------- /src/assets/audio/synth_melody.mp3: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/a-sumo/hyperstep/8f44400a74d3433d91ab6ab3740f2c273eed5c74/src/assets/audio/synth_melody.mp3 -------------------------------------------------------------------------------- /src/assets/audio/theremin_tone.mp3: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/a-sumo/hyperstep/8f44400a74d3433d91ab6ab3740f2c273eed5c74/src/assets/audio/theremin_tone.mp3 -------------------------------------------------------------------------------- /src/assets/images/noisy_background.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/a-sumo/hyperstep/8f44400a74d3433d91ab6ab3740f2c273eed5c74/src/assets/images/noisy_background.jpg -------------------------------------------------------------------------------- /src/assets/images/vol_synth.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/a-sumo/hyperstep/8f44400a74d3433d91ab6ab3740f2c273eed5c74/src/assets/images/vol_synth.png -------------------------------------------------------------------------------- /src/assets/models/DragonAttenuation.glb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/a-sumo/hyperstep/8f44400a74d3433d91ab6ab3740f2c273eed5c74/src/assets/models/DragonAttenuation.glb -------------------------------------------------------------------------------- /src/assets/models/Duck.glb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/a-sumo/hyperstep/8f44400a74d3433d91ab6ab3740f2c273eed5c74/src/assets/models/Duck.glb -------------------------------------------------------------------------------- /src/coi-serviceworker.js: -------------------------------------------------------------------------------- 1 | /*! coi-serviceworker v0.1.6 - Guido Zuidhof, licensed under MIT */ 2 | let coepCredentialless = false; 3 | if (typeof window === 'undefined') { 4 | self.addEventListener("install", () => self.skipWaiting()); 5 | self.addEventListener("activate", (event) => event.waitUntil(self.clients.claim())); 6 | 7 | self.addEventListener("message", (ev) => { 8 | if (!ev.data) { 9 | return; 10 | } else if (ev.data.type === "deregister") { 11 | self.registration 12 | .unregister() 13 | .then(() => { 14 | return self.clients.matchAll(); 15 | }) 16 | .then(clients => { 17 | clients.forEach((client) => client.navigate(client.url)); 18 | }); 19 | } else if (ev.data.type === "coepCredentialless") { 20 | coepCredentialless = ev.data.value; 21 | } 22 | }); 23 | 24 | self.addEventListener("fetch", function (event) { 25 | const r = event.request; 26 | if (r.cache === "only-if-cached" && r.mode !== "same-origin") { 27 | return; 28 | } 29 | 30 | const request = (coepCredentialless && r.mode === "no-cors") 31 | ? new Request(r, { 32 | credentials: "omit", 33 | }) 34 | : r; 35 | event.respondWith( 36 | fetch(request) 37 | .then((response) => { 38 | if (response.status === 0) { 39 | return response; 40 | } 41 | 42 | const newHeaders = new Headers(response.headers); 43 | newHeaders.set("Cross-Origin-Embedder-Policy", 44 | coepCredentialless ? "credentialless" : "require-corp" 45 | ); 46 | newHeaders.set("Cross-Origin-Opener-Policy", "same-origin"); 47 | 48 | return new Response(response.body, { 49 | status: response.status, 50 | statusText: response.statusText, 51 | headers: newHeaders, 52 | }); 53 | }) 54 | .catch((e) => console.error(e)) 55 | ); 56 | }); 57 | 58 | } else { 59 | (() => { 60 | // You can customize the behavior of this script through a global `coi` variable. 61 | const coi = { 62 | shouldRegister: () => true, 63 | shouldDeregister: () => false, 64 | coepCredentialless: () => false, 65 | doReload: () => window.location.reload(), 66 | quiet: false, 67 | ...window.coi 68 | }; 69 | 70 | const n = navigator; 71 | 72 | if (n.serviceWorker && n.serviceWorker.controller) { 73 | n.serviceWorker.controller.postMessage({ 74 | type: "coepCredentialless", 75 | value: coi.coepCredentialless(), 76 | }); 77 | 78 | if (coi.shouldDeregister()) { 79 | n.serviceWorker.controller.postMessage({ type: "deregister" }); 80 | } 81 | } 82 | 83 | // If we're already coi: do nothing. Perhaps it's due to this script doing its job, or COOP/COEP are 84 | // already set from the origin server. Also if the browser has no notion of crossOriginIsolated, just give up here. 85 | if (window.crossOriginIsolated !== false || !coi.shouldRegister()) return; 86 | 87 | if (!window.isSecureContext) { 88 | !coi.quiet && console.log("COOP/COEP Service Worker not registered, a secure context is required."); 89 | return; 90 | } 91 | 92 | // In some environments (e.g. Chrome incognito mode) this won't be available 93 | if (n.serviceWorker) { 94 | n.serviceWorker.register(window.document.currentScript.src).then( 95 | (registration) => { 96 | !coi.quiet && console.log("COOP/COEP Service Worker registered", registration.scope); 97 | 98 | registration.addEventListener("updatefound", () => { 99 | !coi.quiet && console.log("Reloading page to make use of updated COOP/COEP Service Worker."); 100 | coi.doReload(); 101 | }); 102 | 103 | // If the registration is active, but it's not controlling the page 104 | if (registration.active && !n.serviceWorker.controller) { 105 | !coi.quiet && console.log("Reloading page to make use of COOP/COEP Service Worker."); 106 | coi.doReload(); 107 | } 108 | }, 109 | (err) => { 110 | !coi.quiet && console.error("COOP/COEP Service Worker failed to register:", err); 111 | } 112 | ); 113 | } 114 | })(); 115 | } -------------------------------------------------------------------------------- /src/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | Hyperstep 8 | 9 | 10 | 11 | 63 | 64 | 65 | 82 | 83 | 110 | 111 | 112 | 113 | -------------------------------------------------------------------------------- /src/main.js: -------------------------------------------------------------------------------- 1 | import './style.css'; 2 | import './coi-serviceworker.js'; -------------------------------------------------------------------------------- /src/playground/audio.js: -------------------------------------------------------------------------------- 1 | import audioFile1 from "../assets/audio/r2d2_talk.mp3"; 2 | import audioFile2 from "../assets/audio/synth_melody.mp3"; 3 | import audioFile3 from "../assets/audio/theremin_tone.mp3"; 4 | 5 | // Audio management and processing related codes 6 | export function initAudio() { 7 | // Audio initialization code 8 | 9 | } 10 | 11 | export function setupAudioEvents() { 12 | // Audio event listeners 13 | } 14 | 15 | // record native microphone input and do further audio processing on each audio buffer using the given callback functions 16 | function startMicRecordStream() { 17 | if (navigator.mediaDevices.getUserMedia) { 18 | console.log("Initializing audio..."); 19 | navigator.mediaDevices.getUserMedia({ audio: true, video: false }) 20 | .then(startAudioProcessingStream) 21 | .catch(function (message) { 22 | throw "Could not access microphone - " + message; 23 | }); 24 | } else { 25 | throw "Could not access microphone - getUserMedia not available"; 26 | } 27 | } 28 | 29 | function startAudioProcessingStream(stream) { 30 | gumStream = stream; 31 | if (gumStream.active) { 32 | if (audioCtx.state == "closed") { 33 | audioCtx = new AudioContext(); 34 | } 35 | else if (audioCtx.state == "suspended") { 36 | audioCtx.resume(); 37 | } 38 | 39 | mic = audioCtx.createMediaStreamSource(gumStream); 40 | gain = audioCtx.createGain(); 41 | gain.gain.setValueAtTime(0, audioCtx.currentTime); 42 | 43 | let codeForProcessorModule = ["https://cdn.jsdelivr.net/npm/essentia.js@0.1.3/dist/essentia-wasm.umd.js", 44 | "https://cdn.jsdelivr.net/npm/essentia.js@0.1.3/dist/essentia.js-extractor.umd.js", 45 | "https://raw.githack.com/MTG/essentia.js/master/examples/demos/melspectrogram-rt/melspectrogram-processor.js", 46 | "https://unpkg.com/ringbuf.js@0.1.0/dist/index.js"]; 47 | 48 | // inject Essentia.js code into AudioWorkletGlobalScope context, then setup audio graph and start animation 49 | URLFromFiles(codeForProcessorModule) 50 | .then((concatenatedCode) => { 51 | audioCtx.audioWorklet.addModule(concatenatedCode) 52 | .then(setupAudioGraphStream) 53 | .catch(function moduleLoadRejected(msg) { 54 | console.log(`There was a problem loading the AudioWorklet module code: \n ${msg}`); 55 | }); 56 | }) 57 | .catch((msg) => { 58 | console.log(`There was a problem retrieving the AudioWorklet module code: \n ${msg}`); 59 | }) 60 | // // set button to stop 61 | recordButton.classList.add("recording"); 62 | recordButton.innerHTML = "Stop"; 63 | recordButton.classList.add("bg-emerald-200"); 64 | recordButton.disabled = false; 65 | } else { 66 | throw "Mic stream not active"; 67 | } 68 | } 69 | function startAudioProcessingMediaElt() { 70 | if (audioCtx.state == "closed") { 71 | audioCtx = new AudioContext(); 72 | } 73 | else if (audioCtx.state == "suspended") { 74 | audioCtx.resume(); 75 | } 76 | 77 | source = audioCtx.createMediaElementSource(player); 78 | gain = audioCtx.createGain(); 79 | gain.gain.setValueAtTime(0, audioCtx.currentTime); 80 | let codeForProcessorModule = ["https://cdn.jsdelivr.net/npm/essentia.js@0.1.3/dist/essentia-wasm.umd.js", 81 | "https://cdn.jsdelivr.net/npm/essentia.js@0.1.3/dist/essentia.js-extractor.umd.js", 82 | "https://raw.githack.com/MTG/essentia.js/master/examples/demos/melspectrogram-rt/melspectrogram-processor.js", 83 | "https://unpkg.com/ringbuf.js@0.1.0/dist/index.js"]; 84 | 85 | // inject Essentia.js code into AudioWorkletGlobalScope context, then setup audio graph and start animation 86 | URLFromFiles(codeForProcessorModule) 87 | .then((concatenatedCode) => { 88 | audioCtx.audioWorklet.addModule(concatenatedCode) 89 | .then(setupAudioGraphMediaElt) 90 | .catch(function moduleLoadRejected(msg) { 91 | console.log(`There was a problem loading the AudioWorklet module code: \n ${msg}`); 92 | }); 93 | }) 94 | .catch((msg) => { 95 | console.log(`There was a problem retrieving the AudioWorklet module code: \n ${msg}`); 96 | }) 97 | } 98 | function stopMicRecordStream() { 99 | // stop mic stream 100 | gumStream.getAudioTracks().forEach(function (track) { 101 | track.stop(); 102 | gumStream.removeTrack(track); 103 | }); 104 | 105 | audioCtx.close().then(function () { 106 | // manage button state 107 | recordButton.classList.remove("recording"); 108 | recordButton.innerHTML = 'Record'; 109 | 110 | // disconnect nodes 111 | mic.disconnect(); 112 | melspectrogramNode.disconnect(); 113 | gain.disconnect(); 114 | mic = undefined; 115 | melspectrogramNode = undefined; 116 | gain = undefined; 117 | 118 | console.log("Stopped recording ..."); 119 | }); 120 | } 121 | function stopAudioProcessingMediaElt() { 122 | player.pause(); 123 | audioCtx.close().then(function () { 124 | // disconnect nodes 125 | source.disconnect(); 126 | melspectrogramNode.disconnect(); 127 | source = undefined; 128 | melspectrogramNode = undefined; 129 | }); 130 | } 131 | function setupAudioGraphStream() { 132 | // increase buffer size in case of glitches 133 | let sab = exports.RingBuffer.getStorageForCapacity(melNumBands * 42, Float32Array); 134 | let rb = new exports.RingBuffer(sab, Float32Array); 135 | audioReader = new exports.AudioReader(rb); 136 | 137 | melspectrogramNode = new AudioWorkletNode(audioCtx, 'melspectrogram-processor', { 138 | processorOptions: { 139 | bufferSize: bufferSize, 140 | hopSize: hopSize, 141 | melNumBands: melNumBands, 142 | sampleRate: audioCtx.sampleRate, 143 | } 144 | }); 145 | 146 | try { 147 | melspectrogramNode.port.postMessage({ 148 | sab: sab, 149 | }); 150 | } catch (_) { 151 | alert("No SharedArrayBuffer transfer support, try another browser."); 152 | recordButton.disabled = true; 153 | return; 154 | } 155 | recording = recordButton.classList.contains("recording"); 156 | mic.connect(melspectrogramNode); 157 | melspectrogramNode.connect(gain); 158 | gain.connect(audioCtx.destination); 159 | 160 | } 161 | 162 | function setupAudioGraphMediaElt() { 163 | // increase buffer size in case of glitches 164 | let sab = exports.RingBuffer.getStorageForCapacity(melNumBands * 18, Float32Array); 165 | let rb = new exports.RingBuffer(sab, Float32Array); 166 | audioReader = new exports.AudioReader(rb); 167 | melspectrogramNode = new AudioWorkletNode(audioCtx, 'melspectrogram-processor', { 168 | processorOptions: { 169 | bufferSize: 1024, 170 | hopSize: 512, 171 | melNumBands: melNumBands, 172 | sampleRate: audioCtx.sampleRate, 173 | } 174 | }); 175 | // The AudioWorklet node causes cracking noises during playback so we 176 | // connect it with a gain node to avoid this. 177 | try { 178 | melspectrogramNode.port.postMessage({ 179 | sab: sab, 180 | }); 181 | } catch (_) { 182 | alert("No SharedArrayBuffer transfer support, try another browser."); 183 | return; 184 | } 185 | // connect source to destination for playback 186 | source.connect(audioCtx.destination); 187 | // connect source to AudioWorklet node for feature extraction 188 | source.connect(melspectrogramNode); 189 | melspectrogramNode.connect(gain); 190 | gain.connect(audioCtx.destination); 191 | } 192 | -------------------------------------------------------------------------------- /src/playground/constants.js: -------------------------------------------------------------------------------- 1 | // Constant values used in the project 2 | 3 | let audioCtx; 4 | let bufferSize = 1024; 5 | let hopSize = 512; 6 | let melNumBands = 96; 7 | let numFrames = 1; 8 | 9 | const params = { 10 | df_type: 0, dist_func_tube: 1.0, dist_func_box: 1.0, dist_func_plane: 1.0, df_sphere_tube: 0.0, 11 | df_sphere_box: 0.0, df_sphere_plane: 0.0, df_tube_box: 0.0, df_tube_plane: 0.0, df_plane_box: 0.0, 12 | scale_x: 1, scale_y: 1, scale_z: 1, 13 | global_scale: 0.03, min_dist: 0, max_dist: 1, 14 | rot_x: 0, rot_y: 0, rot_z: 0, 15 | translation_x: 0, translation_y: 0, translation_z: 0, 16 | playback_rate: 1.0, 17 | color_mode: 0, color_preset_type: 0, color_space: 0, uni_color: "#9838ff", 18 | color_1: "#000000", color_2: "#ffffff", 19 | mel_spec_bins: melNumBands, 20 | num_frames: numFrames, 21 | fft_size: bufferSize, 22 | dt_scale: 0.1, 23 | max_steps: 100, 24 | }; 25 | 26 | -------------------------------------------------------------------------------- /src/playground/domElements: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/a-sumo/hyperstep/8f44400a74d3433d91ab6ab3740f2c273eed5c74/src/playground/domElements -------------------------------------------------------------------------------- /src/playground/eventHandlers.js: -------------------------------------------------------------------------------- 1 | // Functions for handling events like window resize, pointer move 2 | -------------------------------------------------------------------------------- /src/playground/globals.js: -------------------------------------------------------------------------------- 1 | import THREE from 'three'; 2 | 3 | -------------------------------------------------------------------------------- /src/playground/gui.js: -------------------------------------------------------------------------------- 1 | import * as THREE from 'three'; 2 | import { GUI } from 'dat.gui/build/dat.gui.min.js'; 3 | // import constants 4 | import { audioCtx, bufferSize, hopSize, melNumBands, numFrames } from './constants'; 5 | 6 | const gui = new GUI( {width: 200 } ); 7 | 8 | export function setupGUI() { 9 | gui.add( params, 'playback_rate').step(0.001).name( 'playback_rate' ).onChange( function ( value ) { 10 | (volumeMesh.material).uniforms['playback_rate']['value'] = 1.0 / value; 11 | player.playbackRate = value; 12 | } ); 13 | gui.add( params, 'num_frames').step(1).name( 'num_frames' ).onChange( function ( value ) { 14 | specTexture = createDataTexture(value, melNumBands); 15 | updateMeshTexture(); 16 | numFrames = value; 17 | } ); 18 | // Distance Function 19 | const df_folder = gui.addFolder('distance function') ; 20 | df_folder.add( params, 'min_dist').step(0.01).name( 'min_dist' ).onChange( function ( value ) { 21 | (volumeMesh.material).uniforms['min_dist']['value'] = value; 22 | } ); 23 | df_folder.add( params, 'max_dist').step(0.01).name( 'max_dist' ).onChange( function ( value ) { 24 | (volumeMesh.material).uniforms['max_dist']['value'] = value; 25 | } ); 26 | df_folder.add( params, 'df_type', { 27 | 'Sphere - Tube': 0,'Sphere - Box': 1,'Sphere - Plane': 2, 28 | 'Tube - Box': 3, 'Tube - Plane': 4,'Plane - Box': 5}).name( 'sphere/tube' ).onChange( function ( value ) { 29 | (volumeMesh.material).uniforms['df_type']['value'] = value; 30 | } ); 31 | df_folder.add( params, 'df_sphere_tube', 0, 1).step(0.01).name( 'sphere/tube' ).onChange( function ( value ) { 32 | (volumeMesh.material).uniforms['df_sphere_tube']['value'] = value; 33 | } ); 34 | df_folder.add( params, 'df_sphere_box', 0, 1).step(0.01).name( 'sphere/box' ).onChange( function ( value ) { 35 | (volumeMesh.material).uniforms['df_sphere_box']['value'] = value; 36 | } ); 37 | df_folder.add( params, 'df_sphere_plane', 0, 1).step(0.01).name( 'sphere/plane' ).onChange( function ( value ) { 38 | (volumeMesh.material).uniforms['df_sphere_plane']['value'] = value; 39 | } ); 40 | df_folder.add( params, 'df_tube_box', 0, 1).step(0.01).name( 'tube/box' ).onChange( function ( value ) { 41 | (volumeMesh.material).uniforms['df_tube_box']['value'] = value; 42 | } ); 43 | df_folder.add( params, 'df_tube_plane', 0, 1).step(0.01).name( 'tube/plane' ).onChange( function ( value ) { 44 | (volumeMesh.material).uniforms['df_tube_plane']['value'] = value; 45 | } ); 46 | df_folder.add( params, 'df_plane_box', 0, 1).step(0.01).name( 'plane/box' ).onChange( function ( value ) { 47 | (volumeMesh.material).uniforms['df_plane_box']['value'] = value; 48 | } ); 49 | df_folder.add( params, 'global_scale').step(0.0001).name( 'global_scale' ).onChange( function ( value ) { 50 | (volumeMesh.material).uniforms['global_scale']['value'] = value; 51 | } ); 52 | const transforms = gui.addFolder('transforms') ; 53 | transforms.add( params, 'scale_x', 0, 1).step(0.00001).name( 'scale_x' ).onChange( function ( value ) { 54 | (volumeMesh.material).uniforms['df_scale']['value'] = new THREE.Vector3(value, params.scale_y, params.scale_z); 55 | } ); 56 | transforms .add( params, 'scale_y', 0, 1).step(0.00001).name( 'scale_y' ).onChange( function ( value ) { 57 | (volumeMesh.material).uniforms['df_scale']['value'] = new THREE.Vector3(params.scale_x, value, params.scale_z); 58 | } ); 59 | transforms.add( params, 'scale_z', 0, 1).step(0.00001).name( 'scale_z' ).onChange( function ( value ) { 60 | (volumeMesh.material).uniforms['df_scale']['value'] = new THREE.Vector3(params.scale_x, params.scale_y, value); 61 | } ); 62 | transforms.add( params, 'rot_x', -360, 360).step(0.1).name( 'rotate_x' ).onChange( function ( value ) { 63 | (volumeMesh.material).uniforms['df_rot']['value'] = new THREE.Vector3(value, params.rot_y, params.rot_z); 64 | } ); 65 | transforms.add( params, 'rot_y', -360, 360).step(0.1).name( 'rotate_y' ).onChange( function ( value ) { 66 | (volumeMesh.material).uniforms['df_rot']['value'] = new THREE.Vector3(params.rot_x, value, params.rot_z); 67 | } ); 68 | transforms.add( params, 'rot_z', -360, 360).step(0.1).name( 'rotate_z' ).onChange( function ( value ) { 69 | (volumeMesh.material).uniforms['df_rot']['value'] = new THREE.Vector3(params.rot_x, params.rot_y, value); 70 | } ); 71 | transforms.add( params, 'translation_x').step(0.01).name( 'translate_x' ).onChange( function ( value ) { 72 | (volumeMesh.material).uniforms['df_translation']['value'] = new THREE.Vector3(value, params.translation_y, params.translation_z); 73 | } ); 74 | transforms.add( params, 'translation_y').step(0.01).name( 'translate_y' ).onChange( function ( value ) { 75 | (volumeMesh.material).uniforms['df_translation']['value'] = new THREE.Vector3(params.translation_x, value, params.translation_z); 76 | } ); 77 | transforms.add( params, 'translation_z').step(0.01).name( 'translate_z' ).onChange( function ( value ) { 78 | (volumeMesh.material).uniforms['df_translation']['value'] = new THREE.Vector3(params.translation_x, params.translation_y, value); 79 | } ); 80 | // Color 81 | const color_folder = gui.addFolder('color') ; 82 | color_folder.add( params, 'color_mode', {'Presets': 0, 'Gradient': 1, 'Unicolor': 2}).name( 'color_mode' ).onChange( function ( value ) { 83 | (volumeMesh.material).uniforms['color_mode']['value'] = value; 84 | } ); 85 | color_folder.add( params, 'color_preset_type', 0, 4).step(1).name( 'color_preset' ).onChange( function ( value ) { 86 | (volumeMesh.material).uniforms['color_preset_type']['value'] = value; 87 | } ); 88 | color_folder.add( params, 'color_space', {'RBG': 0, 'HSV': 1}).name( 'color_space' ).onChange( function ( value ) { 89 | (volumeMesh.material).uniforms['color_space']['value'] = value ; 90 | } ); 91 | color_folder.addColor( params, 'uni_color').name( 'unicolor' ).onChange( function ( value ) { 92 | (volumeMesh.material).uniforms['uni_color']['value'] = new THREE.Color(value) ; 93 | } ); 94 | color_folder.addColor( params, 'color_1').name( 'color_1' ).onChange( function ( value ) { 95 | (volumeMesh.material).uniforms['color_1']['value'] = new THREE.Color(value) ; 96 | } ); 97 | color_folder.addColor( params, 'color_2').name( 'color_2' ).onChange( function ( value ) { 98 | (volumeMesh.material).uniforms['color_2']['value'] = new THREE.Color(value) ; 99 | } ); 100 | // Spectrogram 101 | const spectrogram_folder = gui.addFolder('spectrogram') ; 102 | spectrogram_folder.add( params, 'mel_spec_bins', 10, 96).step(1).name( 'mel_spec_bins' ).onChange( function ( value ) { 103 | melNumBands = value ; 104 | } ); 105 | // Raycasting 106 | const raycasting_folder = gui.addFolder('raycasting') ; 107 | raycasting_folder.add( params, 'dt_scale', 0.005,).step(0.001).name( 'dt_scale' ).onChange( function ( value ) { 108 | (volumeMesh.material).uniforms['dt_scale']['value'] = value; 109 | } ); 110 | raycasting_folder.add( params, 'max_steps', 1,).step(1).name( 'max_steps' ).onChange( function ( value ) { 111 | (volumeMesh.material).uniforms['max_steps']['value'] = value; 112 | } ); 113 | } -------------------------------------------------------------------------------- /src/playground/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | Volume 8 | 9 | 10 | 11 |
12 | 13 |
14 |
15 |
16 | 21 | 53 |
54 |
55 | 56 | 57 | 58 | 59 | -------------------------------------------------------------------------------- /src/playground/main.js: -------------------------------------------------------------------------------- 1 | import '../style.css'; 2 | import * as THREE from 'three'; 3 | import { OrbitControls } from 'three/examples/jsm/controls/OrbitControls.js'; 4 | import Stats from 'three/examples/jsm/libs/stats.module'; 5 | import { GUI } from 'dat.gui/build/dat.gui.min.js'; 6 | import audioFile1 from "../assets/audio/r2d2_talk.mp3"; 7 | import audioFile2 from "../assets/audio/synth_melody.mp3"; 8 | import audioFile3 from "../assets/audio/theremin_tone.mp3"; 9 | import noisyBackground from "../assets/images/noisy_background.jpg"; 10 | import { raycastVertexShader, raycastFragmentShader} from './shaders.js'; 11 | 12 | const audioFiles = { 13 | "audio-1" : audioFile1, 14 | "audio-2" : audioFile2, 15 | "audio-3" : audioFile3, 16 | } 17 | 18 | let camera, 19 | scene, 20 | renderer, 21 | controls, 22 | stats, 23 | raycaster 24 | 25 | let planeMesh, 26 | debugPlaneMesh, 27 | volumeMesh, 28 | pointer, 29 | specTexture, 30 | clock, 31 | curveMesh, 32 | curve_data 33 | 34 | let AudioContext; 35 | // global var for web audio API AudioContext 36 | let audioCtx; 37 | let bufferSize = 1024; 38 | let hopSize = 512; 39 | let melNumBands = 96; 40 | let numFrames = 1; 41 | let exports = {}; 42 | exports = require('../utils/ringbuf.js/index.js') 43 | let scaledMelspectrum = []; 44 | let recording,running 45 | 46 | const gui = new GUI( {width: 200 } ); 47 | // gui parameters 48 | 49 | const params = { 50 | df_type: 0,dist_func_tube: 1.0, dist_func_box: 1.0, dist_func_plane: 1.0, df_sphere_tube : 0.0, 51 | df_sphere_box: 0.0, df_sphere_plane: 0.0, df_tube_box: 0.0, df_tube_plane: 0.0, df_plane_box: 0.0, 52 | scale_x: 1, scale_y: 1, scale_z: 1, 53 | global_scale: 0.03, min_dist:0, max_dist:1, 54 | rot_x: 0, rot_y: 0, rot_z: 0, 55 | translation_x: 0, translation_y: 0, translation_z: 0, 56 | playback_rate: 1.0, 57 | color_mode: 0, color_preset_type: 0, color_space: 0, uni_color: "#9838ff", 58 | color_1: "#000000", color_2: "#ffffff", 59 | mel_spec_bins: melNumBands, 60 | num_frames: numFrames, 61 | fft_size: bufferSize, 62 | dt_scale: 0.1, 63 | max_steps: 100, 64 | }; 65 | 66 | // From a series of URL to js files, get an object URL that can be loaded in an 67 | // AudioWorklet. This is useful to be able to use multiple files (utils, data 68 | // structure, main DSP, etc.) without either using static imports, eval, manual 69 | // concatenation with or without a build step, etc. 70 | function URLFromFiles(files) { 71 | const promises = files 72 | .map((file) => fetch(file) 73 | .then((response) => response.text())); 74 | return Promise 75 | .all(promises) 76 | .then((texts) => { 77 | texts.unshift("var exports = {};"); // hack to make injected umd modules work 78 | const text = texts.join(''); 79 | const blob = new Blob([text], { type: "text/javascript" }); 80 | 81 | return URL.createObjectURL(blob); 82 | }); 83 | } 84 | 85 | try { 86 | AudioContext = window.AudioContext || window.webkitAudioContext; 87 | audioCtx = new AudioContext(); 88 | } catch (e) { 89 | throw "Could not instantiate AudioContext: " + e.message; 90 | } 91 | // global var getUserMedia mic stream 92 | let gumStream; 93 | // global audio node variables 94 | let source, mic; 95 | 96 | let gain; 97 | let melspectrogramNode; 98 | 99 | // Shared data with AudioWorkletGlobalScope 100 | let audioReader; 101 | 102 | // Curve constants 103 | const NUM_CURVE_POINTS = 5; 104 | 105 | // Volume constants 106 | const x_dim = 4; 107 | const y_dim = 4; 108 | const z_dim = 4; 109 | const x_scale = 1; 110 | const y_scale = 1; 111 | const z_scale = 1; 112 | 113 | // Setup audio 114 | 115 | // Some browsers partially implement mediaDevices. We can't assign an object 116 | // with getUserMedia as it would overwrite existing properties. 117 | // Add the getUserMedia property if it's missing. 118 | let navigatorCopy = navigator; 119 | if (navigatorCopy.mediaDevices === undefined) { 120 | navigatorCopy.mediaDevices = {}; 121 | } 122 | // Set up UI Elements 123 | const fileInput = document.getElementById('loadFileInput'); 124 | const recordButton = document.getElementById('recordButton'); 125 | const player = document.getElementById("audioPlayer"); 126 | player.src = audioFiles['audio-1']; 127 | player.load(); 128 | 129 | const blob = window.URL || window.webkitURL; 130 | const buttonGroup = document.getElementById("button-group"); 131 | 132 | function onLoadFile(inputElement){ 133 | player.src = blob.createObjectURL(inputElement.files[0]); 134 | player.load(); 135 | } 136 | 137 | 138 | // Main body 139 | init(); 140 | animate(); 141 | 142 | function init() { 143 | scene = new THREE.Scene(); 144 | 145 | // Renderer 146 | renderer = new THREE.WebGLRenderer({antialias: true}); 147 | renderer.setPixelRatio( window.devicePixelRatio ); 148 | renderer.setSize( window.innerWidth, window.innerHeight ); 149 | document.body.appendChild( renderer.domElement ); 150 | 151 | // Camera 152 | // Perspective 153 | const aspect = window.innerWidth / window.innerHeight; 154 | camera = new THREE.PerspectiveCamera( 45, aspect, 0.01, 1000 ); 155 | // // Orthographic 156 | // const width = 5; 157 | // const h = 2 * width; // frustum height 158 | // const aspect = window.innerWidth / window.innerHeight; 159 | // camera = new THREE.OrthographicCamera( - h * aspect / 2, h * aspect / 2, h / 2, - h / 2, 0.01, 1000 ); 160 | camera.position.set( -2, 1, 2 ); 161 | scene.add(camera); 162 | 163 | // Controls 164 | controls = new OrbitControls(camera, renderer.domElement); 165 | controls.addEventListener('change', render); 166 | controls.minZoom = 0.1; 167 | controls.maxZoom = 10; 168 | controls.enablePan = false; 169 | controls.update(); 170 | 171 | // GUI 172 | addGUI(); 173 | 174 | // Clock 175 | clock = new THREE.Clock(); 176 | 177 | // Debug spectrogram texture 178 | let planeGeo1 = new THREE.PlaneGeometry(2, 2); 179 | let planeMat1 = new THREE.MeshBasicMaterial({ map: createDataTexture(x_dim, y_dim), side: THREE.DoubleSide}); 180 | debugPlaneMesh = new THREE.Mesh(planeGeo1, planeMat1); 181 | debugPlaneMesh .position.set( -2, 0, -1 ); 182 | // scene.add(debugPlaneMesh); 183 | 184 | specTexture = createDataTexture(numFrames, melNumBands); 185 | 186 | // Curve 187 | const curve = initCurveData(NUM_CURVE_POINTS); 188 | const points = curve.getPoints(5); 189 | const geometry = new THREE.BufferGeometry().setFromPoints(points); 190 | const material = new THREE.LineBasicMaterial({ color: 0xff0000 }); 191 | 192 | // Create curveMesh to add to the scene 193 | curveMesh = new THREE.Line(geometry, material); 194 | curveMesh.matrixAutoUpdate = false; 195 | //scene.add(curveMesh); 196 | 197 | // Volume 198 | const volumeGeometry = new THREE.BoxGeometry( x_scale, y_scale, z_scale); 199 | 200 | const volumeUniforms = { 201 | 'volume_scale': { value: new THREE.Vector3( x_scale, y_scale, z_scale ) }, 202 | 'volume': { value: create3dDataTexture(x_dim, y_dim, z_dim) }, 203 | 'volume_dims': { value: new THREE.Vector3( x_dim, y_dim, z_dim) }, 204 | 'min_dist': { value: params.min_dist}, 205 | 'max_dist': { value: params.max_dist}, 206 | 'color_mode': { value: params.color_mode}, 207 | 'color_preset_type': { value: params.color_preset_type}, 208 | 'color_space': {value: params.color_space}, 209 | 'uni_color': { value: new THREE.Color(params.uni_color) }, 210 | 'color_1': { value: new THREE.Color(params.color_1) }, 211 | 'color_2': { value: new THREE.Color(params.color_2)}, 212 | 'aabb_min': { value: new THREE.Vector3()}, 213 | 'aabb_max': { value: new THREE.Vector3()}, 214 | 'dt_scale': { value: params.dt_scale}, 215 | 'max_steps': { value: params.max_steps}, 216 | 'spectrum': { value: createDataTexture(x_dim, y_dim) }, 217 | 'noise_texture': { value: new THREE.TextureLoader().load(noisyBackground) }, 218 | 'curve_data': { value: createCurveDataTexture(curve_data) }, 219 | 'time': {value: clock.getElapsedTime()}, 220 | 'playback_progress': {value: 0.0}, 221 | 'df_sphere_tube': {value: params.df_sphere_tube}, 222 | 'df_sphere_box': {value: params.df_sphere_box}, 223 | 'df_sphere_plane': {value: params.df_sphere_plane}, 224 | 'df_tube_box': {value: params.df_tube_box}, 225 | 'df_tube_plane': {value: params.df_tube_plane}, 226 | 'df_plane_box': {value: params.df_plane_box}, 227 | 'playback_rate': {value: 1.0}, 228 | 'df_translation': {value: new THREE.Vector3(params.translation_x,params.translation_y,params.translation_z)}, 229 | 'df_rot': {value: new THREE.Vector3(params.rot_x,params.rot_y,params.rot_z)}, 230 | 'df_scale': {value: new THREE.Vector3(params.scale_x,params.scale_y,params.scale_z)}, 231 | 'global_scale': {value: params.global_scale}, 232 | 'df_type':{value: 0} 233 | }; 234 | 235 | const volumeMaterial = new THREE.ShaderMaterial({ 236 | uniforms: volumeUniforms, 237 | vertexShader: raycastVertexShader, 238 | fragmentShader: raycastFragmentShader, 239 | side: THREE.DoubleSide, 240 | transparent: true 241 | }); 242 | 243 | volumeMesh = new THREE.Mesh( volumeGeometry, volumeMaterial); 244 | volumeMesh.matrixAutoUpdate = true; 245 | volumeMesh.geometry.computeBoundingBox(); 246 | 247 | (volumeMesh.material).uniforms['aabb_min']['value'] = volumeMesh.geometry.boundingBox.min; 248 | (volumeMesh.material).uniforms['aabb_max']['value'] = volumeMesh.geometry.boundingBox.max; 249 | 250 | scene.add(volumeMesh); 251 | 252 | 253 | pointer = new THREE.Vector2(); 254 | 255 | window.addEventListener('pointerMove', onPointerMove); 256 | 257 | const planeGeo = new THREE.PlaneGeometry(25, 25); 258 | const planeMat = new THREE.MeshBasicMaterial({ visible: false }); 259 | planeMesh = new THREE.Mesh(planeGeo, planeMat); 260 | planeMesh.rotation.x = -0.5 * Math.PI; 261 | // scene.add(planeMesh); 262 | // planeMesh.name = 'plane'; 263 | 264 | raycaster = new THREE.Raycaster(); 265 | 266 | // Add helpers 267 | //addHelpers(scene); 268 | render(); 269 | document.addEventListener('pointermove', onPointerMove); 270 | window.addEventListener('resize', onWindowResize); 271 | recordButton.addEventListener('click', onRecordClickHandler); 272 | player.addEventListener('play', startAudioProcessingMediaElt); 273 | player.addEventListener('pause', stopAudioProcessingMediaElt); 274 | 275 | fileInput.addEventListener('change', () => {onLoadFile(fileInput)}); 276 | buttonGroup.addEventListener("click", (e) => { 277 | const isButton = e.target.nodeName === 'BUTTON'; 278 | if(!isButton) { 279 | return 280 | } 281 | player.src = audioFiles[e.target.id]; 282 | player.load(); 283 | }); 284 | } 285 | 286 | function render() { 287 | renderer.render(scene, camera); 288 | } 289 | 290 | function onWindowResize() { 291 | 292 | // renderer.setSize( window.innerWidth, window.innerHeight ); 293 | 294 | // const aspect = window.innerWidth / window.innerHeight; 295 | 296 | // const frustumHeight = camera.top - camera.bottom; 297 | 298 | // camera.left = - frustumHeight * aspect / 2; 299 | // camera.right = frustumHeight * aspect / 2; 300 | 301 | // camera.updateProjectionMatrix(); 302 | camera.aspect = window.innerWidth / window.innerHeight; 303 | camera.updateProjectionMatrix(); 304 | 305 | renderer.setSize(window.innerWidth, window.innerHeight); 306 | 307 | render(); 308 | 309 | } 310 | 311 | function onPointerMove(event) { 312 | 313 | pointer.x = (event.clientX / window.innerWidth) * 2 - 1; 314 | pointer.y = - (event.clientY / window.innerHeight) * 2 + 1; 315 | 316 | } 317 | 318 | function addHelpers(scene) { 319 | // const gridHelper = new THREE.GridHelper(10, 10); 320 | // scene.add(gridHelper); 321 | // stats = Stats(); 322 | //document.body.appendChild(stats.dom) 323 | const axesHelper = new THREE.AxesHelper(3); 324 | scene.add(axesHelper); 325 | } 326 | 327 | function updateUniforms(){ 328 | (volumeMesh.material).uniforms['time']['value'] = clock.getElapsedTime(); 329 | (volumeMesh.material).uniforms['curve_data']['value'] = updateCurveData(curveMesh, NUM_CURVE_POINTS); 330 | (volumeMesh.material).uniforms['playback_progress']['value'] = (player.currentTime) / player.duration; 331 | } 332 | function animate() { 333 | requestAnimationFrame(animate); 334 | updateMeshTexture(); 335 | updateUniforms(); 336 | //stats.update(); 337 | render(); 338 | } 339 | 340 | // Creates 3D texture with RGB gradient along the XYZ axes 341 | function create3dDataTexture(width, height, depth) { 342 | const d = new Uint8Array(width * height * depth * 4); 343 | let stride = 0; 344 | 345 | for (let z = 0; z < depth; z++) { 346 | for (let y = 0; y < height; y++) { 347 | for (let x = 0; x < width; x++) { 348 | d[stride + 0] = (x / width) * 255; 349 | d[stride + 1] = (y / height) * 255; 350 | d[stride + 2] = (z / depth) * 255; 351 | d[stride + 3] = 255; 352 | stride += 4; 353 | } 354 | } 355 | } 356 | const texture = new THREE.Data3DTexture(d, width, height, depth); 357 | texture.format = THREE.RGBAFormat; 358 | // texture.type = THREE.FloatType; 359 | // texture.minFilter = THREE.NearestFilter; 360 | // texture.magFilter = THREE.NearestFilter; 361 | texture.unpackAlignment = 1; 362 | texture.needsUpdate = true; 363 | 364 | return texture; 365 | } 366 | function createDataTexture(width, height) { 367 | 368 | const d = new Uint8Array(width * height * 4); 369 | 370 | let stride = 0; 371 | for (let y = 0; y < height; y++) { 372 | for (let x = 0; x < width; x++) { 373 | d[stride + 0] = 1; 374 | d[stride + 1] = 0; 375 | d[stride + 2] = 0; 376 | d[stride + 3] = 1; 377 | stride += 4; 378 | } 379 | } 380 | const texture = new THREE.DataTexture(d, width, height); 381 | texture.format = THREE.RedFormat; 382 | // texture.type = THREE.FloatType; 383 | // texture.minFilter = THREE.NearestFilter; 384 | // texture.magFilter = THREE.NearestFilter; 385 | texture.unpackAlignment = 1; 386 | 387 | return texture; 388 | } 389 | 390 | function updateSpectrumData(texture, new_data) { 391 | const width = numFrames; 392 | const height = melNumBands; 393 | const data = texture.image.data; 394 | let stride = 0; 395 | for (let y = 0; y < height; y++) { 396 | for (let x = 0; x < width; x++) { 397 | if (x < width - 1) { 398 | // shift the index by 4 to get R,G,B or A value of the subsequent column 399 | data[stride] = data[stride + 4]; 400 | } else { 401 | // set red value of texture 402 | data[stride] = new_data[y]; 403 | } 404 | data[stride + 1] = 0; 405 | data[stride + 2] = 0; 406 | data[stride + 3] = 1; 407 | stride += 4; 408 | } 409 | } 410 | const new_texture = new THREE.DataTexture(data, width, height); 411 | new_texture.format = THREE.RGBAFormat; 412 | // Enable linear filtering for smoother texture interpolation 413 | new_texture.minFilter = THREE.LinearFilter; 414 | new_texture.magFilter = THREE.LinearFilter; 415 | // Optionally, enable anisotropic filtering for improved quality at oblique angles 416 | new_texture.anisotropy = renderer.capabilities.getMaxAnisotropy(); 417 | 418 | new_texture.unpackAlignment = 1; 419 | new_texture.needsUpdate = true; 420 | setMeshTexture(new_texture); 421 | new_texture.dispose(); 422 | 423 | } 424 | function createCurveDataTexture(data) { 425 | const d = new Float32Array(data.numPoints * 4 * 4); 426 | let stride = 0; 427 | const pt_data = [data.positions, data.tangents, data.normals, data.binormals]; 428 | for (let j = 0; j < 4; j++) { 429 | for (let k = 0; k < data.numPoints; k++) { 430 | d[stride + 0] = pt_data[j][k].x; 431 | d[stride + 1] = pt_data[j][k].y; 432 | d[stride + 2] = pt_data[j][k].z; 433 | d[stride + 3] = 1.0; 434 | stride += 4; 435 | } 436 | } 437 | const texture = new THREE.DataTexture(d, data.numPoints, 1); 438 | texture.type = THREE.FloatType; 439 | texture.format = THREE.RGBAFormat; 440 | texture.minFilter = THREE.NearestFilter; 441 | texture.magFilter = THREE.NearestFilter; 442 | texture.unpackAlignment = 1; 443 | texture.needsUpdate = true; 444 | return texture 445 | } 446 | 447 | function initCurveData(num_points) { 448 | 449 | const curve = new THREE.CatmullRomCurve3([ 450 | new THREE.Vector3(0, 0.0, -0.5), 451 | new THREE.Vector3(0, 0.0, -0.25), 452 | new THREE.Vector3(0, 0.0, 0), 453 | new THREE.Vector3(0, 0.0, 0.25), 454 | new THREE.Vector3(0, 0.0, 0.5) 455 | ]); 456 | const cPoints = curve.getSpacedPoints(num_points); 457 | const cObjects = curve.computeFrenetFrames(num_points, true); 458 | 459 | curve_data = { 460 | positions: cPoints, 461 | tangents: cObjects.tangents, 462 | normals: cObjects.normals, 463 | binormals: cObjects.binormals, 464 | numPoints: num_points 465 | } 466 | return curve; 467 | } 468 | 469 | function updateCurveData(curve_mesh, num_points) { 470 | 471 | const geo_array = curve_mesh.geometry.attributes.position.array; 472 | 473 | // rebuild the curve 474 | const positions = Array(num_points); 475 | let i3 = 0; 476 | for (let i = 0; i < num_points; i++) { 477 | if (i == num_points - 1) { 478 | positions[i] = new THREE.Vector3( 479 | geo_array[i3 + 0], 480 | geo_array[i3 + 1] + Math.abs(Math.sin(clock.getElapsedTime())), 481 | geo_array[i3 + 2]); 482 | } 483 | else { 484 | positions[i] = new THREE.Vector3( 485 | geo_array[i3 + 0], 486 | geo_array[i3 + 1], 487 | geo_array[i3 + 2]); 488 | } 489 | i3 += 3; 490 | } 491 | 492 | const curve = new THREE.CatmullRomCurve3(positions); 493 | const cPoints = curve.getSpacedPoints(num_points); 494 | const cObjects = curve.computeFrenetFrames(num_points, true); 495 | 496 | // update curve_data interface 497 | curve_data = { 498 | positions: cPoints, 499 | tangents: cObjects.tangents, 500 | normals: cObjects.normals, 501 | binormals: cObjects.binormals, 502 | numPoints: num_points 503 | } 504 | 505 | return createCurveDataTexture(curve_data); 506 | } 507 | function setMeshTexture(texture) { 508 | (debugPlaneMesh.material).map = texture; 509 | (volumeMesh.material).uniforms['spectrum']['value'] = texture; 510 | texture.dispose(); 511 | } 512 | 513 | 514 | function onRecordClickHandler() { 515 | recording = recordButton.classList.contains("recording"); 516 | if (recording) { 517 | recordButton.classList.remove("recording"); 518 | recordButton.innerHTML = "Record"; 519 | recordButton.classList.remove("bg-emerald-200"); 520 | recordButton.disabled = false; 521 | stopMicRecordStream(); 522 | } else { 523 | 524 | recordButton.disabled = true; 525 | // start microphone stream using getUserMedia and run feature extraction 526 | startMicRecordStream(); 527 | } 528 | } 529 | 530 | // record native microphone input and do further audio processing on each audio buffer using the given callback functions 531 | function startMicRecordStream() { 532 | if (navigator.mediaDevices.getUserMedia) { 533 | console.log("Initializing audio..."); 534 | navigator.mediaDevices.getUserMedia({ audio: true, video: false }) 535 | .then(startAudioProcessingStream) 536 | .catch(function (message) { 537 | throw "Could not access microphone - " + message; 538 | }); 539 | } else { 540 | throw "Could not access microphone - getUserMedia not available"; 541 | } 542 | } 543 | 544 | function startAudioProcessingStream(stream) { 545 | gumStream = stream; 546 | if (gumStream.active) { 547 | if (audioCtx.state == "closed") { 548 | audioCtx = new AudioContext(); 549 | } 550 | else if (audioCtx.state == "suspended") { 551 | audioCtx.resume(); 552 | } 553 | 554 | mic = audioCtx.createMediaStreamSource(gumStream); 555 | gain = audioCtx.createGain(); 556 | gain.gain.setValueAtTime(0, audioCtx.currentTime); 557 | 558 | let codeForProcessorModule = ["https://cdn.jsdelivr.net/npm/essentia.js@0.1.3/dist/essentia-wasm.umd.js", 559 | "https://cdn.jsdelivr.net/npm/essentia.js@0.1.3/dist/essentia.js-extractor.umd.js", 560 | "https://raw.githack.com/MTG/essentia.js/master/examples/demos/melspectrogram-rt/melspectrogram-processor.js", 561 | "https://unpkg.com/ringbuf.js@0.1.0/dist/index.js"]; 562 | 563 | // inject Essentia.js code into AudioWorkletGlobalScope context, then setup audio graph and start animation 564 | URLFromFiles(codeForProcessorModule) 565 | .then((concatenatedCode) => { 566 | audioCtx.audioWorklet.addModule(concatenatedCode) 567 | .then(setupAudioGraphStream) 568 | .catch(function moduleLoadRejected(msg) { 569 | console.log(`There was a problem loading the AudioWorklet module code: \n ${msg}`); 570 | }); 571 | }) 572 | .catch((msg) => { 573 | console.log(`There was a problem retrieving the AudioWorklet module code: \n ${msg}`); 574 | }) 575 | // // set button to stop 576 | recordButton.classList.add("recording"); 577 | recordButton.innerHTML = "Stop"; 578 | recordButton.classList.add("bg-emerald-200"); 579 | recordButton.disabled = false; 580 | } else { 581 | throw "Mic stream not active"; 582 | } 583 | } 584 | function startAudioProcessingMediaElt() { 585 | if (audioCtx.state == "closed") { 586 | audioCtx = new AudioContext(); 587 | } 588 | else if (audioCtx.state == "suspended") { 589 | audioCtx.resume(); 590 | } 591 | 592 | source = audioCtx.createMediaElementSource(player); 593 | gain = audioCtx.createGain(); 594 | gain.gain.setValueAtTime(0, audioCtx.currentTime); 595 | let codeForProcessorModule = ["https://cdn.jsdelivr.net/npm/essentia.js@0.1.3/dist/essentia-wasm.umd.js", 596 | "https://cdn.jsdelivr.net/npm/essentia.js@0.1.3/dist/essentia.js-extractor.umd.js", 597 | "https://raw.githack.com/MTG/essentia.js/master/examples/demos/melspectrogram-rt/melspectrogram-processor.js", 598 | "https://unpkg.com/ringbuf.js@0.1.0/dist/index.js"]; 599 | 600 | // inject Essentia.js code into AudioWorkletGlobalScope context, then setup audio graph and start animation 601 | URLFromFiles(codeForProcessorModule) 602 | .then((concatenatedCode) => { 603 | audioCtx.audioWorklet.addModule(concatenatedCode) 604 | .then(setupAudioGraphMediaElt) 605 | .catch(function moduleLoadRejected(msg) { 606 | console.log(`There was a problem loading the AudioWorklet module code: \n ${msg}`); 607 | }); 608 | }) 609 | .catch((msg) => { 610 | console.log(`There was a problem retrieving the AudioWorklet module code: \n ${msg}`); 611 | }) 612 | } 613 | function stopMicRecordStream() { 614 | // stop mic stream 615 | gumStream.getAudioTracks().forEach(function (track) { 616 | track.stop(); 617 | gumStream.removeTrack(track); 618 | }); 619 | 620 | audioCtx.close().then(function () { 621 | // manage button state 622 | recordButton.classList.remove("recording"); 623 | recordButton.innerHTML = 'Record'; 624 | 625 | // disconnect nodes 626 | mic.disconnect(); 627 | melspectrogramNode.disconnect(); 628 | gain.disconnect(); 629 | mic = undefined; 630 | melspectrogramNode = undefined; 631 | gain = undefined; 632 | 633 | console.log("Stopped recording ..."); 634 | }); 635 | } 636 | function stopAudioProcessingMediaElt() { 637 | player.pause(); 638 | audioCtx.close().then(function () { 639 | // disconnect nodes 640 | source.disconnect(); 641 | melspectrogramNode.disconnect(); 642 | source = undefined; 643 | melspectrogramNode = undefined; 644 | }); 645 | } 646 | function setupAudioGraphStream() { 647 | // increase buffer size in case of glitches 648 | let sab = exports.RingBuffer.getStorageForCapacity(melNumBands * 42, Float32Array); 649 | let rb = new exports.RingBuffer(sab, Float32Array); 650 | audioReader = new exports.AudioReader(rb); 651 | 652 | melspectrogramNode = new AudioWorkletNode(audioCtx, 'melspectrogram-processor', { 653 | processorOptions: { 654 | bufferSize: bufferSize, 655 | hopSize: hopSize, 656 | melNumBands: melNumBands, 657 | sampleRate: audioCtx.sampleRate, 658 | } 659 | }); 660 | 661 | try { 662 | melspectrogramNode.port.postMessage({ 663 | sab: sab, 664 | }); 665 | } catch (_) { 666 | alert("No SharedArrayBuffer transfer support, try another browser."); 667 | recordButton.disabled = true; 668 | return; 669 | } 670 | recording = recordButton.classList.contains("recording"); 671 | mic.connect(melspectrogramNode); 672 | melspectrogramNode.connect(gain); 673 | gain.connect(audioCtx.destination); 674 | 675 | } 676 | 677 | function setupAudioGraphMediaElt() { 678 | // increase buffer size in case of glitches 679 | let sab = exports.RingBuffer.getStorageForCapacity(melNumBands * 18, Float32Array); 680 | let rb = new exports.RingBuffer(sab, Float32Array); 681 | audioReader = new exports.AudioReader(rb); 682 | melspectrogramNode = new AudioWorkletNode(audioCtx, 'melspectrogram-processor', { 683 | processorOptions: { 684 | bufferSize: 1024, 685 | hopSize: 512, 686 | melNumBands: melNumBands, 687 | sampleRate: audioCtx.sampleRate, 688 | } 689 | }); 690 | // The AudioWorklet node causes cracking noises during playback so we 691 | // connect it with a gain node to avoid this. 692 | try { 693 | melspectrogramNode.port.postMessage({ 694 | sab: sab, 695 | }); 696 | } catch (_) { 697 | alert("No SharedArrayBuffer transfer support, try another browser."); 698 | return; 699 | } 700 | // connect source to destination for playback 701 | source.connect(audioCtx.destination); 702 | // connect source to AudioWorklet node for feature extraction 703 | source.connect(melspectrogramNode); 704 | melspectrogramNode.connect(gain); 705 | gain.connect(audioCtx.destination); 706 | } 707 | 708 | function updateMeshTexture() { 709 | let melspectrumBuffer = new Float32Array(melNumBands); 710 | if (audioReader !== undefined){ 711 | if (audioReader.available_read() >= melNumBands) { 712 | let toread = audioReader.dequeue(melspectrumBuffer); 713 | if (toread !== 0) { 714 | // scale spectrum values to 0 - 255 715 | scaledMelspectrum = melspectrumBuffer.map(x => Math.round(x * 35.5)) 716 | } 717 | } 718 | } 719 | updateSpectrumData(specTexture, scaledMelspectrum); 720 | } 721 | 722 | function addGUI() { 723 | gui.add( params, 'playback_rate').step(0.001).name( 'playback_rate' ).onChange( function ( value ) { 724 | (volumeMesh.material).uniforms['playback_rate']['value'] = 1.0 / value; 725 | player.playbackRate = value; 726 | } ); 727 | gui.add( params, 'num_frames').step(1).name( 'num_frames' ).onChange( function ( value ) { 728 | specTexture = createDataTexture(value, melNumBands); 729 | updateMeshTexture(); 730 | numFrames = value; 731 | } ); 732 | // Distance Function 733 | const df_folder = gui.addFolder('distance function') ; 734 | df_folder.add( params, 'min_dist').step(0.01).name( 'min_dist' ).onChange( function ( value ) { 735 | (volumeMesh.material).uniforms['min_dist']['value'] = value; 736 | } ); 737 | df_folder.add( params, 'max_dist').step(0.01).name( 'max_dist' ).onChange( function ( value ) { 738 | (volumeMesh.material).uniforms['max_dist']['value'] = value; 739 | } ); 740 | df_folder.add( params, 'df_type', { 741 | 'Sphere - Tube': 0,'Sphere - Box': 1,'Sphere - Plane': 2, 742 | 'Tube - Box': 3, 'Tube - Plane': 4,'Plane - Box': 5}).name( 'sphere/tube' ).onChange( function ( value ) { 743 | (volumeMesh.material).uniforms['df_type']['value'] = value; 744 | } ); 745 | df_folder.add( params, 'df_sphere_tube', 0, 1).step(0.01).name( 'sphere/tube' ).onChange( function ( value ) { 746 | (volumeMesh.material).uniforms['df_sphere_tube']['value'] = value; 747 | } ); 748 | df_folder.add( params, 'df_sphere_box', 0, 1).step(0.01).name( 'sphere/box' ).onChange( function ( value ) { 749 | (volumeMesh.material).uniforms['df_sphere_box']['value'] = value; 750 | } ); 751 | df_folder.add( params, 'df_sphere_plane', 0, 1).step(0.01).name( 'sphere/plane' ).onChange( function ( value ) { 752 | (volumeMesh.material).uniforms['df_sphere_plane']['value'] = value; 753 | } ); 754 | df_folder.add( params, 'df_tube_box', 0, 1).step(0.01).name( 'tube/box' ).onChange( function ( value ) { 755 | (volumeMesh.material).uniforms['df_tube_box']['value'] = value; 756 | } ); 757 | df_folder.add( params, 'df_tube_plane', 0, 1).step(0.01).name( 'tube/plane' ).onChange( function ( value ) { 758 | (volumeMesh.material).uniforms['df_tube_plane']['value'] = value; 759 | } ); 760 | df_folder.add( params, 'df_plane_box', 0, 1).step(0.01).name( 'plane/box' ).onChange( function ( value ) { 761 | (volumeMesh.material).uniforms['df_plane_box']['value'] = value; 762 | } ); 763 | df_folder.add( params, 'global_scale').step(0.0001).name( 'global_scale' ).onChange( function ( value ) { 764 | (volumeMesh.material).uniforms['global_scale']['value'] = value; 765 | } ); 766 | const transforms = gui.addFolder('transforms') ; 767 | transforms.add( params, 'scale_x', 0, 1).step(0.00001).name( 'scale_x' ).onChange( function ( value ) { 768 | (volumeMesh.material).uniforms['df_scale']['value'] = new THREE.Vector3(value, params.scale_y, params.scale_z); 769 | } ); 770 | transforms .add( params, 'scale_y', 0, 1).step(0.00001).name( 'scale_y' ).onChange( function ( value ) { 771 | (volumeMesh.material).uniforms['df_scale']['value'] = new THREE.Vector3(params.scale_x, value, params.scale_z); 772 | } ); 773 | transforms.add( params, 'scale_z', 0, 1).step(0.00001).name( 'scale_z' ).onChange( function ( value ) { 774 | (volumeMesh.material).uniforms['df_scale']['value'] = new THREE.Vector3(params.scale_x, params.scale_y, value); 775 | } ); 776 | transforms.add( params, 'rot_x', -360, 360).step(0.1).name( 'rotate_x' ).onChange( function ( value ) { 777 | (volumeMesh.material).uniforms['df_rot']['value'] = new THREE.Vector3(value, params.rot_y, params.rot_z); 778 | } ); 779 | transforms.add( params, 'rot_y', -360, 360).step(0.1).name( 'rotate_y' ).onChange( function ( value ) { 780 | (volumeMesh.material).uniforms['df_rot']['value'] = new THREE.Vector3(params.rot_x, value, params.rot_z); 781 | } ); 782 | transforms.add( params, 'rot_z', -360, 360).step(0.1).name( 'rotate_z' ).onChange( function ( value ) { 783 | (volumeMesh.material).uniforms['df_rot']['value'] = new THREE.Vector3(params.rot_x, params.rot_y, value); 784 | } ); 785 | transforms.add( params, 'translation_x').step(0.01).name( 'translate_x' ).onChange( function ( value ) { 786 | (volumeMesh.material).uniforms['df_translation']['value'] = new THREE.Vector3(value, params.translation_y, params.translation_z); 787 | } ); 788 | transforms.add( params, 'translation_y').step(0.01).name( 'translate_y' ).onChange( function ( value ) { 789 | (volumeMesh.material).uniforms['df_translation']['value'] = new THREE.Vector3(params.translation_x, value, params.translation_z); 790 | } ); 791 | transforms.add( params, 'translation_z').step(0.01).name( 'translate_z' ).onChange( function ( value ) { 792 | (volumeMesh.material).uniforms['df_translation']['value'] = new THREE.Vector3(params.translation_x, params.translation_y, value); 793 | } ); 794 | // Color 795 | const color_folder = gui.addFolder('color') ; 796 | color_folder.add( params, 'color_mode', {'Presets': 0, 'Gradient': 1, 'Unicolor': 2}).name( 'color_mode' ).onChange( function ( value ) { 797 | (volumeMesh.material).uniforms['color_mode']['value'] = value; 798 | } ); 799 | color_folder.add( params, 'color_preset_type', 0, 4).step(1).name( 'color_preset' ).onChange( function ( value ) { 800 | (volumeMesh.material).uniforms['color_preset_type']['value'] = value; 801 | } ); 802 | color_folder.add( params, 'color_space', {'RBG': 0, 'HSV': 1}).name( 'color_space' ).onChange( function ( value ) { 803 | (volumeMesh.material).uniforms['color_space']['value'] = value ; 804 | } ); 805 | color_folder.addColor( params, 'uni_color').name( 'unicolor' ).onChange( function ( value ) { 806 | (volumeMesh.material).uniforms['uni_color']['value'] = new THREE.Color(value) ; 807 | } ); 808 | color_folder.addColor( params, 'color_1').name( 'color_1' ).onChange( function ( value ) { 809 | (volumeMesh.material).uniforms['color_1']['value'] = new THREE.Color(value) ; 810 | } ); 811 | color_folder.addColor( params, 'color_2').name( 'color_2' ).onChange( function ( value ) { 812 | (volumeMesh.material).uniforms['color_2']['value'] = new THREE.Color(value) ; 813 | } ); 814 | // Spectrogram 815 | const spectrogram_folder = gui.addFolder('spectrogram') ; 816 | spectrogram_folder.add( params, 'mel_spec_bins', 10, 96).step(1).name( 'mel_spec_bins' ).onChange( function ( value ) { 817 | melNumBands = value ; 818 | } ); 819 | // Raycasting 820 | const raycasting_folder = gui.addFolder('raycasting') ; 821 | raycasting_folder.add( params, 'dt_scale', 0.005,).step(0.001).name( 'dt_scale' ).onChange( function ( value ) { 822 | (volumeMesh.material).uniforms['dt_scale']['value'] = value; 823 | } ); 824 | raycasting_folder.add( params, 'max_steps', 1,).step(1).name( 'max_steps' ).onChange( function ( value ) { 825 | (volumeMesh.material).uniforms['max_steps']['value'] = value; 826 | } ); 827 | } 828 | 829 | 830 | 831 | 832 | 833 | 834 | 835 | 836 | 837 | 838 | -------------------------------------------------------------------------------- /src/playground/melspectrogram-processor.js: -------------------------------------------------------------------------------- 1 | // avoid ES Module imports: not available on workers in Firefox nor Safari 2 | let essentiaExtractor = new EssentiaExtractor(exports.EssentiaWASM); 3 | console.log('parsing as js'); 4 | function Float32Concat(first, second) 5 | { 6 | var firstLength = first.length, 7 | result = new Float32Array(firstLength + second.length); 8 | 9 | result.set(first); 10 | result.set(second, firstLength); 11 | 12 | return result; 13 | } 14 | 15 | class MelspectrogramProcessor extends AudioWorkletProcessor { 16 | constructor(options) { 17 | super(); 18 | this._bufferSize = options.processorOptions.bufferSize; 19 | this._hopSize = options.processorOptions.hopSize; 20 | this._melNumBands = options.processorOptions.melNumBands; 21 | this._sampleRate = options.processorOptions.sampleRate; 22 | this._channelCount = 1; 23 | this._extractor = essentiaExtractor; 24 | // modifying default extractor settings 25 | this._extractor.frameSize = this._bufferSize; 26 | this._extractor.hopSize = this._hopSize; 27 | // settings specific to an algorithm 28 | this._extractor.profile.MelBands.numberBands = this._melNumBands; 29 | this._extractor.profile.MelBands.type = 'power'; 30 | 31 | // buffersize mismatch helpers 32 | this._inputRingBuffer = new ChromeLabsRingBuffer(this._bufferSize, this._channelCount); 33 | this._outputRingBuffer = new ChromeLabsRingBuffer(this._bufferSize, this._channelCount); // changed from 1024 to match block size 34 | 35 | this._accumData = [new Float32Array(this._bufferSize)]; 36 | this._spectrum; 37 | 38 | // Shared Array Buffer config 39 | this.port.onmessage = e => { 40 | this._audio_writer = new AudioWriter(new RingBuffer(e.data.sab, Float32Array)); 41 | }; 42 | } 43 | 44 | process(inputList, outputList, params) { 45 | let input = inputList[0]; 46 | let output = outputList[0]; 47 | 48 | this._inputRingBuffer.push(input); 49 | 50 | if (this._inputRingBuffer.framesAvailable >= this._bufferSize) { 51 | 52 | this._inputRingBuffer.pull(this._accumData); 53 | 54 | this._spectrum = this._extractor.melSpectrumExtractor(this._accumData[0], this._sampleRate); 55 | if (this._audio_writer.available_write() >= this._melNumBands) { 56 | this._audio_writer.enqueue(this._spectrum); 57 | } 58 | 59 | let zeros = new Float32Array(128-this._spectrum.length); 60 | let zeroPaddedSpectrum = Float32Concat(this._spectrum, zeros); 61 | 62 | this._outputRingBuffer.push([zeroPaddedSpectrum]); 63 | 64 | // reset variables 65 | this._accumData = [new Float32Array(this._bufferSize)]; 66 | this._spectrum = null; 67 | } 68 | 69 | this._outputRingBuffer.pull(output); // if ringbuffer does not have enough frames, output will be silent 70 | // console.log(output[0]); 71 | return true; 72 | } 73 | } 74 | 75 | registerProcessor("melspectrogram-processor", MelspectrogramProcessor); 76 | 77 | 78 | 79 | // helper classes from https://github.com/GoogleChromeLabs/web-audio-samples/blob/gh-pages/audio-worklet/design-pattern/lib/wasm-audio-helper.js#L170: 80 | 81 | /** 82 | * Copyright 2018 Google LLC 83 | * 84 | * Licensed under the Apache License, Version 2.0 (the "License"); you may not 85 | * use this file except in compliance with the License. You may obtain a copy of 86 | * the License at 87 | * 88 | * http://www.apache.org/licenses/LICENSE-2.0 89 | * 90 | * Unless required by applicable law or agreed to in writing, software 91 | * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 92 | * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 93 | * License for the specific language governing permissions and limitations under 94 | * the License. 95 | */ 96 | 97 | // Basic byte unit of WASM heap. (16 bit = 2 bytes) 98 | const BYTES_PER_UNIT = Uint16Array.BYTES_PER_ELEMENT; 99 | 100 | // Byte per audio sample. (32 bit float) 101 | const BYTES_PER_SAMPLE = Float32Array.BYTES_PER_ELEMENT; 102 | 103 | // The max audio channel on Chrome is 32. 104 | const MAX_CHANNEL_COUNT = 32; 105 | 106 | // WebAudio's render quantum size. 107 | const RENDER_QUANTUM_FRAMES = 128; 108 | 109 | /** 110 | * A JS FIFO implementation for the AudioWorklet. 3 assumptions for the 111 | * simpler operation: 112 | * 1. the push and the pull operation are done by 128 frames. (Web Audio 113 | * API's render quantum size in the speficiation) 114 | * 2. the channel count of input/output cannot be changed dynamically. 115 | * The AudioWorkletNode should be configured with the `.channelCount = k` 116 | * (where k is the channel count you want) and 117 | * `.channelCountMode = explicit`. 118 | * 3. This is for the single-thread operation. (obviously) 119 | * 120 | * @class 121 | */ 122 | class ChromeLabsRingBuffer { 123 | /** 124 | * @constructor 125 | * @param {number} length Buffer length in frames. 126 | * @param {number} channelCount Buffer channel count. 127 | */ 128 | constructor(length, channelCount) { 129 | this._readIndex = 0; 130 | this._writeIndex = 0; 131 | this._framesAvailable = 0; 132 | 133 | this._channelCount = channelCount; 134 | this._length = length; 135 | this._channelData = []; 136 | for (let i = 0; i < this._channelCount; ++i) { 137 | this._channelData[i] = new Float32Array(length); 138 | } 139 | } 140 | 141 | /** 142 | * Getter for Available frames in buffer. 143 | * 144 | * @return {number} Available frames in buffer. 145 | */ 146 | get framesAvailable() { 147 | return this._framesAvailable; 148 | } 149 | 150 | /** 151 | * Push a sequence of Float32Arrays to buffer. 152 | * 153 | * @param {array} arraySequence A sequence of Float32Arrays. 154 | */ 155 | push(arraySequence) { 156 | // The channel count of arraySequence and the length of each channel must 157 | // match with this buffer obejct. 158 | 159 | // Transfer data from the |arraySequence| storage to the internal buffer. 160 | let sourceLength = arraySequence[0].length; 161 | for (let i = 0; i < sourceLength; ++i) { 162 | let writeIndex = (this._writeIndex + i) % this._length; 163 | for (let channel = 0; channel < this._channelCount; ++channel) { 164 | this._channelData[channel][writeIndex] = arraySequence[channel][i]; 165 | } 166 | } 167 | 168 | this._writeIndex += sourceLength; 169 | if (this._writeIndex >= this._length) { 170 | this._writeIndex = 0; 171 | } 172 | 173 | // For excessive frames, the buffer will be overwritten. 174 | this._framesAvailable += sourceLength; 175 | if (this._framesAvailable > this._length) { 176 | this._framesAvailable = this._length; 177 | } 178 | } 179 | 180 | /** 181 | * Pull data out of buffer and fill a given sequence of Float32Arrays. 182 | * 183 | * @param {array} arraySequence An array of Float32Arrays. 184 | */ 185 | pull(arraySequence) { 186 | // The channel count of arraySequence and the length of each channel must 187 | // match with this buffer obejct. 188 | 189 | // If the FIFO is completely empty, do nothing. 190 | if (this._framesAvailable === 0) { 191 | return; 192 | } 193 | 194 | let destinationLength = arraySequence[0].length; 195 | 196 | // Transfer data from the internal buffer to the |arraySequence| storage. 197 | for (let i = 0; i < destinationLength; ++i) { 198 | let readIndex = (this._readIndex + i) % this._length; 199 | for (let channel = 0; channel < this._channelCount; ++channel) { 200 | arraySequence[channel][i] = this._channelData[channel][readIndex]; 201 | } 202 | } 203 | 204 | this._readIndex += destinationLength; 205 | if (this._readIndex >= this._length) { 206 | this._readIndex = 0; 207 | } 208 | 209 | this._framesAvailable -= destinationLength; 210 | if (this._framesAvailable < 0) { 211 | this._framesAvailable = 0; 212 | } 213 | } 214 | } // class ChromeLabsRingBuffer -------------------------------------------------------------------------------- /src/playground/objects.js: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/a-sumo/hyperstep/8f44400a74d3433d91ab6ab3740f2c273eed5c74/src/playground/objects.js -------------------------------------------------------------------------------- /src/playground/scene.js: -------------------------------------------------------------------------------- 1 | import * as THREE from 'three'; 2 | import { OrbitControls } from 'three/examples/jsm/controls/OrbitControls.js'; 3 | import { volumeMesh, debugPlaneMesh, planeMesh, createDataTexture, create3dDataTexture } from './objects'; 4 | import { camera, renderer, scene, controls, pointer, raycaster, clock } from './globals'; 5 | 6 | export function initScene() { 7 | // ... All the scene setup code from init function 8 | } 9 | 10 | export function animate() { 11 | requestAnimationFrame(animate); 12 | updateMeshTexture(); 13 | updateUniforms(); 14 | renderer.render(scene, camera); 15 | } 16 | 17 | // Other functions such as onWindowResize, onPointerMove, etc. 18 | function onWindowResize() { 19 | 20 | // renderer.setSize( window.innerWidth, window.innerHeight ); 21 | 22 | // const aspect = window.innerWidth / window.innerHeight; 23 | 24 | // const frustumHeight = camera.top - camera.bottom; 25 | 26 | // camera.left = - frustumHeight * aspect / 2; 27 | // camera.right = frustumHeight * aspect / 2; 28 | 29 | // camera.updateProjectionMatrix(); 30 | camera.aspect = window.innerWidth / window.innerHeight; 31 | camera.updateProjectionMatrix(); 32 | 33 | renderer.setSize(window.innerWidth, window.innerHeight); 34 | 35 | render(); 36 | 37 | } 38 | 39 | function onPointerMove(event) { 40 | 41 | pointer.x = (event.clientX / window.innerWidth) * 2 - 1; 42 | pointer.y = - (event.clientY / window.innerHeight) * 2 + 1; 43 | 44 | } -------------------------------------------------------------------------------- /src/playground/sceneObject.js: -------------------------------------------------------------------------------- 1 | import * as THREE from 'three'; 2 | 3 | // Scene, camera, renderer, and object initialization 4 | export function init() { 5 | // Initialization code for scene, camera, renderer 6 | } 7 | 8 | export function animate() { 9 | // Animation loop 10 | } 11 | -------------------------------------------------------------------------------- /src/playground/shaders.js: -------------------------------------------------------------------------------- 1 | // GLSL shader code 2 | export const raycastVertexShader = /* glsl */` 3 | uniform vec3 volume_scale; 4 | out vec3 vray_dir; 5 | flat out vec3 transformed_eye; 6 | 7 | void main(void) { 8 | vec3 volume_translation = vec3(0.5) - volume_scale * 0.5; 9 | gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1); 10 | transformed_eye = (cameraPosition - volume_translation) / volume_scale; 11 | vray_dir = position - transformed_eye; 12 | }`; 13 | 14 | export const raycastFragmentShader = /* glsl */` 15 | precision highp int; 16 | precision highp float; 17 | in vec3 vray_dir; 18 | flat in vec3 transformed_eye; 19 | const float Epsilon = 1e-10; 20 | // Scene 21 | uniform highp sampler3D volume; 22 | uniform highp sampler2D spectrum; 23 | uniform highp sampler2D curve_data; 24 | uniform vec3 aabb_min; 25 | uniform vec3 aabb_max; 26 | uniform highp sampler2D noise_texture; 27 | // playback 28 | uniform float time; 29 | uniform float playback_progress; 30 | uniform float playback_rate; 31 | 32 | // distance field 33 | uniform int df_type; 34 | uniform vec3 df_scale; 35 | uniform float global_scale; 36 | uniform float df_sphere_tube; 37 | uniform float df_sphere_box; 38 | uniform float df_sphere_plane; 39 | uniform float df_tube_box; 40 | uniform float df_tube_plane; 41 | uniform float df_plane_box; 42 | uniform vec3 df_rot; 43 | uniform vec3 df_translation; 44 | uniform float min_dist; 45 | uniform float max_dist; 46 | 47 | // raycasting volume 48 | uniform float dt_scale; 49 | uniform int max_steps; 50 | uniform ivec3 volume_dims; 51 | uniform vec3 volume_scale; 52 | uniform int color_space; 53 | uniform int color_mode; 54 | uniform int color_preset_type; 55 | uniform vec3 uni_color; 56 | uniform vec3 color_1; 57 | uniform vec3 color_2; 58 | 59 | 60 | // Axis-Aligned Bounding Box intersection 61 | vec2 intersect_box(vec3 aabbMin, vec3 aabbMax, vec3 orig, vec3 dir) { 62 | 63 | vec3 inv_dir = 1.0 / dir; 64 | vec3 tmin_tmp = (aabbMin - orig) * inv_dir; 65 | vec3 tmax_tmp = (aabbMax - orig) * inv_dir; 66 | vec3 tmin = min(tmin_tmp, tmax_tmp); 67 | vec3 tmax = max(tmin_tmp, tmax_tmp); 68 | float t0 = max(tmin.x, max(tmin.y, tmin.z)); 69 | float t1 = min(tmax.x, min(tmax.y, tmax.z)); 70 | return vec2(t0, t1); 71 | } 72 | // Color conversions 73 | // from: http://lolengine.net/blog/2013/07/27/rgb-to-hsv-in-gls 74 | vec3 rgb2hsv(vec3 c) { 75 | vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0); 76 | vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g)); 77 | vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r)); 78 | 79 | float d = q.x - min(q.w, q.y); 80 | float e = 1.0e-10; 81 | return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x); 82 | } 83 | 84 | vec3 hsv2rgb(vec3 c) { 85 | c = vec3(c.x, clamp(c.yz, 0.0, 1.0)); 86 | vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0); 87 | vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www); 88 | return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y); 89 | } 90 | 91 | float linear_to_srgb(float x) { 92 | if (x <= 0.0031308f) { 93 | return 12.92f * x; 94 | } 95 | return 1.055f * pow(x, 1.f / 2.4f) - 0.055f; 96 | } 97 | 98 | // Clamping 99 | float smoothClamp(float x, float a, float b) 100 | { 101 | return smoothstep(0., 1., (x - a)/(b - a))*(b - a) + a; 102 | } 103 | 104 | float softClamp(float x, float a, float b) 105 | { 106 | return smoothstep(0., 1., (2./3.)*(x - a)/(b - a) + (1./6.))*(b - a) + a; 107 | } 108 | 109 | float sdSphere( vec3 p, vec3 offset, float scale ) 110 | { 111 | float dist = length(p - offset) - scale; 112 | return 1.0 - clamp(dist, 0.0, 1.0); 113 | } 114 | 115 | float sdPlane( vec3 p, vec3 n, float h ) 116 | { 117 | // n must be normalized 118 | return dot(p,n) + h; 119 | } 120 | 121 | float sdRoundBox( vec3 p, vec3 b, float r ) 122 | { 123 | vec3 q = abs(p) - b; 124 | return length(max(q,0.0)) + min(max(q.x,max(q.y,q.z)),0.0) - r; 125 | } 126 | 127 | //3d rotation: https://gist.github.com/yiwenl/3f804e80d0930e34a0b33359259b556c 128 | mat4 rotationMatrix(vec3 axis, float angle) { 129 | axis = normalize(axis); 130 | float s = sin(angle); 131 | float c = cos(angle); 132 | float oc = 1.0 - c; 133 | 134 | return mat4(oc * axis.x * axis.x + c, oc * axis.x * axis.y - axis.z * s, oc * axis.z * axis.x + axis.y * s, 0.0, 135 | oc * axis.x * axis.y + axis.z * s, oc * axis.y * axis.y + c, oc * axis.y * axis.z - axis.x * s, 0.0, 136 | oc * axis.z * axis.x - axis.y * s, oc * axis.y * axis.z + axis.x * s, oc * axis.z * axis.z + c, 0.0, 137 | 0.0, 0.0, 0.0, 1.0); 138 | } 139 | 140 | vec3 rotate(vec3 v, vec3 axis, float angle) { 141 | mat4 m = rotationMatrix(axis, angle); 142 | return (m * vec4(v, 1.0)).xyz; 143 | } 144 | 145 | // from http://www.java-gaming.org/index.php?topic=35123.0 146 | vec4 cubic(float v){ 147 | vec4 n = vec4(1.0, 2.0, 3.0, 4.0) - v; 148 | vec4 s = n * n * n; 149 | float x = s.x; 150 | float y = s.y - 4.0 * s.x; 151 | float z = s.z - 4.0 * s.y + 6.0 * s.x; 152 | float w = 6.0 - x - y - z; 153 | return vec4(x, y, z, w) * (1.0/6.0); 154 | } 155 | 156 | vec4 textureBicubic(sampler2D sampler, vec2 texCoords){ 157 | 158 | vec2 texSize = vec2(textureSize(sampler, 0)); 159 | vec2 invTexSize = 1.0 / texSize; 160 | 161 | texCoords = texCoords * texSize - 0.5; 162 | 163 | 164 | vec2 fxy = fract(texCoords); 165 | texCoords -= fxy; 166 | 167 | vec4 xcubic = cubic(fxy.x); 168 | vec4 ycubic = cubic(fxy.y); 169 | 170 | vec4 c = texCoords.xxyy + vec2 (-0.5, +1.5).xyxy; 171 | 172 | vec4 s = vec4(xcubic.xz + xcubic.yw, ycubic.xz + ycubic.yw); 173 | vec4 offset = c + vec4 (xcubic.yw, ycubic.yw) / s; 174 | 175 | offset *= invTexSize.xxyy; 176 | 177 | vec4 sample0 = texture(sampler, offset.xz); 178 | vec4 sample1 = texture(sampler, offset.yz); 179 | vec4 sample2 = texture(sampler, offset.xw); 180 | vec4 sample3 = texture(sampler, offset.yw); 181 | 182 | float sx = s.x / (s.x + s.y); 183 | float sy = s.z / (s.z + s.w); 184 | 185 | return mix( 186 | mix(sample3, sample2, sx), mix(sample1, sample0, sx) 187 | , sy); 188 | } 189 | 190 | 191 | void main(void) { 192 | vec3 ray_dir = normalize(vray_dir); 193 | vec2 t_hit = intersect_box(aabb_min, aabb_max, transformed_eye, ray_dir); 194 | 195 | if (t_hit.x > t_hit.y) { 196 | discard; 197 | } 198 | 199 | t_hit.x = max(t_hit.x, 0.0); 200 | vec3 dt_vec = 1.0 / (vec3(volume_dims) * abs(ray_dir)); 201 | 202 | float dt = dt_scale * min(dt_vec.x, min(dt_vec.y, dt_vec.z)); 203 | vec3 p = transformed_eye + (t_hit.x + dt) * ray_dir; 204 | 205 | vec4 spec_val = textureBicubic(spectrum, vec2(0.0, 0.0)); 206 | spec_val.rgba = vec4(0.0); 207 | 208 | // Frequency coordinate 209 | float u_coords = 0.0; 210 | // Time coordinate 211 | float v_coords = 0.0; 212 | 213 | float dist = 0.0; 214 | 215 | int step = 0; 216 | for (float t = t_hit.x; t < t_hit.y; t += dt) { 217 | if (step > max_steps){ 218 | break; 219 | } 220 | // Sample the noise texture to modulate the step length 221 | vec2 noiseCoords = p.xy * 0.1; // 'noiseScale' controls the noise frequency 222 | float noiseFactor = texture(noise_texture, noiseCoords).r; 223 | 224 | // Modulate 'dt' using the noise factor 225 | float noiseIntensity = 0.0; // Adjust this to control the effect intensity 226 | float modulatedDt = dt * (1.0 + noiseFactor * noiseIntensity); 227 | 228 | // position used for distance field calculation 229 | vec3 p_dist = p; 230 | vec3 p_dist_r = vec3(0.0); 231 | vec3 p_dist_t = vec3(0.0); 232 | vec3 p_dist_r_t = vec3(0.0); 233 | 234 | // rotate 235 | p_dist_r = rotate(p_dist, vec3(1.0, 0.0, 0.0) , radians( df_rot.x)); 236 | p_dist_r = rotate(p_dist_r, vec3(0.0, 1.0, 0.0) , radians( df_rot.y)); 237 | p_dist_r = rotate(p_dist_r, vec3(0.0, 0.0, 1.0) , radians( df_rot.z)); 238 | 239 | // translate 240 | p_dist_t = p_dist - df_translation; 241 | 242 | // rotate then translate 243 | p_dist_r_t = p_dist_r - df_translation; 244 | 245 | // distance function 246 | // sphere 247 | float dist_sphere = clamp(length(p_dist_r_t), 0.0, 1.0); 248 | float u_coords_sphere = playback_progress; 249 | 250 | // tube 251 | float dist_tube = length(p_dist_r_t.xy); 252 | float u_coords_tube = (p_dist_r_t.z - 0.5) / playback_rate + 1.; 253 | 254 | // plane 255 | // normal vector 256 | vec3 plane_n = vec3(0.0); 257 | plane_n = rotate(vec3(0.0,1.0,0.0), vec3(1.0, 0.0, 0.0) , radians( df_rot.x)); 258 | plane_n = rotate(plane_n, vec3(0.0, 1.0, 0.0) , radians( df_rot.y)); 259 | plane_n = rotate(plane_n, vec3(0.0, 0.0, 1.0) , radians( df_rot.z)); 260 | float dist_plane = dot(p_dist_t, plane_n); 261 | float u_coords_plane = playback_progress; 262 | 263 | // round box 264 | float dist_box = sdRoundBox(p_dist_r_t, df_scale * global_scale * 1.3, 0.0); 265 | float u_coords_box = u_coords_sphere; 266 | 267 | // Interpolate between distance functions 268 | if(df_type == 0){ 269 | u_coords = mix(u_coords_sphere, u_coords_tube, df_sphere_tube); 270 | dist = mix(dist_sphere, dist_tube, df_sphere_tube); 271 | } 272 | else if ( df_type == 1){ 273 | u_coords = mix(u_coords_sphere, u_coords_box, df_sphere_box); 274 | dist = mix(dist_sphere, dist_box, df_sphere_box); 275 | } 276 | else if ( df_type == 2){ 277 | u_coords = mix(u_coords_sphere, u_coords_plane, df_sphere_plane); 278 | dist = mix(dist_sphere, dist_plane, df_sphere_plane); 279 | } 280 | else if ( df_type == 3){ 281 | u_coords = mix(u_coords_tube, u_coords_box, df_tube_box); 282 | dist = mix(dist_tube, dist_box, df_tube_box); 283 | } 284 | else if (df_type == 4){ 285 | u_coords = mix(u_coords_tube, u_coords_plane, df_tube_plane); 286 | dist = mix(dist_tube, dist_plane, df_tube_plane); 287 | } 288 | else if (df_type == 5){ 289 | u_coords = mix(u_coords_plane, u_coords_box, df_plane_box); 290 | dist = mix(dist_plane, dist_box, df_plane_box); 291 | } 292 | 293 | v_coords = length(df_scale) * global_scale / max(pow(dist,2.0), Epsilon); 294 | 295 | spec_val = textureBicubic(spectrum, vec2(u_coords, v_coords)); 296 | // interpolateTricubicFast(spectrum, p / volumeSize, volumeSize); 297 | 298 | // THREE.js sets values outside texture borders(outside the [0,1] x [0,1] range) 299 | // to the values at the borders 300 | // This an undesired effect for our purposes so we set those values 301 | // to zero. 302 | if (u_coords < 0. || u_coords > 1. || 303 | v_coords < 0. || v_coords > 1.){ 304 | spec_val = vec4(0.0); 305 | } 306 | // Soft Clamp values 307 | if (dist < min_dist || dist > max_dist){ 308 | spec_val *= softClamp(dist - max_dist, 0., 1.); 309 | } 310 | vec4 val_color = vec4(0.0); 311 | 312 | float mixValue = max(dist, Epsilon); 313 | vec3 mix_color = vec3(0.0); 314 | if (color_mode == 0) { 315 | // Use color presets 316 | vec4 preset_color = vec4(pow(spec_val.r,10.0) * 1./dist, 317 | pow(spec_val.r, 2.0), 318 | pow(spec_val.r, 0.0) * 1./dist, spec_val.r) ; 319 | 320 | // swizzle color components to define presets 321 | if ( color_preset_type == 0){ 322 | val_color = preset_color.xyzw; 323 | } 324 | if ( color_preset_type == 1){ 325 | val_color = preset_color.zxyw; 326 | } 327 | else if(color_preset_type == 2) { 328 | val_color = preset_color.zyxw; 329 | } 330 | else if(color_preset_type == 3) { 331 | val_color = preset_color.xzyw; 332 | } 333 | else if(color_preset_type == 4) { 334 | val_color = preset_color.yxzw; 335 | } 336 | } 337 | else if (color_mode == 1) { 338 | // Use color gradient 339 | if (color_space == 0) { 340 | // mix color in rgb space 341 | mix_color = mix(color_1, color_2, mixValue); 342 | } 343 | else if (color_space == 1) { 344 | // Mix color in hsv space 345 | vec3 hsv1 = rgb2hsv(color_1); 346 | vec3 hsv2 = rgb2hsv(color_2); 347 | float hue = (mod(mod((hsv2.x - hsv1.x), 1.) + 1.5, 1.) - 0.5) * mixValue + hsv1.x; 348 | vec3 hsv = vec3(hue, mix(hsv1.yz, hsv2.yz, mixValue)); 349 | mix_color = hsv2rgb(hsv); 350 | } 351 | val_color = vec4(mix_color, spec_val.r); 352 | } 353 | else if (color_mode == 2) { 354 | // Use unique color 355 | 356 | val_color = vec4( 357 | pow(spec_val.r, (1.0 - uni_color.x) * 10.0), 358 | pow(spec_val.r, (1.0 - uni_color.y) * 10.0), 359 | pow(spec_val.r, (1.0 - uni_color.z) * 10.0), 360 | spec_val.r); 361 | val_color.xyz *= 1.0 / max(dist, Epsilon); 362 | } 363 | // Opacity correction 364 | val_color.w = 1.0 - pow(1.0 - val_color.w, dt_scale); 365 | 366 | // Alpha-blending 367 | gl_FragColor.rgb += (1.0 - gl_FragColor.a) * val_color.w * val_color.xyz; 368 | gl_FragColor.a += (1.0 - gl_FragColor.a) * val_color.w; 369 | if (gl_FragColor.a > 0.99) { 370 | break; 371 | } 372 | if (val_color.w < 0.0) { 373 | discard; 374 | } 375 | // step along the ray direction 376 | p += ray_dir * modulatedDt; 377 | step++; 378 | } 379 | 380 | gl_FragColor.r = linear_to_srgb(gl_FragColor.r); 381 | gl_FragColor.g = linear_to_srgb(gl_FragColor.g); 382 | gl_FragColor.b = linear_to_srgb(gl_FragColor.b); 383 | 384 | //gl_FragColor = color; 385 | 386 | } 387 | `; -------------------------------------------------------------------------------- /src/playground/utils.js: -------------------------------------------------------------------------------- 1 | // Utility functions like URLFromFiles, createDataTexture, etc. 2 | -------------------------------------------------------------------------------- /src/style.css: -------------------------------------------------------------------------------- 1 | @tailwind base; 2 | @tailwind components; 3 | @tailwind utilities; 4 | 5 | * { 6 | box-sizing: border-box; 7 | scroll-behavior: smooth; 8 | } 9 | 10 | /* [disabled] { 11 | opacity: 0.5; 12 | pointer-events: none; 13 | } */ 14 | 15 | section { 16 | background: #F2F2F2; 17 | padding: 14px; 18 | margin-bottom: 4px; 19 | } 20 | 21 | 22 | input[type="file"] { 23 | width: 0.1px; 24 | height: 0.1px; 25 | opacity: 0; 26 | overflow: hidden; 27 | position: absolute; 28 | z-index: -1; 29 | } 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | -------------------------------------------------------------------------------- /src/utils/AudioWorkletFunctions.js: -------------------------------------------------------------------------------- 1 | // From a series of URL to js files, get an object URL that can be loaded in an 2 | // AudioWorklet. This is useful to be able to use multiple files (utils, data 3 | // structure, main DSP, etc.) without either using static imports, eval, manual 4 | // concatenation with or without a build step, etc. 5 | export function URLFromFiles(files) { 6 | const promises = files 7 | .map((file) => fetch(file) 8 | .then((response) => response.text())); 9 | return Promise 10 | .all(promises) 11 | .then((texts) => { 12 | texts.unshift("var exports = {};"); // hack to make injected umd modules work 13 | const text = texts.join(''); 14 | const blob = new Blob([text], { type: "text/javascript" }); 15 | 16 | return URL.createObjectURL(blob); 17 | }); 18 | } -------------------------------------------------------------------------------- /src/utils/CappedTubeGeometry.js: -------------------------------------------------------------------------------- 1 | import * as THREE from 'three'; 2 | 3 | class CappedTubeGeometry extends THREE.BufferGeometry { 4 | 5 | constructor( path = new THREE.CatmullRomCurve3( new THREE.Vector3( - 1, - 1, 0 ), new THREE.Vector3( - 1, 1, 0 ), new THREE.Vector3( 1, 1, 0 ) ), tubularSegments = 64, radius = 1, radialSegments = 8, closed = false ) { 6 | 7 | super(); 8 | 9 | this.type = 'CappedTubeGeometry'; 10 | 11 | this.parameters = { 12 | path: path, 13 | tubularSegments: tubularSegments, 14 | radius: radius, 15 | radialSegments: radialSegments, 16 | closed: closed 17 | }; 18 | 19 | const frames = path.computeFrenetFrames( tubularSegments, closed ); 20 | 21 | // expose internals 22 | 23 | this.tangents = frames.tangents; 24 | this.normals = frames.normals; 25 | this.binormals = frames.binormals; 26 | // helper variables 27 | 28 | const vertex = new THREE.Vector3(); 29 | const normal = new THREE.Vector3(); 30 | const uv = new THREE.Vector2(); 31 | let P = new THREE.Vector3(); 32 | 33 | // buffer 34 | 35 | const vertices = []; 36 | const normals = []; 37 | const uvs = []; 38 | const indices = []; 39 | 40 | // create buffer data 41 | 42 | generateBufferData(); 43 | 44 | // build geometry 45 | 46 | this.setIndex( indices ); 47 | this.setAttribute( 'position', new THREE.Float32BufferAttribute( vertices, 3 ) ); 48 | this.setAttribute( 'normal', new THREE.Float32BufferAttribute( normals, 3 ) ); 49 | this.setAttribute( 'uv', new THREE.Float32BufferAttribute( uvs, 2 ) ); 50 | 51 | // functions 52 | 53 | function generateBufferData() { 54 | 55 | for ( let i = 0; i < tubularSegments; i ++ ) { 56 | 57 | generateSegment( i ); 58 | 59 | } 60 | 61 | // if the geometry is not closed, generate the last row of vertices and normals 62 | // at the regular position on the given path 63 | // 64 | // if the geometry is closed, duplicate the first row of vertices and normals (uvs will differ) 65 | 66 | generateSegment( ( closed === false ) ? tubularSegments : 0 ); 67 | 68 | // uvs are generated in a separate function. 69 | // this makes it easy compute correct values for closed geometries 70 | 71 | generateUVs(); 72 | 73 | // finally create faces 74 | 75 | generateIndices(); 76 | 77 | } 78 | 79 | function generateSegment( i ) { 80 | 81 | // we use getPointAt to sample evenly distributed points from the given path 82 | 83 | P = path.getPointAt( i / tubularSegments, P ); 84 | 85 | // retrieve corresponding normal and binormal 86 | 87 | const N = frames.normals[ i ]; 88 | const B = frames.binormals[ i ]; 89 | 90 | // generate normals and vertices for the current segment 91 | 92 | for ( let j = 0; j <= radialSegments; j ++ ) { 93 | 94 | const v = j / radialSegments * Math.PI * 2; 95 | 96 | const sin = Math.sin( v ); 97 | const cos = - Math.cos( v ); 98 | 99 | // normal 100 | 101 | normal.x = ( cos * N.x + sin * B.x ); 102 | normal.y = ( cos * N.y + sin * B.y ); 103 | normal.z = ( cos * N.z + sin * B.z ); 104 | normal.normalize(); 105 | 106 | normals.push( normal.x, normal.y, normal.z ); 107 | 108 | // vertex 109 | 110 | vertex.x = P.x + radius * normal.x; 111 | vertex.y = P.y + radius * normal.y; 112 | vertex.z = P.z + radius * normal.z; 113 | 114 | vertices.push( vertex.x, vertex.y, vertex.z ); 115 | 116 | } 117 | 118 | } 119 | 120 | function generateIndices() { 121 | 122 | for ( let j = 1; j <= tubularSegments; j ++ ) { 123 | 124 | for ( let i = 1; i <= radialSegments; i ++ ) { 125 | 126 | const a = ( radialSegments + 1 ) * ( j - 1 ) + ( i - 1 ); 127 | const b = ( radialSegments + 1 ) * j + ( i - 1 ); 128 | const c = ( radialSegments + 1 ) * j + i; 129 | const d = ( radialSegments + 1 ) * ( j - 1 ) + i; 130 | 131 | // faces 132 | 133 | indices.push( a, b, d ); 134 | indices.push( b, c, d ); 135 | 136 | } 137 | 138 | } 139 | 140 | } 141 | 142 | function generateUVs() { 143 | 144 | for ( let i = 0; i <= tubularSegments; i ++ ) { 145 | 146 | for ( let j = 0; j <= radialSegments; j ++ ) { 147 | 148 | uv.x = i / tubularSegments; 149 | uv.y = j / radialSegments; 150 | 151 | uvs.push( uv.x, uv.y ); 152 | 153 | } 154 | 155 | } 156 | 157 | } 158 | 159 | } 160 | 161 | toJSON() { 162 | 163 | const data = super.toJSON(); 164 | 165 | data.path = this.parameters.path.toJSON(); 166 | 167 | return data; 168 | 169 | } 170 | 171 | static fromJSON( data ) { 172 | 173 | // This only works for built-in curves (e.g. CatmullRomCurve3). 174 | // User defined curves or instances of CurvePath will not be deserialized. 175 | return new CappedTubeGeometry( 176 | new Curves[ data.path.type ]().fromJSON( data.path ), 177 | data.tubularSegments, 178 | data.radius, 179 | data.radialSegments, 180 | data.closed 181 | ); 182 | 183 | } 184 | 185 | } 186 | 187 | 188 | export { CappedTubeGeometry }; -------------------------------------------------------------------------------- /src/utils/GenerateSDFMaterial.js: -------------------------------------------------------------------------------- 1 | // https://github.com/gkjohnson/three-mesh-bvh/blob/master/example/utils/GenerateSDFMaterial.js 2 | import { ShaderMaterial, Matrix4 } from 'three'; 3 | import { shaderIntersectFunction, shaderDistanceFunction, shaderStructs} from 'three-mesh-bvh/src/gpu/shaderFunctions.js' 4 | import { MeshBVHUniformStruct } from 'three-mesh-bvh/src/gpu/MeshBVHUniformStruct.js'; 5 | 6 | export class GenerateSDFMaterial extends ShaderMaterial { 7 | 8 | constructor( params ) { 9 | 10 | super( { 11 | 12 | uniforms: { 13 | 14 | matrix: { value: new Matrix4() }, 15 | zValue: { value: 0 }, 16 | bvh: { value: new MeshBVHUniformStruct() } 17 | 18 | }, 19 | 20 | vertexShader: /* glsl */` 21 | 22 | varying vec2 vUv; 23 | 24 | void main() { 25 | 26 | vUv = uv; 27 | gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 ); 28 | 29 | } 30 | 31 | `, 32 | 33 | fragmentShader: /* glsl */` 34 | 35 | precision highp isampler2D; 36 | precision highp usampler2D; 37 | 38 | ${ shaderStructs } 39 | ${ shaderIntersectFunction } 40 | ${ shaderDistanceFunction } 41 | 42 | varying vec2 vUv; 43 | 44 | uniform BVH bvh; 45 | uniform float zValue; 46 | uniform mat4 matrix; 47 | 48 | void main() { 49 | 50 | // compute the point in space to check 51 | vec3 point = vec3( vUv, zValue ); 52 | point -= vec3( 0.5 ); 53 | point = ( matrix * vec4( point, 1.0 ) ).xyz; 54 | 55 | // retrieve the distance and other values 56 | uvec4 faceIndices; 57 | vec3 faceNormal; 58 | vec3 barycoord; 59 | float side; 60 | vec3 outPoint; 61 | float dist = bvhClosestPointToPoint( bvh, point.xyz, faceIndices, faceNormal, barycoord, side, outPoint ); 62 | 63 | // if the triangle side is the back then it must be on the inside and the value negative 64 | gl_FragColor = vec4( side * dist, 0, 0, 0 ); 65 | 66 | } 67 | 68 | ` 69 | 70 | } ); 71 | 72 | this.setValues( params ); 73 | 74 | } 75 | 76 | } 77 | -------------------------------------------------------------------------------- /src/utils/RayCastSDFMaterial.js: -------------------------------------------------------------------------------- 1 | import { ShaderMaterial, Matrix4, Vector3 } from 'three'; 2 | 3 | export class RayCastSDFMaterial extends ShaderMaterial { 4 | 5 | constructor( params ) { 6 | 7 | super( { 8 | 9 | defines: { 10 | 11 | MAX_STEPS: 100, 12 | SURFACE_EPSILON: 0.001, 13 | 14 | }, 15 | 16 | uniforms: { 17 | 18 | surface: { value: 0 }, 19 | sdfTex: { value: null }, 20 | dataTex: { value: null}, 21 | curveTex: { value: null }, 22 | normalStep: { value: new Vector3() }, 23 | projectionInverse: { value: new Matrix4() }, 24 | sdfTransformInverse: { value: new Matrix4() } 25 | 26 | }, 27 | 28 | vertexShader: /* glsl */` 29 | varying vec2 vUv; 30 | void main() { 31 | vUv = uv; 32 | gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 ); 33 | } 34 | `, 35 | 36 | fragmentShader: /* glsl */` 37 | precision highp sampler3D; 38 | varying vec2 vUv; 39 | uniform float surface; 40 | uniform sampler3D sdfTex; 41 | uniform sampler2D dataTex; 42 | uniform sampler2D curveTex; 43 | uniform vec3 normalStep; 44 | uniform mat4 projectionInverse; 45 | uniform mat4 sdfTransformInverse; 46 | #include 47 | // distance to box bounds 48 | vec2 rayBoxDist( vec3 boundsMin, vec3 boundsMax, vec3 rayOrigin, vec3 rayDir ) { 49 | vec3 t0 = ( boundsMin - rayOrigin ) / rayDir; 50 | vec3 t1 = ( boundsMax - rayOrigin ) / rayDir; 51 | vec3 tmin = min( t0, t1 ); 52 | vec3 tmax = max( t0, t1 ); 53 | float distA = max( max( tmin.x, tmin.y ), tmin.z ); 54 | float distB = min( tmax.x, min( tmax.y, tmax.z ) ); 55 | float distToBox = max( 0.0, distA ); 56 | float distInsideBox = max( 0.0, distB - distToBox ); 57 | return vec2( distToBox, distInsideBox ); 58 | } 59 | 60 | void main() { 61 | // get the inverse of the sdf box transform 62 | mat4 sdfTransform = inverse( sdfTransformInverse ); 63 | // convert the uv to clip space for ray transformation 64 | vec2 clipSpace = 2.0 * vUv - vec2( 1.0 ); 65 | // get world ray direction 66 | vec3 rayOrigin = vec3( 0.0 ); 67 | vec4 homogenousDirection = projectionInverse * vec4( clipSpace, - 1.0, 1.0 ); 68 | vec3 rayDirection = normalize( homogenousDirection.xyz / homogenousDirection.w ); 69 | // transform ray into local coordinates of sdf bounds 70 | vec3 sdfRayOrigin = ( sdfTransformInverse * vec4( rayOrigin, 1.0 ) ).xyz; 71 | vec3 sdfRayDirection = normalize( ( sdfTransformInverse * vec4( rayDirection, 0.0 ) ).xyz ); 72 | // find whether our ray hits the box bounds in the local box space 73 | vec2 boxIntersectionInfo = rayBoxDist( vec3( - 0.5 ), vec3( 0.5 ), sdfRayOrigin, sdfRayDirection ); 74 | float distToBox = boxIntersectionInfo.x; 75 | float distInsideBox = boxIntersectionInfo.y; 76 | bool intersectsBox = distInsideBox > 0.0; 77 | gl_FragColor = vec4( 0.0 ); 78 | if ( intersectsBox ) { 79 | // find the surface point in world space 80 | bool intersectsSurface = false; 81 | vec4 localPoint = vec4( sdfRayOrigin + sdfRayDirection * ( distToBox + 1e-5 ), 1.0 ); 82 | vec4 point = sdfTransform * localPoint; 83 | int step = 0; 84 | // ray march 85 | for ( int i = 0; i < MAX_STEPS; i ++ ) { 86 | // sdf box extends from - 0.5 to 0.5 87 | // transform into the local bounds space [ 0, 1 ] and check if we're inside the bounds 88 | vec3 uv = ( sdfTransformInverse * point ).xyz + vec3( 0.5 ); 89 | if ( uv.x < 0.0 || uv.x > 1.0 || uv.y < 0.0 || uv.y > 1.0 || uv.z < 0.0 || uv.z > 1.0 ) { 90 | break; 91 | } 92 | // // get the distance to surface and exit the loop if we're close to the surface 93 | // float distanceToSurface = texture2D( sdfTex, uv ).r - surface; 94 | // if ( distanceToSurface < SURFACE_EPSILON ) { 95 | // intersectsSurface = true; 96 | // break; 97 | // } 98 | // get the distance value 99 | float distance = abs(texture2D( sdfTex, uv ).r); 100 | // distance = clamp(length(uv-vec3(0.5)), 0.0, 1.0); 101 | // sample data texture along distance value 102 | vec2 uv2 = vec2(0., distance); 103 | float dataSample = texture(dataTex, uv2).r; 104 | vec4 baseColor = vec4(pow(dataSample,10.0) * 1./distance, 105 | pow(dataSample, 2.0), 106 | pow(dataSample, 0.0) * 1./distance, dataSample) ; 107 | 108 | // vec4 baseColor = vec4(distance,0., 0., 0.4); 109 | // baseColor.rgb = uv; 110 | // baseColor.w = 1.0; 111 | // Opacity correction 112 | baseColor.w = 1.0 - pow(1.0 - baseColor.w, 0.01); 113 | // Alpha-blending 114 | gl_FragColor.rbg += (1.0 - gl_FragColor.a) * baseColor.a * baseColor.xyz; 115 | gl_FragColor.a += (1.0 - gl_FragColor.a) * baseColor.w; 116 | // exit the loop if the accumulated alpha is close to 1 117 | // if (gl_FragColor.a > 0.9) { 118 | // break; 119 | // } 120 | // step the ray 121 | point.xyz += rayDirection * 0.01; 122 | } 123 | 124 | // // find the surface normal 125 | // if ( intersectsSurface ) { 126 | // // compute the surface normal 127 | // vec3 uv = ( sdfTransformInverse * point ).xyz + vec3( 0.5 ); 128 | // float dx = texture( sdfTex, uv + vec3( normalStep.x, 0.0, 0.0 ) ).r - texture( sdfTex, uv - vec3( normalStep.x, 0.0, 0.0 ) ).r; 129 | // float dy = texture( sdfTex, uv + vec3( 0.0, normalStep.y, 0.0 ) ).r - texture( sdfTex, uv - vec3( 0.0, normalStep.y, 0.0 ) ).r; 130 | // float dz = texture( sdfTex, uv + vec3( 0.0, 0.0, normalStep.z ) ).r - texture( sdfTex, uv - vec3( 0.0, 0.0, normalStep.z ) ).r; 131 | // vec3 normal = normalize( vec3( dx, dy, dz ) ); 132 | // // compute some basic lighting effects 133 | // vec3 lightDirection = normalize( vec3( 1.0 ) ); 134 | // float lightIntensity = 135 | // saturate( dot( normal, lightDirection ) ) + 136 | // saturate( dot( normal, - lightDirection ) ) * 0.05 + 137 | // 0.1; 138 | // gl_FragColor.rgb = vec3( lightIntensity ); 139 | // gl_FragColor.a = 1.0; 140 | // } 141 | } 142 | //#include 143 | } 144 | ` 145 | 146 | } ); 147 | 148 | this.setValues( params ); 149 | 150 | } 151 | 152 | } -------------------------------------------------------------------------------- /src/utils/RayMarchSDFMaterial.js: -------------------------------------------------------------------------------- 1 | // https://github.com/gkjohnson/three-mesh-bvh/blob/master/example/utils/RayMarchSDFMaterial.js 2 | import { ShaderMaterial, Matrix4, Vector3 } from 'three'; 3 | 4 | export class RayMarchSDFMaterial extends ShaderMaterial { 5 | 6 | constructor( params ) { 7 | 8 | super( { 9 | 10 | defines: { 11 | 12 | MAX_STEPS: 500, 13 | SURFACE_EPSILON: 0.001, 14 | 15 | }, 16 | 17 | uniforms: { 18 | 19 | surface: { value: 0 }, 20 | sdfTex: { value: null }, 21 | normalStep: { value: new Vector3() }, 22 | projectionInverse: { value: new Matrix4() }, 23 | sdfTransformInverse: { value: new Matrix4() } 24 | 25 | }, 26 | 27 | vertexShader: /* glsl */` 28 | 29 | varying vec2 vUv; 30 | 31 | void main() { 32 | 33 | vUv = uv; 34 | gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 ); 35 | 36 | } 37 | 38 | `, 39 | 40 | fragmentShader: /* glsl */` 41 | precision highp sampler3D; 42 | 43 | varying vec2 vUv; 44 | 45 | uniform float surface; 46 | uniform sampler3D sdfTex; 47 | uniform vec3 normalStep; 48 | uniform mat4 projectionInverse; 49 | uniform mat4 sdfTransformInverse; 50 | 51 | #include 52 | 53 | // distance to box bounds 54 | vec2 rayBoxDist( vec3 boundsMin, vec3 boundsMax, vec3 rayOrigin, vec3 rayDir ) { 55 | 56 | vec3 t0 = ( boundsMin - rayOrigin ) / rayDir; 57 | vec3 t1 = ( boundsMax - rayOrigin ) / rayDir; 58 | vec3 tmin = min( t0, t1 ); 59 | vec3 tmax = max( t0, t1 ); 60 | 61 | float distA = max( max( tmin.x, tmin.y ), tmin.z ); 62 | float distB = min( tmax.x, min( tmax.y, tmax.z ) ); 63 | 64 | float distToBox = max( 0.0, distA ); 65 | float distInsideBox = max( 0.0, distB - distToBox ); 66 | return vec2( distToBox, distInsideBox ); 67 | 68 | } 69 | 70 | void main() { 71 | 72 | // get the inverse of the sdf box transform 73 | mat4 sdfTransform = inverse( sdfTransformInverse ); 74 | 75 | // convert the uv to clip space for ray transformation 76 | vec2 clipSpace = 2.0 * vUv - vec2( 1.0 ); 77 | 78 | // get world ray direction 79 | vec3 rayOrigin = vec3( 0.0 ); 80 | vec4 homogenousDirection = projectionInverse * vec4( clipSpace, - 1.0, 1.0 ); 81 | vec3 rayDirection = normalize( homogenousDirection.xyz / homogenousDirection.w ); 82 | 83 | // transform ray into local coordinates of sdf bounds 84 | vec3 sdfRayOrigin = ( sdfTransformInverse * vec4( rayOrigin, 1.0 ) ).xyz; 85 | vec3 sdfRayDirection = normalize( ( sdfTransformInverse * vec4( rayDirection, 0.0 ) ).xyz ); 86 | 87 | // find whether our ray hits the box bounds in the local box space 88 | vec2 boxIntersectionInfo = rayBoxDist( vec3( - 0.5 ), vec3( 0.5 ), sdfRayOrigin, sdfRayDirection ); 89 | float distToBox = boxIntersectionInfo.x; 90 | float distInsideBox = boxIntersectionInfo.y; 91 | bool intersectsBox = distInsideBox > 0.0; 92 | 93 | gl_FragColor = vec4( 0.0 ); 94 | if ( intersectsBox ) { 95 | 96 | // find the surface point in world space 97 | bool intersectsSurface = false; 98 | vec4 localPoint = vec4( sdfRayOrigin + sdfRayDirection * ( distToBox + 1e-5 ), 1.0 ); 99 | vec4 point = sdfTransform * localPoint; 100 | 101 | // ray march 102 | for ( int i = 0; i < MAX_STEPS; i ++ ) { 103 | 104 | // sdf box extends from - 0.5 to 0.5 105 | // transform into the local bounds space [ 0, 1 ] and check if we're inside the bounds 106 | vec3 uv = ( sdfTransformInverse * point ).xyz + vec3( 0.5 ); 107 | if ( uv.x < 0.0 || uv.x > 1.0 || uv.y < 0.0 || uv.y > 1.0 || uv.z < 0.0 || uv.z > 1.0 ) { 108 | 109 | break; 110 | 111 | } 112 | 113 | // get the distance to surface and exit the loop if we're close to the surface 114 | float distanceToSurface = texture2D( sdfTex, uv ).r - surface; 115 | if ( distanceToSurface < SURFACE_EPSILON ) { 116 | 117 | intersectsSurface = true; 118 | break; 119 | 120 | } 121 | 122 | // step the ray 123 | point.xyz += rayDirection * abs( distanceToSurface ); 124 | 125 | } 126 | 127 | // find the surface normal 128 | if ( intersectsSurface ) { 129 | 130 | // compute the surface normal 131 | vec3 uv = ( sdfTransformInverse * point ).xyz + vec3( 0.5 ); 132 | float dx = texture( sdfTex, uv + vec3( normalStep.x, 0.0, 0.0 ) ).r - texture( sdfTex, uv - vec3( normalStep.x, 0.0, 0.0 ) ).r; 133 | float dy = texture( sdfTex, uv + vec3( 0.0, normalStep.y, 0.0 ) ).r - texture( sdfTex, uv - vec3( 0.0, normalStep.y, 0.0 ) ).r; 134 | float dz = texture( sdfTex, uv + vec3( 0.0, 0.0, normalStep.z ) ).r - texture( sdfTex, uv - vec3( 0.0, 0.0, normalStep.z ) ).r; 135 | vec3 normal = normalize( vec3( dx, dy, dz ) ); 136 | 137 | // compute some basic lighting effects 138 | vec3 lightDirection = normalize( vec3( 1.0 ) ); 139 | float lightIntensity = 140 | saturate( dot( normal, lightDirection ) ) + 141 | saturate( dot( normal, - lightDirection ) ) * 0.05 + 142 | 0.1; 143 | gl_FragColor.rgb = vec3( lightIntensity ); 144 | gl_FragColor.a = 1.0; 145 | 146 | } 147 | 148 | } 149 | 150 | #include 151 | 152 | } 153 | ` 154 | 155 | } ); 156 | 157 | this.setValues( params ); 158 | 159 | } 160 | 161 | } 162 | -------------------------------------------------------------------------------- /src/utils/ringbuf.js/index.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | Object.defineProperty(exports, '__esModule', { value: true }); 4 | 5 | // Send audio interleaved audio frames between threads, wait-free. 6 | // 7 | // Those classes allow communicating between a non-real time thread (browser 8 | // main thread or worker) and a real-time thread (in an AudioWorkletProcessor). 9 | // Write and Reader cannot change role after setup, unless externally 10 | // synchronized. 11 | // 12 | // GC _can_ happen during the initial construction of this object when hopefully 13 | // no audio is being output. This depends on how implementations schedule GC 14 | // passes. After the setup phase no GC is triggered on either side of the queue.. 15 | 16 | // Interleaved -> Planar audio buffer conversion 17 | // 18 | // `input` is an array of n*128 frames arrays, interleaved, where n is the 19 | // channel count. 20 | // output is an array of 128-frames arrays. 21 | // 22 | // This is useful to get data from a codec, the network, or anything that is 23 | // interleaved, into planar format, for example a Web Audio API AudioBuffer or 24 | // the output parameter of an AudioWorkletProcessor. 25 | function deinterleave(input, output) { 26 | var channel_count = input.length / 256; 27 | if (output.length != channel_count) { 28 | throw "not enough space in output arrays"; 29 | } 30 | for (var i = 0; i < channelCount; i++) { 31 | let out_channel = output[i]; 32 | let interleaved_idx = i; 33 | for (var j = 0; j < 128; ++j) { 34 | out_channel[j] = input[interleaved_idx]; 35 | interleaved_idx += channel_count; 36 | } 37 | } 38 | } 39 | // Planar -> Interleaved audio buffer conversion 40 | // 41 | // Input is an array of `n` 128 frames Float32Array that hold the audio data. 42 | // output is a Float32Array that is n*128 elements long. This function is useful 43 | // to get data from the Web Audio API (that does planar audio), into something 44 | // that codec or network streaming library expect. 45 | function interleave(input, output) { 46 | if (input.length * 128 != output.length) { 47 | throw "input and output of incompatible sizes"; 48 | } 49 | var out_idx = 0; 50 | for (var i = 0; i < 128; i++) { 51 | for (var channel = 0; j < output.length; j++) { 52 | output[out_idx] = input[channel][i]; 53 | out_idx++; 54 | } 55 | } 56 | } 57 | 58 | class AudioWriter { 59 | // From a RingBuffer, build an object that can enqueue enqueue audio in a ring 60 | // buffer. 61 | constructor(ringbuf) { 62 | if (ringbuf.type() != "Float32Array") { 63 | throw "This class requires a ring buffer of Float32Array"; 64 | } 65 | this.ringbuf = ringbuf; 66 | } 67 | // Enqueue a buffer of interleaved audio into the ring buffer. 68 | // Returns the number of samples that have been successfuly written to the 69 | // queue. `buf` is not written to during this call, so the samples that 70 | // haven't been written to the queue are still available. 71 | enqueue(buf) { 72 | return this.ringbuf.push(buf); 73 | } 74 | // Query the free space in the ring buffer. This is the amount of samples that 75 | // can be queued, with a guarantee of success. 76 | available_write() { 77 | return this.ringbuf.available_write(); 78 | } 79 | } 80 | 81 | class AudioReader { 82 | constructor(ringbuf) { 83 | if (ringbuf.type() != "Float32Array") { 84 | throw "This class requires a ring buffer of Float32Array"; 85 | } 86 | this.ringbuf = ringbuf; 87 | } 88 | // Attempt to dequeue at most `buf.length` samples from the queue. This 89 | // returns the number of samples dequeued. If greater than 0, the samples are 90 | // at the beginning of `buf` 91 | dequeue(buf) { 92 | if (this.ringbuf.empty()) { 93 | return 0; 94 | } 95 | return this.ringbuf.pop(buf); 96 | } 97 | // Query the occupied space in the queue. This is the amount of samples that 98 | // can be read with a guarantee of success. 99 | available_read() { 100 | return this.ringbuf.available_read(); 101 | } 102 | } 103 | 104 | // Communicate parameter changes, lock free, no gc. 105 | // 106 | // between a UI thread (browser main thread or worker) and a real-time thread 107 | // (in an AudioWorkletProcessor). Write and Reader cannot change role after 108 | // setup, unless externally synchronized. 109 | // 110 | // GC can happen during the initial construction of this object when hopefully 111 | // no audio is being output. This depends on the implementation. 112 | // 113 | // Parameter changes are like in the VST framework: an index and a float value 114 | // (no restriction on the value). 115 | // 116 | // This class supports up to 256 parameters, but this is easy to extend if 117 | // needed. 118 | // 119 | // An element is a index, that is an unsigned byte, and a float32, which is 4 120 | // bytes. 121 | 122 | class ParameterWriter { 123 | // From a RingBuffer, build an object that can enqueue a parameter change in 124 | // the queue. 125 | constructor(ringbuf) { 126 | if (ringbuf.type() != "Uint8Array") { 127 | throw "This class requires a ring buffer of Uint8Array"; 128 | } 129 | const SIZE_ELEMENT = 5; 130 | this.ringbuf = ringbuf; 131 | this.mem = new ArrayBuffer(SIZE_ELEMENT); 132 | this.array = new Uint8Array(this.mem); 133 | this.view = new DataView(this.mem); 134 | } 135 | // Enqueue a parameter change for parameter of index `index`, with a new value 136 | // of `value`. 137 | // Returns true if enqueuing suceeded, false otherwise. 138 | enqueue_change(index, value) { 139 | const SIZE_ELEMENT = 5; 140 | this.view.setUint8(0, index); 141 | this.view.setFloat32(1, value); 142 | if (this.ringbuf.available_write() < SIZE_ELEMENT) { 143 | return false; 144 | } 145 | return this.ringbuf.push(this.array) == SIZE_ELEMENT; 146 | } 147 | } 148 | 149 | class ParameterReader { 150 | constructor(ringbuf) { 151 | const SIZE_ELEMENT = 5; 152 | this.ringbuf = ringbuf; 153 | this.mem = new ArrayBuffer(SIZE_ELEMENT); 154 | this.array = new Uint8Array(this.mem); 155 | this.view = new DataView(this.mem); 156 | } 157 | dequeue_change(o) { 158 | if (this.ringbuf.empty()) { 159 | return false; 160 | } 161 | var rv = this.ringbuf.pop(this.array); 162 | o.index = this.view.getUint8(0); 163 | o.value = this.view.getFloat32(1); 164 | 165 | return true; 166 | } 167 | } 168 | 169 | // A Single Producer - Single Consumer thread-safe wait-free ring buffer. 170 | // 171 | // The producer and the consumer can be separate thread, but cannot change role, 172 | // except with external synchronization. 173 | 174 | class RingBuffer { 175 | static getStorageForCapacity(capacity, type) { 176 | if (!type.BYTES_PER_ELEMENT) { 177 | throw "Pass in a ArrayBuffer subclass"; 178 | } 179 | var bytes = 8 + (capacity + 1) * type.BYTES_PER_ELEMENT; 180 | return new SharedArrayBuffer(bytes); 181 | } 182 | // `sab` is a SharedArrayBuffer with a capacity calculated by calling 183 | // `getStorageForCapacity` with the desired capacity. 184 | constructor(sab, type) { 185 | if (!ArrayBuffer.__proto__.isPrototypeOf(type) && 186 | type.BYTES_PER_ELEMENT !== undefined) { 187 | throw "Pass a concrete typed array class as second argument"; 188 | } 189 | 190 | // Maximum usable size is 1<<32 - type.BYTES_PER_ELEMENT bytes in the ring 191 | // buffer for this version, easily changeable. 192 | // -4 for the write ptr (uint32_t offsets) 193 | // -4 for the read ptr (uint32_t offsets) 194 | // capacity counts the empty slot to distinguish between full and empty. 195 | this._type = type; 196 | this.capacity = (sab.byteLength - 8) / type.BYTES_PER_ELEMENT; 197 | this.buf = sab; 198 | this.write_ptr = new Uint32Array(this.buf, 0, 1); 199 | this.read_ptr = new Uint32Array(this.buf, 4, 1); 200 | this.storage = new type(this.buf, 8, this.capacity); 201 | } 202 | // Returns the type of the underlying ArrayBuffer for this RingBuffer. This 203 | // allows implementing crude type checking. 204 | type() { 205 | return this._type.name; 206 | } 207 | // Push bytes to the ring buffer. `bytes` is an typed array of the same type 208 | // as passed in the ctor, to be written to the queue. 209 | // Returns the number of elements written to the queue. 210 | push(elements) { 211 | var rd = Atomics.load(this.read_ptr, 0); 212 | var wr = Atomics.load(this.write_ptr, 0); 213 | 214 | if ((wr + 1) % this._storage_capacity() == rd) { 215 | // full 216 | return 0; 217 | } 218 | 219 | let to_write = Math.min(this._available_write(rd, wr), elements.length); 220 | let first_part = Math.min(this._storage_capacity() - wr, to_write); 221 | let second_part = to_write - first_part; 222 | 223 | this._copy(elements, 0, this.storage, wr, first_part); 224 | this._copy(elements, first_part, this.storage, 0, second_part); 225 | 226 | // publish the enqueued data to the other side 227 | Atomics.store( 228 | this.write_ptr, 229 | 0, 230 | (wr + to_write) % this._storage_capacity() 231 | ); 232 | 233 | return to_write; 234 | } 235 | // Read `elements.length` elements from the ring buffer. `elements` is a typed 236 | // array of the same type as passed in the ctor. 237 | // Returns the number of elements read from the queue, they are placed at the 238 | // beginning of the array passed as parameter. 239 | pop(elements) { 240 | var rd = Atomics.load(this.read_ptr, 0); 241 | var wr = Atomics.load(this.write_ptr, 0); 242 | 243 | if (wr == rd) { 244 | return 0; 245 | } 246 | 247 | let to_read = Math.min(this._available_read(rd, wr), elements.length); 248 | 249 | let first_part = Math.min(this._storage_capacity() - rd, elements.length); 250 | let second_part = to_read - first_part; 251 | 252 | this._copy(this.storage, rd, elements, 0, first_part); 253 | this._copy(this.storage, 0, elements, first_part, second_part); 254 | 255 | Atomics.store(this.read_ptr, 0, (rd + to_read) % this._storage_capacity()); 256 | 257 | return to_read; 258 | } 259 | 260 | // True if the ring buffer is empty false otherwise. This can be late on the 261 | // reader side: it can return true even if something has just been pushed. 262 | empty() { 263 | var rd = Atomics.load(this.read_ptr, 0); 264 | var wr = Atomics.load(this.write_ptr, 0); 265 | 266 | return wr == rd; 267 | } 268 | 269 | // True if the ring buffer is full, false otherwise. This can be late on the 270 | // write side: it can return true when something has just been poped. 271 | full() { 272 | var rd = Atomics.load(this.read_ptr, 0); 273 | var wr = Atomics.load(this.write_ptr, 0); 274 | 275 | return (wr + 1) % this.capacity != rd; 276 | } 277 | 278 | // The usable capacity for the ring buffer: the number of elements that can be 279 | // stored. 280 | capacity() { 281 | return this.capacity - 1; 282 | } 283 | 284 | // Number of elements available for reading. This can be late, and report less 285 | // elements that is actually in the queue, when something has just been 286 | // enqueued. 287 | available_read() { 288 | var rd = Atomics.load(this.read_ptr, 0); 289 | var wr = Atomics.load(this.write_ptr, 0); 290 | return this._available_read(rd, wr); 291 | } 292 | 293 | // Number of elements available for writing. This can be late, and report less 294 | // elemtns that is actually available for writing, when something has just 295 | // been dequeued. 296 | available_write() { 297 | var rd = Atomics.load(this.read_ptr, 0); 298 | var wr = Atomics.load(this.write_ptr, 0); 299 | return this._available_write(rd, wr); 300 | } 301 | 302 | // private methods // 303 | 304 | // Number of elements available for reading, given a read and write pointer.. 305 | _available_read(rd, wr) { 306 | if (wr > rd) { 307 | return wr - rd; 308 | } else { 309 | return wr + this._storage_capacity() - rd; 310 | } 311 | } 312 | 313 | // Number of elements available from writing, given a read and write pointer. 314 | _available_write(rd, wr) { 315 | let rv = rd - wr - 1; 316 | if (wr >= rd) { 317 | rv += this._storage_capacity(); 318 | } 319 | return rv; 320 | } 321 | 322 | // The size of the storage for elements not accounting the space for the index. 323 | _storage_capacity() { 324 | return this.capacity; 325 | } 326 | 327 | // Copy `size` elements from `input`, starting at offset `offset_input`, to 328 | // `output`, starting at offset `offset_output`. 329 | _copy(input, offset_input, output, offset_output, size) { 330 | for (var i = 0; i < size; i++) { 331 | output[offset_output + i] = input[offset_input + i]; 332 | } 333 | } 334 | } 335 | 336 | exports.AudioReader = AudioReader; 337 | exports.AudioWriter = AudioWriter; 338 | exports.ParameterReader = ParameterReader; 339 | exports.ParameterWriter = ParameterWriter; 340 | exports.RingBuffer = RingBuffer; 341 | exports.deinterleave = deinterleave; 342 | exports.interleave = interleave; 343 | //sourceMappingURL=index.js.map 344 | -------------------------------------------------------------------------------- /src/volume-minimal/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | Simple Volume Raycasting 8 | 9 | 10 | 11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /src/volume-minimal/main.js: -------------------------------------------------------------------------------- 1 | import '../style.css'; 2 | import * as THREE from 'three'; 3 | import { OrbitControls } from 'three/examples/jsm/controls/OrbitControls.js'; 4 | import Stats from 'three/examples/jsm/libs/stats.module'; 5 | import { GUI } from 'dat.gui/build/dat.gui.min.js'; 6 | 7 | 8 | let camera, 9 | scene, 10 | renderer, 11 | controls, 12 | stats 13 | 14 | let planeMesh, 15 | debugPlaneMesh, 16 | volumeMesh, 17 | pointer, 18 | clock 19 | 20 | 21 | const gui = new GUI({ width: 200 }); 22 | // gui parameters 23 | 24 | const params = { 25 | dt_scale: 0.1, 26 | max_steps: 100, 27 | }; 28 | 29 | // Volume constants 30 | const x_dim = 100; 31 | const y_dim = 100; 32 | const z_dim = 100; 33 | const x_scale = 1; 34 | const y_scale = 1; 35 | const z_scale = 1; 36 | 37 | 38 | // Shaders 39 | const raycastVertexShader = /* glsl */` 40 | out vec3 vray_dir; 41 | flat out vec3 transformed_eye; 42 | 43 | void main(void) { 44 | 45 | gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1); 46 | transformed_eye = cameraPosition; 47 | vray_dir = position - transformed_eye; 48 | }`; 49 | const raycastFragmentShader = /* glsl */` 50 | precision highp int; 51 | precision highp float; 52 | in vec3 vray_dir; 53 | flat in vec3 transformed_eye; 54 | const float Epsilon = 1e-10; 55 | // Scene 56 | uniform highp sampler3D volume; 57 | uniform highp sampler2D spectrum; 58 | uniform vec3 aabb_min; 59 | uniform vec3 aabb_max; 60 | 61 | 62 | // raycasting volume 63 | uniform float dt_scale; 64 | uniform int max_steps; 65 | uniform ivec3 volume_dims; 66 | 67 | // Axis-Aligned Bounding Box intersection 68 | vec2 intersect_box(vec3 aabbMin, vec3 aabbMax, vec3 orig, vec3 dir) { 69 | 70 | vec3 inv_dir = 1.0 / dir; 71 | vec3 tmin_tmp = (aabbMin - orig) * inv_dir; 72 | vec3 tmax_tmp = (aabbMax - orig) * inv_dir; 73 | vec3 tmin = min(tmin_tmp, tmax_tmp); 74 | vec3 tmax = max(tmin_tmp, tmax_tmp); 75 | float t0 = max(tmin.x, max(tmin.y, tmin.z)); 76 | float t1 = min(tmax.x, min(tmax.y, tmax.z)); 77 | return vec2(t0, t1); 78 | } 79 | 80 | float linear_to_srgb(float x) { 81 | if (x <= 0.0031308f) { 82 | return 12.92f * x; 83 | } 84 | return 1.055f * pow(x, 1.f / 2.4f) - 0.055f; 85 | } 86 | 87 | void main(void) { 88 | vec3 ray_dir = normalize(vray_dir); 89 | vec2 t_hit = intersect_box(aabb_min, aabb_max, transformed_eye, ray_dir); 90 | 91 | if (t_hit.x > t_hit.y) { 92 | discard; 93 | } 94 | 95 | t_hit.x = max(t_hit.x, 0.0); 96 | vec3 dt_vec = 1.0 / (vec3(volume_dims) * abs(ray_dir)); 97 | float dt = dt_scale * min(dt_vec.x, min(dt_vec.y, dt_vec.z)); 98 | vec3 p = transformed_eye + (t_hit.x + dt) * ray_dir; 99 | 100 | vec4 spec_val = texture(volume, p); 101 | 102 | int step = 0; 103 | for (float t = t_hit.x; t < t_hit.y; t += dt) { 104 | if (step > max_steps){ 105 | break; 106 | } 107 | vec4 val_color = spec_val; 108 | 109 | // Opacity correction 110 | val_color.w = 1.0 - pow(1.0 - val_color.w, dt_scale); 111 | 112 | // Alpha-blending 113 | gl_FragColor.rgb += (1.0 - gl_FragColor.a) * val_color.w * val_color.xyz; 114 | gl_FragColor.a += (1.0 - gl_FragColor.a) * val_color.w; 115 | if (gl_FragColor.a > 0.99) { 116 | break; 117 | } 118 | if (val_color.w < 0.0) { 119 | discard; 120 | } 121 | // step along the ray direction 122 | p += ray_dir * dt; 123 | step++; 124 | } 125 | 126 | gl_FragColor.r = linear_to_srgb(gl_FragColor.r); 127 | gl_FragColor.g = linear_to_srgb(gl_FragColor.g); 128 | gl_FragColor.b = linear_to_srgb(gl_FragColor.b); 129 | 130 | //gl_FragColor = color; 131 | 132 | } 133 | `; 134 | 135 | // Main body 136 | init(); 137 | animate(); 138 | 139 | function init() { 140 | scene = new THREE.Scene(); 141 | 142 | // Renderer 143 | renderer = new THREE.WebGLRenderer({ antialias: true }); 144 | renderer.setPixelRatio(window.devicePixelRatio); 145 | renderer.setSize(window.innerWidth, window.innerHeight); 146 | document.body.appendChild(renderer.domElement); 147 | 148 | // Camera 149 | // Perspective 150 | const aspect = window.innerWidth / window.innerHeight; 151 | camera = new THREE.PerspectiveCamera(45, aspect, 0.01, 1000); 152 | // // Orthographic 153 | // const width = 5; 154 | // const h = 2 * width; // frustum height 155 | // const aspect = window.innerWidth / window.innerHeight; 156 | // camera = new THREE.OrthographicCamera( - h * aspect / 2, h * aspect / 2, h / 2, - h / 2, 0.01, 1000 ); 157 | camera.position.set(-2, 1, 2); 158 | scene.add(camera); 159 | 160 | // Controls 161 | controls = new OrbitControls(camera, renderer.domElement); 162 | controls.addEventListener('change', render); 163 | controls.minZoom = 0.1; 164 | controls.maxZoom = 10; 165 | controls.enablePan = false; 166 | controls.update(); 167 | 168 | // GUI 169 | addGUI(); 170 | 171 | // Clock 172 | clock = new THREE.Clock(); 173 | 174 | // Debug spectrogram texture 175 | let planeGeo1 = new THREE.PlaneGeometry(2, 2); 176 | let planeMat1 = new THREE.MeshBasicMaterial({ map: createDataTexture(x_dim, y_dim), side: THREE.DoubleSide }); 177 | debugPlaneMesh = new THREE.Mesh(planeGeo1, planeMat1); 178 | debugPlaneMesh.position.set(-2, 0, -1); 179 | // scene.add(debugPlaneMesh); 180 | 181 | // Volume 182 | const volumeGeometry = new THREE.BoxGeometry(x_scale, y_scale, z_scale); 183 | 184 | const volumeUniforms = { 185 | 'volume': { value: create3dDataTexture(x_dim, y_dim, z_dim) }, 186 | 'volume_dims': { value: new THREE.Vector3( x_dim, y_dim, z_dim) }, 187 | 'aabb_min': { value: new THREE.Vector3()}, 188 | 'aabb_max': { value: new THREE.Vector3()}, 189 | 'dt_scale': { value: params.dt_scale }, 190 | 'max_steps': { value: params.max_steps }, 191 | }; 192 | 193 | const volumeMaterial = new THREE.ShaderMaterial({ 194 | uniforms: volumeUniforms, 195 | vertexShader: raycastVertexShader, 196 | fragmentShader: raycastFragmentShader, 197 | side: THREE.DoubleSide, 198 | transparent: true 199 | }); 200 | 201 | volumeMesh = new THREE.Mesh(volumeGeometry, volumeMaterial); 202 | volumeMesh.matrixAutoUpdate = true; 203 | volumeMesh.geometry.computeBoundingBox(); 204 | 205 | (volumeMesh.material).uniforms['aabb_min']['value'] = volumeMesh.geometry.boundingBox.min; 206 | (volumeMesh.material).uniforms['aabb_max']['value'] = volumeMesh.geometry.boundingBox.max; 207 | 208 | scene.add(volumeMesh); 209 | 210 | pointer = new THREE.Vector2(); 211 | 212 | window.addEventListener('pointerMove', onPointerMove); 213 | 214 | const planeGeo = new THREE.PlaneGeometry(25, 25); 215 | const planeMat = new THREE.MeshBasicMaterial({ visible: false }); 216 | planeMesh = new THREE.Mesh(planeGeo, planeMat); 217 | planeMesh.rotation.x = -0.5 * Math.PI; 218 | 219 | // Add helpers 220 | //addHelpers(scene); 221 | render(); 222 | document.addEventListener('pointermove', onPointerMove); 223 | window.addEventListener('resize', onWindowResize); 224 | } 225 | 226 | function render() { 227 | renderer.render(scene, camera); 228 | } 229 | 230 | function onWindowResize() { 231 | 232 | // renderer.setSize( window.innerWidth, window.innerHeight ); 233 | 234 | // const aspect = window.innerWidth / window.innerHeight; 235 | 236 | // const frustumHeight = camera.top - camera.bottom; 237 | 238 | // camera.left = - frustumHeight * aspect / 2; 239 | // camera.right = frustumHeight * aspect / 2; 240 | 241 | // camera.updateProjectionMatrix(); 242 | camera.aspect = window.innerWidth / window.innerHeight; 243 | camera.updateProjectionMatrix(); 244 | 245 | renderer.setSize(window.innerWidth, window.innerHeight); 246 | 247 | render(); 248 | 249 | } 250 | 251 | function onPointerMove(event) { 252 | 253 | pointer.x = (event.clientX / window.innerWidth) * 2 - 1; 254 | pointer.y = - (event.clientY / window.innerHeight) * 2 + 1; 255 | 256 | } 257 | 258 | function addHelpers(scene) { 259 | // const gridHelper = new THREE.GridHelper(10, 10); 260 | // scene.add(gridHelper); 261 | // stats = Stats(); 262 | //document.body.appendChild(stats.dom) 263 | const axesHelper = new THREE.AxesHelper(3); 264 | scene.add(axesHelper); 265 | } 266 | 267 | 268 | function animate() { 269 | requestAnimationFrame(animate); 270 | //stats.update(); 271 | render(); 272 | } 273 | 274 | // Creates 3D texture with RGB gradient along the XYZ axes 275 | function create3dDataTexture(width, height, depth) { 276 | const d = new Uint8Array(width * height * depth * 4); 277 | let stride = 0; 278 | 279 | for (let z = 0; z < depth; z++) { 280 | for (let y = 0; y < height; y++) { 281 | for (let x = 0; x < width; x++) { 282 | d[stride + 0] = (x / width) * 255; 283 | d[stride + 1] = (y / height) * 255; 284 | d[stride + 2] = (z / depth) * 255; 285 | d[stride + 3] = 255; 286 | stride += 4; 287 | } 288 | } 289 | } 290 | const texture = new THREE.Data3DTexture(d, width, height, depth); 291 | texture.format = THREE.RGBAFormat; 292 | // texture.type = THREE.FloatType; 293 | // texture.minFilter = THREE.NearestFilter; 294 | // texture.magFilter = THREE.NearestFilter; 295 | texture.unpackAlignment = 1; 296 | texture.needsUpdate = true; 297 | 298 | return texture; 299 | } 300 | function createDataTexture(width, height) { 301 | 302 | const d = new Uint8Array(width * height * 4); 303 | 304 | let stride = 0; 305 | for (let y = 0; y < height; y++) { 306 | for (let x = 0; x < width; x++) { 307 | d[stride + 0] = 1; 308 | d[stride + 1] = 0; 309 | d[stride + 2] = 0; 310 | d[stride + 3] = 1; 311 | stride += 4; 312 | } 313 | } 314 | const texture = new THREE.DataTexture(d, width, height); 315 | texture.format = THREE.RedFormat; 316 | // texture.type = THREE.FloatType; 317 | // texture.minFilter = THREE.NearestFilter; 318 | // texture.magFilter = THREE.NearestFilter; 319 | texture.unpackAlignment = 1; 320 | 321 | return texture; 322 | } 323 | 324 | 325 | function addGUI() { 326 | // Raycasting 327 | const raycasting_folder = gui.addFolder('raycasting'); 328 | raycasting_folder.add(params, 'dt_scale', 0.005,).step(0.001).name('dt_scale').onChange(function (value) { 329 | (volumeMesh.material).uniforms['dt_scale']['value'] = value; 330 | }); 331 | raycasting_folder.add(params, 'max_steps', 1,).step(1).name('max_steps').onChange(function (value) { 332 | (volumeMesh.material).uniforms['max_steps']['value'] = value; 333 | }); 334 | } 335 | 336 | 337 | 338 | 339 | 340 | 341 | 342 | 343 | 344 | 345 | -------------------------------------------------------------------------------- /src/volume/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | Volume 8 | 9 | 10 | 11 |
12 | 13 |
14 |
15 |
16 | 21 | 53 |
54 |
55 | 56 | 57 | 58 | 59 | -------------------------------------------------------------------------------- /src/volume/melspectrogram-processor.js: -------------------------------------------------------------------------------- 1 | // avoid ES Module imports: not available on workers in Firefox nor Safari 2 | let essentiaExtractor = new EssentiaExtractor(exports.EssentiaWASM); 3 | console.log('parsing as js'); 4 | function Float32Concat(first, second) 5 | { 6 | var firstLength = first.length, 7 | result = new Float32Array(firstLength + second.length); 8 | 9 | result.set(first); 10 | result.set(second, firstLength); 11 | 12 | return result; 13 | } 14 | 15 | class MelspectrogramProcessor extends AudioWorkletProcessor { 16 | constructor(options) { 17 | super(); 18 | this._bufferSize = options.processorOptions.bufferSize; 19 | this._hopSize = options.processorOptions.hopSize; 20 | this._melNumBands = options.processorOptions.melNumBands; 21 | this._sampleRate = options.processorOptions.sampleRate; 22 | this._channelCount = 1; 23 | this._extractor = essentiaExtractor; 24 | // modifying default extractor settings 25 | this._extractor.frameSize = this._bufferSize; 26 | this._extractor.hopSize = this._hopSize; 27 | // settings specific to an algorithm 28 | this._extractor.profile.MelBands.numberBands = this._melNumBands; 29 | this._extractor.profile.MelBands.type = 'power'; 30 | 31 | // buffersize mismatch helpers 32 | this._inputRingBuffer = new ChromeLabsRingBuffer(this._bufferSize, this._channelCount); 33 | this._outputRingBuffer = new ChromeLabsRingBuffer(this._bufferSize, this._channelCount); // changed from 1024 to match block size 34 | 35 | this._accumData = [new Float32Array(this._bufferSize)]; 36 | this._spectrum; 37 | 38 | // SAB config 39 | this.port.onmessage = e => { 40 | this._audio_writer = new AudioWriter(new RingBuffer(e.data.sab, Float32Array)); 41 | }; 42 | } 43 | 44 | process(inputList, outputList, params) { 45 | let input = inputList[0]; 46 | let output = outputList[0]; 47 | 48 | this._inputRingBuffer.push(input); 49 | 50 | if (this._inputRingBuffer.framesAvailable >= this._bufferSize) { 51 | 52 | this._inputRingBuffer.pull(this._accumData); 53 | 54 | this._spectrum = this._extractor.melSpectrumExtractor(this._accumData[0], this._sampleRate); 55 | if (this._audio_writer.available_write() >= this._melNumBands) { 56 | this._audio_writer.enqueue(this._spectrum); 57 | } 58 | 59 | let zeros = new Float32Array(128-this._spectrum.length); 60 | let zeroPaddedSpectrum = Float32Concat(this._spectrum, zeros); 61 | 62 | this._outputRingBuffer.push([zeroPaddedSpectrum]); 63 | 64 | // reset variables 65 | this._accumData = [new Float32Array(this._bufferSize)]; 66 | this._spectrum = null; 67 | } 68 | 69 | this._outputRingBuffer.pull(output); // if ringbuffer does not have enough frames, output will be silent 70 | // console.log(output[0]); 71 | return true; 72 | } 73 | } 74 | 75 | registerProcessor("melspectrogram-processor", MelspectrogramProcessor); 76 | 77 | 78 | 79 | // helper classes from https://github.com/GoogleChromeLabs/web-audio-samples/blob/gh-pages/audio-worklet/design-pattern/lib/wasm-audio-helper.js#L170: 80 | 81 | /** 82 | * Copyright 2018 Google LLC 83 | * 84 | * Licensed under the Apache License, Version 2.0 (the "License"); you may not 85 | * use this file except in compliance with the License. You may obtain a copy of 86 | * the License at 87 | * 88 | * http://www.apache.org/licenses/LICENSE-2.0 89 | * 90 | * Unless required by applicable law or agreed to in writing, software 91 | * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 92 | * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 93 | * License for the specific language governing permissions and limitations under 94 | * the License. 95 | */ 96 | 97 | // Basic byte unit of WASM heap. (16 bit = 2 bytes) 98 | const BYTES_PER_UNIT = Uint16Array.BYTES_PER_ELEMENT; 99 | 100 | // Byte per audio sample. (32 bit float) 101 | const BYTES_PER_SAMPLE = Float32Array.BYTES_PER_ELEMENT; 102 | 103 | // The max audio channel on Chrome is 32. 104 | const MAX_CHANNEL_COUNT = 32; 105 | 106 | // WebAudio's render quantum size. 107 | const RENDER_QUANTUM_FRAMES = 128; 108 | 109 | /** 110 | * A JS FIFO implementation for the AudioWorklet. 3 assumptions for the 111 | * simpler operation: 112 | * 1. the push and the pull operation are done by 128 frames. (Web Audio 113 | * API's render quantum size in the speficiation) 114 | * 2. the channel count of input/output cannot be changed dynamically. 115 | * The AudioWorkletNode should be configured with the `.channelCount = k` 116 | * (where k is the channel count you want) and 117 | * `.channelCountMode = explicit`. 118 | * 3. This is for the single-thread operation. (obviously) 119 | * 120 | * @class 121 | */ 122 | class ChromeLabsRingBuffer { 123 | /** 124 | * @constructor 125 | * @param {number} length Buffer length in frames. 126 | * @param {number} channelCount Buffer channel count. 127 | */ 128 | constructor(length, channelCount) { 129 | this._readIndex = 0; 130 | this._writeIndex = 0; 131 | this._framesAvailable = 0; 132 | 133 | this._channelCount = channelCount; 134 | this._length = length; 135 | this._channelData = []; 136 | for (let i = 0; i < this._channelCount; ++i) { 137 | this._channelData[i] = new Float32Array(length); 138 | } 139 | } 140 | 141 | /** 142 | * Getter for Available frames in buffer. 143 | * 144 | * @return {number} Available frames in buffer. 145 | */ 146 | get framesAvailable() { 147 | return this._framesAvailable; 148 | } 149 | 150 | /** 151 | * Push a sequence of Float32Arrays to buffer. 152 | * 153 | * @param {array} arraySequence A sequence of Float32Arrays. 154 | */ 155 | push(arraySequence) { 156 | // The channel count of arraySequence and the length of each channel must 157 | // match with this buffer obejct. 158 | 159 | // Transfer data from the |arraySequence| storage to the internal buffer. 160 | let sourceLength = arraySequence[0].length; 161 | for (let i = 0; i < sourceLength; ++i) { 162 | let writeIndex = (this._writeIndex + i) % this._length; 163 | for (let channel = 0; channel < this._channelCount; ++channel) { 164 | this._channelData[channel][writeIndex] = arraySequence[channel][i]; 165 | } 166 | } 167 | 168 | this._writeIndex += sourceLength; 169 | if (this._writeIndex >= this._length) { 170 | this._writeIndex = 0; 171 | } 172 | 173 | // For excessive frames, the buffer will be overwritten. 174 | this._framesAvailable += sourceLength; 175 | if (this._framesAvailable > this._length) { 176 | this._framesAvailable = this._length; 177 | } 178 | } 179 | 180 | /** 181 | * Pull data out of buffer and fill a given sequence of Float32Arrays. 182 | * 183 | * @param {array} arraySequence An array of Float32Arrays. 184 | */ 185 | pull(arraySequence) { 186 | // The channel count of arraySequence and the length of each channel must 187 | // match with this buffer obejct. 188 | 189 | // If the FIFO is completely empty, do nothing. 190 | if (this._framesAvailable === 0) { 191 | return; 192 | } 193 | 194 | let destinationLength = arraySequence[0].length; 195 | 196 | // Transfer data from the internal buffer to the |arraySequence| storage. 197 | for (let i = 0; i < destinationLength; ++i) { 198 | let readIndex = (this._readIndex + i) % this._length; 199 | for (let channel = 0; channel < this._channelCount; ++channel) { 200 | arraySequence[channel][i] = this._channelData[channel][readIndex]; 201 | } 202 | } 203 | 204 | this._readIndex += destinationLength; 205 | if (this._readIndex >= this._length) { 206 | this._readIndex = 0; 207 | } 208 | 209 | this._framesAvailable -= destinationLength; 210 | if (this._framesAvailable < 0) { 211 | this._framesAvailable = 0; 212 | } 213 | } 214 | } // class ChromeLabsRingBuffer -------------------------------------------------------------------------------- /tailwind.config.js: -------------------------------------------------------------------------------- 1 | /** @type {import('tailwindcss').Config} */ 2 | module.exports = { 3 | content: [ 4 | './dist/*.html', 5 | './src/**/*.{html,js}', 6 | ], 7 | theme: { 8 | extend: {}, 9 | }, 10 | variants: { 11 | extend: {}, 12 | }, 13 | plugins: [], 14 | } -------------------------------------------------------------------------------- /webpack.config.js: -------------------------------------------------------------------------------- 1 | 2 | const MiniCssExtractPlugin = require('mini-css-extract-plugin'); 3 | const HtmlWebpackPlugin = require('html-webpack-plugin'); 4 | const pages = ["home", "volume", "volume-minimal", "playground"]; 5 | const path = require('path'); 6 | 7 | module.exports = { 8 | entry: pages.reduce((config, page) => { 9 | if (page == 'home'){ 10 | config[page] = `./src/main.js`; 11 | } 12 | else{ 13 | config[page] = `./src/${page}/main.js`; 14 | } 15 | return config; 16 | }, {}), 17 | output: { 18 | filename: '[name].bundle.js', 19 | path: path.resolve(__dirname, 'dist'), 20 | publicPath: "/assets/" 21 | }, 22 | optimization: { 23 | splitChunks: { 24 | chunks: "all", 25 | }, 26 | }, 27 | devServer: { 28 | headers: { 29 | 'Cross-Origin-Embedder-Policy': 'require-corp', 30 | 'Cross-Origin-Opener-Policy': 'same-origin', 31 | }, 32 | client: { 33 | logging: 'none', 34 | }, 35 | }, 36 | 37 | plugins: [ 38 | new HtmlWebpackPlugin(), 39 | new MiniCssExtractPlugin()].concat( 40 | pages.map( 41 | (page) => 42 | { 43 | if (page == 'home'){ 44 | return new HtmlWebpackPlugin({ 45 | inject: true, 46 | template: `./src/index.html`, 47 | filename: `index.html`, 48 | chunks: [page], 49 | }) 50 | } 51 | else{ 52 | return new HtmlWebpackPlugin({ 53 | inject: true, 54 | template: `./src/${page}/index.html`, 55 | filename: `${page}/index.html`, 56 | chunks: [page], 57 | }) 58 | } 59 | } 60 | ) 61 | ), 62 | module: { 63 | rules: [ 64 | { 65 | test: /\.(glb|gltf|obj|fbx|stl)$/, 66 | type: 'asset/resource', 67 | }, 68 | { 69 | test: /\.(jpg|png|svg|gif)$/, 70 | type: 'asset/resource', 71 | }, 72 | { 73 | test: /\.css$/, 74 | use: 75 | [ 76 | MiniCssExtractPlugin.loader, 77 | 'css-loader', 78 | 'postcss-loader' 79 | ] 80 | }, 81 | { 82 | test: /\.js$/, 83 | resourceQuery: { not: [/raw/] }, 84 | exclude: /node_modules/, 85 | use: 86 | [ 87 | 'babel-loader' 88 | ] 89 | }, 90 | { 91 | resourceQuery: /raw/, 92 | type: 'asset/source' 93 | }, 94 | { 95 | test: /\.(ogg|mp3|wav|mpe?g)$/, 96 | type: 'asset/resource', 97 | }, 98 | ], 99 | }, 100 | resolve: { 101 | extensions: ['.js', '.jsx', '.ts', '.tsx'], 102 | }, 103 | }; 104 | 105 | --------------------------------------------------------------------------------