├── .github └── workflows │ └── test.yml ├── .gitignore ├── .mise.toml ├── CHANGELOG.md ├── LICENSE ├── README.md ├── assets ├── browser_protocol.json └── js_protocol.json ├── birdie_snapshots ├── created_page_with_reference_html.accepted ├── element_inner_html.accepted ├── element_outer_html.accepted ├── enocded_call_argument_with_dynamic_value.accepted ├── enum_decoder_function.accepted ├── enum_encoder_function.accepted ├── list_of_greetings.accepted ├── list_of_links.accepted ├── opened_sample_page.accepted ├── outer_html.accepted ├── runtime_evaluate_params.accepted └── runtime_evaluate_response.accepted ├── codegen.sh ├── doc_assets └── header_1.png ├── gleam.toml ├── manifest.toml ├── src ├── chrobot.gleam ├── chrobot │ ├── chrome.gleam │ ├── install.gleam │ ├── internal │ │ ├── keymap.gleam │ │ └── utils.gleam │ ├── protocol.gleam │ └── protocol │ │ ├── browser.gleam │ │ ├── debugger.gleam │ │ ├── dom.gleam │ │ ├── dom_debugger.gleam │ │ ├── emulation.gleam │ │ ├── fetch.gleam │ │ ├── input.gleam │ │ ├── io.gleam │ │ ├── log.gleam │ │ ├── network.gleam │ │ ├── page.gleam │ │ ├── performance.gleam │ │ ├── profiler.gleam │ │ ├── runtime.gleam │ │ ├── security.gleam │ │ └── target.gleam └── chrobot_ffi.erl ├── test ├── chrobot_test.gleam ├── chrome_test.gleam ├── codegen │ ├── download_protocol.gleam │ ├── generate_bindings.gleam │ └── generate_bindings_test.gleam ├── mock_server.gleam ├── protocol │ └── runtime_test.gleam └── test_utils.gleam ├── test_assets ├── reference_website.html └── runtime_evaluate_response.json └── vendor └── justin_fork ├── .gitignore ├── CHANGELOG.md ├── README.md ├── gleam.toml ├── manifest.toml ├── src └── justin_fork.gleam └── test └── justin_fork_test.gleam /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: test 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request: 8 | 9 | jobs: 10 | test: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - uses: actions/checkout@v4 14 | - uses: erlef/setup-beam@v1 15 | with: 16 | otp-version: "27" 17 | gleam-version: "1.6.2" 18 | rebar3-version: "3" 19 | # elixir-version: "1.15.4" 20 | - uses: browser-actions/setup-chrome@v1 21 | id: setup-chrome 22 | - run: | 23 | ${{ steps.setup-chrome.outputs.chrome-path }} --version 24 | - run: gleam deps download 25 | - run: gleam test 26 | env: 27 | CHROBOT_TEST_BROWSER_PATH: ${{ steps.setup-chrome.outputs.chrome-path }} 28 | - run: gleam format --check src test 29 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.beam 2 | *.ez 3 | /build 4 | erl_crash.dump 5 | /chrome 6 | .DS_Store 7 | test/playground_.gleam 8 | test/debugging_.gleam 9 | -------------------------------------------------------------------------------- /.mise.toml: -------------------------------------------------------------------------------- 1 | [tools] 2 | erlang = "27" 3 | gleam = "1.6.2" 4 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | ## [3.0.4] 2024-12-10 2 | 3 | - Move to `envoy` since `gleam_erlang` `os.get_env` was removed 4 | - Update to stdlib 0.47 5 | 6 | ## [3.0.3] 2024-11-26 7 | 8 | - Update to `gleam_stlidb` 0.44 9 | 10 | ## [3.0.2] 2024-10-20 11 | 12 | - Use `erlang/port` instead of `otp/port` since it is deprecated 13 | 14 | ## [3.0.1] 2024-10-15 15 | 16 | - Ensure compatibility with gleam 1.5 17 | - Update dependency ranges (mainly httpc and json) 18 | 19 | ## [3.0.0] 2024-08-07 20 | 21 | This update restructures the project to move all modules under the `chrobot` namespace. 22 | 23 | - All `protocol` modules are now under `chrobot/protocol` 24 | - `chrome` is now `chrobot/chrome` 25 | - `browser_install` is now `chrobot/install` 26 | 27 | ## [2.3.0] 2024-08-07 28 | 29 | - Add query selectors that run on elements (Remote Objects) 30 | 31 | ## [2.2.5] 2024-08-12 32 | 33 | - Upgrade to Gleam 1.4.1 34 | - Upgrade `simplifile` to 2.0.0 35 | 36 | ## [2.2.4] 2024-06-22 37 | 38 | - Implement more accurate polling 39 | - Make `poll` function part of the public `chrobot` API 40 | 41 | ## [2.2.3] 2024-06-08 42 | 43 | - Add `launch_window` function to launch browser in headful mode 44 | 45 | ## [2.2.2] 2024-06-07 46 | 47 | This update brings basic utilities for integration testing and some conveniences in the high level `chrobot` module 48 | 49 | - Add `click` and `focus` functions 50 | - Add `press_key`, `type_text`, and text input related functions 51 | 52 | ## [2.1.2] 2024-05-29 53 | 54 | - Improve message parsing performance A LOT 🚀 55 | - This should have a big impact on the speed of generating PDFs and taking screenshots 56 | 57 | ## [2.1.1] 2024-05-25 58 | 59 | - Rename the install module to browser_install 60 | 61 | ## [2.1.0] 2024-05-25 62 | 63 | - Allow setting launch config through environment 64 | - Make logging prettier 65 | - Add browser installation script 66 | 67 | ## [2.0.0] 2024-05-17 68 | 69 | - **Breaking Change:** Added `log_level` to `chrome.BrowserConfig`, this means any `launch_with_config` calls must 70 | be amended with this extra parameter 71 | 72 | - Adjusted browser logging behaviour 73 | 74 | ## [1.2.0] 2024-05-16 75 | 76 | - Move codegen scripts to `/test` to fix published package 77 | 78 | ## [1.1.0] 2024-05-16 79 | 80 | - Remove unused `glexec` dependency 81 | - Trying to pass a dynamic value to an enocder now logs a warning 82 | 83 | ## [1.0.0] 2024-05-16 84 | 85 | Initial release -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright 2024 Jonas Gruenwald 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 8 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |
2 |
3 |
8 | ⛭ Typed browser automation for the BEAM ⛭ 9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
22 | Everything there is online about 23 | W3 is linked directly or indirectly 24 | to this document, including an executive 25 | summary of the project, Mailing lists 26 | , Policy , November's W3 news , 27 | Frequently Asked Questions . 28 |
29 |9 | I am HTML 10 |
11 |12 | I am the hyperstructure 13 |
14 |15 | I am linked to you 16 |
17 | -------------------------------------------------------------------------------- /birdie_snapshots/runtime_evaluate_params.accepted: -------------------------------------------------------------------------------- 1 | --- 2 | version: 1.1.4 3 | title: Runtime.evaluate params 4 | file: ./test/protocol/runtime_test.gleam 5 | test_name: evaluate_test 6 | --- 7 | {"awaitPromise":false,"userGesture":true,"returnByValue":true,"silent":false,"expression":"document.querySelector(\"h1\")"} -------------------------------------------------------------------------------- /birdie_snapshots/runtime_evaluate_response.accepted: -------------------------------------------------------------------------------- 1 | --- 2 | version: 1.1.4 3 | title: Runtime.evaluate response 4 | file: ./test/protocol/runtime_test.gleam 5 | test_name: evaluate_test 6 | --- 7 | Ok(EvaluateResponse(RemoteObject(RemoteObjectTypeObject, Some(RemoteObjectSubtypeNode), Some("HTMLHeadingElement"), None, None, Some("h1"), Some(RemoteObjectId("8282282834669415287.1.3079"))), None)) -------------------------------------------------------------------------------- /codegen.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # rm -r src/protocol 3 | set -e 4 | gleam run -m codegen/generate_bindings 5 | gleam format 6 | gleam check 7 | echo "Done & Dusted! 🧹" -------------------------------------------------------------------------------- /doc_assets/header_1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JonasGruenwald/chrobot/d5399537f64da8cfbfe85eb39090f0bf4ee0ac34/doc_assets/header_1.png -------------------------------------------------------------------------------- /gleam.toml: -------------------------------------------------------------------------------- 1 | name = "chrobot" 2 | version = "3.0.4" 3 | 4 | description = "A browser automation tool and interface to the Chrome DevTools Protocol." 5 | licences = ["MIT"] 6 | repository = { type = "github", user = "JonasGruenwald", repo = "chrobot" } 7 | links = [] 8 | 9 | [documentation] 10 | pages = [ 11 | { title = "Changelog", path = "changelog.html", source = "./CHANGELOG.md" }, 12 | ] 13 | 14 | [dependencies] 15 | gleam_stdlib = ">= 0.47.0 and < 2.0.0" 16 | gleam_json = ">= 1.0.0 and < 3.0.0" 17 | gleam_erlang = ">= 0.25.0 and < 1.0.0" 18 | gleam_otp = ">= 0.10.0 and < 1.0.0" 19 | gleam_httpc = ">= 2.2.0 and < 4.0.0" 20 | gleam_http = ">= 3.6.0 and < 4.0.0" 21 | filepath = ">= 1.0.0 and < 2.0.0" 22 | simplifile = ">= 2.0.1 and < 3.0.0" 23 | gleam_community_ansi = ">= 1.4.0 and < 2.0.0" 24 | spinner = ">= 1.1.0 and < 2.0.0" 25 | envoy = ">= 1.0.2 and < 2.0.0" 26 | 27 | [dev-dependencies] 28 | gleeunit = ">= 1.0.0 and < 2.0.0" 29 | justin_fork = { path = "./vendor/justin_fork" } 30 | birdie = ">= 1.1.8 and < 2.0.0" 31 | mist = ">= 1.2.0 and < 4.0.0" 32 | gleam_regexp = ">= 1.0.0 and < 2.0.0" 33 | -------------------------------------------------------------------------------- /manifest.toml: -------------------------------------------------------------------------------- 1 | # This file was generated by Gleam 2 | # You typically do not need to edit this file 3 | 4 | packages = [ 5 | { name = "argv", version = "1.0.2", build_tools = ["gleam"], requirements = [], otp_app = "argv", source = "hex", outer_checksum = "BA1FF0929525DEBA1CE67256E5ADF77A7CDDFE729E3E3F57A5BDCAA031DED09D" }, 6 | { name = "birdie", version = "1.2.4", build_tools = ["gleam"], requirements = ["argv", "edit_distance", "filepath", "glance", "gleam_community_ansi", "gleam_erlang", "gleam_stdlib", "justin", "rank", "simplifile", "trie_again"], otp_app = "birdie", source = "hex", outer_checksum = "769AE13AB5B5B84E724E9966037DCCB5BD63B2F43C52EF80B4BF3351F64E469E" }, 7 | { name = "birl", version = "1.7.1", build_tools = ["gleam"], requirements = ["gleam_stdlib", "ranger"], otp_app = "birl", source = "hex", outer_checksum = "5C66647D62BCB11FE327E7A6024907C4A17954EF22865FE0940B54A852446D01" }, 8 | { name = "edit_distance", version = "2.0.1", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "edit_distance", source = "hex", outer_checksum = "A1E485C69A70210223E46E63985FA1008B8B2DDA9848B7897469171B29020C05" }, 9 | { name = "envoy", version = "1.0.2", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "envoy", source = "hex", outer_checksum = "95FD059345AA982E89A0B6E2A3BF1CF43E17A7048DCD85B5B65D3B9E4E39D359" }, 10 | { name = "filepath", version = "1.1.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "filepath", source = "hex", outer_checksum = "67A6D15FB39EEB69DD31F8C145BB5A421790581BD6AA14B33D64D5A55DBD6587" }, 11 | { name = "glance", version = "1.1.0", build_tools = ["gleam"], requirements = ["gleam_stdlib", "glexer"], otp_app = "glance", source = "hex", outer_checksum = "E155BA1A787FD11827048355021C0390D2FE9A518485526F631A9D472858CC6D" }, 12 | { name = "gleam_community_ansi", version = "1.4.1", build_tools = ["gleam"], requirements = ["gleam_community_colour", "gleam_stdlib"], otp_app = "gleam_community_ansi", source = "hex", outer_checksum = "4CD513FC62523053E62ED7BAC2F36136EC17D6A8942728250A9A00A15E340E4B" }, 13 | { name = "gleam_community_colour", version = "1.4.1", build_tools = ["gleam"], requirements = ["gleam_json", "gleam_stdlib"], otp_app = "gleam_community_colour", source = "hex", outer_checksum = "386CB9B01B33371538672EEA8A6375A0A0ADEF41F17C86DDCB81C92AD00DA610" }, 14 | { name = "gleam_crypto", version = "1.4.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleam_crypto", source = "hex", outer_checksum = "8AE56026B3E05EBB1F076778478A762E9EB62B31AEEB4285755452F397029D22" }, 15 | { name = "gleam_erlang", version = "0.33.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleam_erlang", source = "hex", outer_checksum = "A1D26B80F01901B59AABEE3475DD4C18D27D58FA5C897D922FCB9B099749C064" }, 16 | { name = "gleam_http", version = "3.7.2", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleam_http", source = "hex", outer_checksum = "8A70D2F70BB7CFEB5DF048A2183FFBA91AF6D4CF5798504841744A16999E33D2" }, 17 | { name = "gleam_httpc", version = "3.0.0", build_tools = ["gleam"], requirements = ["gleam_erlang", "gleam_http", "gleam_stdlib"], otp_app = "gleam_httpc", source = "hex", outer_checksum = "091CDD2BEC8092E82707BEA03FB5205A2BBBDE4A2F551E3C069E13B8BC0C428E" }, 18 | { name = "gleam_json", version = "2.1.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleam_json", source = "hex", outer_checksum = "0A57FB5666E695FD2BEE74C0428A98B0FC11A395D2C7B4CDF5E22C5DD32C74C6" }, 19 | { name = "gleam_otp", version = "0.16.0", build_tools = ["gleam"], requirements = ["gleam_erlang", "gleam_stdlib"], otp_app = "gleam_otp", source = "hex", outer_checksum = "FA0EB761339749B4E82D63016C6A18C4E6662DA05BAB6F1346F9AF2E679E301A" }, 20 | { name = "gleam_regexp", version = "1.0.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleam_regexp", source = "hex", outer_checksum = "A3655FDD288571E90EE9C4009B719FEF59FA16AFCDF3952A76A125AF23CF1592" }, 21 | { name = "gleam_stdlib", version = "0.47.0", build_tools = ["gleam"], requirements = [], otp_app = "gleam_stdlib", source = "hex", outer_checksum = "3B22D46743C46498C8355365243327AC731ECD3959216344FA9CF9AD348620AC" }, 22 | { name = "glearray", version = "1.0.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "glearray", source = "hex", outer_checksum = "B99767A9BC63EF9CC8809F66C7276042E5EFEACAA5B25188B552D3691B91AC6D" }, 23 | { name = "gleeunit", version = "1.2.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleeunit", source = "hex", outer_checksum = "F7A7228925D3EE7D0813C922E062BFD6D7E9310F0BEE585D3A42F3307E3CFD13" }, 24 | { name = "glexer", version = "2.0.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "glexer", source = "hex", outer_checksum = "25E87F25706749E40C3CDC72D2E52AEA12260B23D14FD9E09A1B524EF393485E" }, 25 | { name = "glisten", version = "6.0.0", build_tools = ["gleam"], requirements = ["gleam_erlang", "gleam_otp", "gleam_stdlib", "logging", "telemetry"], otp_app = "glisten", source = "hex", outer_checksum = "912132751031473CB38F454120124FFC96AF6B0EA33D92C9C90DB16327A2A972" }, 26 | { name = "gramps", version = "2.0.3", build_tools = ["gleam"], requirements = ["gleam_crypto", "gleam_erlang", "gleam_http", "gleam_stdlib"], otp_app = "gramps", source = "hex", outer_checksum = "3CCAA6E081225180D95C79679D383BBF51C8D1FDC1B84DA1DA444F628C373793" }, 27 | { name = "hpack_erl", version = "0.3.0", build_tools = ["rebar3"], requirements = [], otp_app = "hpack", source = "hex", outer_checksum = "D6137D7079169D8C485C6962DFE261AF5B9EF60FBC557344511C1E65E3D95FB0" }, 28 | { name = "justin", version = "1.0.1", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "justin", source = "hex", outer_checksum = "7FA0C6DB78640C6DC5FBFD59BF3456009F3F8B485BF6825E97E1EB44E9A1E2CD" }, 29 | { name = "justin_fork", version = "1.0.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], source = "local", path = "vendor/justin_fork" }, 30 | { name = "logging", version = "1.3.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "logging", source = "hex", outer_checksum = "1098FBF10B54B44C2C7FDF0B01C1253CAFACDACABEFB4B0D027803246753E06D" }, 31 | { name = "mist", version = "3.0.0", build_tools = ["gleam"], requirements = ["birl", "gleam_erlang", "gleam_http", "gleam_otp", "gleam_stdlib", "glisten", "gramps", "hpack_erl", "logging"], otp_app = "mist", source = "hex", outer_checksum = "CDA1A74E768419235E16886463EC4722EFF4AB3F8D820A76EAD45D7C167D7282" }, 32 | { name = "ranger", version = "1.3.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "ranger", source = "hex", outer_checksum = "B8F3AFF23A3A5B5D9526B8D18E7C43A7DFD3902B151B97EC65397FE29192B695" }, 33 | { name = "rank", version = "1.0.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "rank", source = "hex", outer_checksum = "5660E361F0E49CBB714CC57CC4C89C63415D8986F05B2DA0C719D5642FAD91C9" }, 34 | { name = "repeatedly", version = "2.1.2", build_tools = ["gleam"], requirements = [], otp_app = "repeatedly", source = "hex", outer_checksum = "93AE1938DDE0DC0F7034F32C1BF0D4E89ACEBA82198A1FE21F604E849DA5F589" }, 35 | { name = "simplifile", version = "2.2.0", build_tools = ["gleam"], requirements = ["filepath", "gleam_stdlib"], otp_app = "simplifile", source = "hex", outer_checksum = "0DFABEF7DC7A9E2FF4BB27B108034E60C81BEBFCB7AB816B9E7E18ED4503ACD8" }, 36 | { name = "spinner", version = "1.3.0", build_tools = ["gleam"], requirements = ["gleam_community_ansi", "gleam_stdlib", "glearray", "repeatedly"], otp_app = "spinner", source = "hex", outer_checksum = "B824C4CFDA6AC912D14365BF365F2A52C4DA63EF2D768D2A1C46D9BF7AF669E7" }, 37 | { name = "telemetry", version = "1.3.0", build_tools = ["rebar3"], requirements = [], otp_app = "telemetry", source = "hex", outer_checksum = "7015FC8919DBE63764F4B4B87A95B7C0996BD539E0D499BE6EC9D7F3875B79E6" }, 38 | { name = "trie_again", version = "1.1.2", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "trie_again", source = "hex", outer_checksum = "5B19176F52B1BD98831B57FDC97BD1F88C8A403D6D8C63471407E78598E27184" }, 39 | ] 40 | 41 | [requirements] 42 | birdie = { version = ">= 1.1.8 and < 2.0.0" } 43 | envoy = { version = ">= 1.0.2 and < 2.0.0" } 44 | filepath = { version = ">= 1.0.0 and < 2.0.0" } 45 | gleam_community_ansi = { version = ">= 1.4.0 and < 2.0.0" } 46 | gleam_erlang = { version = ">= 0.25.0 and < 1.0.0" } 47 | gleam_http = { version = ">= 3.6.0 and < 4.0.0" } 48 | gleam_httpc = { version = ">= 2.2.0 and < 4.0.0" } 49 | gleam_json = { version = ">= 1.0.0 and < 3.0.0" } 50 | gleam_otp = { version = ">= 0.10.0 and < 1.0.0" } 51 | gleam_regexp = { version = ">= 1.0.0 and < 2.0.0" } 52 | gleam_stdlib = { version = ">= 0.47.0 and < 2.0.0" } 53 | gleeunit = { version = ">= 1.0.0 and < 2.0.0" } 54 | justin_fork = { path = "./vendor/justin_fork" } 55 | mist = { version = ">= 1.2.0 and < 4.0.0" } 56 | simplifile = { version = ">= 2.0.1 and < 3.0.0" } 57 | spinner = { version = ">= 1.1.0 and < 2.0.0" } 58 | -------------------------------------------------------------------------------- /src/chrobot/install.gleam: -------------------------------------------------------------------------------- 1 | //// This module provides basic browser installation functionality, allowing you 2 | //// to install a local version of [Google Chrome for Testing](https://github.com/GoogleChromeLabs/chrome-for-testing) in the current directory on macOS and Linux. 3 | //// 4 | //// ## Usage 5 | //// 6 | //// You may run browser installation directly with 7 | //// 8 | //// ```sh 9 | //// gleam run -m chrobot/install 10 | //// ``` 11 | //// When running directly, you can configure the browser version to install by setting the `CHROBOT_TARGET_VERSION` environment variable, 12 | //// it will default to `latest`. 13 | //// You may also set the directory to install under, with `CHROBOT_TARGET_PATH`. 14 | //// 15 | //// The browser will be installed into a directory called `chrome` under the target directory. 16 | //// There is no support for managing multiple browser installations, if an installation is already present for the same version, 17 | //// the script will overwrite it. 18 | //// 19 | //// To uninstall browsers installed by this tool just remove the `chrome` directory created by it, or delete an individual browser 20 | //// installation from inside it. 21 | //// 22 | //// ## Caveats 23 | //// 24 | //// This module attempts to rudimentarily mimic the functionality of the [puppeteer install script](https://pptr.dev/browsers-api), 25 | //// the only goal is to have a quick and convenient way to install browsers locally, for more advanced management of browser 26 | //// installations, please seek out other tools. 27 | //// 28 | //// Supported platforms are limited by what the Google Chrome for Testing distribution supports, which is currently: 29 | //// 30 | //// * linux64 31 | //// * mac-arm64 32 | //// * mac-x64 33 | //// * win32 34 | //// * win64 35 | //// 36 | //// Notably, this distribution **unfortunately does not support ARM64 on Linux**. 37 | //// 38 | //// ### Linux Dependencies 39 | //// 40 | //// The tool does **not** install dependencies on Linux, you must install them yourself. 41 | //// 42 | //// On debian / ubuntu based systems you may install dependencies with the following command: 43 | //// 44 | //// ```sh 45 | //// sudo apt-get update && sudo apt-get install -y \ 46 | //// ca-certificates \ 47 | //// fonts-liberation \ 48 | //// libasound2 \ 49 | //// libatk-bridge2.0-0 \ 50 | //// libatk1.0-0 \ 51 | //// libc6 \ 52 | //// libcairo2 \ 53 | //// libcups2 \ 54 | //// libdbus-1-3 \ 55 | //// libexpat1 \ 56 | //// libfontconfig1 \ 57 | //// libgbm1 \ 58 | //// libgcc1 \ 59 | //// libglib2.0-0 \ 60 | //// libgtk-3-0 \ 61 | //// libnspr4 \ 62 | //// libnss3 \ 63 | //// libpango-1.0-0 \ 64 | //// libpangocairo-1.0-0 \ 65 | //// libstdc++6 \ 66 | //// libx11-6 \ 67 | //// libx11-xcb1 \ 68 | //// libxcb1 \ 69 | //// libxcomposite1 \ 70 | //// libxcursor1 \ 71 | //// libxdamage1 \ 72 | //// libxext6 \ 73 | //// libxfixes3 \ 74 | //// libxi6 \ 75 | //// libxrandr2 \ 76 | //// libxrender1 \ 77 | //// libxss1 \ 78 | //// libxtst6 \ 79 | //// lsb-release \ 80 | //// wget \ 81 | //// xdg-utils 82 | //// ``` 83 | 84 | import chrobot/chrome 85 | import chrobot/internal/utils 86 | import envoy 87 | import filepath as path 88 | import gleam/dynamic 89 | import gleam/erlang/os 90 | import gleam/http/request 91 | import gleam/http/response 92 | import gleam/httpc 93 | import gleam/io 94 | import gleam/json 95 | import gleam/list 96 | import gleam/result 97 | import gleam/string 98 | import simplifile as file 99 | 100 | const version_list_endpoint = "https://googlechromelabs.github.io/chrome-for-testing/known-good-versions-with-downloads.json" 101 | 102 | pub type InstallationError { 103 | InstallationError 104 | } 105 | 106 | pub fn main() { 107 | install() 108 | } 109 | 110 | /// Install a local version of Google Chrome for Testing. 111 | /// This function is meant to be called in a script, it will log errors and warnings to the console. 112 | /// and return a generic error if installation fails. 113 | pub fn install() { 114 | let target_version = 115 | result.unwrap(envoy.get("CHROBOT_TARGET_VERSION"), "latest") 116 | let target_path = result.unwrap(envoy.get("CHROBOT_TARGET_PATH"), ".") 117 | install_with_config(target_path, target_version) 118 | } 119 | 120 | /// Install a specific local version of Google Chrome for Testing to a specific directory. 121 | /// This function is meant to be called in a script, it will log errors and warnings to the console. 122 | /// and return a generic error if installation fails. 123 | pub fn install_with_config( 124 | to target_path: String, 125 | version target_version: String, 126 | ) { 127 | let chrome_dir_path = path.join(target_path, "chrome") 128 | let chrome_dir_path = case chrome_dir_path { 129 | "./chrome" -> "chrome" 130 | other -> other 131 | } 132 | io.println( 133 | "\nPreparing to install Chrome for Testing (" 134 | <> target_version 135 | <> ") into " 136 | <> chrome_dir_path 137 | <> ".\n", 138 | ) 139 | 140 | // Existing version sanity check 141 | case chrome.get_local_chrome_path_at(chrome_dir_path) { 142 | Ok(local_chrome_path) -> { 143 | utils.warn( 144 | "You already have a local Chrome installation at this path:\n" 145 | <> local_chrome_path 146 | <> " 147 | Chrobot does not support managing multiple browser installations, 148 | you are encouraged to remove old installations manually if you no longer need them.", 149 | ) 150 | } 151 | Error(_) -> Nil 152 | } 153 | 154 | use platform <- assert_ok(resolve_platform(), "Platform unsupported") 155 | let p = utils.start_progress("Fetching available versions...") 156 | use req <- assert_ok( 157 | request.to(version_list_endpoint), 158 | "Failed to build version request", 159 | ) 160 | use res <- assert_ok( 161 | httpc.send(req), 162 | "Version list request failed, ensure you have an active internet connection.", 163 | ) 164 | use <- assert_true( 165 | res.status == 200, 166 | "Version list request returned a non-200 status code.", 167 | ) 168 | 169 | use <- assert_is_json( 170 | res, 171 | "Version list request returned a response that is not JSON.", 172 | ) 173 | 174 | use payload <- assert_ok( 175 | json.decode(res.body, dynamic.dynamic), 176 | "Failed to parse version list JSON", 177 | ) 178 | 179 | use version_list <- assert_ok( 180 | parse_version_list(payload), 181 | "Failed to decode version list JSON - Maybe the API has changed or is down?", 182 | ) 183 | 184 | use version <- assert_ok( 185 | select_version(target_version, version_list), 186 | "Failed to find version " <> target_version <> " in version list", 187 | ) 188 | 189 | use download <- assert_ok( 190 | select_download(version, platform), 191 | "Failed to find download for platform " 192 | <> platform 193 | <> " in version " 194 | <> version.version, 195 | ) 196 | 197 | utils.stop_progress(p) 198 | 199 | io.println( 200 | "\nSelected version " 201 | <> version.version 202 | <> " for platform " 203 | <> download.platform 204 | <> "\n", 205 | ) 206 | 207 | let p = utils.start_progress("Downloading Chrome for Testing...") 208 | 209 | use download_request <- assert_ok( 210 | new_download_request(download.url), 211 | "Failed to build download request", 212 | ) 213 | use download_res <- assert_ok( 214 | httpc.send_bits(download_request), 215 | "Download request failed, ensure you have an active internet connection", 216 | ) 217 | use <- assert_true( 218 | download_res.status == 200, 219 | "Download request returned a non-200 status code", 220 | ) 221 | 222 | utils.set_progress(p, "Writing download to disk...") 223 | 224 | let download_path = 225 | path.join( 226 | chrome_dir_path, 227 | "chrome_download_" <> download.platform <> version.revision <> ".zip", 228 | ) 229 | 230 | let installation_dir = 231 | path.join(chrome_dir_path, platform <> "-" <> version.version) 232 | 233 | use _ <- assert_ok( 234 | file.create_directory_all(installation_dir), 235 | "Failed to create directory", 236 | ) 237 | 238 | use _ <- assert_ok( 239 | file.write_bits(download_res.body, to: download_path), 240 | "Failed to write download to disk", 241 | ) 242 | 243 | utils.set_progress(p, "Extracting download...") 244 | 245 | use _ <- assert_ok( 246 | unzip(download_path, installation_dir), 247 | "Failed to extract downloaded .zip archive", 248 | ) 249 | 250 | use _ <- assert_ok( 251 | file.delete(download_path), 252 | "Failed to remove downloaded .zip archive! The installation should otherwise have succeeded.", 253 | ) 254 | 255 | // Find the executable binary 256 | use haystack <- assert_ok( 257 | file.get_files(installation_dir), 258 | "Failed to scan installation directory for executable", 259 | ) 260 | 261 | use executable <- assert_ok( 262 | list.find(haystack, fn(file) { 263 | chrome.is_local_chrome_path(file, os.family()) 264 | }), 265 | "Failed to find executable in installation directory", 266 | ) 267 | 268 | utils.stop_progress(p) 269 | 270 | case os.family() { 271 | os.Linux -> { 272 | utils.hint( 273 | "You can run the following command to check wich depencies are missing on your system:", 274 | ) 275 | utils.show_cmd("ldd \"" <> executable <> "\" | grep not") 276 | } 277 | _ -> Nil 278 | } 279 | 280 | utils.info( 281 | "Chrome for Testing (" 282 | <> version.version 283 | <> ") installed successfully! The executable is located at:\n" 284 | <> executable 285 | <> "\n" 286 | <> "When using the `launch` command, chrobot should automatically use this local installation.", 287 | ) 288 | 289 | Ok(executable) 290 | } 291 | 292 | type VersionItem { 293 | VersionItem(version: String, revision: String, downloads: List(DownloadItem)) 294 | } 295 | 296 | type DownloadItem { 297 | DownloadItem(platform: String, url: String) 298 | } 299 | 300 | fn select_version( 301 | target: String, 302 | version_list: List(VersionItem), 303 | ) -> Result(VersionItem, Nil) { 304 | case target { 305 | "latest" -> { 306 | list.last(version_list) 307 | } 308 | _ -> { 309 | case string.contains(target, ".") { 310 | // Try for exact match 311 | True -> { 312 | list.find(version_list, fn(item) { item.version == target }) 313 | } 314 | False -> { 315 | // Try to find first major version matching the target 316 | list.reverse(version_list) 317 | |> list.find(fn(item) { 318 | case string.split(item.version, ".") { 319 | [major, ..] if major == target -> True 320 | _ -> False 321 | } 322 | }) 323 | } 324 | } 325 | } 326 | } 327 | } 328 | 329 | fn select_download(version: VersionItem, platform: String) { 330 | list.find(version.downloads, fn(item) { item.platform == platform }) 331 | } 332 | 333 | fn parse_version_list(input: dynamic.Dynamic) { 334 | let download_item_decoder = 335 | dynamic.decode2( 336 | DownloadItem, 337 | dynamic.field("platform", dynamic.string), 338 | dynamic.field("url", dynamic.string), 339 | ) 340 | let download_list_item_decoder = fn(list_item: dynamic.Dynamic) { 341 | dynamic.field("chrome", dynamic.list(download_item_decoder))(list_item) 342 | } 343 | let version_item_decoder = 344 | dynamic.decode3( 345 | VersionItem, 346 | dynamic.field("version", dynamic.string), 347 | dynamic.field("revision", dynamic.string), 348 | dynamic.field("downloads", download_list_item_decoder), 349 | ) 350 | 351 | dynamic.field("versions", dynamic.list(version_item_decoder))(input) 352 | } 353 | 354 | fn resolve_platform() -> Result(String, String) { 355 | case os.family(), get_arch() { 356 | os.Darwin, "aarch64" <> _ -> { 357 | Ok("mac-arm64") 358 | } 359 | os.Darwin, _ -> { 360 | Ok("mac-x64") 361 | } 362 | os.Linux, "x86_64" <> _ -> { 363 | io.println("") 364 | utils.warn( 365 | "You appear to be on linux, just to let you know, dependencies are not installed automatically by this script, 366 | you must install them yourself! Please check the docs of the install module for further information.", 367 | ) 368 | Ok("linux64") 369 | } 370 | os.WindowsNt, "x86_64" <> _ -> { 371 | Ok("win64") 372 | } 373 | os.WindowsNt, _ -> { 374 | utils.warn( 375 | "The installer thinks you are on a 32-bit Windows system and is installing 32-bit Chrome, 376 | this is unusual, please verify this is correct", 377 | ) 378 | Ok("win32") 379 | } 380 | _, architecture -> { 381 | utils.err("Could not resolve an appropriate platform for your system. 382 | Please note that the available platforms are limited by what Google Chrome for Testing supports, 383 | notably, ARM64 on Linux is unfortunately not supported at the moment. 384 | Your architecture is: " <> architecture <> ".") 385 | Error("Unsupported system: " <> architecture) 386 | } 387 | } 388 | } 389 | 390 | fn assert_is_json( 391 | res, 392 | human_error: String, 393 | apply fun: fn() -> Result(b, InstallationError), 394 | ) { 395 | case response.get_header(res, "content-type") { 396 | Ok("application/json") -> fun() 397 | Ok("application/json" <> _) -> fun() 398 | _ -> { 399 | io.println("") 400 | utils.err(human_error) 401 | Error(InstallationError) 402 | } 403 | } 404 | } 405 | 406 | fn assert_ok( 407 | result: Result(a, e), 408 | human_error: String, 409 | apply fun: fn(a) -> Result(b, InstallationError), 410 | ) -> Result(b, InstallationError) { 411 | case result { 412 | Ok(x) -> fun(x) 413 | Error(err) -> { 414 | io.println("") 415 | utils.err(human_error) 416 | io.debug(err) 417 | Error(InstallationError) 418 | } 419 | } 420 | } 421 | 422 | fn assert_true( 423 | condition: Bool, 424 | human_error: String, 425 | apply fun: fn() -> Result(a, InstallationError), 426 | ) -> Result(a, InstallationError) { 427 | case condition { 428 | True -> fun() 429 | False -> { 430 | io.println("") 431 | utils.err(human_error) 432 | Error(InstallationError) 433 | } 434 | } 435 | } 436 | 437 | /// Attempt unzip of the downloaded file 438 | /// Notes: 439 | /// The erlang standard library unzip function, does not restore file permissions, and 440 | /// chrome consists of a bunch of executables, setting them all to executable 441 | /// manually is a bit annoying. 442 | /// Therefore, we try to use the system unzip command via a shell instead, 443 | /// and only fall back to the erlang unzip if that fails. 444 | fn unzip(from: String, to: String) { 445 | run_command("unzip -q " <> from <> " -d " <> to) 446 | use installation_dir_entries <- result.try( 447 | file.read_directory(to) 448 | |> result.replace_error(Nil), 449 | ) 450 | 451 | let was_extracted = 452 | list.map(installation_dir_entries, fn(i) { 453 | file.is_directory(path.join(to, i)) 454 | }) 455 | |> list.any(fn(check) { 456 | case check { 457 | Ok(True) -> True 458 | _ -> False 459 | } 460 | }) 461 | 462 | case was_extracted { 463 | True -> Ok(Nil) 464 | False -> { 465 | // In this fallback method we extract the zip using erlang unzip, and then set the executable bit on all files 466 | // As you can imagine, this is not ideal, and may cause issues, therefore we warn the user. 467 | utils.warn( 468 | "Failed to extract downloaded .zip archive using system unzip command, falling back to erlang unzip. 469 | You might run into permission issues when attempting to run the installed binary, this is not ideal!", 470 | ) 471 | use _ <- result.try(erl_unzip(from, to)) 472 | use installation_files <- result.try( 473 | file.get_files(to) 474 | |> result.replace_error(Nil), 475 | ) 476 | list.each(installation_files, fn(i) { 477 | case file.is_file(i) { 478 | Ok(True) -> { 479 | let _ = set_executable(i) 480 | Nil 481 | } 482 | _ -> { 483 | Nil 484 | } 485 | } 486 | }) 487 | Ok(Nil) 488 | } 489 | } 490 | } 491 | 492 | fn new_download_request(url: String) { 493 | use base_req <- result.try(request.to(url)) 494 | Ok(request.set_body(base_req, <<>>)) 495 | } 496 | 497 | @external(erlang, "chrobot_ffi", "get_arch") 498 | fn get_arch() -> String 499 | 500 | @external(erlang, "chrobot_ffi", "unzip") 501 | fn erl_unzip(from: String, to: String) -> Result(Nil, Nil) 502 | 503 | @external(erlang, "chrobot_ffi", "run_command") 504 | fn run_command(command: String) -> String 505 | 506 | @external(erlang, "chrobot_ffi", "set_executable") 507 | fn set_executable(file: String) -> Result(Nil, Nil) 508 | -------------------------------------------------------------------------------- /src/chrobot/internal/utils.gleam: -------------------------------------------------------------------------------- 1 | import envoy 2 | import gleam/erlang/process.{type CallError, type Subject} as p 3 | import gleam/io 4 | import gleam/json 5 | import gleam/option.{type Option, None, Some} 6 | import gleam/string 7 | import gleam_community/ansi 8 | import spinner 9 | 10 | /// Very very naive but should be fine 11 | fn term_supports_color() -> Bool { 12 | case envoy.get("TERM") { 13 | Ok("dumb") -> False 14 | _ -> True 15 | } 16 | } 17 | 18 | pub fn add_optional( 19 | prop_encoders: List(#(String, json.Json)), 20 | value: option.Option(a), 21 | callback: fn(a) -> #(String, json.Json), 22 | ) { 23 | case value { 24 | option.Some(a) -> [callback(a), ..prop_encoders] 25 | option.None -> prop_encoders 26 | } 27 | } 28 | 29 | pub fn alert_encode_dynamic(input_value) { 30 | warn( 31 | "You passed a dymamic value to a protocol encoder! 32 | Dynamic values cannot be encoded, the value will be set to null instead. 33 | This is unlikely to be intentional, you should fix that part of your code.", 34 | ) 35 | io.println("The value was: " <> string.inspect(input_value)) 36 | json.null() 37 | } 38 | 39 | fn align(content: String) { 40 | string.replace(content, "\n", "\n ") 41 | } 42 | 43 | pub fn err(content: String) { 44 | case term_supports_color() { 45 | True -> { 46 | { 47 | "[-_-] ERR! " 48 | |> ansi.bg_red() 49 | |> ansi.white() 50 | |> ansi.bold() 51 | <> " " 52 | <> align(content) 53 | |> ansi.red() 54 | } 55 | |> io.println() 56 | } 57 | False -> { 58 | io.println("[-_-] ERR! " <> content) 59 | } 60 | } 61 | } 62 | 63 | pub fn warn(content: String) { 64 | case term_supports_color() { 65 | True -> { 66 | { 67 | "[O_O] HEY! " 68 | |> ansi.bg_yellow() 69 | |> ansi.black() 70 | |> ansi.bold() 71 | <> " " 72 | <> align(content) 73 | |> ansi.yellow() 74 | } 75 | |> io.println() 76 | } 77 | False -> { 78 | io.println("[O_O] HEY! " <> content) 79 | } 80 | } 81 | } 82 | 83 | pub fn hint(content: String) { 84 | case term_supports_color() { 85 | True -> { 86 | { 87 | "[>‿0] HINT " 88 | |> ansi.bg_cyan() 89 | |> ansi.black() 90 | |> ansi.bold() 91 | <> " " 92 | <> align(content) 93 | |> ansi.cyan() 94 | } 95 | |> io.println() 96 | } 97 | False -> { 98 | io.println("[>‿0] HINT " <> content) 99 | } 100 | } 101 | } 102 | 103 | pub fn info(content: String) { 104 | case term_supports_color() { 105 | True -> { 106 | { 107 | "[0‿0] INFO " 108 | |> ansi.bg_white() 109 | |> ansi.black() 110 | |> ansi.bold() 111 | <> " " 112 | <> align(content) 113 | |> ansi.white() 114 | } 115 | |> io.println() 116 | } 117 | False -> { 118 | io.println("[0‿0] INFO " <> content) 119 | } 120 | } 121 | } 122 | 123 | pub fn start_progress(text: String) -> Option(spinner.Spinner) { 124 | case term_supports_color() { 125 | True -> { 126 | let spinner = 127 | spinner.new(text) 128 | |> spinner.with_colour(ansi.blue) 129 | |> spinner.start() 130 | Some(spinner) 131 | } 132 | False -> { 133 | io.println("Progress: " <> text) 134 | None 135 | } 136 | } 137 | } 138 | 139 | pub fn set_progress(spinner: Option(spinner.Spinner), text: String) -> Nil { 140 | case spinner { 141 | Some(spinner) -> spinner.set_text(spinner, text) 142 | None -> { 143 | io.println("Progress: " <> text) 144 | Nil 145 | } 146 | } 147 | } 148 | 149 | pub fn stop_progress(spinner: Option(spinner.Spinner)) -> Nil { 150 | case spinner { 151 | Some(spinner) -> spinner.stop(spinner) 152 | None -> Nil 153 | } 154 | } 155 | 156 | pub fn show_cmd(content: String) { 157 | case term_supports_color() { 158 | True -> { 159 | { "\n " <> ansi.dim("$") <> " " <> ansi.bold(content) <> "\n" } 160 | |> io.println() 161 | } 162 | False -> { 163 | io.println("\n $ " <> content <> "\n") 164 | } 165 | } 166 | } 167 | 168 | pub fn try_call_with_subject( 169 | subject: Subject(request), 170 | make_request: fn(Subject(response)) -> request, 171 | reply_subject: Subject(response), 172 | within timeout: Int, 173 | ) -> Result(response, CallError(response)) { 174 | // Monitor the callee process so we can tell if it goes down (meaning we 175 | // won't get a reply) 176 | let monitor = p.monitor_process(p.subject_owner(subject)) 177 | 178 | // Send the request to the process over the channel 179 | p.send(subject, make_request(reply_subject)) 180 | 181 | // Await a reply or handle failure modes (timeout, process down, etc) 182 | let result = 183 | p.new_selector() 184 | |> p.selecting(reply_subject, Ok) 185 | |> p.selecting_process_down(monitor, fn(down: p.ProcessDown) { 186 | Error(p.CalleeDown(reason: down.reason)) 187 | }) 188 | |> p.select(timeout) 189 | 190 | // Demonitor the process and close the channels as we're done 191 | p.demonitor_process(monitor) 192 | 193 | // Prepare an appropriate error (if present) for the caller 194 | case result { 195 | Error(Nil) -> Error(p.CallTimeout) 196 | Ok(res) -> res 197 | } 198 | } 199 | 200 | @external(erlang, "chrobot_ffi", "get_time_ms") 201 | pub fn get_time_ms() -> Int 202 | -------------------------------------------------------------------------------- /src/chrobot/protocol.gleam: -------------------------------------------------------------------------------- 1 | //// > ⚙️ This module was generated from the Chrome DevTools Protocol version **1.3** 2 | //// For reference: [See the DevTools Protocol API Docs](https://chromedevtools.github.io/devtools-protocol/1-3/) 3 | //// 4 | //// This is the protocol definition entrypoint, it contains an overview of the protocol structure, 5 | //// and a function to retrieve the version of the protocol used to generate the current bindings. 6 | //// The protocol version is also displayed in the box above, which appears on every generated module. 7 | //// 8 | //// ## ⚠️ Really Important Notes 9 | //// 10 | //// 1) It's best never to work with the DOM domain for automation, 11 | //// [an explanation of why can be found here](https://github.com/puppeteer/puppeteer/pull/71#issuecomment-314599749). 12 | //// Instead, to automate DOM interaction, JavaScript can be injected using the Runtime domain. 13 | //// 14 | //// 2) Unfortunately, I haven't found a good way to map dynamic properties to gleam attributes bidirectionally. 15 | //// **This means all dynamic values you supply to commands will be silently dropped**! 16 | //// It's important to realize this to avoid confusion, for example in `runtime.call_function_on` 17 | //// you may want to supply arguments which can be any value, but it won't work. 18 | //// The only path to do that as far as I can tell, is write the protocol call logic yourself, 19 | //// perhaps taking the codegen code as a basis. 20 | //// Check the `call_custom_function_on` function from `chrobot` which does this for the mentioned function 21 | //// 22 | //// ## Structure 23 | //// 24 | //// Each domain in the protocol is represented as a module under `protocol/`. 25 | //// 26 | //// In general, the bindings are generated through codegen, directly from the JSON protocol schema [published here](https://github.com/ChromeDevTools/devtools-protocol), 27 | //// however there are some little adjustments that needed to be made, to make the protocol schema usable, mainly due to 28 | //// what I believe are minor bugs in the protocol. 29 | //// To see these changes, check the `apply_protocol_patches` function in `chrobot/internal/generate_bindings`. 30 | //// 31 | //// Domains may depend on the types of other domains, these dependencies are mirrored in the generated bindings where possible. 32 | //// In some case, type references to other modules have been replaced by the respective inner type, because the references would 33 | //// create a circular dependency. 34 | //// 35 | //// ## Types 36 | //// 37 | //// The generated bindings include a mirror of the type defitions of each type in the protocol spec, 38 | //// alongside with an `encode__` function to encode the type into JSON in order to send it to the browser 39 | //// and a `decode__` function in order to decode the type out of a payload sent from the browser. Encoders and 40 | //// decoders are marked internal and should be used through command functions which are described below. 41 | //// 42 | //// Notes: 43 | //// - Some object properties in the protocol have the type `any`, in this case the value is considered as dynamic 44 | //// by decoders, and encoders will not encode it, setting it to `null` instead in the payload 45 | //// - Object types that don't specify any properties are treated as a `Dict(String,String)` 46 | //// 47 | //// Additional type definitions and encoders / decoders are generated, 48 | //// for any enumerable property in the protocol, as well as the return values of commands. 49 | //// These special type definitions are marked with a comment to indicate 50 | //// the fact that they are not part of the protocol spec, but rather generated dynamically to support the bindings. 51 | //// 52 | //// 53 | //// ## Commands 54 | //// 55 | //// A function is generated for each command, named after the command (in snake case). 56 | //// The function handles both encoding the parameters to sent to the browser via the protocol, and decoding the response. 57 | //// A `ProtocolError` error is returned if the decoding fails, this would mean there is a bug in the protocol 58 | //// or the generated bindings. 59 | //// 60 | //// The first parameter to the command function is always a `callback` of the form 61 | //// 62 | //// ```gleam 63 | //// fn(method: String, parameters: Option(Json)) -> Result(Dynamic, RequestError) 64 | //// ``` 65 | //// 66 | //// By using this callback you can take advantage of the generated protocol encoders/decoders 67 | //// while also passing in your browser subject to direct the command to, and passing along additional 68 | //// arguments, like the `sessionId` which is required for some operations. 69 | //// 70 | //// 71 | //// ## Events 72 | //// 73 | //// Events are not implemented yet! 74 | //// 75 | //// 76 | //// 77 | 78 | // --------------------------------------------------------------------------- 79 | // | !!!!!! This is an autogenerated file - Do not edit manually !!!!!! | 80 | // | Run `codegen.sh` to regenerate. | 81 | // --------------------------------------------------------------------------- 82 | 83 | const version_major = "1" 84 | 85 | const version_minor = "3" 86 | 87 | /// Get the protocol version as a tuple of major and minor version 88 | pub fn version() { 89 | #(version_major, version_minor) 90 | } 91 | -------------------------------------------------------------------------------- /src/chrobot/protocol/browser.gleam: -------------------------------------------------------------------------------- 1 | //// > ⚙️ This module was generated from the Chrome DevTools Protocol version **1.3** 2 | //// ## Browser Domain 3 | //// 4 | //// The Browser domain defines methods and events for browser managing. 5 | //// 6 | //// [📖 View this domain on the DevTools Protocol API Docs](https://chromedevtools.github.io/devtools-protocol/1-3/Browser/) 7 | 8 | // --------------------------------------------------------------------------- 9 | // | !!!!!! This is an autogenerated file - Do not edit manually !!!!!! | 10 | // | Run `codegen.sh` to regenerate. | 11 | // --------------------------------------------------------------------------- 12 | 13 | import chrobot/chrome 14 | import chrobot/internal/utils 15 | import gleam/dynamic 16 | import gleam/json 17 | import gleam/option 18 | import gleam/result 19 | 20 | /// This type is not part of the protocol spec, it has been generated dynamically 21 | /// to represent the response to the command `get_version` 22 | pub type GetVersionResponse { 23 | GetVersionResponse( 24 | /// Protocol version. 25 | protocol_version: String, 26 | /// Product name. 27 | product: String, 28 | /// Product revision. 29 | revision: String, 30 | /// User-Agent. 31 | user_agent: String, 32 | /// V8 version. 33 | js_version: String, 34 | ) 35 | } 36 | 37 | @internal 38 | pub fn decode__get_version_response(value__: dynamic.Dynamic) { 39 | use protocol_version <- result.try(dynamic.field( 40 | "protocolVersion", 41 | dynamic.string, 42 | )(value__)) 43 | use product <- result.try(dynamic.field("product", dynamic.string)(value__)) 44 | use revision <- result.try(dynamic.field("revision", dynamic.string)(value__)) 45 | use user_agent <- result.try(dynamic.field("userAgent", dynamic.string)( 46 | value__, 47 | )) 48 | use js_version <- result.try(dynamic.field("jsVersion", dynamic.string)( 49 | value__, 50 | )) 51 | 52 | Ok(GetVersionResponse( 53 | protocol_version: protocol_version, 54 | product: product, 55 | revision: revision, 56 | user_agent: user_agent, 57 | js_version: js_version, 58 | )) 59 | } 60 | 61 | /// Reset all permission management for all origins. 62 | /// 63 | /// Parameters: 64 | /// - `browser_context_id` : BrowserContext to reset permissions. When omitted, default browser context is used. 65 | /// 66 | /// Returns: 67 | /// 68 | pub fn reset_permissions( 69 | callback__, 70 | browser_context_id browser_context_id: option.Option(String), 71 | ) { 72 | callback__( 73 | "Browser.resetPermissions", 74 | option.Some(json.object( 75 | [] 76 | |> utils.add_optional(browser_context_id, fn(inner_value__) { 77 | #("browserContextId", json.string(inner_value__)) 78 | }), 79 | )), 80 | ) 81 | } 82 | 83 | /// Close browser gracefully. 84 | /// 85 | pub fn close(callback__) { 86 | callback__("Browser.close", option.None) 87 | } 88 | 89 | /// Returns version information. 90 | /// - `protocol_version` : Protocol version. 91 | /// - `product` : Product name. 92 | /// - `revision` : Product revision. 93 | /// - `user_agent` : User-Agent. 94 | /// - `js_version` : V8 version. 95 | /// 96 | pub fn get_version(callback__) { 97 | use result__ <- result.try(callback__("Browser.getVersion", option.None)) 98 | 99 | decode__get_version_response(result__) 100 | |> result.replace_error(chrome.ProtocolError) 101 | } 102 | 103 | /// Allows a site to use privacy sandbox features that require enrollment 104 | /// without the site actually being enrolled. Only supported on page targets. 105 | /// 106 | /// Parameters: 107 | /// - `url` 108 | /// 109 | /// Returns: 110 | /// 111 | pub fn add_privacy_sandbox_enrollment_override(callback__, url url: String) { 112 | callback__( 113 | "Browser.addPrivacySandboxEnrollmentOverride", 114 | option.Some(json.object([#("url", json.string(url))])), 115 | ) 116 | } 117 | -------------------------------------------------------------------------------- /src/chrobot/protocol/dom_debugger.gleam: -------------------------------------------------------------------------------- 1 | //// > ⚙️ This module was generated from the Chrome DevTools Protocol version **1.3** 2 | //// ## DOMDebugger Domain 3 | //// 4 | //// DOM debugging allows setting breakpoints on particular DOM operations and events. JavaScript 5 | //// execution will stop on these operations as if there was a regular breakpoint set. 6 | //// 7 | //// [📖 View this domain on the DevTools Protocol API Docs](https://chromedevtools.github.io/devtools-protocol/1-3/DOMDebugger/) 8 | 9 | // --------------------------------------------------------------------------- 10 | // | !!!!!! This is an autogenerated file - Do not edit manually !!!!!! | 11 | // | Run `codegen.sh` to regenerate. | 12 | // --------------------------------------------------------------------------- 13 | 14 | import chrobot/chrome 15 | import chrobot/internal/utils 16 | import chrobot/protocol/dom 17 | import chrobot/protocol/runtime 18 | import gleam/dynamic 19 | import gleam/json 20 | import gleam/option 21 | import gleam/result 22 | 23 | /// DOM breakpoint type. 24 | pub type DOMBreakpointType { 25 | DOMBreakpointTypeSubtreeModified 26 | DOMBreakpointTypeAttributeModified 27 | DOMBreakpointTypeNodeRemoved 28 | } 29 | 30 | @internal 31 | pub fn encode__dom_breakpoint_type(value__: DOMBreakpointType) { 32 | case value__ { 33 | DOMBreakpointTypeSubtreeModified -> "subtree-modified" 34 | DOMBreakpointTypeAttributeModified -> "attribute-modified" 35 | DOMBreakpointTypeNodeRemoved -> "node-removed" 36 | } 37 | |> json.string() 38 | } 39 | 40 | @internal 41 | pub fn decode__dom_breakpoint_type(value__: dynamic.Dynamic) { 42 | case dynamic.string(value__) { 43 | Ok("subtree-modified") -> Ok(DOMBreakpointTypeSubtreeModified) 44 | Ok("attribute-modified") -> Ok(DOMBreakpointTypeAttributeModified) 45 | Ok("node-removed") -> Ok(DOMBreakpointTypeNodeRemoved) 46 | Error(error) -> Error(error) 47 | Ok(other) -> 48 | Error([ 49 | dynamic.DecodeError( 50 | expected: "valid enum property", 51 | found: other, 52 | path: ["enum decoder"], 53 | ), 54 | ]) 55 | } 56 | } 57 | 58 | /// Object event listener. 59 | pub type EventListener { 60 | EventListener( 61 | /// `EventListener`'s type. 62 | type_: String, 63 | /// `EventListener`'s useCapture. 64 | use_capture: Bool, 65 | /// `EventListener`'s passive flag. 66 | passive: Bool, 67 | /// `EventListener`'s once flag. 68 | once: Bool, 69 | /// Script id of the handler code. 70 | script_id: runtime.ScriptId, 71 | /// Line number in the script (0-based). 72 | line_number: Int, 73 | /// Column number in the script (0-based). 74 | column_number: Int, 75 | /// Event handler function value. 76 | handler: option.Option(runtime.RemoteObject), 77 | /// Event original handler function value. 78 | original_handler: option.Option(runtime.RemoteObject), 79 | /// Node the listener is added to (if any). 80 | backend_node_id: option.Option(dom.BackendNodeId), 81 | ) 82 | } 83 | 84 | @internal 85 | pub fn encode__event_listener(value__: EventListener) { 86 | json.object( 87 | [ 88 | #("type", json.string(value__.type_)), 89 | #("useCapture", json.bool(value__.use_capture)), 90 | #("passive", json.bool(value__.passive)), 91 | #("once", json.bool(value__.once)), 92 | #("scriptId", runtime.encode__script_id(value__.script_id)), 93 | #("lineNumber", json.int(value__.line_number)), 94 | #("columnNumber", json.int(value__.column_number)), 95 | ] 96 | |> utils.add_optional(value__.handler, fn(inner_value__) { 97 | #("handler", runtime.encode__remote_object(inner_value__)) 98 | }) 99 | |> utils.add_optional(value__.original_handler, fn(inner_value__) { 100 | #("originalHandler", runtime.encode__remote_object(inner_value__)) 101 | }) 102 | |> utils.add_optional(value__.backend_node_id, fn(inner_value__) { 103 | #("backendNodeId", dom.encode__backend_node_id(inner_value__)) 104 | }), 105 | ) 106 | } 107 | 108 | @internal 109 | pub fn decode__event_listener(value__: dynamic.Dynamic) { 110 | use type_ <- result.try(dynamic.field("type", dynamic.string)(value__)) 111 | use use_capture <- result.try(dynamic.field("useCapture", dynamic.bool)( 112 | value__, 113 | )) 114 | use passive <- result.try(dynamic.field("passive", dynamic.bool)(value__)) 115 | use once <- result.try(dynamic.field("once", dynamic.bool)(value__)) 116 | use script_id <- result.try(dynamic.field( 117 | "scriptId", 118 | runtime.decode__script_id, 119 | )(value__)) 120 | use line_number <- result.try(dynamic.field("lineNumber", dynamic.int)( 121 | value__, 122 | )) 123 | use column_number <- result.try(dynamic.field("columnNumber", dynamic.int)( 124 | value__, 125 | )) 126 | use handler <- result.try(dynamic.optional_field( 127 | "handler", 128 | runtime.decode__remote_object, 129 | )(value__)) 130 | use original_handler <- result.try(dynamic.optional_field( 131 | "originalHandler", 132 | runtime.decode__remote_object, 133 | )(value__)) 134 | use backend_node_id <- result.try(dynamic.optional_field( 135 | "backendNodeId", 136 | dom.decode__backend_node_id, 137 | )(value__)) 138 | 139 | Ok(EventListener( 140 | type_: type_, 141 | use_capture: use_capture, 142 | passive: passive, 143 | once: once, 144 | script_id: script_id, 145 | line_number: line_number, 146 | column_number: column_number, 147 | handler: handler, 148 | original_handler: original_handler, 149 | backend_node_id: backend_node_id, 150 | )) 151 | } 152 | 153 | /// This type is not part of the protocol spec, it has been generated dynamically 154 | /// to represent the response to the command `get_event_listeners` 155 | pub type GetEventListenersResponse { 156 | GetEventListenersResponse( 157 | /// Array of relevant listeners. 158 | listeners: List(EventListener), 159 | ) 160 | } 161 | 162 | @internal 163 | pub fn decode__get_event_listeners_response(value__: dynamic.Dynamic) { 164 | use listeners <- result.try(dynamic.field( 165 | "listeners", 166 | dynamic.list(decode__event_listener), 167 | )(value__)) 168 | 169 | Ok(GetEventListenersResponse(listeners: listeners)) 170 | } 171 | 172 | /// Returns event listeners of the given object. 173 | /// 174 | /// Parameters: 175 | /// - `object_id` : Identifier of the object to return listeners for. 176 | /// - `depth` : The maximum depth at which Node children should be retrieved, defaults to 1. Use -1 for the 177 | /// entire subtree or provide an integer larger than 0. 178 | /// - `pierce` : Whether or not iframes and shadow roots should be traversed when returning the subtree 179 | /// (default is false). Reports listeners for all contexts if pierce is enabled. 180 | /// 181 | /// Returns: 182 | /// - `listeners` : Array of relevant listeners. 183 | /// 184 | pub fn get_event_listeners( 185 | callback__, 186 | object_id object_id: runtime.RemoteObjectId, 187 | depth depth: option.Option(Int), 188 | pierce pierce: option.Option(Bool), 189 | ) { 190 | use result__ <- result.try(callback__( 191 | "DOMDebugger.getEventListeners", 192 | option.Some(json.object( 193 | [#("objectId", runtime.encode__remote_object_id(object_id))] 194 | |> utils.add_optional(depth, fn(inner_value__) { 195 | #("depth", json.int(inner_value__)) 196 | }) 197 | |> utils.add_optional(pierce, fn(inner_value__) { 198 | #("pierce", json.bool(inner_value__)) 199 | }), 200 | )), 201 | )) 202 | 203 | decode__get_event_listeners_response(result__) 204 | |> result.replace_error(chrome.ProtocolError) 205 | } 206 | 207 | /// Removes DOM breakpoint that was set using `setDOMBreakpoint`. 208 | /// 209 | /// Parameters: 210 | /// - `node_id` : Identifier of the node to remove breakpoint from. 211 | /// - `type_` : Type of the breakpoint to remove. 212 | /// 213 | /// Returns: 214 | /// 215 | pub fn remove_dom_breakpoint( 216 | callback__, 217 | node_id node_id: dom.NodeId, 218 | type_ type_: DOMBreakpointType, 219 | ) { 220 | callback__( 221 | "DOMDebugger.removeDOMBreakpoint", 222 | option.Some( 223 | json.object([ 224 | #("nodeId", dom.encode__node_id(node_id)), 225 | #("type", encode__dom_breakpoint_type(type_)), 226 | ]), 227 | ), 228 | ) 229 | } 230 | 231 | /// Removes breakpoint on particular DOM event. 232 | /// 233 | /// Parameters: 234 | /// - `event_name` : Event name. 235 | /// 236 | /// Returns: 237 | /// 238 | pub fn remove_event_listener_breakpoint( 239 | callback__, 240 | event_name event_name: String, 241 | ) { 242 | callback__( 243 | "DOMDebugger.removeEventListenerBreakpoint", 244 | option.Some(json.object([#("eventName", json.string(event_name))])), 245 | ) 246 | } 247 | 248 | /// Removes breakpoint from XMLHttpRequest. 249 | /// 250 | /// Parameters: 251 | /// - `url` : Resource URL substring. 252 | /// 253 | /// Returns: 254 | /// 255 | pub fn remove_xhr_breakpoint(callback__, url url: String) { 256 | callback__( 257 | "DOMDebugger.removeXHRBreakpoint", 258 | option.Some(json.object([#("url", json.string(url))])), 259 | ) 260 | } 261 | 262 | /// Sets breakpoint on particular operation with DOM. 263 | /// 264 | /// Parameters: 265 | /// - `node_id` : Identifier of the node to set breakpoint on. 266 | /// - `type_` : Type of the operation to stop upon. 267 | /// 268 | /// Returns: 269 | /// 270 | pub fn set_dom_breakpoint( 271 | callback__, 272 | node_id node_id: dom.NodeId, 273 | type_ type_: DOMBreakpointType, 274 | ) { 275 | callback__( 276 | "DOMDebugger.setDOMBreakpoint", 277 | option.Some( 278 | json.object([ 279 | #("nodeId", dom.encode__node_id(node_id)), 280 | #("type", encode__dom_breakpoint_type(type_)), 281 | ]), 282 | ), 283 | ) 284 | } 285 | 286 | /// Sets breakpoint on particular DOM event. 287 | /// 288 | /// Parameters: 289 | /// - `event_name` : DOM Event name to stop on (any DOM event will do). 290 | /// 291 | /// Returns: 292 | /// 293 | pub fn set_event_listener_breakpoint(callback__, event_name event_name: String) { 294 | callback__( 295 | "DOMDebugger.setEventListenerBreakpoint", 296 | option.Some(json.object([#("eventName", json.string(event_name))])), 297 | ) 298 | } 299 | 300 | /// Sets breakpoint on XMLHttpRequest. 301 | /// 302 | /// Parameters: 303 | /// - `url` : Resource URL substring. All XHRs having this substring in the URL will get stopped upon. 304 | /// 305 | /// Returns: 306 | /// 307 | pub fn set_xhr_breakpoint(callback__, url url: String) { 308 | callback__( 309 | "DOMDebugger.setXHRBreakpoint", 310 | option.Some(json.object([#("url", json.string(url))])), 311 | ) 312 | } 313 | -------------------------------------------------------------------------------- /src/chrobot/protocol/emulation.gleam: -------------------------------------------------------------------------------- 1 | //// > ⚙️ This module was generated from the Chrome DevTools Protocol version **1.3** 2 | //// ## Emulation Domain 3 | //// 4 | //// This domain emulates different environments for the page. 5 | //// 6 | //// [📖 View this domain on the DevTools Protocol API Docs](https://chromedevtools.github.io/devtools-protocol/1-3/Emulation/) 7 | 8 | // --------------------------------------------------------------------------- 9 | // | !!!!!! This is an autogenerated file - Do not edit manually !!!!!! | 10 | // | Run `codegen.sh` to regenerate. | 11 | // --------------------------------------------------------------------------- 12 | 13 | import chrobot/internal/utils 14 | import chrobot/protocol/dom 15 | import gleam/dynamic 16 | import gleam/json 17 | import gleam/option 18 | import gleam/result 19 | 20 | /// Screen orientation. 21 | pub type ScreenOrientation { 22 | ScreenOrientation( 23 | /// Orientation type. 24 | type_: ScreenOrientationType, 25 | /// Orientation angle. 26 | angle: Int, 27 | ) 28 | } 29 | 30 | /// This type is not part of the protocol spec, it has been generated dynamically 31 | /// to represent the possible values of the enum property `type` of `ScreenOrientation` 32 | pub type ScreenOrientationType { 33 | ScreenOrientationTypePortraitPrimary 34 | ScreenOrientationTypePortraitSecondary 35 | ScreenOrientationTypeLandscapePrimary 36 | ScreenOrientationTypeLandscapeSecondary 37 | } 38 | 39 | @internal 40 | pub fn encode__screen_orientation_type(value__: ScreenOrientationType) { 41 | case value__ { 42 | ScreenOrientationTypePortraitPrimary -> "portraitPrimary" 43 | ScreenOrientationTypePortraitSecondary -> "portraitSecondary" 44 | ScreenOrientationTypeLandscapePrimary -> "landscapePrimary" 45 | ScreenOrientationTypeLandscapeSecondary -> "landscapeSecondary" 46 | } 47 | |> json.string() 48 | } 49 | 50 | @internal 51 | pub fn decode__screen_orientation_type(value__: dynamic.Dynamic) { 52 | case dynamic.string(value__) { 53 | Ok("portraitPrimary") -> Ok(ScreenOrientationTypePortraitPrimary) 54 | Ok("portraitSecondary") -> Ok(ScreenOrientationTypePortraitSecondary) 55 | Ok("landscapePrimary") -> Ok(ScreenOrientationTypeLandscapePrimary) 56 | Ok("landscapeSecondary") -> Ok(ScreenOrientationTypeLandscapeSecondary) 57 | Error(error) -> Error(error) 58 | Ok(other) -> 59 | Error([ 60 | dynamic.DecodeError( 61 | expected: "valid enum property", 62 | found: other, 63 | path: ["enum decoder"], 64 | ), 65 | ]) 66 | } 67 | } 68 | 69 | @internal 70 | pub fn encode__screen_orientation(value__: ScreenOrientation) { 71 | json.object([ 72 | #("type", encode__screen_orientation_type(value__.type_)), 73 | #("angle", json.int(value__.angle)), 74 | ]) 75 | } 76 | 77 | @internal 78 | pub fn decode__screen_orientation(value__: dynamic.Dynamic) { 79 | use type_ <- result.try(dynamic.field("type", decode__screen_orientation_type)( 80 | value__, 81 | )) 82 | use angle <- result.try(dynamic.field("angle", dynamic.int)(value__)) 83 | 84 | Ok(ScreenOrientation(type_: type_, angle: angle)) 85 | } 86 | 87 | pub type DisplayFeature { 88 | DisplayFeature( 89 | /// Orientation of a display feature in relation to screen 90 | orientation: DisplayFeatureOrientation, 91 | /// The offset from the screen origin in either the x (for vertical 92 | /// orientation) or y (for horizontal orientation) direction. 93 | offset: Int, 94 | /// A display feature may mask content such that it is not physically 95 | /// displayed - this length along with the offset describes this area. 96 | /// A display feature that only splits content will have a 0 mask_length. 97 | mask_length: Int, 98 | ) 99 | } 100 | 101 | /// This type is not part of the protocol spec, it has been generated dynamically 102 | /// to represent the possible values of the enum property `orientation` of `DisplayFeature` 103 | pub type DisplayFeatureOrientation { 104 | DisplayFeatureOrientationVertical 105 | DisplayFeatureOrientationHorizontal 106 | } 107 | 108 | @internal 109 | pub fn encode__display_feature_orientation(value__: DisplayFeatureOrientation) { 110 | case value__ { 111 | DisplayFeatureOrientationVertical -> "vertical" 112 | DisplayFeatureOrientationHorizontal -> "horizontal" 113 | } 114 | |> json.string() 115 | } 116 | 117 | @internal 118 | pub fn decode__display_feature_orientation(value__: dynamic.Dynamic) { 119 | case dynamic.string(value__) { 120 | Ok("vertical") -> Ok(DisplayFeatureOrientationVertical) 121 | Ok("horizontal") -> Ok(DisplayFeatureOrientationHorizontal) 122 | Error(error) -> Error(error) 123 | Ok(other) -> 124 | Error([ 125 | dynamic.DecodeError( 126 | expected: "valid enum property", 127 | found: other, 128 | path: ["enum decoder"], 129 | ), 130 | ]) 131 | } 132 | } 133 | 134 | @internal 135 | pub fn encode__display_feature(value__: DisplayFeature) { 136 | json.object([ 137 | #("orientation", encode__display_feature_orientation(value__.orientation)), 138 | #("offset", json.int(value__.offset)), 139 | #("maskLength", json.int(value__.mask_length)), 140 | ]) 141 | } 142 | 143 | @internal 144 | pub fn decode__display_feature(value__: dynamic.Dynamic) { 145 | use orientation <- result.try(dynamic.field( 146 | "orientation", 147 | decode__display_feature_orientation, 148 | )(value__)) 149 | use offset <- result.try(dynamic.field("offset", dynamic.int)(value__)) 150 | use mask_length <- result.try(dynamic.field("maskLength", dynamic.int)( 151 | value__, 152 | )) 153 | 154 | Ok(DisplayFeature( 155 | orientation: orientation, 156 | offset: offset, 157 | mask_length: mask_length, 158 | )) 159 | } 160 | 161 | pub type DevicePosture { 162 | DevicePosture( 163 | /// Current posture of the device 164 | type_: DevicePostureType, 165 | ) 166 | } 167 | 168 | /// This type is not part of the protocol spec, it has been generated dynamically 169 | /// to represent the possible values of the enum property `type` of `DevicePosture` 170 | pub type DevicePostureType { 171 | DevicePostureTypeContinuous 172 | DevicePostureTypeFolded 173 | } 174 | 175 | @internal 176 | pub fn encode__device_posture_type(value__: DevicePostureType) { 177 | case value__ { 178 | DevicePostureTypeContinuous -> "continuous" 179 | DevicePostureTypeFolded -> "folded" 180 | } 181 | |> json.string() 182 | } 183 | 184 | @internal 185 | pub fn decode__device_posture_type(value__: dynamic.Dynamic) { 186 | case dynamic.string(value__) { 187 | Ok("continuous") -> Ok(DevicePostureTypeContinuous) 188 | Ok("folded") -> Ok(DevicePostureTypeFolded) 189 | Error(error) -> Error(error) 190 | Ok(other) -> 191 | Error([ 192 | dynamic.DecodeError( 193 | expected: "valid enum property", 194 | found: other, 195 | path: ["enum decoder"], 196 | ), 197 | ]) 198 | } 199 | } 200 | 201 | @internal 202 | pub fn encode__device_posture(value__: DevicePosture) { 203 | json.object([#("type", encode__device_posture_type(value__.type_))]) 204 | } 205 | 206 | @internal 207 | pub fn decode__device_posture(value__: dynamic.Dynamic) { 208 | use type_ <- result.try(dynamic.field("type", decode__device_posture_type)( 209 | value__, 210 | )) 211 | 212 | Ok(DevicePosture(type_: type_)) 213 | } 214 | 215 | pub type MediaFeature { 216 | MediaFeature(name: String, value: String) 217 | } 218 | 219 | @internal 220 | pub fn encode__media_feature(value__: MediaFeature) { 221 | json.object([ 222 | #("name", json.string(value__.name)), 223 | #("value", json.string(value__.value)), 224 | ]) 225 | } 226 | 227 | @internal 228 | pub fn decode__media_feature(value__: dynamic.Dynamic) { 229 | use name <- result.try(dynamic.field("name", dynamic.string)(value__)) 230 | use value <- result.try(dynamic.field("value", dynamic.string)(value__)) 231 | 232 | Ok(MediaFeature(name: name, value: value)) 233 | } 234 | 235 | /// Clears the overridden device metrics. 236 | /// 237 | pub fn clear_device_metrics_override(callback__) { 238 | callback__("Emulation.clearDeviceMetricsOverride", option.None) 239 | } 240 | 241 | /// Clears the overridden Geolocation Position and Error. 242 | /// 243 | pub fn clear_geolocation_override(callback__) { 244 | callback__("Emulation.clearGeolocationOverride", option.None) 245 | } 246 | 247 | /// Enables CPU throttling to emulate slow CPUs. 248 | /// 249 | /// Parameters: 250 | /// - `rate` : Throttling rate as a slowdown factor (1 is no throttle, 2 is 2x slowdown, etc). 251 | /// 252 | /// Returns: 253 | /// 254 | pub fn set_cpu_throttling_rate(callback__, rate rate: Float) { 255 | callback__( 256 | "Emulation.setCPUThrottlingRate", 257 | option.Some(json.object([#("rate", json.float(rate))])), 258 | ) 259 | } 260 | 261 | /// Sets or clears an override of the default background color of the frame. This override is used 262 | /// if the content does not specify one. 263 | /// 264 | /// Parameters: 265 | /// - `color` : RGBA of the default background color. If not specified, any existing override will be 266 | /// cleared. 267 | /// 268 | /// Returns: 269 | /// 270 | pub fn set_default_background_color_override( 271 | callback__, 272 | color color: option.Option(dom.RGBA), 273 | ) { 274 | callback__( 275 | "Emulation.setDefaultBackgroundColorOverride", 276 | option.Some(json.object( 277 | [] 278 | |> utils.add_optional(color, fn(inner_value__) { 279 | #("color", dom.encode__rgba(inner_value__)) 280 | }), 281 | )), 282 | ) 283 | } 284 | 285 | /// Overrides the values of device screen dimensions (window.screen.width, window.screen.height, 286 | /// window.innerWidth, window.innerHeight, and "device-width"/"device-height"-related CSS media 287 | /// query results). 288 | /// 289 | /// Parameters: 290 | /// - `width` : Overriding width value in pixels (minimum 0, maximum 10000000). 0 disables the override. 291 | /// - `height` : Overriding height value in pixels (minimum 0, maximum 10000000). 0 disables the override. 292 | /// - `device_scale_factor` : Overriding device scale factor value. 0 disables the override. 293 | /// - `mobile` : Whether to emulate mobile device. This includes viewport meta tag, overlay scrollbars, text 294 | /// autosizing and more. 295 | /// - `screen_orientation` : Screen orientation override. 296 | /// 297 | /// Returns: 298 | /// 299 | pub fn set_device_metrics_override( 300 | callback__, 301 | width width: Int, 302 | height height: Int, 303 | device_scale_factor device_scale_factor: Float, 304 | mobile mobile: Bool, 305 | screen_orientation screen_orientation: option.Option(ScreenOrientation), 306 | ) { 307 | callback__( 308 | "Emulation.setDeviceMetricsOverride", 309 | option.Some(json.object( 310 | [ 311 | #("width", json.int(width)), 312 | #("height", json.int(height)), 313 | #("deviceScaleFactor", json.float(device_scale_factor)), 314 | #("mobile", json.bool(mobile)), 315 | ] 316 | |> utils.add_optional(screen_orientation, fn(inner_value__) { 317 | #("screenOrientation", encode__screen_orientation(inner_value__)) 318 | }), 319 | )), 320 | ) 321 | } 322 | 323 | /// Emulates the given media type or media feature for CSS media queries. 324 | /// 325 | /// Parameters: 326 | /// - `media` : Media type to emulate. Empty string disables the override. 327 | /// - `features` : Media features to emulate. 328 | /// 329 | /// Returns: 330 | /// 331 | pub fn set_emulated_media( 332 | callback__, 333 | media media: option.Option(String), 334 | features features: option.Option(List(MediaFeature)), 335 | ) { 336 | callback__( 337 | "Emulation.setEmulatedMedia", 338 | option.Some(json.object( 339 | [] 340 | |> utils.add_optional(media, fn(inner_value__) { 341 | #("media", json.string(inner_value__)) 342 | }) 343 | |> utils.add_optional(features, fn(inner_value__) { 344 | #("features", json.array(inner_value__, of: encode__media_feature)) 345 | }), 346 | )), 347 | ) 348 | } 349 | 350 | /// Emulates the given vision deficiency. 351 | /// 352 | /// Parameters: 353 | /// - `type_` : Vision deficiency to emulate. Order: best-effort emulations come first, followed by any 354 | /// physiologically accurate emulations for medically recognized color vision deficiencies. 355 | /// 356 | /// Returns: 357 | /// 358 | pub fn set_emulated_vision_deficiency( 359 | callback__, 360 | type_ type_: SetEmulatedVisionDeficiencyType, 361 | ) { 362 | callback__( 363 | "Emulation.setEmulatedVisionDeficiency", 364 | option.Some( 365 | json.object([ 366 | #("type", encode__set_emulated_vision_deficiency_type(type_)), 367 | ]), 368 | ), 369 | ) 370 | } 371 | 372 | /// This type is not part of the protocol spec, it has been generated dynamically 373 | /// to represent the possible values of the enum property `type` of `setEmulatedVisionDeficiency` 374 | pub type SetEmulatedVisionDeficiencyType { 375 | SetEmulatedVisionDeficiencyTypeNone 376 | SetEmulatedVisionDeficiencyTypeBlurredVision 377 | SetEmulatedVisionDeficiencyTypeReducedContrast 378 | SetEmulatedVisionDeficiencyTypeAchromatopsia 379 | SetEmulatedVisionDeficiencyTypeDeuteranopia 380 | SetEmulatedVisionDeficiencyTypeProtanopia 381 | SetEmulatedVisionDeficiencyTypeTritanopia 382 | } 383 | 384 | @internal 385 | pub fn encode__set_emulated_vision_deficiency_type( 386 | value__: SetEmulatedVisionDeficiencyType, 387 | ) { 388 | case value__ { 389 | SetEmulatedVisionDeficiencyTypeNone -> "none" 390 | SetEmulatedVisionDeficiencyTypeBlurredVision -> "blurredVision" 391 | SetEmulatedVisionDeficiencyTypeReducedContrast -> "reducedContrast" 392 | SetEmulatedVisionDeficiencyTypeAchromatopsia -> "achromatopsia" 393 | SetEmulatedVisionDeficiencyTypeDeuteranopia -> "deuteranopia" 394 | SetEmulatedVisionDeficiencyTypeProtanopia -> "protanopia" 395 | SetEmulatedVisionDeficiencyTypeTritanopia -> "tritanopia" 396 | } 397 | |> json.string() 398 | } 399 | 400 | @internal 401 | pub fn decode__set_emulated_vision_deficiency_type(value__: dynamic.Dynamic) { 402 | case dynamic.string(value__) { 403 | Ok("none") -> Ok(SetEmulatedVisionDeficiencyTypeNone) 404 | Ok("blurredVision") -> Ok(SetEmulatedVisionDeficiencyTypeBlurredVision) 405 | Ok("reducedContrast") -> Ok(SetEmulatedVisionDeficiencyTypeReducedContrast) 406 | Ok("achromatopsia") -> Ok(SetEmulatedVisionDeficiencyTypeAchromatopsia) 407 | Ok("deuteranopia") -> Ok(SetEmulatedVisionDeficiencyTypeDeuteranopia) 408 | Ok("protanopia") -> Ok(SetEmulatedVisionDeficiencyTypeProtanopia) 409 | Ok("tritanopia") -> Ok(SetEmulatedVisionDeficiencyTypeTritanopia) 410 | Error(error) -> Error(error) 411 | Ok(other) -> 412 | Error([ 413 | dynamic.DecodeError( 414 | expected: "valid enum property", 415 | found: other, 416 | path: ["enum decoder"], 417 | ), 418 | ]) 419 | } 420 | } 421 | 422 | /// Overrides the Geolocation Position or Error. Omitting any of the parameters emulates position 423 | /// unavailable. 424 | /// 425 | /// Parameters: 426 | /// - `latitude` : Mock latitude 427 | /// - `longitude` : Mock longitude 428 | /// - `accuracy` : Mock accuracy 429 | /// 430 | /// Returns: 431 | /// 432 | pub fn set_geolocation_override( 433 | callback__, 434 | latitude latitude: option.Option(Float), 435 | longitude longitude: option.Option(Float), 436 | accuracy accuracy: option.Option(Float), 437 | ) { 438 | callback__( 439 | "Emulation.setGeolocationOverride", 440 | option.Some(json.object( 441 | [] 442 | |> utils.add_optional(latitude, fn(inner_value__) { 443 | #("latitude", json.float(inner_value__)) 444 | }) 445 | |> utils.add_optional(longitude, fn(inner_value__) { 446 | #("longitude", json.float(inner_value__)) 447 | }) 448 | |> utils.add_optional(accuracy, fn(inner_value__) { 449 | #("accuracy", json.float(inner_value__)) 450 | }), 451 | )), 452 | ) 453 | } 454 | 455 | /// Overrides the Idle state. 456 | /// 457 | /// Parameters: 458 | /// - `is_user_active` : Mock isUserActive 459 | /// - `is_screen_unlocked` : Mock isScreenUnlocked 460 | /// 461 | /// Returns: 462 | /// 463 | pub fn set_idle_override( 464 | callback__, 465 | is_user_active is_user_active: Bool, 466 | is_screen_unlocked is_screen_unlocked: Bool, 467 | ) { 468 | callback__( 469 | "Emulation.setIdleOverride", 470 | option.Some( 471 | json.object([ 472 | #("isUserActive", json.bool(is_user_active)), 473 | #("isScreenUnlocked", json.bool(is_screen_unlocked)), 474 | ]), 475 | ), 476 | ) 477 | } 478 | 479 | /// Clears Idle state overrides. 480 | /// 481 | pub fn clear_idle_override(callback__) { 482 | callback__("Emulation.clearIdleOverride", option.None) 483 | } 484 | 485 | /// Switches script execution in the page. 486 | /// 487 | /// Parameters: 488 | /// - `value` : Whether script execution should be disabled in the page. 489 | /// 490 | /// Returns: 491 | /// 492 | pub fn set_script_execution_disabled(callback__, value value: Bool) { 493 | callback__( 494 | "Emulation.setScriptExecutionDisabled", 495 | option.Some(json.object([#("value", json.bool(value))])), 496 | ) 497 | } 498 | 499 | /// Enables touch on platforms which do not support them. 500 | /// 501 | /// Parameters: 502 | /// - `enabled` : Whether the touch event emulation should be enabled. 503 | /// - `max_touch_points` : Maximum touch points supported. Defaults to one. 504 | /// 505 | /// Returns: 506 | /// 507 | pub fn set_touch_emulation_enabled( 508 | callback__, 509 | enabled enabled: Bool, 510 | max_touch_points max_touch_points: option.Option(Int), 511 | ) { 512 | callback__( 513 | "Emulation.setTouchEmulationEnabled", 514 | option.Some(json.object( 515 | [#("enabled", json.bool(enabled))] 516 | |> utils.add_optional(max_touch_points, fn(inner_value__) { 517 | #("maxTouchPoints", json.int(inner_value__)) 518 | }), 519 | )), 520 | ) 521 | } 522 | 523 | /// Overrides default host system timezone with the specified one. 524 | /// 525 | /// Parameters: 526 | /// - `timezone_id` : The timezone identifier. List of supported timezones: 527 | /// https://source.chromium.org/chromium/chromium/deps/icu.git/+/faee8bc70570192d82d2978a71e2a615788597d1:source/data/misc/metaZones.txt 528 | /// If empty, disables the override and restores default host system timezone. 529 | /// 530 | /// Returns: 531 | /// 532 | pub fn set_timezone_override(callback__, timezone_id timezone_id: String) { 533 | callback__( 534 | "Emulation.setTimezoneOverride", 535 | option.Some(json.object([#("timezoneId", json.string(timezone_id))])), 536 | ) 537 | } 538 | 539 | /// Allows overriding user agent with the given string. 540 | /// `userAgentMetadata` must be set for Client Hint headers to be sent. 541 | /// 542 | /// Parameters: 543 | /// - `user_agent` : User agent to use. 544 | /// - `accept_language` : Browser language to emulate. 545 | /// - `platform` : The platform navigator.platform should return. 546 | /// 547 | /// Returns: 548 | /// 549 | pub fn set_user_agent_override( 550 | callback__, 551 | user_agent user_agent: String, 552 | accept_language accept_language: option.Option(String), 553 | platform platform: option.Option(String), 554 | ) { 555 | callback__( 556 | "Emulation.setUserAgentOverride", 557 | option.Some(json.object( 558 | [#("userAgent", json.string(user_agent))] 559 | |> utils.add_optional(accept_language, fn(inner_value__) { 560 | #("acceptLanguage", json.string(inner_value__)) 561 | }) 562 | |> utils.add_optional(platform, fn(inner_value__) { 563 | #("platform", json.string(inner_value__)) 564 | }), 565 | )), 566 | ) 567 | } 568 | -------------------------------------------------------------------------------- /src/chrobot/protocol/fetch.gleam: -------------------------------------------------------------------------------- 1 | //// > ⚙️ This module was generated from the Chrome DevTools Protocol version **1.3** 2 | //// ## Fetch Domain 3 | //// 4 | //// A domain for letting clients substitute browser's network layer with client code. 5 | //// 6 | //// [📖 View this domain on the DevTools Protocol API Docs](https://chromedevtools.github.io/devtools-protocol/1-3/Fetch/) 7 | 8 | // --------------------------------------------------------------------------- 9 | // | !!!!!! This is an autogenerated file - Do not edit manually !!!!!! | 10 | // | Run `codegen.sh` to regenerate. | 11 | // --------------------------------------------------------------------------- 12 | 13 | import chrobot/chrome 14 | import chrobot/internal/utils 15 | import chrobot/protocol/io 16 | import chrobot/protocol/network 17 | import gleam/dynamic 18 | import gleam/json 19 | import gleam/option 20 | import gleam/result 21 | 22 | /// Unique request identifier. 23 | pub type RequestId { 24 | RequestId(String) 25 | } 26 | 27 | @internal 28 | pub fn encode__request_id(value__: RequestId) { 29 | case value__ { 30 | RequestId(inner_value__) -> json.string(inner_value__) 31 | } 32 | } 33 | 34 | @internal 35 | pub fn decode__request_id(value__: dynamic.Dynamic) { 36 | value__ |> dynamic.decode1(RequestId, dynamic.string) 37 | } 38 | 39 | /// Stages of the request to handle. Request will intercept before the request is 40 | /// sent. Response will intercept after the response is received (but before response 41 | /// body is received). 42 | pub type RequestStage { 43 | RequestStageRequest 44 | RequestStageResponse 45 | } 46 | 47 | @internal 48 | pub fn encode__request_stage(value__: RequestStage) { 49 | case value__ { 50 | RequestStageRequest -> "Request" 51 | RequestStageResponse -> "Response" 52 | } 53 | |> json.string() 54 | } 55 | 56 | @internal 57 | pub fn decode__request_stage(value__: dynamic.Dynamic) { 58 | case dynamic.string(value__) { 59 | Ok("Request") -> Ok(RequestStageRequest) 60 | Ok("Response") -> Ok(RequestStageResponse) 61 | Error(error) -> Error(error) 62 | Ok(other) -> 63 | Error([ 64 | dynamic.DecodeError( 65 | expected: "valid enum property", 66 | found: other, 67 | path: ["enum decoder"], 68 | ), 69 | ]) 70 | } 71 | } 72 | 73 | pub type RequestPattern { 74 | RequestPattern( 75 | /// Wildcards (`'*'` -> zero or more, `'?'` -> exactly one) are allowed. Escape character is 76 | /// backslash. Omitting is equivalent to `"*"`. 77 | url_pattern: option.Option(String), 78 | /// If set, only requests for matching resource types will be intercepted. 79 | resource_type: option.Option(network.ResourceType), 80 | /// Stage at which to begin intercepting requests. Default is Request. 81 | request_stage: option.Option(RequestStage), 82 | ) 83 | } 84 | 85 | @internal 86 | pub fn encode__request_pattern(value__: RequestPattern) { 87 | json.object( 88 | [] 89 | |> utils.add_optional(value__.url_pattern, fn(inner_value__) { 90 | #("urlPattern", json.string(inner_value__)) 91 | }) 92 | |> utils.add_optional(value__.resource_type, fn(inner_value__) { 93 | #("resourceType", network.encode__resource_type(inner_value__)) 94 | }) 95 | |> utils.add_optional(value__.request_stage, fn(inner_value__) { 96 | #("requestStage", encode__request_stage(inner_value__)) 97 | }), 98 | ) 99 | } 100 | 101 | @internal 102 | pub fn decode__request_pattern(value__: dynamic.Dynamic) { 103 | use url_pattern <- result.try(dynamic.optional_field( 104 | "urlPattern", 105 | dynamic.string, 106 | )(value__)) 107 | use resource_type <- result.try(dynamic.optional_field( 108 | "resourceType", 109 | network.decode__resource_type, 110 | )(value__)) 111 | use request_stage <- result.try(dynamic.optional_field( 112 | "requestStage", 113 | decode__request_stage, 114 | )(value__)) 115 | 116 | Ok(RequestPattern( 117 | url_pattern: url_pattern, 118 | resource_type: resource_type, 119 | request_stage: request_stage, 120 | )) 121 | } 122 | 123 | /// Response HTTP header entry 124 | pub type HeaderEntry { 125 | HeaderEntry(name: String, value: String) 126 | } 127 | 128 | @internal 129 | pub fn encode__header_entry(value__: HeaderEntry) { 130 | json.object([ 131 | #("name", json.string(value__.name)), 132 | #("value", json.string(value__.value)), 133 | ]) 134 | } 135 | 136 | @internal 137 | pub fn decode__header_entry(value__: dynamic.Dynamic) { 138 | use name <- result.try(dynamic.field("name", dynamic.string)(value__)) 139 | use value <- result.try(dynamic.field("value", dynamic.string)(value__)) 140 | 141 | Ok(HeaderEntry(name: name, value: value)) 142 | } 143 | 144 | /// Authorization challenge for HTTP status code 401 or 407. 145 | pub type AuthChallenge { 146 | AuthChallenge( 147 | /// Source of the authentication challenge. 148 | source: option.Option(AuthChallengeSource), 149 | /// Origin of the challenger. 150 | origin: String, 151 | /// The authentication scheme used, such as basic or digest 152 | scheme: String, 153 | /// The realm of the challenge. May be empty. 154 | realm: String, 155 | ) 156 | } 157 | 158 | /// This type is not part of the protocol spec, it has been generated dynamically 159 | /// to represent the possible values of the enum property `source` of `AuthChallenge` 160 | pub type AuthChallengeSource { 161 | AuthChallengeSourceServer 162 | AuthChallengeSourceProxy 163 | } 164 | 165 | @internal 166 | pub fn encode__auth_challenge_source(value__: AuthChallengeSource) { 167 | case value__ { 168 | AuthChallengeSourceServer -> "Server" 169 | AuthChallengeSourceProxy -> "Proxy" 170 | } 171 | |> json.string() 172 | } 173 | 174 | @internal 175 | pub fn decode__auth_challenge_source(value__: dynamic.Dynamic) { 176 | case dynamic.string(value__) { 177 | Ok("Server") -> Ok(AuthChallengeSourceServer) 178 | Ok("Proxy") -> Ok(AuthChallengeSourceProxy) 179 | Error(error) -> Error(error) 180 | Ok(other) -> 181 | Error([ 182 | dynamic.DecodeError( 183 | expected: "valid enum property", 184 | found: other, 185 | path: ["enum decoder"], 186 | ), 187 | ]) 188 | } 189 | } 190 | 191 | @internal 192 | pub fn encode__auth_challenge(value__: AuthChallenge) { 193 | json.object( 194 | [ 195 | #("origin", json.string(value__.origin)), 196 | #("scheme", json.string(value__.scheme)), 197 | #("realm", json.string(value__.realm)), 198 | ] 199 | |> utils.add_optional(value__.source, fn(inner_value__) { 200 | #("source", encode__auth_challenge_source(inner_value__)) 201 | }), 202 | ) 203 | } 204 | 205 | @internal 206 | pub fn decode__auth_challenge(value__: dynamic.Dynamic) { 207 | use source <- result.try(dynamic.optional_field( 208 | "source", 209 | decode__auth_challenge_source, 210 | )(value__)) 211 | use origin <- result.try(dynamic.field("origin", dynamic.string)(value__)) 212 | use scheme <- result.try(dynamic.field("scheme", dynamic.string)(value__)) 213 | use realm <- result.try(dynamic.field("realm", dynamic.string)(value__)) 214 | 215 | Ok(AuthChallenge(source: source, origin: origin, scheme: scheme, realm: realm)) 216 | } 217 | 218 | /// Response to an AuthChallenge. 219 | pub type AuthChallengeResponse { 220 | AuthChallengeResponse( 221 | /// The decision on what to do in response to the authorization challenge. Default means 222 | /// deferring to the default behavior of the net stack, which will likely either the Cancel 223 | /// authentication or display a popup dialog box. 224 | response: AuthChallengeResponseResponse, 225 | /// The username to provide, possibly empty. Should only be set if response is 226 | /// ProvideCredentials. 227 | username: option.Option(String), 228 | /// The password to provide, possibly empty. Should only be set if response is 229 | /// ProvideCredentials. 230 | password: option.Option(String), 231 | ) 232 | } 233 | 234 | /// This type is not part of the protocol spec, it has been generated dynamically 235 | /// to represent the possible values of the enum property `response` of `AuthChallengeResponse` 236 | pub type AuthChallengeResponseResponse { 237 | AuthChallengeResponseResponseDefault 238 | AuthChallengeResponseResponseCancelAuth 239 | AuthChallengeResponseResponseProvideCredentials 240 | } 241 | 242 | @internal 243 | pub fn encode__auth_challenge_response_response( 244 | value__: AuthChallengeResponseResponse, 245 | ) { 246 | case value__ { 247 | AuthChallengeResponseResponseDefault -> "Default" 248 | AuthChallengeResponseResponseCancelAuth -> "CancelAuth" 249 | AuthChallengeResponseResponseProvideCredentials -> "ProvideCredentials" 250 | } 251 | |> json.string() 252 | } 253 | 254 | @internal 255 | pub fn decode__auth_challenge_response_response(value__: dynamic.Dynamic) { 256 | case dynamic.string(value__) { 257 | Ok("Default") -> Ok(AuthChallengeResponseResponseDefault) 258 | Ok("CancelAuth") -> Ok(AuthChallengeResponseResponseCancelAuth) 259 | Ok("ProvideCredentials") -> 260 | Ok(AuthChallengeResponseResponseProvideCredentials) 261 | Error(error) -> Error(error) 262 | Ok(other) -> 263 | Error([ 264 | dynamic.DecodeError( 265 | expected: "valid enum property", 266 | found: other, 267 | path: ["enum decoder"], 268 | ), 269 | ]) 270 | } 271 | } 272 | 273 | @internal 274 | pub fn encode__auth_challenge_response(value__: AuthChallengeResponse) { 275 | json.object( 276 | [#("response", encode__auth_challenge_response_response(value__.response))] 277 | |> utils.add_optional(value__.username, fn(inner_value__) { 278 | #("username", json.string(inner_value__)) 279 | }) 280 | |> utils.add_optional(value__.password, fn(inner_value__) { 281 | #("password", json.string(inner_value__)) 282 | }), 283 | ) 284 | } 285 | 286 | @internal 287 | pub fn decode__auth_challenge_response(value__: dynamic.Dynamic) { 288 | use response <- result.try(dynamic.field( 289 | "response", 290 | decode__auth_challenge_response_response, 291 | )(value__)) 292 | use username <- result.try(dynamic.optional_field("username", dynamic.string)( 293 | value__, 294 | )) 295 | use password <- result.try(dynamic.optional_field("password", dynamic.string)( 296 | value__, 297 | )) 298 | 299 | Ok(AuthChallengeResponse( 300 | response: response, 301 | username: username, 302 | password: password, 303 | )) 304 | } 305 | 306 | /// This type is not part of the protocol spec, it has been generated dynamically 307 | /// to represent the response to the command `get_response_body` 308 | pub type GetResponseBodyResponse { 309 | GetResponseBodyResponse( 310 | /// Response body. 311 | body: String, 312 | /// True, if content was sent as base64. 313 | base64_encoded: Bool, 314 | ) 315 | } 316 | 317 | @internal 318 | pub fn decode__get_response_body_response(value__: dynamic.Dynamic) { 319 | use body <- result.try(dynamic.field("body", dynamic.string)(value__)) 320 | use base64_encoded <- result.try(dynamic.field("base64Encoded", dynamic.bool)( 321 | value__, 322 | )) 323 | 324 | Ok(GetResponseBodyResponse(body: body, base64_encoded: base64_encoded)) 325 | } 326 | 327 | /// This type is not part of the protocol spec, it has been generated dynamically 328 | /// to represent the response to the command `take_response_body_as_stream` 329 | pub type TakeResponseBodyAsStreamResponse { 330 | TakeResponseBodyAsStreamResponse(stream: io.StreamHandle) 331 | } 332 | 333 | @internal 334 | pub fn decode__take_response_body_as_stream_response(value__: dynamic.Dynamic) { 335 | use stream <- result.try(dynamic.field("stream", io.decode__stream_handle)( 336 | value__, 337 | )) 338 | 339 | Ok(TakeResponseBodyAsStreamResponse(stream: stream)) 340 | } 341 | 342 | /// Disables the fetch domain. 343 | /// 344 | pub fn disable(callback__) { 345 | callback__("Fetch.disable", option.None) 346 | } 347 | 348 | /// Enables issuing of requestPaused events. A request will be paused until client 349 | /// calls one of failRequest, fulfillRequest or continueRequest/continueWithAuth. 350 | /// 351 | /// Parameters: 352 | /// - `patterns` : If specified, only requests matching any of these patterns will produce 353 | /// fetchRequested event and will be paused until clients response. If not set, 354 | /// all requests will be affected. 355 | /// - `handle_auth_requests` : If true, authRequired events will be issued and requests will be paused 356 | /// expecting a call to continueWithAuth. 357 | /// 358 | /// Returns: 359 | /// 360 | pub fn enable( 361 | callback__, 362 | patterns patterns: option.Option(List(RequestPattern)), 363 | handle_auth_requests handle_auth_requests: option.Option(Bool), 364 | ) { 365 | callback__( 366 | "Fetch.enable", 367 | option.Some(json.object( 368 | [] 369 | |> utils.add_optional(patterns, fn(inner_value__) { 370 | #("patterns", json.array(inner_value__, of: encode__request_pattern)) 371 | }) 372 | |> utils.add_optional(handle_auth_requests, fn(inner_value__) { 373 | #("handleAuthRequests", json.bool(inner_value__)) 374 | }), 375 | )), 376 | ) 377 | } 378 | 379 | /// Causes the request to fail with specified reason. 380 | /// 381 | /// Parameters: 382 | /// - `request_id` : An id the client received in requestPaused event. 383 | /// - `error_reason` : Causes the request to fail with the given reason. 384 | /// 385 | /// Returns: 386 | /// 387 | pub fn fail_request( 388 | callback__, 389 | request_id request_id: RequestId, 390 | error_reason error_reason: network.ErrorReason, 391 | ) { 392 | callback__( 393 | "Fetch.failRequest", 394 | option.Some( 395 | json.object([ 396 | #("requestId", encode__request_id(request_id)), 397 | #("errorReason", network.encode__error_reason(error_reason)), 398 | ]), 399 | ), 400 | ) 401 | } 402 | 403 | /// Provides response to the request. 404 | /// 405 | /// Parameters: 406 | /// - `request_id` : An id the client received in requestPaused event. 407 | /// - `response_code` : An HTTP response code. 408 | /// - `response_headers` : Response headers. 409 | /// - `binary_response_headers` : Alternative way of specifying response headers as a \0-separated 410 | /// series of name: value pairs. Prefer the above method unless you 411 | /// need to represent some non-UTF8 values that can't be transmitted 412 | /// over the protocol as text. (Encoded as a base64 string when passed over JSON) 413 | /// - `body` : A response body. If absent, original response body will be used if 414 | /// the request is intercepted at the response stage and empty body 415 | /// will be used if the request is intercepted at the request stage. (Encoded as a base64 string when passed over JSON) 416 | /// - `response_phrase` : A textual representation of responseCode. 417 | /// If absent, a standard phrase matching responseCode is used. 418 | /// 419 | /// Returns: 420 | /// 421 | pub fn fulfill_request( 422 | callback__, 423 | request_id request_id: RequestId, 424 | response_code response_code: Int, 425 | response_headers response_headers: option.Option(List(HeaderEntry)), 426 | binary_response_headers binary_response_headers: option.Option(String), 427 | body body: option.Option(String), 428 | response_phrase response_phrase: option.Option(String), 429 | ) { 430 | callback__( 431 | "Fetch.fulfillRequest", 432 | option.Some(json.object( 433 | [ 434 | #("requestId", encode__request_id(request_id)), 435 | #("responseCode", json.int(response_code)), 436 | ] 437 | |> utils.add_optional(response_headers, fn(inner_value__) { 438 | #( 439 | "responseHeaders", 440 | json.array(inner_value__, of: encode__header_entry), 441 | ) 442 | }) 443 | |> utils.add_optional(binary_response_headers, fn(inner_value__) { 444 | #("binaryResponseHeaders", json.string(inner_value__)) 445 | }) 446 | |> utils.add_optional(body, fn(inner_value__) { 447 | #("body", json.string(inner_value__)) 448 | }) 449 | |> utils.add_optional(response_phrase, fn(inner_value__) { 450 | #("responsePhrase", json.string(inner_value__)) 451 | }), 452 | )), 453 | ) 454 | } 455 | 456 | /// Continues the request, optionally modifying some of its parameters. 457 | /// 458 | /// Parameters: 459 | /// - `request_id` : An id the client received in requestPaused event. 460 | /// - `url` : If set, the request url will be modified in a way that's not observable by page. 461 | /// - `method` : If set, the request method is overridden. 462 | /// - `post_data` : If set, overrides the post data in the request. (Encoded as a base64 string when passed over JSON) 463 | /// - `headers` : If set, overrides the request headers. Note that the overrides do not 464 | /// extend to subsequent redirect hops, if a redirect happens. Another override 465 | /// may be applied to a different request produced by a redirect. 466 | /// 467 | /// Returns: 468 | /// 469 | pub fn continue_request( 470 | callback__, 471 | request_id request_id: RequestId, 472 | url url: option.Option(String), 473 | method method: option.Option(String), 474 | post_data post_data: option.Option(String), 475 | headers headers: option.Option(List(HeaderEntry)), 476 | ) { 477 | callback__( 478 | "Fetch.continueRequest", 479 | option.Some(json.object( 480 | [#("requestId", encode__request_id(request_id))] 481 | |> utils.add_optional(url, fn(inner_value__) { 482 | #("url", json.string(inner_value__)) 483 | }) 484 | |> utils.add_optional(method, fn(inner_value__) { 485 | #("method", json.string(inner_value__)) 486 | }) 487 | |> utils.add_optional(post_data, fn(inner_value__) { 488 | #("postData", json.string(inner_value__)) 489 | }) 490 | |> utils.add_optional(headers, fn(inner_value__) { 491 | #("headers", json.array(inner_value__, of: encode__header_entry)) 492 | }), 493 | )), 494 | ) 495 | } 496 | 497 | /// Continues a request supplying authChallengeResponse following authRequired event. 498 | /// 499 | /// Parameters: 500 | /// - `request_id` : An id the client received in authRequired event. 501 | /// - `auth_challenge_response` : Response to with an authChallenge. 502 | /// 503 | /// Returns: 504 | /// 505 | pub fn continue_with_auth( 506 | callback__, 507 | request_id request_id: RequestId, 508 | auth_challenge_response auth_challenge_response: AuthChallengeResponse, 509 | ) { 510 | callback__( 511 | "Fetch.continueWithAuth", 512 | option.Some( 513 | json.object([ 514 | #("requestId", encode__request_id(request_id)), 515 | #( 516 | "authChallengeResponse", 517 | encode__auth_challenge_response(auth_challenge_response), 518 | ), 519 | ]), 520 | ), 521 | ) 522 | } 523 | 524 | /// Causes the body of the response to be received from the server and 525 | /// returned as a single string. May only be issued for a request that 526 | /// is paused in the Response stage and is mutually exclusive with 527 | /// takeResponseBodyForInterceptionAsStream. Calling other methods that 528 | /// affect the request or disabling fetch domain before body is received 529 | /// results in an undefined behavior. 530 | /// Note that the response body is not available for redirects. Requests 531 | /// paused in the _redirect received_ state may be differentiated by 532 | /// `responseCode` and presence of `location` response header, see 533 | /// comments to `requestPaused` for details. 534 | /// 535 | /// Parameters: 536 | /// - `request_id` : Identifier for the intercepted request to get body for. 537 | /// 538 | /// Returns: 539 | /// - `body` : Response body. 540 | /// - `base64_encoded` : True, if content was sent as base64. 541 | /// 542 | pub fn get_response_body(callback__, request_id request_id: RequestId) { 543 | use result__ <- result.try(callback__( 544 | "Fetch.getResponseBody", 545 | option.Some(json.object([#("requestId", encode__request_id(request_id))])), 546 | )) 547 | 548 | decode__get_response_body_response(result__) 549 | |> result.replace_error(chrome.ProtocolError) 550 | } 551 | 552 | /// Returns a handle to the stream representing the response body. 553 | /// The request must be paused in the HeadersReceived stage. 554 | /// Note that after this command the request can't be continued 555 | /// as is -- client either needs to cancel it or to provide the 556 | /// response body. 557 | /// The stream only supports sequential read, IO.read will fail if the position 558 | /// is specified. 559 | /// This method is mutually exclusive with getResponseBody. 560 | /// Calling other methods that affect the request or disabling fetch 561 | /// domain before body is received results in an undefined behavior. 562 | /// 563 | /// Parameters: 564 | /// - `request_id` 565 | /// 566 | /// Returns: 567 | /// - `stream` 568 | /// 569 | pub fn take_response_body_as_stream( 570 | callback__, 571 | request_id request_id: RequestId, 572 | ) { 573 | use result__ <- result.try(callback__( 574 | "Fetch.takeResponseBodyAsStream", 575 | option.Some(json.object([#("requestId", encode__request_id(request_id))])), 576 | )) 577 | 578 | decode__take_response_body_as_stream_response(result__) 579 | |> result.replace_error(chrome.ProtocolError) 580 | } 581 | -------------------------------------------------------------------------------- /src/chrobot/protocol/io.gleam: -------------------------------------------------------------------------------- 1 | //// > ⚙️ This module was generated from the Chrome DevTools Protocol version **1.3** 2 | //// ## IO Domain 3 | //// 4 | //// Input/Output operations for streams produced by DevTools. 5 | //// 6 | //// [📖 View this domain on the DevTools Protocol API Docs](https://chromedevtools.github.io/devtools-protocol/1-3/IO/) 7 | 8 | // --------------------------------------------------------------------------- 9 | // | !!!!!! This is an autogenerated file - Do not edit manually !!!!!! | 10 | // | Run `codegen.sh` to regenerate. | 11 | // --------------------------------------------------------------------------- 12 | 13 | import chrobot/chrome 14 | import chrobot/internal/utils 15 | import chrobot/protocol/runtime 16 | import gleam/dynamic 17 | import gleam/json 18 | import gleam/option 19 | import gleam/result 20 | 21 | /// This is either obtained from another method or specified as `blob:151 | I am HTML 152 |
153 |154 | I am the hyperstructure 155 |
156 |157 | I am linked to you 158 |
159 | " 160 | let page = 161 | chrobot.create_page(browser, dummy_html, 10_000) 162 | |> should.be_ok() 163 | let result = 164 | chrobot.get_all_html(page) 165 | |> should.be_ok() 166 | birdie.snap(result, title: "Outer HTML") 167 | } 168 | 169 | pub fn select_test() { 170 | use page <- test_utils.with_reference_page() 171 | let object_id = 172 | chrobot.select(page, "#wibble") 173 | |> should.be_ok 174 | let text_content = 175 | chrobot.get_text(page, object_id) 176 | |> should.be_ok() 177 | 178 | text_content 179 | |> should.equal("Wibble") 180 | } 181 | 182 | pub fn select_from_test() { 183 | use page <- test_utils.with_reference_page() 184 | let object_id = 185 | chrobot.select(page, ".greeting") 186 | |> should.be_ok 187 | 188 | let inner_object_id = 189 | chrobot.select_from(page, object_id, "span") 190 | |> should.be_ok 191 | 192 | let text_content = 193 | chrobot.get_text(page, inner_object_id) 194 | |> should.be_ok 195 | 196 | text_content 197 | |> should.equal("Joe") 198 | } 199 | 200 | pub fn get_html_test() { 201 | use page <- test_utils.with_reference_page() 202 | let object = 203 | chrobot.select(page, "header") 204 | |> should.be_ok 205 | 206 | let inner_html = 207 | chrobot.get_inner_html(page, object) 208 | |> should.be_ok 209 | 210 | let outer_html = 211 | chrobot.get_outer_html(page, object) 212 | |> should.be_ok 213 | 214 | birdie.snap(inner_html, title: "Element Inner HTML") 215 | birdie.snap(outer_html, title: "Element Outer HTML") 216 | } 217 | 218 | pub fn get_attribute_test() { 219 | use page <- test_utils.with_reference_page() 220 | let object_id = 221 | chrobot.select(page, "#wobble") 222 | |> should.be_ok 223 | 224 | let attribute = 225 | chrobot.get_attribute(page, object_id, "data-foo") 226 | |> should.be_ok 227 | 228 | attribute 229 | |> should.equal("bar") 230 | } 231 | 232 | pub fn select_all_test() { 233 | use page <- test_utils.with_reference_page() 234 | let object_ids = 235 | chrobot.select_all(page, "a") 236 | |> should.be_ok 237 | 238 | let hrefs = 239 | object_ids 240 | |> list.map(fn(object_id) { 241 | chrobot.get_attribute(page, object_id, "href") 242 | |> should.be_ok 243 | }) 244 | 245 | birdie.snap(string.join(hrefs, "\n"), title: "List of links") 246 | } 247 | 248 | pub fn select_all_from_test() { 249 | use page <- test_utils.with_reference_page() 250 | let object_id = 251 | chrobot.select(page, "ul") 252 | |> should.be_ok 253 | 254 | let inner_object_ids = 255 | chrobot.select_all_from(page, object_id, "li") 256 | |> should.be_ok 257 | 258 | let texts = 259 | inner_object_ids 260 | |> list.map(fn(inner_object_id) { 261 | chrobot.get_text(page, inner_object_id) 262 | |> should.be_ok 263 | }) 264 | 265 | birdie.snap(string.join(texts, "\n"), title: "List of greetings") 266 | } 267 | 268 | pub fn get_property_test() { 269 | use page <- test_utils.with_reference_page() 270 | let object_id = 271 | chrobot.select(page, "#demo-checkbox") 272 | |> should.be_ok 273 | 274 | chrobot.get_property(page, object_id, "checked", dynamic.bool) 275 | |> should.be_ok 276 | |> should.be_true 277 | } 278 | 279 | pub fn click_test() { 280 | use page <- test_utils.with_reference_page() 281 | 282 | // This is just a sanity check, to make sure the checkbox is checked before we click it 283 | let object_id = 284 | chrobot.select(page, "#demo-checkbox") 285 | |> should.be_ok 286 | 287 | chrobot.get_property(page, object_id, "checked", dynamic.bool) 288 | |> should.be_ok 289 | |> should.be_true 290 | 291 | // Click the checkbox 292 | chrobot.click(page, object_id) 293 | |> should.be_ok 294 | 295 | // After clicking the checkbox, it should be unchecked 296 | chrobot.get_property(page, object_id, "checked", dynamic.bool) 297 | |> should.be_ok 298 | |> should.be_false 299 | } 300 | 301 | pub fn type_test() { 302 | use page <- test_utils.with_reference_page() 303 | let object_id = 304 | chrobot.select(page, "#demo-text-input") 305 | |> should.be_ok 306 | 307 | chrobot.focus(page, object_id) 308 | |> should.be_ok 309 | 310 | chrobot.type_text(page, "Hello, World!") 311 | |> should.be_ok 312 | 313 | chrobot.get_property(page, object_id, "value", dynamic.string) 314 | |> should.be_ok 315 | |> should.equal("Hello, World!") 316 | } 317 | 318 | pub fn press_key_test() { 319 | use page <- test_utils.with_reference_page() 320 | let object_id = 321 | chrobot.select(page, "#demo-text-input") 322 | |> should.be_ok 323 | 324 | chrobot.focus(page, object_id) 325 | |> should.be_ok 326 | 327 | chrobot.press_key(page, "Enter") 328 | |> should.be_ok 329 | 330 | chrobot.get_property(page, object_id, "value", dynamic.string) 331 | |> should.be_ok 332 | |> should.equal("ENTER KEY PRESSED") 333 | } 334 | 335 | pub fn poll_test() { 336 | let initial_time = utils.get_time_ms() 337 | 338 | // this function will start returning "Success" in 200ms 339 | let poll_function = fn() { 340 | case utils.get_time_ms() - initial_time { 341 | time if time > 200 -> Ok("Success") 342 | _ -> Error(chrome.NotFoundError) 343 | } 344 | } 345 | 346 | chrobot.poll(poll_function, 500) 347 | |> should.be_ok() 348 | |> should.equal("Success") 349 | } 350 | 351 | pub fn poll_failure_test() { 352 | let initial_time = utils.get_time_ms() 353 | 354 | // this function will start returning "Success" in 200ms 355 | let poll_function = fn() { 356 | case utils.get_time_ms() - initial_time { 357 | time if time > 200 -> Ok("Success") 358 | _ -> Error(chrome.NotFoundError) 359 | } 360 | } 361 | 362 | case chrobot.poll(poll_function, 100) { 363 | Error(chrome.NotFoundError) -> { 364 | should.be_true(True) 365 | let elapsed_time = utils.get_time_ms() - initial_time 366 | // timeout should be within a 10ms window of accuracy 367 | { elapsed_time < 105 && elapsed_time > 95 } 368 | |> should.be_true() 369 | } 370 | _ -> { 371 | utils.err("Polling function didn't return the correct error") 372 | should.fail() 373 | } 374 | } 375 | } 376 | 377 | pub fn poll_timeout_failure_test() { 378 | let initial_time = utils.get_time_ms() 379 | 380 | // this function will return errors first 381 | // and after 100ms it will start blocking for 10s 382 | let poll_function = fn() { 383 | case utils.get_time_ms() - initial_time { 384 | time if time > 100 -> { 385 | process.sleep(10_000) 386 | Ok("Success") 387 | } 388 | _ -> Error(chrome.NotFoundError) 389 | } 390 | } 391 | 392 | // the timeout is 300ms, so the polling function will be interrupted 393 | // while it's blocking, it should still return the original error 394 | case chrobot.poll(poll_function, 300) { 395 | Error(chrome.NotFoundError) -> { 396 | should.be_true(True) 397 | let elapsed_time = utils.get_time_ms() - initial_time 398 | // timeout should be within a 10ms window of accuracy 399 | { elapsed_time < 305 && elapsed_time > 295 } 400 | |> should.be_true() 401 | } 402 | _ -> { 403 | utils.err("Polling function didn't return the correct error") 404 | should.fail() 405 | } 406 | } 407 | } 408 | -------------------------------------------------------------------------------- /test/codegen/download_protocol.gleam: -------------------------------------------------------------------------------- 1 | //// Download the latest protocol JSON files from the official repository 2 | //// and place them in the local assets folder. 3 | //// 4 | //// Protocol Repo is here: 5 | //// https://github.com/ChromeDevTools/devtools-protocol 6 | //// 7 | //// This script will panic if anything goes wrong, do not import this module anywere 8 | 9 | import gleam/http/request 10 | import gleam/httpc 11 | import gleam/io 12 | import simplifile as file 13 | 14 | const browser_protocol_url = "https://raw.githubusercontent.com/ChromeDevTools/devtools-protocol/master/json/browser_protocol.json" 15 | 16 | const js_protocol_url = "https://raw.githubusercontent.com/ChromeDevTools/devtools-protocol/master/json/js_protocol.json" 17 | 18 | const destination_dir = "./assets/" 19 | 20 | pub fn main() { 21 | download( 22 | from: browser_protocol_url, 23 | to: destination_dir <> "browser_protocol.json", 24 | ) 25 | download(from: js_protocol_url, to: destination_dir <> "js_protocol.json") 26 | } 27 | 28 | fn download(from origin_url: String, to destination_path: String) -> Nil { 29 | io.println("Making request to " <> origin_url) 30 | let assert Ok(request) = request.to(origin_url) 31 | let assert Ok(res) = httpc.send(request) 32 | case res.status { 33 | 200 -> { 34 | io.println("Writing response to " <> destination_path) 35 | let assert Ok(_) = file.write(res.body, to: destination_path) 36 | Nil 37 | } 38 | _ -> { 39 | io.println("Non-200 response from server!") 40 | panic 41 | } 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /test/codegen/generate_bindings_test.gleam: -------------------------------------------------------------------------------- 1 | import birdie 2 | import codegen/generate_bindings.{ 3 | apply_protocol_patches, gen_domain_module, gen_root_module, 4 | get_stable_protocol, merge_protocols, parse_protocol, 5 | } 6 | import gleam/list 7 | import gleam/option 8 | import gleeunit/should 9 | 10 | pub fn parse_browser_protocol_test() { 11 | let assert Ok(protocol) = parse_protocol("./assets/browser_protocol.json") 12 | protocol.version.major 13 | |> should.equal("1") 14 | 15 | // Let's find the DOM domain 16 | let assert Ok(dom_domain) = 17 | list.find(protocol.domains, fn(d) { d.domain == "DOM" }) 18 | 19 | // It should have a types list 20 | option.is_some(dom_domain.types) 21 | |> should.be_true 22 | 23 | // The NodeId type should be on the DOM domain types 24 | let dom_types = option.unwrap(dom_domain.types, []) 25 | let assert Ok(node_id_type) = list.find(dom_types, fn(t) { t.id == "NodeId" }) 26 | 27 | // NodeId should have an inner type of "integer" 28 | let inner_type_is_int = case node_id_type.inner { 29 | generate_bindings.PrimitiveType("integer") -> True 30 | _ -> False 31 | } 32 | inner_type_is_int 33 | |> should.equal(True) 34 | } 35 | 36 | pub fn parse_js_protocol_test() { 37 | let assert Ok(protocol) = parse_protocol("./assets/js_protocol.json") 38 | protocol.version.major 39 | |> should.equal("1") 40 | 41 | // Let's find the Runtime domain 42 | let assert Ok(runtime_domain) = 43 | list.find(protocol.domains, fn(d) { d.domain == "Runtime" }) 44 | 45 | // It should have a types list 46 | option.is_some(runtime_domain.types) 47 | |> should.be_true 48 | 49 | // The DeepSerializedValue type should be on the Runtime domain types 50 | let runtime_types = option.unwrap(runtime_domain.types, []) 51 | let assert Ok(deep_serialized_value_type) = 52 | list.find(runtime_types, fn(t) { t.id == "DeepSerializedValue" }) 53 | 54 | // DeepSerializedValue should have an inner type of "object" with properties 55 | let assert Ok(target_properties) = case deep_serialized_value_type.inner { 56 | generate_bindings.ObjectType(option.Some(properties)) -> Ok(properties) 57 | _ -> Error("Did not find ObjectType with some properties") 58 | } 59 | 60 | // There should be a property named "type" in there 61 | let assert Ok(type_property) = 62 | list.find(target_properties, fn(p) { p.name == "type" }) 63 | 64 | // This "type" property should be of type string with enum values 65 | let assert Ok(enum_values) = case type_property.inner { 66 | generate_bindings.EnumType(values) -> Ok(values) 67 | _ -> Error("Property should was not an EnumType") 68 | } 69 | 70 | // One of the enum values should be "window" 71 | list.any(enum_values, fn(v) { v == "window" }) 72 | |> should.be_true 73 | } 74 | 75 | pub fn gen_enum_encoder_decoder_test() { 76 | let enum_type_name = "CertificateTransparencyCompliance" 77 | let enum_values = ["unknown", "not-compliant", "compliant"] 78 | generate_bindings.gen_enum_encoder(enum_type_name, enum_values) 79 | |> birdie.snap(title: "Enum encoder function") 80 | generate_bindings.gen_enum_decoder(enum_type_name, enum_values) 81 | |> birdie.snap(title: "Enum decoder function") 82 | } 83 | 84 | /// Just run all the functions, see if anything panics. 85 | /// We could snapshot the output here, but then again the output is just the codegen 86 | /// that's written to `protocol/*` and committed to vcs so we already have snapshots of 87 | /// it and would just duplicate those. 88 | pub fn general_bindings_gen_test() { 89 | let assert Ok(browser_protocol) = 90 | parse_protocol("./assets/browser_protocol.json") 91 | let assert Ok(js_protocol) = parse_protocol("./assets/js_protocol.json") 92 | let protocol = 93 | merge_protocols(browser_protocol, js_protocol) 94 | |> apply_protocol_patches() 95 | let stable_protocol = get_stable_protocol(protocol, False, False) 96 | gen_root_module(stable_protocol) 97 | list.each(stable_protocol.domains, fn(domain) { 98 | gen_domain_module(stable_protocol, domain) 99 | }) 100 | } 101 | -------------------------------------------------------------------------------- /test/mock_server.gleam: -------------------------------------------------------------------------------- 1 | //// The mock server listens on localhost and returns some fixed data, 2 | //// it can be used in tests, to avoid the need to request an external website 3 | 4 | import chrobot/internal/utils 5 | import gleam/bytes_tree 6 | import gleam/http/request.{type Request} 7 | import gleam/http/response.{type Response} 8 | import gleam/int 9 | import gleam/string 10 | import mist.{type Connection, type ResponseData} 11 | 12 | pub fn get_port() -> Int { 13 | 8182 14 | } 15 | 16 | pub fn get_url() -> String { 17 | "http://localhost:" <> int.to_string(get_port()) <> "/" 18 | } 19 | 20 | pub fn start() { 21 | let not_found = 22 | response.new(404) 23 | |> response.set_body(mist.Bytes(bytes_tree.from_string("Not found!"))) 24 | 25 | let result = 26 | fn(req: Request(Connection)) -> Response(ResponseData) { 27 | case request.path_segments(req) { 28 | [] -> return_test_page(req) 29 | _ -> not_found 30 | } 31 | } 32 | |> mist.new 33 | |> mist.port(get_port()) 34 | |> mist.start_http 35 | 36 | case result { 37 | Ok(_) -> Nil 38 | Error(err) -> { 39 | utils.err("The chrobot test server failed to start! 40 | The server tries to list on on port " <> int.to_string(get_port()) <> ", perhaps it's in use?") 41 | panic as string.inspect(err) 42 | } 43 | } 44 | } 45 | 46 | pub type MyMessage { 47 | Broadcast(String) 48 | } 49 | 50 | fn return_test_page(_request: Request(Connection)) -> Response(ResponseData) { 51 | let body = 52 | " 53 | 54 | 55 |19 | Everything there is online about 20 | W3 is linked directly or indirectly 21 | to this document, including an executive 22 | summary of the project, Mailing lists 24 | , Policy , November's W3 news , 26 | Frequently Asked Questions . 27 |
28 |