├── .gitignore ├── README.md ├── astro.config.mjs ├── bin ├── build │ ├── checks │ │ ├── html.pl │ │ ├── latex.pl │ │ ├── links.pl │ │ ├── lint_source_md.mjs │ │ ├── lint_split_md.mjs │ │ ├── repeats.sh │ │ ├── spellcheck.sh │ │ ├── spellcheck_prep.pl │ │ └── whitespace.pl │ ├── dicts │ │ ├── en-common.rws │ │ ├── en_GB-ise-w_accents-only.rws │ │ └── en_GB-ise-w_accents.multi │ ├── make_annotated.pl │ ├── prebuild.js │ ├── process_markdown.sh │ └── split_markdown.pl ├── pdf │ ├── README.md │ ├── filters │ │ ├── codeblocks.lua │ │ ├── figures.lua │ │ ├── linebreaks.lua │ │ ├── links.lua │ │ ├── logging.lua │ │ ├── pagebreaks.lua │ │ ├── printast.lua │ │ └── summaries.lua │ ├── inc │ │ ├── header.tex │ │ ├── json-highlighting.tex │ │ ├── solidity-highlighting.tex │ │ └── title-page.tex │ └── make_pdf └── util │ ├── add_trailing_slashes.pl │ ├── anchors.awk │ ├── check_grammar.sh │ ├── check_spellings_list.sh │ ├── check_urls.sh │ ├── check_urls_parallel.sh │ ├── constants.sh │ ├── functions.awk │ ├── git-pre-commit-hook.sh │ ├── ltex_config.json │ ├── make_spellings_list.sh │ ├── patch_all.sh │ ├── publish.sh │ ├── stats.sh │ ├── update_spellings_list.sh │ └── validate.js ├── eslint.config.js ├── integrations ├── my_add_tooltips.js ├── my_autolink_headings.js ├── my_build_checks.js ├── my_cleanup_html.js ├── my_fixup_links.js ├── my_htaccess.js ├── my_search_index.js └── my_svg_inline.js ├── package-lock.json ├── package.json ├── public ├── dark_230103.css └── light_230103.css ├── src ├── book.md ├── charts │ ├── charts.html │ ├── randao_extend.py │ └── reward_variance.py ├── components │ ├── Banner.astro │ ├── Footer.astro │ ├── FootnoteTooltips.astro │ ├── Matomo.astro │ ├── NestedList.astro │ ├── PageList.astro │ ├── PageNavi.astro │ ├── PrevNext.astro │ ├── PrevNextLink.astro │ ├── Scripts.astro │ ├── Search.astro │ ├── Sidebar.astro │ └── Subsections.astro ├── content.config.js ├── css │ ├── banner.css │ ├── custom.css │ ├── dark-mode-toggle.css │ ├── fonts.css │ ├── footer.css │ ├── footnote-tooltips.css │ ├── page.css │ ├── pagenavi.css │ ├── prevnext.css │ ├── search.css │ ├── sidebar.css │ └── subsections.css ├── diagrams │ ├── aggregators.drawio │ ├── annotated-forkchoice.drawio │ ├── bls.drawio │ ├── committees.drawio │ ├── consensus.drawio │ ├── deposits-withdrawals.drawio │ ├── gasper.drawio │ ├── incentives.drawio │ ├── merkleization.drawio │ ├── randomness.drawio │ ├── shuffling.drawio │ └── ssz.drawio ├── images │ ├── benjaminion.pdf │ ├── benjaminion.svg │ ├── charts │ │ ├── committee_aggregators.svg │ │ ├── hysteresis.svg │ │ ├── inactivity_balances.svg │ │ ├── inactivity_scores.svg │ │ ├── issuance_curve.svg │ │ ├── randao_extend_0.svg │ │ ├── randao_extend_1.svg │ │ ├── randao_extend_2.svg │ │ ├── randao_proposals.svg │ │ ├── randao_proposals_percent.svg │ │ ├── randao_tail.svg │ │ ├── reward_timeliness.svg │ │ ├── reward_variance.svg │ │ └── rewards_curve.svg │ ├── diagrams │ │ ├── aggregators.svg │ │ ├── annotated-forkchoice-bouncing-0.svg │ │ ├── annotated-forkchoice-bouncing-1.svg │ │ ├── annotated-forkchoice-bouncing-2.svg │ │ ├── annotated-forkchoice-correct-justified-0.svg │ │ ├── annotated-forkchoice-correct-justified-1.svg │ │ ├── annotated-forkchoice-filter-0.svg │ │ ├── annotated-forkchoice-filter-1.svg │ │ ├── annotated-forkchoice-filter-2.svg │ │ ├── annotated-forkchoice-filter-3.svg │ │ ├── annotated-forkchoice-gasper.svg │ │ ├── annotated-forkchoice-get-weight-0.svg │ │ ├── annotated-forkchoice-justification-withholding-0.svg │ │ ├── annotated-forkchoice-justification-withholding-1.svg │ │ ├── annotated-forkchoice-justification-withholding-2.svg │ │ ├── annotated-forkchoice-late-block-0.svg │ │ ├── annotated-forkchoice-late-block-1.svg │ │ ├── annotated-forkchoice-late-block-2.svg │ │ ├── annotated-forkchoice-lmd-ghost-0.svg │ │ ├── annotated-forkchoice-lmd-ghost-1.svg │ │ ├── annotated-forkchoice-lmd-ghost-2.svg │ │ ├── annotated-forkchoice-processSlots.svg │ │ ├── annotated-forkchoice-pull-up-tip.svg │ │ ├── annotated-forkchoice-the-merge-block.svg │ │ ├── annotated-forkchoice-unrealised-justification-reorg-0.svg │ │ ├── annotated-forkchoice-unrealised-justification-reorg-1.svg │ │ ├── annotated-forkchoice-viable-nonviable.svg │ │ ├── bls-aggregate_verify.svg │ │ ├── bls-aggregation.svg │ │ ├── bls-key.svg │ │ ├── bls-pubkey_aggregation.svg │ │ ├── bls-setup.svg │ │ ├── bls-signature_aggregation.svg │ │ ├── bls-signing.svg │ │ ├── bls-verifying.svg │ │ ├── committees-all.svg │ │ ├── committees-organised.svg │ │ ├── committees-random.svg │ │ ├── committees-selection.svg │ │ ├── consensus-2-finality.svg │ │ ├── consensus-answer-0.svg │ │ ├── consensus-answer-1.svg │ │ ├── consensus-answer-2.svg │ │ ├── consensus-answer-3.svg │ │ ├── consensus-answer-4.svg │ │ ├── consensus-block_chain.svg │ │ ├── consensus-block_tree.svg │ │ ├── consensus-block_tree_resolved.svg │ │ ├── consensus-commandment-1a.svg │ │ ├── consensus-commandment-1b.svg │ │ ├── consensus-commandment-2a.svg │ │ ├── consensus-commandment-2b.svg │ │ ├── consensus-conflict.svg │ │ ├── consensus-conflicting-finalised.svg │ │ ├── consensus-conflicting-justification-0.svg │ │ ├── consensus-conflicting-justification-1.svg │ │ ├── consensus-conflicting-justification-2.svg │ │ ├── consensus-conflicting-justification-3.svg │ │ ├── consensus-exercise-0.svg │ │ ├── consensus-ffg-vote.svg │ │ ├── consensus-finalised.svg │ │ ├── consensus-finality.svg │ │ ├── consensus-issues-ffg-reorg.svg │ │ ├── consensus-justification-chain.svg │ │ ├── consensus-justified.svg │ │ ├── consensus-k-finality-proof.svg │ │ ├── consensus-messages.svg │ │ ├── consensus-nas-0.svg │ │ ├── consensus-nas-1.svg │ │ ├── consensus-partition.svg │ │ ├── consensus-plausible-liveness.svg │ │ ├── consensus-reversion-0.svg │ │ ├── consensus-reversion-1.svg │ │ ├── consensus-slots-epochs-checkpoints.svg │ │ ├── consensus-source-target.svg │ │ ├── consensus-two-rounds.svg │ │ ├── consensus-two-thirds.svg │ │ ├── deposits-withdrawals-deposit-data-root.svg │ │ ├── deposits-withdrawals-deposit-root.svg │ │ ├── deposits-withdrawals-eth-calls.svg │ │ ├── deposits-withdrawals-overview.svg │ │ ├── deposits-withdrawals-update-branch.svg │ │ ├── deposits-withdrawals-zero-hashes.svg │ │ ├── gasper-blocktree.svg │ │ ├── gasper-blocktree_finalised.svg │ │ ├── incentives-inactivity_scores_flow.svg │ │ ├── incentives-reward_split.svg │ │ ├── incentives-rewards_eligibility.svg │ │ ├── incentives-scalability_trilemma.svg │ │ ├── incentives-weights.svg │ │ ├── merkleization-AttestationData.svg │ │ ├── merkleization-AttestingIndices.svg │ │ ├── merkleization-IndexedAttestation.svg │ │ ├── merkleization-IndexedAttestation_all.svg │ │ ├── merkleization-Signature.svg │ │ ├── merkleization-tree.svg │ │ ├── randomness-assignments.svg │ │ ├── randomness-biasing.svg │ │ ├── randomness-lookahead.svg │ │ ├── randomness-propose_probabilities.svg │ │ ├── randomness-reveal.svg │ │ ├── randomness-shuffle.svg │ │ ├── randomness-tail_probabilities.svg │ │ ├── shuffling-0.svg │ │ ├── shuffling-1.svg │ │ ├── shuffling-2.svg │ │ ├── shuffling-3.svg │ │ ├── shuffling-4.svg │ │ ├── ssz-bitlist.svg │ │ ├── ssz-examples_AttesterSlashing.svg │ │ ├── ssz-examples_Baz.svg │ │ └── ssz-examples_IndexedAttestation.svg │ ├── moon.svg │ └── sun.svg ├── include │ ├── SiteConfig.js │ └── constants.json ├── layouts │ └── Html.astro ├── md │ ├── 404.md │ ├── contact.md │ ├── contents.md │ ├── pdf.md │ ├── search.md │ └── title_page.md ├── pages │ └── [...path].astro └── spellings.en.pws └── tsconfig.json /.gitignore: -------------------------------------------------------------------------------- 1 | # Deployment utilities 2 | bin/priv/ 3 | 4 | # roughViz library - build it from https://github.com/benjaminion/roughViz 5 | src/charts/roughviz.min.js 6 | 7 | # svg-text-to-path library - get it from https://github.com/paulzi/svg-text-to-path/tree/master/dist 8 | src/charts/svg-text-to-path.js 9 | 10 | # Download and unpack the font file from https://fonts.google.com/specimen/Gaegu 11 | src/charts/font/* 12 | 13 | # Files generated during the build process 14 | src/md/pages/ 15 | src/md/annotated.md 16 | src/cache/ 17 | 18 | # Junk 19 | tmp/ 20 | test* 21 | *.pdf 22 | 23 | # IntelliJ configs 24 | .idea/ 25 | *.iml 26 | 27 | # build output 28 | dist/ 29 | 30 | # generated types 31 | .astro/ 32 | 33 | # dependencies 34 | node_modules/ 35 | 36 | # logs 37 | npm-debug.log* 38 | yarn-debug.log* 39 | yarn-error.log* 40 | pnpm-debug.log* 41 | 42 | # environment variables 43 | .env 44 | .env.production 45 | 46 | # macOS-specific files 47 | .DS_Store 48 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Upgrading Ethereum 2 | 3 | This is my book about Ethereum 2.0: Ethereum on proof of stake and beyond. 4 | 5 | You can read it at [eth2book.info](https://eth2book.info/latest/) (also available at [upgrading-ethereum.info](https://upgrading-ethereum.info/latest/)). 6 | 7 | It is a work in progress. There's more about the roll-out plan in the [preface](https://eth2book.info/latest/preface/). 8 | 9 | ## Licence 10 | 11 | This work is licensed under a [Creative Commons Attribution-ShareAlike 4.0 International](https://creativecommons.org/licenses/by-sa/4.0/) licence. 12 | 13 | ## Contributing 14 | 15 | I am not looking for contributions at this time. That may change in future, but for now I will not be accepting any PRs to _src/book.md_. 16 | 17 | Feel free to raise issues for typos, inaccuracies, omissions, and suggestions, but please don't make PRs for these. I'll happily consider PRs for improvements to the CSS or JavaScript. 18 | 19 | Kindly note that [British spelling](https://www.oxfordinternationalenglish.com/differences-in-british-and-american-spelling/) is not a typo. 20 | 21 | ## Installing 22 | 23 | As of May 2025, I have migrated the entire build from Gatsby to [Astro](https://astro.build/). Please let me know if you spot any issues! 24 | 25 | ### Pre-requisites 26 | 27 | Install `node` and `npm`. These are my versions: 28 | 29 | ``` 30 | > node --version 31 | v22.16.0 32 | > npm --version 33 | 11.4.1 34 | ``` 35 | 36 | You'll also need a working `perl` installed at _/usr/bin/perl_ so that the build can preprocess the book document. 37 | 38 | ### Pre-build checks 39 | 40 | I've implemented a heap of pre-build checks for linting and spelling issues. You can run them standalone with `npm run check`, and they also run as a first step in the build process, though a failure will not stop the build. 41 | 42 | To cause Git commits to halt when these checks fail, add the following symlink: 43 | 44 | ``` 45 | (cd .git/hooks; ln -s ../../bin/util/git-pre-commit-hook.sh pre-commit) 46 | ``` 47 | 48 | The controlling script for the checks is _bin/build/prebuild.js_. You can enable and disable specific checks there. 49 | 50 | If the $\LaTeX$ linting fails you might need to install the following, or just disable that check. 51 | 52 | ``` 53 | sudo apt install libipc-run3-perl chktex 54 | ``` 55 | 56 | ### Building 57 | 58 | Clone this repo. `cd` into it, then: 59 | 60 | ``` 61 | npm install 62 | npm run build 63 | ``` 64 | 65 | ### Viewing 66 | 67 | After building as above, do 68 | 69 | ``` 70 | npm run serve 71 | ``` 72 | 73 | Astro will tell you where it is serving the content (somewhere like http://localhost:4321/capella). 74 | 75 | Instead of building and serving, you can run `npm run devel` and visit the link Astro shows. This will not pick up real-time changes to _src/book.md_ and will need to be restarted to see them. It is useful, though, for checking CSS and other component changes interactively. 76 | 77 | ## Workflow 78 | 79 | The entire text for the book is in the _src/book.md_ file. Everything under _src/md/pages_ is auto-generated and any changes there will be lost. 80 | 81 | There are various npm script commands to help with building and testing. See `package.json` for the full list. 82 | 83 | - `npm run clean` deletes the output directory (`dist/`) and the Astro cache. 84 | - I recommend doing this often. Astro caches aggressively and will often skip things like rebulding the search index. 85 | - `npm run check` runs a bunch of custom linting and checking, controlled by the _bin/build/prebuild.js_ script. 86 | - Check all links to internal anchors, image files, and footnotes. 87 | - Spell check. Add any exceptions to _src/spellings.en.pws_ (or use `npm run spfix`). 88 | - Markdown linting on both the original source and the generated pages. 89 | - HTML checks and LaTeX expression linting. 90 | - `npm run build` runs `astro build`. 91 | - `npm run serve` runs `astro preview`. 92 | - `npm run links` checks external links. 93 | - Checking links to GitHub it will fail due to rate-limiting unless you supply GitHub credentials. 94 | - `npm run spell` runs a spell check 95 | - `npm run spfix` can be used to maintain the list of spellings. 96 | - `npm run valid` submits a page to the [W3C markup validation service](https://validator.w3.org/) and lists any issues above `info` level. 97 | - `npm run pdfit` creates a PDF of the whole thing. See the [README](bin/pdf/README.md). 98 | - `npm run stats` shows some stats about the book. Build the PDF first to get the full set. 99 | - `npm run debug` builds with debugging output for my integrations. 100 | - `npm run minim` does a minimal build with only a couple of pages. See `src/content.config.js`. 101 | 102 | ### Environment variables 103 | 104 | A couple of environment variables can be used to shorten the build time when testing infrastructure changes: 105 | 106 | ``` 107 | UE_MINIMAL= npm run build # Build a minimal version with only a couple of pages 108 | UE_NOCHECK= npm run build # Skip checks on the source markdown when building 109 | ``` 110 | 111 | ## How to 112 | 113 | ### Create a new page 114 | 115 | New pages are created by appending HTML comments to headings (first three levels only): 116 | 117 | ``` 118 | ## Heading 119 | ``` 120 | 121 | Take care to get the white space correct. 122 | 123 | ### Make a page unlinkable 124 | 125 | Do this if a page has no content yet. It will appear in the index, but not linkable. 126 | 127 | Append a `*` to the path: 128 | 129 | ``` 130 | ## Heading 131 | ``` 132 | 133 | ## Images 134 | 135 | All images are SVG, and text elements are replaced by paths for maximum compatibility: it seems that a lot of applications have trouble with embedded fonts. 136 | 137 | ### Diagrams 138 | 139 | Diagrams have been created in [drawio.com](https://www.drawio.com/) and exported to SVG with the following options: 140 | - Border width: 10 (some of the sketched elements go out of bounds) 141 | - Text settings: Convert labels to SVG 142 | 143 | Source files for all diagrams are in the _src/diagrams_ directory. The font used is the _Gloria Hallelujah_ Google font. 144 | 145 | ### Charts 146 | 147 | Charts (graphs, barcharts) are generated using my ~~hacked~~extended version of the [roughViz](https://github.com/benjaminion/roughViz) library. Load the _src/charts/charts.html_ file in a browser (you might need to fiddle with some browser security settings to allow it to load local files). The charts are downloaded via the link that should appear on each image. If the download link doesn't appear check the browser console for errors. 148 | 149 | Pre-requisites: 150 | - _roughviz.min.js_ needs to be [downloaded](https://raw.githubusercontent.com/benjaminion/roughViz/master/dist/roughviz.min.js) from my repo and put in the _charts_ directory. 151 | - _svg-text-to-path.js_ needs to be [downloaded](https://raw.githubusercontent.com/paulzi/svg-text-to-path/master/dist/svg-text-to-path.js), also to the _charts_ directory. 152 | - The _Gaegu-Light.ttf_ file needs to be extracted from the [Gaegu](https://fonts.google.com/specimen/Gaegu) Google font and put in the _charts/font_ directory. 153 | 154 | ## Whole book PDF 155 | 156 | There's an experimental pipeline for typesetting the whole text as a PDF book in _bin/pdf/_. See the [README](bin/pdf/README.md) there for instructions. 157 | 158 | ## Coffee 159 | 160 | Kind souls sometimes ask for a way to send me a cup of coffee or make a donation. My account info is below - donations are absolutely not expected or necessary, but are always very encouraging and gratefully received. 161 | 162 | - `0xd262d146e869915444d0f34ecdaabab5ab43007e` on Ethereum, Polygon, Optimism, Arbitrum, Base. 163 | - Also at `benjaminion.eth` 164 | 165 | Any whales or large treasuries out there, I encourage you to take a look at the [Protocol Guild](https://protocol-guild.readthedocs.io/en/latest/index.html) which supports the people developing and maintaining our incredible technology, not just writing about it. 166 | 167 | Finally, all [feedback](https://eth2book.info/latest/contact/) is very welcome! 168 | -------------------------------------------------------------------------------- /astro.config.mjs: -------------------------------------------------------------------------------- 1 | // @ts-check 2 | import { defineConfig } from 'astro/config'; 3 | import { Metadata, SearchOptions } from './src/include/SiteConfig.js'; 4 | import remarkMath from 'remark-math'; 5 | import rehypeKatex from 'rehype-katex'; 6 | import rehypeRaw from 'rehype-raw'; 7 | import Prism from 'prismjs'; 8 | 9 | // Custom integrations 10 | import myBuildChecks from './integrations/my_build_checks'; 11 | import myAutoLinkHeadings from './integrations/my_autolink_headings'; 12 | import mySvgInline from './integrations/my_svg_inline'; 13 | import mySearchIndex from './integrations/my_search_index'; 14 | import myAddTooltips from './integrations/my_add_tooltips'; 15 | import myFixupLinks from './integrations/my_fixup_links'; 16 | import myCleanupHtml from './integrations/my_cleanup_html'; 17 | import myHtaccess from './integrations/my_htaccess'; 18 | 19 | Prism.languages.none = Prism.languages.text; 20 | Prism.languages.code = Prism.languages.text; 21 | 22 | const basePath = '/' + Metadata.version; 23 | 24 | export default defineConfig({ 25 | base: basePath, 26 | integrations: [ 27 | myBuildChecks(), 28 | myAutoLinkHeadings({ headings: ['h2', 'h3', 'h4', 'h5', 'h6'], exclude: '.no-anchor' }), 29 | mySvgInline({ filePath: 'src/', cachePath: 'src/cache/' }), 30 | mySearchIndex(SearchOptions), 31 | myAddTooltips({ constantsFile: 'src/include/constants.json' }), 32 | myFixupLinks(), 33 | myCleanupHtml(), 34 | myHtaccess(basePath), 35 | ], 36 | markdown: { 37 | syntaxHighlight: 'prism', 38 | smartypants: false, 39 | remarkRehype: { clobberPrefix: '' }, // This is ok as we trust the markdown 40 | remarkPlugins: [remarkMath], 41 | rehypePlugins: [ 42 | rehypeRaw, // Insert HTML embedded in MD files into the AST rather than as raw strings 43 | [rehypeKatex, {}], 44 | ], 45 | }, 46 | }); 47 | -------------------------------------------------------------------------------- /bin/build/checks/html.pl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/perl 2 | 3 | # Checks that all HTML tags are balanced 4 | 5 | use strict; 6 | use warnings; 7 | 8 | $\ = "\n"; # set output record separator 9 | 10 | my @html_entities = ('ndash', 'nbsp', 'trade', 'ldquo', 'rdquo'); 11 | my %entities = map { $_ => 1 } @html_entities; 12 | 13 | my $fh = *STDIN; 14 | if (my $file = shift) { 15 | open $fh, '<', $file or die "Can't open $file: $!"; 16 | } 17 | 18 | my @tags = (); 19 | 20 | while(<$fh>) { 21 | 22 | while (/<(\/{0,1})([a-z]+).*?(\/{0,1})>/g) { 23 | my $thisTag = $2; 24 | my $isOpeningTag = $1 ne "/"; 25 | my $isSelfClosed = $3 eq "/"; 26 | 27 | # Ignore self-closed tags 28 | next if $isSelfClosed; 29 | 30 | if ($isOpeningTag) { 31 | push(@tags, $thisTag); 32 | } else { 33 | if (@tags) { 34 | my $tag = pop(@tags); 35 | if ($thisTag ne $tag) { 36 | print "Expected: , at line $., got "; 37 | } 38 | } else { 39 | print "Expected no tag at line $., got "; 40 | } 41 | } 42 | } 43 | 44 | pos($_) = 0; 45 | while (/&(.+?);/g) { 46 | if (!exists($entities{$1})) { 47 | print "Unknown HTML entity $1 at line $."; 48 | } 49 | } 50 | } 51 | 52 | for (@tags) { 53 | print "Unclosed tag found: <$_>"; 54 | } 55 | -------------------------------------------------------------------------------- /bin/build/checks/latex.pl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/perl 2 | 3 | # Notes: 4 | # - Need to install run3: sudo apt-get install libipc-run3-perl 5 | # - chktex manual is here: https://www.nongnu.org/chktex/ChkTeX.pdf 6 | 7 | use strict; 8 | use warnings; 9 | use IPC::Run3; 10 | 11 | $\ = "\n"; # set output record separator 12 | 13 | my $fh = *STDIN; 14 | if (my $file = shift) { 15 | open $fh, '<', $file or die "Can't open $file: $!"; 16 | } 17 | 18 | # Add any exclusions here by adding "-n#" where # is the warning number 19 | my @command = ["chktex", "-q"]; 20 | 21 | # Specifically ignore some false positives. 22 | my $ignore = qr/\$(\[1,r\))\$/; 23 | 24 | my $latex = ''; 25 | my $inMath = 0; 26 | while(<$fh>) { 27 | 28 | chomp; 29 | 30 | if ($inMath and !(/^\$\$$/)) { 31 | $latex .= $_ . " % Source line $.\n"; 32 | } 33 | 34 | if (/^\$\$$/) { 35 | $inMath = !$inMath; 36 | $latex .= $inMath ? "\\[\n" : "\\]\n"; 37 | next; 38 | } 39 | 40 | while (/(^|[^\\])(\$.+?\$)/g) { 41 | my $ltx = $2; 42 | if (!($ltx =~ $ignore)) { 43 | $latex .= $ltx . " % Source line $.\n"; 44 | } 45 | } 46 | 47 | pos = 0; 48 | print "Unbalanced \$ on line $." if (() = /(^|[^\\])\$/g) % 2; 49 | } 50 | 51 | $inMath and print "Unbalanced \$\$ detected" or run3 @command, \$latex; 52 | -------------------------------------------------------------------------------- /bin/build/checks/links.pl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/perl 2 | 3 | # Finds internal links that do not point to existing anchors. 4 | # 5 | # Anchors may be 6 | # 1. Full page paths: /part3/introduction 7 | # 2. Headings or elements: #introduction 8 | # 3. A combination: /part3/introduction#introduction 9 | # 10 | # Relative page paths are not supported. 11 | # 12 | # Anchors generated from headings have some rules. See integrations/my_autolink_headings.js 13 | # - Converted to lower case 14 | # - Spaces become "-" 15 | # - Special characters are omitted: ".,?:'`/[]()" and probably others 16 | # - Underscores and dashes are retained. 17 | # 18 | # Other checks: 19 | # - Footnote references have corresponding definitions and vice versa. 20 | # - Linked image files exist. 21 | 22 | use strict; 23 | use warnings; 24 | use Fcntl qw(SEEK_SET); 25 | 26 | $\ = "\n"; # set output record separator 27 | 28 | my ($file) = @ARGV; 29 | die "Usage: $0 FILE\n" if not $file; 30 | open my $fh, '<', $file or die "Can't open $file: $!"; 31 | 32 | my $domainMatch = qr/(localhost|eth2book.info|upgrading-ethereum.info)/; 33 | my $newPagePath = qr/^(#{1,3} ).* $/; 34 | my $filePath = $file =~ s|[^/]+$||r; 35 | my $pagePath; 36 | my $inCode; 37 | 38 | my %anchors = ( 39 | '/contents/' => 1, 40 | '/annotated-spec/' => 1, 41 | ); 42 | my %fns; 43 | 44 | # First pass: build lists of anchors and footnotes 45 | $inCode = 0; 46 | while(<$fh>) { 47 | 48 | $inCode = 1 - $inCode if /^```/; 49 | next if $inCode; 50 | 51 | # Add pages 52 | if (/$newPagePath/) { 53 | $pagePath = $2; 54 | $pagePath =~ /\/$/ or print "Page missing trailing /: $pagePath, line $."; 55 | $anchors{$pagePath} = 1; 56 | } 57 | 58 | # Add headings 59 | if (/^#+ (.*)$/) { 60 | my $name = $1 =~ s/\s+$//r; 61 | $name = lc $name; 62 | $name =~ s/\s+/-/g; 63 | $name =~ s/[^a-z0-9_-]//g; 64 | $anchors{$pagePath . '#' . $name} = 1; 65 | } 66 | 67 | # Add explicit anchors 68 | while (/<\/a>$/g) { 69 | $anchors{$pagePath . '#' . $1} = 1; 70 | } 71 | 72 | # Add footnote definitions 73 | $fns{$1} = $. if /^\[\^(.+?)\]:/; 74 | } 75 | 76 | $inCode and die "Error: unbalanced code block markers!"; 77 | 78 | # Reset position to start of file 79 | seek $fh, $. = 0, SEEK_SET; 80 | 81 | # Second pass: check anchors and footnotes exist 82 | while(<$fh>) { 83 | 84 | /^```/ and $inCode = 1 - $inCode; 85 | next if $inCode; 86 | 87 | $pagePath = $2 if /$newPagePath/; 88 | 89 | # Footnote references 90 | while (/.\[\^(.+?)\]/g) { 91 | my $fn = $1; 92 | exists($fns{$fn}) and delete $fns{$fn} or print "Missing footnote: $fn, line $."; 93 | } 94 | 95 | while (/(!{0,1})\[.+?\]\((.*?)\)/g) { 96 | 97 | my $isImg = $1 eq '!'; 98 | my $link = $2; 99 | 100 | if ($isImg) { 101 | unless(-e $filePath . $link) { 102 | print "Nonexistent image file: $link line $."; 103 | } 104 | } else { 105 | if ($link =~ /^\/\.\./) { 106 | if (!($link =~ /^...\/(latest|altair|bellatrix|capella|deneb)/)) { 107 | print "Link to non-existent book version, line $.: $link"; 108 | } 109 | } elsif ($link =~ /^([#\/])/) { 110 | my $anchor = ($1 eq '#') ? $pagePath . $link : $link; 111 | unless (exists($anchors{$anchor})) { 112 | print "Anchor not found, line $.: $link"; 113 | } 114 | } elsif ($link eq '') { 115 | print "Empty link, line $."; 116 | } elsif ($link =~ $domainMatch) { 117 | print "Link to $1, line $." 118 | } elsif ($link =~ /^http:/) { 119 | print "HTTP link, line $."; 120 | } elsif (not $link =~ /^https:\/\//) { 121 | print "Suspicious link, line $.: $link"; 122 | } 123 | } 124 | } 125 | } 126 | 127 | while (my($fn,$line) = each %fns) { 128 | print "Unreferenced footnote: $fn, line $line"; 129 | } 130 | -------------------------------------------------------------------------------- /bin/build/checks/lint_source_md.mjs: -------------------------------------------------------------------------------- 1 | import { lint } from "markdownlint/sync" 2 | 3 | // 4 | // See https://github.com/DavidAnson/markdownlint for the rules and options 5 | // 6 | 7 | // Lint check the source markdown file. 8 | export const lintSourceMarkdown = (file) => { 9 | 10 | const options = { 11 | 'files': [ file ], 12 | 'config': { 13 | 'default': true, 14 | 15 | // Start unordered lists with two spaces of indentation 16 | 'MD007': { 17 | 'indent': 2, 18 | 'start_indented': true, 19 | 'start_indent': 2, 20 | }, 21 | 22 | // We don't want any trailing spaces 23 | 'MD009': { 24 | 'strict': true, 25 | }, 26 | 27 | // Some headings end in ! or ? 28 | 'MD026': { 29 | 'punctuation': '.,;:', 30 | }, 31 | 32 | // We fence all block code with backticks 33 | 'MD046': { 34 | 'style': 'fenced', 35 | }, 36 | 37 | // Emphasis style 38 | 'MD049': { 39 | 'style': 'underscore', 40 | }, 41 | 42 | // 43 | // Disabled rules 44 | // 45 | 46 | // We have long lines 47 | 'MD013': false, 48 | 49 | // Duplicate headings are ok (they appear on different pages after pre-processing) 50 | 'MD024': false, 51 | 52 | // Multiple top-level titles are ok (they appear on different pages after pre-processing) 53 | 'MD025': false, 54 | 55 | // Doesn't work well with blockquoted lists 56 | 'MD027': false, 57 | 58 | // Some lists begin with other index than 1 59 | 'MD029': false, 60 | 61 | // We have inline html 62 | 'MD033': false, 63 | 64 | // link-image-reference-definitions - we use these as TODO comments 65 | 'MD053': false, 66 | 67 | // descriptive-link-text - we like using "here" from time to time 68 | 'MD059': false, 69 | } 70 | } 71 | 72 | const result = lint(options) 73 | 74 | return (result[file].length > 0) ? result.toString() : null 75 | } 76 | -------------------------------------------------------------------------------- /bin/build/checks/lint_split_md.mjs: -------------------------------------------------------------------------------- 1 | import { lint } from "markdownlint/sync" 2 | 3 | // 4 | // See https://github.com/DavidAnson/markdownlint for the rules and options 5 | // 6 | 7 | // Lint check the markdown files after they have been split out from the source document. 8 | // The rules differ slightly from the rules for the original source. 9 | export const lintSplitMarkdown = (files) => { 10 | 11 | const options = { 12 | 'files': files, 13 | 'config': { 14 | 'default': true, 15 | 16 | // Start unordered lists with two spaces of indentation 17 | 'MD007': { 18 | 'indent': 2, 19 | 'start_indented': true, 20 | 'start_indent': 2, 21 | }, 22 | 23 | // We don't want any trailing spaces 24 | 'MD009': { 25 | 'strict': true, 26 | }, 27 | 28 | // Some headings end in ! or ? 29 | 'MD026': { 30 | 'punctuation': '.,;:', 31 | }, 32 | 33 | // We fence all block code with backticks 34 | 'MD046': { 35 | 'style': 'fenced', 36 | }, 37 | 38 | // Emphasis style 39 | 'MD049': { 40 | 'style': 'underscore', 41 | }, 42 | 43 | // 44 | // Disabled rules 45 | // 46 | 47 | // Trailing blank lines are hard to avoid when doing the split 48 | 'MD012': false, 49 | 50 | // We have long lines 51 | 'MD013': false, 52 | 53 | // Doesn't work well with blockquoted lists 54 | 'MD027': false, 55 | 56 | // Some lists begin with other index than 1 57 | 'MD029': false, 58 | 59 | // We have inline html 60 | 'MD033': false, 61 | 62 | // We don't expect the very first line to be a top-level heading (due to inserted
) 63 | 'MD041': false, 64 | 65 | // link-image-reference-definitions - we use these as TODO comments 66 | 'MD053': false, 67 | 68 | // descriptive-link-text - we like using "here" from time to time 69 | 'MD059': false, 70 | } 71 | } 72 | 73 | const result = lint(options) 74 | 75 | return (Object.values(result).filter(x => x.length > 0).length > 0) 76 | ? result.toString() 77 | : null 78 | } 79 | -------------------------------------------------------------------------------- /bin/build/checks/repeats.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Check the book source, supplied as $1, for repeated words. 4 | 5 | grep -Enoi '\b([_[:alpha:]]+)\s+\1\b' $1 | awk 'BEGIN {FS="[: ]"} {print "Line " $1 ": " $2 " " $3}' 6 | 7 | # The following allows us to handle the return from grep ("1" is not an error!) 8 | r=("${PIPESTATUS[@]}") 9 | (( ${r[0]} )) && (( ${r[0]} != 1 )) && exit ${r[0]} 10 | exit ${r[1]} 11 | -------------------------------------------------------------------------------- /bin/build/checks/spellcheck.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Spell check the book source, supplied as $1, exceptions are in the file $2. 4 | # 5 | # Aspell has input filters for markdown etc, but it's honestly easier just to preprocess stuff 6 | # with the spellcheck_prep.pl script. 7 | 8 | export LANG=en_GB.UTF-8 9 | here=$(cd $(dirname "$0") && pwd) 10 | 11 | $here/spellcheck_prep.pl $1 \ 12 | | aspell --home-dir . -p $2 --dont-suggest pipe --dict-dir=$here/../dicts -d en_GB-ise-w_accents \ 13 | | tail -n +2 \ 14 | | awk 'BEGIN {n=1} /^$/{n++} /^..+$/{print "Line " n ": "$2}' 15 | -------------------------------------------------------------------------------- /bin/build/checks/spellcheck_prep.pl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/perl 2 | 3 | # Strips markdown stuff from the input file to prepare for spell checking. 4 | # To preserve line numbers, we keep all lines and prefix every output line with a `^` 5 | 6 | use strict; 7 | use warnings; 8 | 9 | $\ = "\n"; # set output record separator 10 | 11 | my $inCode = 0; 12 | my $inMath = 0; 13 | my $inComment = 0; 14 | 15 | while(<>) { 16 | 17 | # Code blocks 18 | if (/^```/) { 19 | $inCode = 1 - $inCode; 20 | print '^'; 21 | next; 22 | } 23 | 24 | # LaTeX blocks 25 | if (/^\$\$/) { 26 | $inMath = 1 - $inMath; 27 | print '^'; 28 | next; 29 | } 30 | 31 | # Multi-line HTML comments 32 | if (/^$/) { 39 | $inComment = 0; 40 | print '^'; 41 | next; 42 | } 43 | 44 | if ($inMath or $inCode or $inComment) { 45 | print '^'; 46 | next; 47 | } 48 | 49 | chomp; 50 | 51 | # Block quotations 52 | s/^ *>.*$//; 53 | 54 | # Footnote links 55 | s/\[\^.+?\]:?//g; 56 | 57 | # Markdown link stuff 58 | s/^\[//g; 59 | s/[ !"\(]\[/ /g; 60 | s/\]\(.+?\)//g; 61 | 62 | # Backticked text 63 | s/`.+?`//g; 64 | 65 | # Inline maths 66 | s/\$.+?\$//g; 67 | 68 | # Heading prefixes 69 | s/^#+ //g; 70 | 71 | # HTML comments 72 | s///g; 73 | 74 | # HTML tags 75 | s/<.+?>//g; 76 | 77 | # HTML entities 78 | s/&.+?;/ /g; 79 | 80 | # Anything in quote marks 81 | s/".+?"//g; 82 | 83 | print "^ $_"; 84 | } 85 | 86 | die "Code block not closed!" unless $inCode == 0; 87 | die "Math block not closed!" unless $inMath == 0; 88 | die "HTML comment not closed!" unless $inComment == 0; 89 | -------------------------------------------------------------------------------- /bin/build/checks/whitespace.pl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/perl -l 2 | 3 | # Check given file or STDIN for trailing whitespace 4 | 5 | use strict; 6 | use warnings; 7 | 8 | my $fh = *STDIN; 9 | if (my $file = shift) { 10 | open $fh, '<', $file or die "Can't open $file: $!"; 11 | } 12 | 13 | while(<$fh>) { 14 | print "Line $." if /\h$/; 15 | } 16 | -------------------------------------------------------------------------------- /bin/build/dicts/en-common.rws: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/benjaminion/upgrading-ethereum-book/8697ba063012970a996288db4b9ebab3be1dfd37/bin/build/dicts/en-common.rws -------------------------------------------------------------------------------- /bin/build/dicts/en_GB-ise-w_accents-only.rws: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/benjaminion/upgrading-ethereum-book/8697ba063012970a996288db4b9ebab3be1dfd37/bin/build/dicts/en_GB-ise-w_accents-only.rws -------------------------------------------------------------------------------- /bin/build/dicts/en_GB-ise-w_accents.multi: -------------------------------------------------------------------------------- 1 | # Generated with Aspell Dicts "proc" script version 0.60.4 2 | add en-common.rws 3 | add en_GB-ise-w_accents-only.rws 4 | -------------------------------------------------------------------------------- /bin/build/make_annotated.pl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/perl 2 | 3 | # Split out the annotated spec chapter as a single page doc and update links 4 | # 5 | # Sends output to stdout 6 | 7 | use strict; 8 | use warnings; 9 | 10 | $, = "\n"; # set output field separator 11 | $\ = "\n"; # set output record separator 12 | 13 | my $inPart3 = 0; 14 | 15 | print 16 | "---", 17 | "path: /annotated-spec/", 18 | "titles: [\"One Page Annotated Spec\"]", 19 | "index: [999]", 20 | "sequence: 990", 21 | "---"; 22 | 23 | while(<>) { 24 | 25 | last if //; 26 | 27 | chomp; 28 | 29 | if (//) { 30 | $inPart3 = 1; 31 | print 32 | "# One Page Annotated Spec\n", 33 | "**Note:** This page is automatically generated from the chapters in [Part 3](/part3/). You may find that some internal links are broken."; 34 | next; 35 | } 36 | 37 | if ($inPart3) { 38 | 39 | # Remove page path comments from titles 40 | s/^(#.*) $/$1/; 41 | 42 | # Rewrite urls that are internal to the chapter 43 | s/]\(\/part3\/[^#)]*/](/g; 44 | 45 | print $_ 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /bin/build/prebuild.js: -------------------------------------------------------------------------------- 1 | import { execSync } from 'child_process'; 2 | import { glob } from 'glob'; 3 | import { lintSourceMarkdown } from './checks/lint_source_md.mjs'; 4 | import { lintSplitMarkdown } from './checks/lint_split_md.mjs'; 5 | 6 | // Performs the following prebuild tasks: 7 | // - Checks that internal document links look ok 8 | // - Checks that HTML tags are properly balanced 9 | // - Spellcheck 10 | // - Repeated words check 11 | // - Trailing whitespace check 12 | // - Linting of LaTeX expressions 13 | // - Lints the source markdown 14 | // - Splits the source markdown into individual pages 15 | // - Lints the split markdown 16 | 17 | const doInternalLinks = true; 18 | const doHtmlCheck = true; 19 | const doSpellCheck = true; 20 | const doRepeatCheck = true; 21 | const doWhitespaceCheck = true; 22 | const doLatexCheck = true; 23 | const doSourceLint = true; 24 | const doSplitLint = true; 25 | 26 | const linkChecker = 'bin/build/checks/links.pl'; 27 | const htmlChecker = 'bin/build/checks/html.pl'; 28 | const spellChecker = 'bin/build/checks/spellcheck.sh'; 29 | const repeatChecker = 'bin/build/checks/repeats.sh'; 30 | const whitespaceChecker = 'bin/build/checks/whitespace.pl'; 31 | const latexChecker = 'bin/build/checks/latex.pl'; 32 | const mdSplitter = 'bin/build/process_markdown.sh'; 33 | 34 | const sourceMarkdown = 'src/book.md'; 35 | const ourSpellings = 'src/spellings.en.pws'; 36 | 37 | const customReporter = { 38 | // https://tintin.mudhalla.net/info/xterm/ 39 | // https://tintin.mudhalla.net/info/256color/ 40 | info: (m) => { 41 | console.log('\x1b[38;5;19m%s\x1b[0m %s', 'info', m); 42 | }, 43 | warn: (m) => { 44 | console.log('\x1b[38;5;130m%s\x1b[0m %s', 'warn', m); 45 | }, 46 | error: (m) => { 47 | console.log('\x1b[38;5;160m%s\x1b[0m %s', 'error', m); 48 | }, 49 | }; 50 | 51 | function printLines(s, reporter) { 52 | s.split(/\r?\n/).forEach((line, i) => line && reporter.warn(line)); 53 | } 54 | 55 | function runCheck(enabled, checker, messages, reporter) { 56 | let success = true; 57 | if (enabled) { 58 | reporter.info(messages.info); 59 | try { 60 | const out = checker(); 61 | if (out !== '' && out !== null) { 62 | reporter.warn(messages.fail); 63 | printLines(out, reporter); 64 | success = false; 65 | } 66 | } catch (err) { 67 | reporter.warn(messages.error); 68 | printLines(err.toString(), reporter); 69 | success = false; 70 | } 71 | } else { 72 | reporter.warn(messages.skip); 73 | } 74 | return success; 75 | } 76 | 77 | // Set `exitToShell` to false to continue processing after running checks (e.g. while building) 78 | export default function runChecks( 79 | reporter = customReporter, 80 | exitToShell = true, 81 | ) { 82 | var allOk = true; 83 | 84 | allOk &= runCheck( 85 | doInternalLinks, 86 | () => execSync(`${linkChecker} ${sourceMarkdown}`, { encoding: 'utf8' }), 87 | { 88 | info: 'Checking internal links...', 89 | fail: 'Found some bad internal links:', 90 | error: 'Unable to check internal links:', 91 | skip: 'Skipping internal link check', 92 | }, 93 | reporter, 94 | ); 95 | 96 | allOk &= runCheck( 97 | doHtmlCheck, 98 | () => execSync(`${htmlChecker} ${sourceMarkdown}`, { encoding: 'utf8' }), 99 | { 100 | info: 'Checking HTML...', 101 | fail: 'Found HTML issues:', 102 | error: 'Unable to check HTML:', 103 | skip: 'Skipping HTML check', 104 | }, 105 | reporter, 106 | ); 107 | 108 | allOk &= runCheck( 109 | doSpellCheck, 110 | () => 111 | execSync(`${spellChecker} ${sourceMarkdown} ${ourSpellings}`, { 112 | encoding: 'utf8', 113 | }), 114 | { 115 | info: 'Performing spellcheck...', 116 | fail: 'Found some misspellings:', 117 | error: 'Unable to perform spellcheck:', 118 | skip: 'Skipping spellcheck', 119 | }, 120 | reporter, 121 | ); 122 | 123 | allOk &= runCheck( 124 | doRepeatCheck, 125 | () => execSync(`${repeatChecker} ${sourceMarkdown}`, { encoding: 'utf8' }), 126 | { 127 | info: 'Performing repeated words check...', 128 | fail: 'Found some repeated words:', 129 | error: 'Unable to perform repeat check:', 130 | skip: 'Skipping repeat check', 131 | }, 132 | reporter, 133 | ); 134 | 135 | allOk &= runCheck( 136 | doWhitespaceCheck, 137 | () => 138 | execSync(`${whitespaceChecker} ${sourceMarkdown}`, { encoding: 'utf8' }), 139 | { 140 | info: 'Performing trailing whitespace check...', 141 | fail: 'Found trailing whitespace:', 142 | error: 'Unable to perform whitespace check:', 143 | skip: 'Skipping whitespace check', 144 | }, 145 | reporter, 146 | ); 147 | 148 | allOk &= runCheck( 149 | doLatexCheck, 150 | () => execSync(`${latexChecker} ${sourceMarkdown}`, { encoding: 'utf8' }), 151 | { 152 | info: 'Performing LaTeX check...', 153 | fail: 'Found LaTeX issues:', 154 | error: 'Unable to perform LaTeX check:', 155 | skip: 'Skipping LaTeX check', 156 | }, 157 | reporter, 158 | ); 159 | 160 | let sourceLintSucceeded = runCheck( 161 | doSourceLint, 162 | () => lintSourceMarkdown(sourceMarkdown), 163 | { 164 | info: 'Linting source markdown...', 165 | fail: 'Found some linting issues:', 166 | error: 'Unable to lint check source markdown:', 167 | skip: 'Skipping source markdown linting', 168 | }, 169 | reporter, 170 | ); 171 | allOk &= sourceLintSucceeded; 172 | 173 | reporter.info('Unpacking book source...'); 174 | try { 175 | execSync(`${mdSplitter} ${sourceMarkdown}`); 176 | } catch (err) { 177 | reporter.error('Failed to unpack book source.'); 178 | throw err; 179 | } 180 | 181 | if (sourceLintSucceeded) { 182 | allOk &= runCheck( 183 | doSplitLint, 184 | () => 185 | lintSplitMarkdown( 186 | glob.sync('src/md/**/*.md', { ignore: 'src/md/annotated.md' }), 187 | ), 188 | { 189 | info: 'Linting split markdown...', 190 | fail: 'Found some linting issues:', 191 | error: 'Unable to lint check split markdown:', 192 | skip: 'Skipping split markdown linting', 193 | }, 194 | reporter, 195 | ); 196 | } else { 197 | reporter.warn('Skipping split markdown linting due to earlier errors'); 198 | } 199 | 200 | if (exitToShell) { 201 | process.exit(allOk ? 0 : 2); 202 | } 203 | } 204 | -------------------------------------------------------------------------------- /bin/build/process_markdown.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Recreate the individual page markdown files from the master document 4 | # in src/book.md 5 | 6 | cd "$(dirname "$0")/../../src" 7 | 8 | # Build the markdown pages 9 | rm -rf md/pages/ 10 | ../bin/build/split_markdown.pl ../$1 11 | 12 | # Build the one page annotated spec 13 | rm -f md/annotated.md 14 | ../bin/build/make_annotated.pl ../$1 > md/annotated.md 15 | -------------------------------------------------------------------------------- /bin/build/split_markdown.pl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/perl 2 | 3 | # Note: run this via "update.sh" rather than directly. 4 | # 5 | # Split the master markdown file into separate files for each page. 6 | # 7 | # - Creates a directory structure reflecting the path hierarchy of the 8 | # pages. 9 | # - Prepends Gatsby frontmatter to each file. 10 | # - Rewrites links for image files so that the origial file can use 11 | # one path and the split files another. 12 | # 13 | # Rules: 14 | # - New pages can start at markdown heading levels 1, 2, or 3. 15 | # - To start a new page, append " " to the heading. 16 | # - The file containing the page will end up in "./md/pages/path/to/page.md" 17 | # - For the page to be marked "hidden", append a "*" to the path. 18 | 19 | use strict; 20 | use warnings; 21 | use File::Path qw(make_path); 22 | 23 | $, = "\n"; # set output field separator 24 | $\ = "\n"; # set output record separator 25 | 26 | my $outFilePrefix = 'md/pages'; 27 | my $sequence = 0; # Search is -2, Contents is -1 28 | my $thisPart = ''; 29 | my $thisChapter = ''; 30 | my $thisSection = ''; 31 | my $thisPartNo = -1; # Number parts from 0 32 | my $thisChapterNo = 0; 33 | my $thisSectionNo = 0; 34 | my $thisPath; 35 | my $idx; 36 | my $ofh; 37 | 38 | while (<>) { 39 | 40 | chomp; 41 | 42 | if (/^(#{1,3} )(.*) $/) { 43 | 44 | my $level = $1; 45 | my $title = $2; 46 | my $path = $3; 47 | my $hide = $4 eq '*' ? 'true' : 'false'; 48 | 49 | $sequence++; 50 | 51 | my $file = $path; 52 | chop($file) if ($file ne '/'); 53 | my $outFile = $outFilePrefix . $file . '.md'; 54 | my $outDirectory = $outFile =~ s|/[^/]+$||gr; 55 | 56 | unless(-e $outDirectory or make_path($outDirectory)) { 57 | die "Unable to create $outDirectory\n"; 58 | } 59 | 60 | close $ofh if defined $ofh; 61 | open $ofh, '>', $outFile or die "Can't open $outFile for writing: $!"; 62 | 63 | $thisPath = $path; 64 | if ($level eq '# ') { 65 | $thisPart = $title; 66 | $thisChapter = ''; 67 | $thisSection = ''; 68 | $thisPartNo++; 69 | $thisChapterNo = 0; 70 | $idx = $thisPartNo; 71 | } elsif ($level eq '## ') { 72 | $thisChapter = $title; 73 | $thisSection = ''; 74 | $thisChapterNo++; 75 | $thisSectionNo = 0; 76 | $idx = $thisPartNo . ',' . $thisChapterNo; 77 | } elsif ($level eq '### ') { 78 | $thisSection = $title; 79 | $thisSectionNo++; 80 | $idx = $thisPartNo . ',' . $thisChapterNo . ',' . $thisSectionNo; 81 | } else { 82 | die "Internal error: can't determine heading level."; 83 | } 84 | 85 | my $titles = '"'.join('","', grep($_, ($thisPart, $thisChapter, $thisSection))).'"'; 86 | print $ofh 87 | "---", 88 | "hide: $hide", 89 | "path: $path", 90 | "titles: [$titles]", 91 | "index: [$idx]", 92 | "sequence: $sequence", 93 | "---"; 94 | 95 | if ($thisSection ne '') { 96 | print $ofh 97 | "\n
\n", 98 | "

$thisPart

\n", 99 | "

$thisChapter

\n", 100 | "\n
\n", 101 | "### $thisSection"; 102 | } elsif ($thisChapter ne '') { 103 | print $ofh 104 | "\n
\n", 105 | "

$thisPart

\n", 106 | "\n
\n", 107 | "## $thisChapter"; 108 | } else { 109 | print $ofh "# $thisPart"; 110 | } 111 | 112 | } else { 113 | 114 | die "Error: first line of input must be a new page marker" if not defined $ofh; 115 | 116 | print $ofh $_; 117 | } 118 | } 119 | 120 | close $ofh if defined $ofh; 121 | -------------------------------------------------------------------------------- /bin/pdf/README.md: -------------------------------------------------------------------------------- 1 | # Whole book PDF 2 | 3 | This is an experimental pipeline for building the whole text as a single PDF file using `pandoc`. You don't need any of the usual installation to do this (no gatsby, node, npm, etc.). 4 | 5 | ## Pre-requisites 6 | 7 | Get the latest `pandoc` from the [pandoc repo](https://github.com/jgm/pandoc/releases/) and install it. It needs to be a fairly up to date one. (Caveat: Pandoc 3.1.6.2 fails due to [this issue](https://github.com/jgm/pandoc/issues/9014).) 8 | 9 | ``` 10 | > pandoc --version 11 | pandoc 3.7.0.2 12 | ... 13 | ``` 14 | 15 | Install LaTeX and the SVG library. The `xetex` version of LaTeX is not mandatory, but seems to be more reliable for me. 16 | 17 | ``` 18 | sudo apt install librsvg2-bin texlive-xetex 19 | ``` 20 | 21 | The build uses the "DejaVu Sans Mono" font for code display. If you are on Linux you should have this already. If you are not on Linux, I can't help you. Changing it to a different mono-spaced font in _make\_pdf_ shouldn't break anything too badly. 22 | 23 | ## Building the PDF 24 | 25 | Run the script at _bin/pdf/make\_pdf_. It may or may not work, and the results may or may not delight. 26 | 27 | ``` 28 | bin/pdf/make_pdf src/book.md 29 | ``` 30 | 31 | The generated PDF will be written to your current directory as _book.pdf_. 32 | 33 | ## Notes 34 | 35 | `logging.lua` is from the MIT licensed https://github.com/wlupton/pandoc-lua-logging 36 | 37 | ### Configuration 38 | 39 | - To get two-sided output, omit the `-variable classoption:oneside` parameter. 40 | 41 | ### Significant known issues 42 | 43 | - Intermittent: sometimes pages with diagrams overflow off the bottom. 44 | - May be fixed in newer versions of xetex; seems ok in XeTeX 3.141592653-2.6-0.999993 45 | - Footnotes can vanish. 46 | - This happens when the footnote reference is inside block-quoted text. 47 | - It looks to be due to the code that adds a side-bar using `framed`. 48 | - Indeed, the [framed manual](https://anorien.csc.warwick.ac.uk/mirrors/CTAN/macros/latex/contrib/framed/framed.pdf) says that footnotes are not supported. 49 | - Using `mdframed` or other solutions places the footnote with the quote rather than at the bottom of the page. 50 | - As a workaround, I've moved the footnote, but it would be good to fix this somehow. Looks difficult, though. 51 | 52 | ### Pending improvements 53 | 54 | - Keep all headings with their next lines on page breaks. 55 | - Mostly working, but headings before summary boxes are a problem. 56 | - `
` blocks need some styling. 57 | - Heading numbers? 58 | -------------------------------------------------------------------------------- /bin/pdf/filters/codeblocks.lua: -------------------------------------------------------------------------------- 1 | -- Codeblocks filter 2 | -- 3 | -- Pandoc's `--listings` mode is good, but also gets applied to inline code, which causes 4 | -- problems. So we don't do that, but intead process code blocks here to insert the 5 | -- `lstlisting` environment. 6 | 7 | local langs = { 8 | ['python'] = 'Python', 9 | ['bash'] = 'bash', 10 | ['solidity'] = 'Solidity', 11 | ['json'] = 'JSON', 12 | ['code'] = '', 13 | ['none'] = '' 14 | } 15 | 16 | function CodeBlock (block) 17 | local lang_string = '[]' 18 | local lang_source = block.classes[1] 19 | local lang = langs[lang_source] 20 | if lang ~= nil then 21 | if (#lang > 0) then 22 | lang_string = '[language=' .. lang .. ']' 23 | end 24 | else 25 | if lang_source then 26 | print('Codeblocks filter: language ' .. lang_source .. ' is unknown') 27 | else 28 | print('Codeblocks filter: language not specified') 29 | end 30 | end 31 | return pandoc.RawBlock('latex', 32 | "\\begin{lstlisting}" .. lang_string .. "\n" 33 | .. block.text .. "\n" 34 | .. "\\end{lstlisting}") 35 | end 36 | -------------------------------------------------------------------------------- /bin/pdf/filters/figures.lua: -------------------------------------------------------------------------------- 1 | local logging = require 'logging' 2 | 3 | -- Figures filter 4 | -- 5 | -- Finds figures in the document and 6 | -- (1) Moves their caption into the right place (by default pandoc uses the SVG's title 7 | -- for the caption) 8 | -- (2) Applies a width to the image, extracted from the figure tag 9 | 10 | -- Return true if a block matches a HTML tag with string `tag` 11 | local function is_tag(block, tag) 12 | return block and block.t == 'RawBlock' 13 | and block.format == 'html' 14 | and block.text:sub(1, #tag) == tag 15 | end 16 | 17 | -- True if the block matches the HTML tag `' 52 | local function find_caption(blocks, i) 53 | local j = i + 1 54 | while blocks[j] do 55 | if is_tag(blocks[j], '
') and is_para(blocks[j+1]) then 56 | if not is_tag(blocks[j+2], '
') then 57 | io.stderr:write('Figures filter: Multi-paragraph caption found\n') 58 | end 59 | return j + 1 60 | end 61 | j = j + 1 62 | end 63 | logging.temp('block', blocks[i-1]) 64 | assert(false, 'Figures filter: Caption not found\n') 65 | end 66 | 67 | -- Find index of the paragraph containing the closing tag 68 | local function find_end_figure_tag(blocks, i) 69 | local j = i 70 | while blocks[j] do 71 | if is_tag(blocks[j], '') then 72 | return j 73 | end 74 | j = j + 1 75 | end 76 | assert(false, 'Figures filter: Closing tag not found\n') 77 | end 78 | 79 | -- Get the image width from the starting figure html block 80 | local function get_figure_width(block) 81 | assert(is_figure_tag(block), "Figures filter: called get_figure_width on non figure tag") 82 | return block.text:match('width:[^%d]*(%d+%%)') 83 | end 84 | 85 | -- The figure might be wrapped in a div, so extract it as necessary 86 | local function get_src_from_figure(block) 87 | if is_figure(block) then 88 | return block.content[1].content[1].src 89 | end 90 | if is_div_figure(block) then 91 | return block.content[1].content[1].content[1].src 92 | end 93 | logging.temp('block', block) 94 | assert(false, "Figures filter: unable to get figure from this block") 95 | end 96 | 97 | -- Create a new figure with the given parameters 98 | local function make_figure(src, caption, attr) 99 | return pandoc.Figure(pandoc.Image(caption, src, '', attr), {caption, ''} , attr) 100 | end 101 | 102 | function Blocks (blocks) 103 | 104 | --[[ 105 | print("***Input***") 106 | logging.temp('Figures', blocks) 107 | --]] 108 | 109 | -- Go from end to start to avoid problems with shifting indices. 110 | for i = #blocks-1, 1, -1 do 111 | 112 | if is_figure_tag(blocks[i]) then 113 | 114 | local figure_width = get_figure_width(blocks[i]) 115 | local figure_close = find_end_figure_tag(blocks, i) 116 | 117 | local figure_idx = find_figure(blocks, i) 118 | local caption_idx = find_caption(blocks, i) 119 | local caption = blocks[caption_idx].content 120 | local src = get_src_from_figure(blocks[figure_idx]) 121 | 122 | -- Sanity checks 123 | if caption_idx <= figure_idx then 124 | logging.temp('block', blocks[i-1]) 125 | assert(false, "Figures filter: Caption index (" .. caption_idx 126 | .. ") is not greater than figure index (" .. figure_idx .. ")") 127 | end 128 | if caption_idx - figure_idx > 5 then 129 | logging.temp('block', blocks[i-1]) 130 | assert(false, "Figures filter: Caption index (" .. caption_idx 131 | .. ") is much greater than figure index (" .. figure_idx .. ")") 132 | end 133 | 134 | -- Remove existing figure and captions (last to first) 135 | blocks:remove(figure_close) -- The closing tag 136 | blocks:remove(caption_idx + 1) -- The closing caption tag (hopefully) 137 | blocks:remove(caption_idx + 0) -- The caption text 138 | blocks:remove(caption_idx - 1) -- The opening caption tag 139 | blocks:remove(figure_idx) -- The figure para 140 | blocks:remove(i) -- The opening
tag 141 | 142 | -- Insert new figure 143 | local attr = figure_width 144 | and pandoc.Attr('', {}, {width = figure_width}) 145 | or pandoc.Attr ('', {}, {}) 146 | blocks:insert(i, make_figure(src, caption, attr)) 147 | 148 | end 149 | end 150 | 151 | --[[ 152 | -- print("***Output***") 153 | -- logging.temp('Figures', blocks) 154 | --]] 155 | 156 | return blocks 157 | 158 | end 159 | -------------------------------------------------------------------------------- /bin/pdf/filters/linebreaks.lua: -------------------------------------------------------------------------------- 1 | -- Linebreaks filter 2 | -- 3 | -- Converts 4 | -- - html
into pandoc linebreaks, and 5 | -- - html into latex suggested linebreaks. 6 | 7 | function RawInline (el) 8 | if el.format == 'html' then 9 | if el.text == '
' then 10 | return pandoc.LineBreak() 11 | end 12 | if el.text == '' then 13 | return pandoc.RawInline('latex', '\\linebreak[0]') 14 | end 15 | end 16 | return el 17 | end 18 | -------------------------------------------------------------------------------- /bin/pdf/filters/links.lua: -------------------------------------------------------------------------------- 1 | local logging = require 'logging' 2 | 3 | -- Links filter 4 | -- 5 | -- Normalises all the inter- and intra-page links and targets within the document 6 | -- so it all works when it's made into a single page. Also checks for duplicate anchors 7 | -- and dangling links. 8 | -- 9 | -- For best results run this one after the Pagebreaks filter. 10 | 11 | local website = 'https://eth2book.info' 12 | 13 | local links, targets = {}, {} 14 | local skip = { ["/contents/"] = true, ["/annotated-spec/"] = true } 15 | 16 | -- Check that there are no duplicate targets 17 | local function check_target(id) 18 | if targets[id] then 19 | print('Links filter: target "' .. id .. '" is a duplicate') 20 | end 21 | targets[id] = true 22 | end 23 | 24 | -- Check that every link has a target 25 | local function check_links() 26 | for link, v in pairs(links) do 27 | if not skip['/' .. link .. '/'] and not targets[link] then 28 | print('Links filter: link "' .. link .. '" points to nothing') 29 | end 30 | end 31 | end 32 | 33 | -- Normalise anchors: 34 | -- (1) Remove any `-` or `-digit` that may have been introduced by pandoc to deduplicate headers 35 | -- (2) Convert all `_` to `-` 36 | local function norm(id) 37 | return id:gsub('%-+%d*$', ''):gsub('_', '-') 38 | end 39 | 40 | local function is_header(block) 41 | return block.t == 'Header' 42 | end 43 | 44 | local function is_anchor_inline(inline) 45 | return inline.t == 'RawInline' 46 | and inline.format == 'html' 47 | and inline.text:match('^
$') 48 | end 49 | 50 | local function is_anchor_block(block) 51 | return (block.t == 'Para' or block.t == 'Plain') and is_anchor_inline(block.content[1]) 52 | end 53 | 54 | local function get_page(header) 55 | local header_last = header.content[#header.content] 56 | if header_last.t == 'RawInline' then 57 | return norm(header_last.text:match(''):gsub('/', '-')) 58 | end 59 | return nil 60 | end 61 | 62 | local function get_target(text) 63 | return norm(text:match('^$')) 64 | end 65 | 66 | local function get_header_id(header) 67 | return norm(header.attr.identifier) 68 | end 69 | 70 | local function update_links(block, page) 71 | return block:walk { 72 | Link = function(link) 73 | local s, t = link.target, '' 74 | if s:match('^/%.%.') then 75 | -- Link to a different spec version 76 | t = website .. s:sub(4) 77 | elseif s:match('^/') then 78 | -- Link to another internal page 79 | t = '#' .. norm(s:sub(2):gsub('[/#]', '-'):gsub('%-%-', '-')) 80 | links[t:sub(2)] = true 81 | elseif s:match('^#') then 82 | -- Link within a page 83 | t = '#' .. page .. '-' .. norm(s:sub(2)) 84 | links[t:sub(2)] = true 85 | else 86 | -- External link 87 | t = s 88 | end 89 | if not skip[s] then 90 | return pandoc.Link(link.content, t, link.title, link.attr) 91 | end 92 | end 93 | } 94 | end 95 | 96 | local function new_anchor(id) 97 | check_target(id) 98 | return pandoc.Para({pandoc.Span({}, pandoc.Attr(id, {}, {}))}) 99 | end 100 | 101 | function Pandoc (doc) 102 | 103 | local blocks = doc.blocks 104 | 105 | --[[ 106 | print("***Input***") 107 | logging.temp('Links', blocks) 108 | --]] 109 | 110 | local page = '' 111 | local idx = 1 112 | for i = 1, #blocks do 113 | 114 | assert(idx <= #blocks, "Idx longer than blocks: " .. idx .. " " .. #blocks) 115 | local block = blocks[idx] 116 | 117 | if is_header(block) then 118 | 119 | newpage = get_page(block) 120 | if newpage ~= nil then 121 | page = newpage 122 | 123 | -- Insert anchor for each new page 124 | blocks:insert(idx, new_anchor(page)) 125 | idx = idx + 1 126 | end 127 | 128 | -- Update the header's id to include the page 129 | local id = page .. '-' .. get_header_id(block) 130 | check_target(id) 131 | block.attr.identifier = id 132 | 133 | elseif is_anchor_block(block) then 134 | 135 | -- There may be multiple anchors within the same Para block 136 | for j = 1, #block.content do 137 | local inline = block.content[j] 138 | if is_anchor_inline(inline) then 139 | blocks:insert(idx, new_anchor(page .. '-' .. get_target(inline.text))) 140 | idx = idx + 1 141 | end 142 | end 143 | 144 | else 145 | 146 | blocks[idx] = update_links(block, page) 147 | 148 | end 149 | 150 | idx = idx + 1 151 | 152 | end 153 | 154 | check_links() 155 | 156 | --[[ 157 | print("***Output***") 158 | logging.temp('Links', blocks) 159 | --]] 160 | 161 | return pandoc.Pandoc(blocks, doc.meta) 162 | 163 | end 164 | -------------------------------------------------------------------------------- /bin/pdf/filters/logging.lua: -------------------------------------------------------------------------------- 1 | --[[ 2 | logging.lua: pandoc-aware logging functions (can also be used standalone) 3 | Copyright: (c) 2022 William Lupton 4 | License: MIT - see LICENSE file for details 5 | Usage: See README.md for details 6 | ]] 7 | 8 | -- if running standalone, create a 'pandoc' global 9 | if not pandoc then 10 | _G.pandoc = {utils = {}} 11 | end 12 | 13 | -- if there's no pandoc.utils, create a local one 14 | if not pcall(require, 'pandoc.utils') then 15 | pandoc.utils = {} 16 | end 17 | 18 | -- if there's no pandoc.utils.type, create a local one 19 | if not pandoc.utils.type then 20 | pandoc.utils.type = function(value) 21 | local typ = type(value) 22 | if not ({table=1, userdata=1})[typ] then 23 | -- unchanged 24 | elseif value.__name then 25 | typ = value.__name 26 | elseif value.tag and value.t then 27 | typ = value.tag 28 | if typ:match('^Meta.') then 29 | typ = typ:sub(5) 30 | end 31 | if typ == 'Map' then 32 | typ = 'table' 33 | end 34 | end 35 | return typ 36 | end 37 | end 38 | 39 | -- namespace 40 | local logging = {} 41 | 42 | -- helper function to return a sensible typename 43 | logging.type = function(value) 44 | -- this can return 'Inlines', 'Blocks', 'Inline', 'Block' etc., or 45 | -- anything that built-in type() can return, namely 'nil', 'number', 46 | -- 'string', 'boolean', 'table', 'function', 'thread', or 'userdata' 47 | local typ = pandoc.utils.type(value) 48 | 49 | -- it seems that it can also return strings like 'pandoc Row'; replace 50 | -- spaces with periods 51 | -- XXX I'm not sure that this is done consistently, e.g. I don't think 52 | -- it's done for pandoc.Attr or pandoc.List? 53 | typ = typ:gsub(' ', '.') 54 | 55 | -- map Inline and Block to the tag name 56 | -- XXX I guess it's intentional that it doesn't already do this? 57 | return ({Inline=1, Block=1})[typ] and value.tag or typ 58 | end 59 | 60 | -- derived from https://www.lua.org/pil/19.3.html pairsByKeys() 61 | logging.spairs = function(list, comp) 62 | local keys = {} 63 | for key, _ in pairs(list) do 64 | table.insert(keys, tostring(key)) 65 | end 66 | table.sort(keys, comp) 67 | local i = 0 68 | local iter = function() 69 | i = i + 1 70 | return keys[i] and keys[i], list[keys[i]] or nil 71 | end 72 | return iter 73 | end 74 | 75 | -- helper function to dump a value with a prefix (recursive) 76 | -- XXX should detect repetition/recursion 77 | -- XXX would like maxlen logic to apply at all levels? but not trivial 78 | local function dump_(prefix, value, maxlen, level, add) 79 | local buffer = {} 80 | if prefix == nil then prefix = '' end 81 | if level == nil then level = 0 end 82 | if add == nil then add = function(item) table.insert(buffer, item) end end 83 | local indent = maxlen and '' or (' '):rep(level) 84 | 85 | -- get typename, mapping to pandoc tag names where possible 86 | local typename = logging.type(value) 87 | 88 | -- don't explicitly indicate 'obvious' typenames 89 | local typ = (({boolean=1, number=1, string=1, table=1, userdata=1}) 90 | [typename] and '' or typename) 91 | 92 | -- light userdata is just a pointer (can't iterate over it) 93 | -- XXX is there a better way of checking for light userdata? 94 | if type(value) == 'userdata' and not pcall(pairs(value)) then 95 | value = tostring(value):gsub('userdata:%s*', '') 96 | 97 | -- modify the value heuristically 98 | elseif ({table=1, userdata=1})[type(value)] then 99 | local valueCopy, numKeys, lastKey = {}, 0, nil 100 | for key, val in pairs(value) do 101 | -- pandoc >= 2.15 includes 'tag', nil values and functions 102 | if key ~= 'tag' and val and type(val) ~= 'function' then 103 | valueCopy[key] = val 104 | numKeys = numKeys + 1 105 | lastKey = key 106 | end 107 | end 108 | if numKeys == 0 then 109 | -- this allows empty tables to be formatted on a single line 110 | value = typename == 'Space' and '' or '{}' 111 | elseif numKeys == 1 and lastKey == 'text' then 112 | -- this allows text-only types to be formatted on a single line 113 | typ = typename 114 | value = value[lastKey] 115 | typename = 'string' 116 | else 117 | value = valueCopy 118 | end 119 | end 120 | 121 | -- output the possibly-modified value 122 | local presep = #prefix > 0 and ' ' or '' 123 | local typsep = #typ > 0 and ' ' or '' 124 | local valtyp = type(value) 125 | if valtyp == 'nil' then 126 | add('nil') 127 | elseif ({boolean=1, number=1, string=1})[valtyp] then 128 | typsep = #typ > 0 and valtyp == 'string' and #value > 0 and ' ' or '' 129 | -- don't use the %q format specifier; doesn't work with multi-bytes 130 | local quo = typename == 'string' and '"' or '' 131 | add(string.format('%s%s%s%s%s%s%s%s', indent, prefix, presep, typ, 132 | typsep, quo, value, quo)) 133 | -- light userdata is just a pointer (can't iterate over it) 134 | -- XXX is there a better way of checking for light userdata? 135 | elseif valtyp == 'userdata' and not pcall(pairs(value)) then 136 | add(string.format('%s%s%s%s %s', indent, prefix, presep, typ, 137 | tostring(value):gsub('userdata:%s*', ''))) 138 | elseif ({table=1, userdata=1})[valtyp] then 139 | add(string.format('%s%s%s%s%s{', indent, prefix, presep, typ, typsep)) 140 | -- Attr and Attr.attributes have both numeric and string keys, so 141 | -- ignore the numeric ones 142 | -- XXX this is no longer the case for pandoc >= 2.15, so could remove 143 | -- the special case? 144 | local first = true 145 | if prefix ~= 'attributes:' and typ ~= 'Attr' then 146 | for i, val in ipairs(value) do 147 | local pre = maxlen and not first and ', ' or '' 148 | local text = dump_(string.format('%s[%s]', pre, i), val, 149 | maxlen, level + 1, add) 150 | first = false 151 | end 152 | end 153 | -- report keys in alphabetical order to ensure repeatability 154 | for key, val in logging.spairs(value) do 155 | -- pandoc >= 2.15 includes 'tag' 156 | if not tonumber(key) and key ~= 'tag' then 157 | local pre = maxlen and not first and ', ' or '' 158 | local text = dump_(string.format('%s%s:', pre, key), val, 159 | maxlen, level + 1, add) 160 | end 161 | first = false 162 | end 163 | add(string.format('%s}', indent)) 164 | end 165 | return table.concat(buffer, maxlen and '' or '\n') 166 | end 167 | 168 | logging.dump = function(value, maxlen) 169 | if maxlen == nil then maxlen = 70 end 170 | local text = dump_(nil, value, maxlen) 171 | if #text > maxlen then 172 | text = dump_(nil, value, nil) 173 | end 174 | return text 175 | end 176 | 177 | logging.output = function(...) 178 | local need_newline = false 179 | for i, item in ipairs({...}) do 180 | -- XXX space logic could be cleverer, e.g. no space after newline 181 | local maybe_space = i > 1 and ' ' or '' 182 | local text = ({table=1, userdata=1})[type(item)] and 183 | logging.dump(item) or tostring(item) 184 | io.stderr:write(maybe_space, text) 185 | need_newline = text:sub(-1) ~= '\n' 186 | end 187 | if need_newline then 188 | io.stderr:write('\n') 189 | end 190 | end 191 | 192 | -- basic logging support (-1=errors, 0=warnings, 1=info, 2=debug, 3=debug2) 193 | -- XXX should support string levels? 194 | logging.loglevel = 0 195 | 196 | -- set log level and return the previous level 197 | logging.setloglevel = function(loglevel) 198 | local oldlevel = logging.loglevel 199 | logging.loglevel = loglevel 200 | return oldlevel 201 | end 202 | 203 | -- verbosity default is WARNING; --quiet -> ERROR and --verbose -> INFO 204 | -- --trace sets TRACE or DEBUG (depending on --verbose) 205 | if type(PANDOC_STATE) == 'nil' then 206 | -- use the default level 207 | elseif PANDOC_STATE.trace then 208 | logging.loglevel = PANDOC_STATE.verbosity == 'INFO' and 3 or 2 209 | elseif PANDOC_STATE.verbosity == 'INFO' then 210 | logging.loglevel = 1 211 | elseif PANDOC_STATE.verbosity == 'WARNING' then 212 | logging.loglevel = 0 213 | elseif PANDOC_STATE.verbosity == 'ERROR' then 214 | logging.loglevel = -1 215 | end 216 | 217 | logging.error = function(...) 218 | if logging.loglevel >= -1 then 219 | logging.output('(E)', ...) 220 | end 221 | end 222 | 223 | logging.warning = function(...) 224 | if logging.loglevel >= 0 then 225 | logging.output('(W)', ...) 226 | end 227 | end 228 | 229 | logging.info = function(...) 230 | if logging.loglevel >= 1 then 231 | logging.output('(I)', ...) 232 | end 233 | end 234 | 235 | logging.debug = function(...) 236 | if logging.loglevel >= 2 then 237 | logging.output('(D)', ...) 238 | end 239 | end 240 | 241 | logging.debug2 = function(...) 242 | if logging.loglevel >= 3 then 243 | logging.warning('debug2() is deprecated; use trace()') 244 | logging.output('(D2)', ...) 245 | end 246 | end 247 | 248 | logging.trace = function(...) 249 | if logging.loglevel >= 3 then 250 | logging.output('(T)', ...) 251 | end 252 | end 253 | 254 | -- for temporary unconditional debug output 255 | logging.temp = function(...) 256 | logging.output('(#)', ...) 257 | end 258 | 259 | return logging 260 | -------------------------------------------------------------------------------- /bin/pdf/filters/pagebreaks.lua: -------------------------------------------------------------------------------- 1 | local logging = require 'logging' 2 | 3 | -- Pagebreaks filter 4 | -- 5 | -- Inserts a LaTeX `\newpage` before certain headers. 6 | -- For best results run this one before the Links filter. 7 | 8 | -- New pages are signalled in the markdown source by appending a HTML comment to a heading. 9 | local function is_new_page(header) 10 | if (header.content[#header.content].text) then 11 | return string.match(header.content[#header.content].text, '') 12 | end 13 | print('Pagebreaks filter: unable to parse header') 14 | logging.temp('Header', header) 15 | end 16 | 17 | local function should_insert_break(header) 18 | return is_new_page(header) and header.level <= 2 19 | end 20 | 21 | function Header (header) 22 | 23 | if should_insert_break(header) then 24 | return {pandoc.RawBlock('latex', '\\newpage{}'), header} 25 | else 26 | return header 27 | end 28 | 29 | end 30 | -------------------------------------------------------------------------------- /bin/pdf/filters/printast.lua: -------------------------------------------------------------------------------- 1 | local logging = require 'logging' 2 | 3 | function Pandoc (doc) 4 | logging.temp('Doc', doc) 5 | end 6 | -------------------------------------------------------------------------------- /bin/pdf/filters/summaries.lua: -------------------------------------------------------------------------------- 1 | -- Summaries filter 2 | -- 3 | -- Find summary boxes in the text and wrap them in a `summarybox` environment. 4 | 5 | local function is_summary(div) 6 | for _, class in pairs(div.classes) do 7 | if class == 'summary' then 8 | return true 9 | end 10 | end 11 | return false 12 | end 13 | 14 | function Div (div) 15 | 16 | if is_summary(div) then 17 | return { 18 | pandoc.RawInline('latex', '\\begin{summarybox}'), 19 | div, 20 | pandoc.RawInline('latex', '\\end{summarybox}') 21 | } 22 | else 23 | return div 24 | end 25 | 26 | end 27 | -------------------------------------------------------------------------------- /bin/pdf/inc/header.tex: -------------------------------------------------------------------------------- 1 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 2 | % Get environment variables 3 | % 4 | % Based on https://tex.stackexchange.com/a/62032 5 | 6 | \usepackage{xparse} 7 | 8 | \ExplSyntaxOn 9 | 10 | \NewDocumentCommand{\getenv}{om} 11 | { 12 | \sys_get_shell:nnN { kpsewhich ~ --var-value ~ #2 } { } \l_tmpa_tl 13 | \tl_trim_spaces:N \l_tmpa_tl 14 | \IfNoValueTF { #1 } 15 | { 16 | \tl_use:N \l_tmpa_tl 17 | } 18 | { 19 | \tl_set_eq:NN #1 \l_tmpa_tl 20 | } 21 | } 22 | 23 | \ExplSyntaxOff 24 | 25 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 26 | % Environment for chapter summary boxes 27 | % 28 | % mdframed docs are here: 29 | % https://mirror.apps.cam.ac.uk/pub/tex-archive/macros/latex/contrib/mdframed/mdframed.pdf 30 | 31 | \usepackage[framemethod=tikz]{mdframed} 32 | 33 | \mdfdefinestyle{summarybox}{% 34 | leftmargin=1.27cm,rightmargin=1.27cm, 35 | innertopmargin=4ex,innerbottommargin=4ex, 36 | innerleftmargin=1ex,innerrightmargin=2em, 37 | linecolor=white,linewidth=1.3pt, 38 | outerlinecolor=black,outerlinewidth=0.5pt, 39 | innerlinecolor=black,innerlinewidth=0.5pt 40 | } 41 | 42 | \newenvironment{summarybox}{\vspace{5ex}\begin{mdframed}[style=summarybox]}{\end{mdframed}\vspace{3ex}} 43 | 44 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 45 | % Add a sidebar to quoted text 46 | 47 | \usepackage{framed} 48 | 49 | \newlength{\leftbarwidth} 50 | \setlength{\leftbarwidth}{3pt} 51 | \newlength{\leftbarsep} 52 | \setlength{\leftbarsep}{12pt} % Text indent 53 | \setlength\OuterFrameSep{0ex} 54 | 55 | \newcommand*{\leftbarcolorcmd}{\color{leftbarcolor}}% 56 | \colorlet{leftbarcolor}{lightgray} 57 | 58 | \renewenvironment{leftbar}{% 59 | \def\FrameCommand{{\leftbarcolorcmd{\vrule width \leftbarwidth\relax\hspace {\leftbarsep}}}}% 60 | \MakeFramed {\advance \hsize -\width \FrameRestore }% 61 | }{% 62 | \endMakeFramed 63 | } 64 | 65 | \renewenvironment{quote}{\vspace{0.5\baselineskip}\begin{leftbar}}{\end{leftbar}} 66 | 67 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 68 | % We don't like Pandoc's \tightlist in itemize environments 69 | 70 | \def\tightlist{} 71 | 72 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 73 | % Fiddle with footnote presentation 74 | 75 | \usepackage[hang,flushmargin]{footmisc} 76 | \footnotemargin 0em 77 | \renewcommand{\hangfootparindent}{0em} 78 | \setlength{\footnotesep}{12pt} 79 | 80 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 81 | % Allow long constant names to wrap at underscore characters 82 | % 83 | % Documentation at 84 | % https://anorien.csc.warwick.ac.uk/mirrors/CTAN/macros/latex/contrib/underscore/underscore.pdf 85 | 86 | \usepackage[strings,nohyphen]{underscore} 87 | 88 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 89 | % Formatting for code blocks 90 | % 91 | % See https://www.overleaf.com/learn/latex/Code_listing 92 | 93 | % We don't use Pandoc's `--listings` flag as it also mutates inline code. 94 | \usepackage{listings} 95 | 96 | \definecolor{codecomment}{rgb}{0,0.4,0} 97 | \definecolor{codekeyword}{rgb}{0.6,0,0.8} 98 | \definecolor{codetype}{rgb}{0,0.5,0.5} 99 | \definecolor{codestring}{rgb}{0,0,0.8} 100 | 101 | \lstdefinestyle{mystyle}{ 102 | basicstyle=\ttfamily\small, 103 | commentstyle=\color{codecomment}, 104 | keywordstyle=\color{codekeyword}, 105 | stringstyle=\color{codestring}, 106 | breaklines=true, 107 | breakatwhitespace=true, 108 | keepspaces=true, 109 | showspaces=false, 110 | showstringspaces=false, 111 | showtabs=false, 112 | breakindent=24em, 113 | % prebreak=\raisebox{0ex}[0ex][0ex]{\ensuremath{\hookleftarrow}}, 114 | postbreak=\mbox{\ensuremath{\hookrightarrow}\space}, 115 | } 116 | 117 | \lstset{style=mystyle} 118 | 119 | % Import highlighting for specific language code blocks 120 | \input{solidity-highlighting.tex} 121 | \input{json-highlighting.tex} 122 | 123 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 124 | % Tables 125 | 126 | % Aligned to left margin 127 | \makeatletter 128 | \@ifpackageloaded{longtable}{\setlength{\LTleft}{0pt}}{} 129 | \makeatother 130 | 131 | % Increase row spacing 132 | \renewcommand*{\arraystretch}{1.2} 133 | 134 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 135 | % Figure captions 136 | % 137 | % Docs: https://mirror.ox.ac.uk/sites/ctan.org/macros/latex/contrib/caption/caption.pdf 138 | 139 | \usepackage{caption} 140 | \captionsetup{ 141 | labelformat=empty, 142 | width=1.0\linewidth, 143 | textfont=it, 144 | } 145 | 146 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 147 | % Avoid some headings being orphaned at the bottom of pages 148 | 149 | \ifx\paragraph\undefined\else 150 | \let\prevparagraph\paragraph 151 | \renewcommand{\paragraph}[1]{\prevparagraph{#1}\nopagebreak} 152 | \fi 153 | \ifx\subparagraph\undefined\else 154 | \let\prevsubparagraph\subparagraph 155 | \renewcommand{\subparagraph}[1]{\prevsubparagraph{#1}\nopagebreak} 156 | \fi 157 | -------------------------------------------------------------------------------- /bin/pdf/inc/json-highlighting.tex: -------------------------------------------------------------------------------- 1 | % JSON highlighting 2 | % 3 | % Based on the following: 4 | % https://tex.stackexchange.com/a/433961 5 | % Copright edi - CC BY-SA 4.0 (https://creativecommons.org/licenses/by-sa/4.0/) 6 | 7 | \colorlet{numb}{magenta!60!black} 8 | 9 | \lstdefinelanguage{JSON}{ 10 | commentstyle=\color{codecomment}, 11 | stringstyle=\color{codestring}, 12 | showstringspaces=false, 13 | breaklines=true, 14 | breakatwhitespace=false, 15 | stepnumber=1, 16 | string=[s]{"}{"}, 17 | comment=[l]{:\ "}, 18 | morecomment=[l]{:"}, 19 | literate= 20 | *{0}{{{\color{numb}0}}}{1} 21 | {1}{{{\color{numb}1}}}{1} 22 | {2}{{{\color{numb}2}}}{1} 23 | {3}{{{\color{numb}3}}}{1} 24 | {4}{{{\color{numb}4}}}{1} 25 | {5}{{{\color{numb}5}}}{1} 26 | {6}{{{\color{numb}6}}}{1} 27 | {7}{{{\color{numb}7}}}{1} 28 | {8}{{{\color{numb}8}}}{1} 29 | {9}{{{\color{numb}9}}}{1} 30 | } 31 | -------------------------------------------------------------------------------- /bin/pdf/inc/solidity-highlighting.tex: -------------------------------------------------------------------------------- 1 | % Solidity highlighighting 2 | % 3 | % Based on the following: 4 | % https://github.com/s-tikhomirov/solidity-latex-highlighting/ 5 | % Copyright 2017 Sergei Tikhomirov, MIT License 6 | 7 | \lstdefinelanguage{Solidity}{ 8 | keywords=[1]{anonymous, assembly, assert, balance, break, call, callcode, case, catch, class, constant, continue, constructor, contract, debugger, default, delegatecall, delete, do, else, emit, event, experimental, export, external, false, finally, for, function, gas, if, implements, import, in, indexed, instanceof, interface, internal, is, length, library, log0, log1, log2, log3, log4, memory, modifier, new, payable, pragma, private, protected, public, pure, push, require, return, returns, revert, selfdestruct, send, solidity, storage, struct, suicide, super, switch, then, this, throw, transfer, true, try, typeof, using, value, view, while, with, addmod, ecrecover, keccak256, mulmod, ripemd160, sha256, sha3}, % generic keywords including crypto operations 9 | keywordstyle=[1]\color{codekeyword}, 10 | keywords=[2]{address, bool, byte, bytes, bytes1, bytes2, bytes3, bytes4, bytes5, bytes6, bytes7, bytes8, bytes9, bytes10, bytes11, bytes12, bytes13, bytes14, bytes15, bytes16, bytes17, bytes18, bytes19, bytes20, bytes21, bytes22, bytes23, bytes24, bytes25, bytes26, bytes27, bytes28, bytes29, bytes30, bytes31, bytes32, enum, int, int8, int16, int24, int32, int40, int48, int56, int64, int72, int80, int88, int96, int104, int112, int120, int128, int136, int144, int152, int160, int168, int176, int184, int192, int200, int208, int216, int224, int232, int240, int248, int256, mapping, string, uint, uint8, uint16, uint24, uint32, uint40, uint48, uint56, uint64, uint72, uint80, uint88, uint96, uint104, uint112, uint120, uint128, uint136, uint144, uint152, uint160, uint168, uint176, uint184, uint192, uint200, uint208, uint216, uint224, uint232, uint240, uint248, uint256, var, void, ether, finney, szabo, wei, gwei, days, hours, minutes, seconds, weeks, years}, % types; money and time units 11 | keywordstyle=[2]\color{codetype}, 12 | keywords=[3]{block, blockhash, coinbase, difficulty, gaslimit, number, timestamp, msg, data, gas, sender, sig, value, now, tx, gasprice, origin}, % environment variables 13 | keywordstyle=[3]\color{violet}, 14 | identifierstyle=\color{black}, 15 | sensitive=true, 16 | comment=[l]{//}, 17 | morecomment=[s]{/*}{*/}, 18 | commentstyle=\color{codecomment}, 19 | stringstyle=\color{codestring}, 20 | morestring=[b]', 21 | morestring=[b]" 22 | } 23 | -------------------------------------------------------------------------------- /bin/pdf/inc/title-page.tex: -------------------------------------------------------------------------------- 1 | \begin{titlepage} 2 | 3 | \newgeometry{left=5cm,right=5cm} 4 | \begin{center} 5 | 6 | \vspace*{1.0cm} 7 | 8 | \Huge 9 | 10 | \textbf{Upgrading Ethereum} 11 | 12 | \vfill 13 | 14 | \Large 15 | 16 | A technical handbook on Ethereum's move to proof of stake and beyond. 17 | 18 | \vfill 19 | 20 | \textbf{Ben Edgington} 21 | 22 | \vfill 23 | 24 | \includegraphics[width=0.4\textwidth]{images/benjaminion.pdf} 25 | 26 | \vfill 27 | 28 | \textbf{\getenv{UE_GIT_BRANCH} Edition} 29 | 30 | \vfill 31 | 32 | \normalsize 33 | 34 | \today\ - \href{https://github.com/benjaminion/upgrading-ethereum-book}{\texttt{\getenv{UE_GIT_COMMIT}}} 35 | 36 | \end{center} 37 | \restoregeometry 38 | \end{titlepage} 39 | -------------------------------------------------------------------------------- /bin/pdf/make_pdf: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Attempt to make a PDF of the whole text, using pandoc. 4 | # 5 | # All being well, the generated PDF will appear as book.pdf in your current directory. 6 | 7 | if [ $# -eq 0 ]; then 8 | echo "Usage: $0 filename.md" 9 | exit 1 10 | fi 11 | 12 | name=$(basename $1 .md) 13 | outdir=$(pwd) 14 | srcdir=$(cd -- $(dirname "$1") && pwd) 15 | path=$(cd -- $(dirname "$0") && pwd) 16 | 17 | # Git branch name - used on the title page and metadata 18 | git_branch=$(git branch --show-current 2>/dev/null || echo 'unknown') 19 | export UE_GIT_BRANCH=${git_branch^} 20 | 21 | # Git commit hash - used on the title page 22 | export UE_GIT_COMMIT=$(git log -1 --format="%h" 2>/dev/null) 23 | 24 | # For finding the logging script 25 | export LUA_PATH="$path/filters/?.lua;;" 26 | 27 | # For finding the Solidity highlighting 28 | export TEXINPUTS="$path/inc/:" 29 | 30 | cd $srcdir 31 | 32 | echo "Converting $srcdir/$name.md to ./$name.pdf with version $UE_GIT_BRANCH" 33 | 34 | pandoc \ 35 | $name.md \ 36 | --output $outdir/$name.pdf \ 37 | --from markdown \ 38 | --metadata title-meta:"Upgrading Ethereum - ${UE_GIT_BRANCH^} Edition" \ 39 | --metadata author-meta:'Ben Edgington' \ 40 | --metadata lang:en-GB \ 41 | --lua-filter $path/filters/pagebreaks.lua \ 42 | --lua-filter $path/filters/links.lua \ 43 | --lua-filter $path/filters/figures.lua \ 44 | --lua-filter $path/filters/summaries.lua \ 45 | --lua-filter $path/filters/linebreaks.lua \ 46 | --lua-filter $path/filters/codeblocks.lua \ 47 | --variable documentclass:book \ 48 | --variable classoption:oneside \ 49 | --variable linkcolor:violet \ 50 | --variable geometry:a4paper \ 51 | --variable geometry:margin=2.54cm \ 52 | --variable block-headings \ 53 | --variable monofont:'DejaVu Sans Mono' \ 54 | --include-before-body $path/inc/title-page.tex \ 55 | --include-in-header $path/inc/header.tex \ 56 | --toc \ 57 | --toc-depth 3 \ 58 | --no-highlight \ 59 | --pdf-engine xelatex 60 | -------------------------------------------------------------------------------- /bin/util/add_trailing_slashes.pl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/perl 2 | 3 | # Add trailing slashes to all internal URLs and page paths. 4 | # It ought to be idempotent. 5 | 6 | use strict; 7 | use warnings; 8 | 9 | my ($file) = @ARGV; 10 | die "Usage: $0 FILE\n" if not $file; 11 | open my $fh, '<', $file or die "Can't open $file: $!"; 12 | 13 | while(<$fh>) { 14 | 15 | # Add trailing slash to page paths if required 16 | s|^(#{1,3} .* )$|$1/$2|; 17 | 18 | # Add trailing slash to internal URLs if required 19 | s|(\[.+?\])\(/([^\)#]*[^/])((?:#.*?)?)\)|$1(/$2/$3)|g; 20 | 21 | print $_ 22 | } 23 | -------------------------------------------------------------------------------- /bin/util/anchors.awk: -------------------------------------------------------------------------------- 1 | #!/usr/bin/gawk -f 2 | 3 | # Output all eligible anchors in the document 4 | 5 | BEGIN { 6 | # /contents is OK as a page, but will not be picked up automatically 7 | print "/contents" 8 | name = "" 9 | } 10 | 11 | /(^# |^## |^### ).* $/ { 12 | path = gensub(/^#+ .* $/, "\\1", "1") 13 | print path 14 | } 15 | 16 | # Headings 17 | /^#/ { 18 | if ($0 ~ / $/) { 19 | name = gensub(/^#+ (.*) $/, "\\1", "1") 20 | } else { 21 | name = gensub(/^#+ (.*)$/, "\\1", "1") 22 | } 23 | name = tolower(name) 24 | gsub(/ /, "-", name) 25 | gsub(/[^a-z0-9_-]/, "", name) 26 | 27 | print path "#" name 28 | } 29 | 30 | # Explicit anchors - only one per line allowed, at the start of the line 31 | /^<\/a>$/ { 32 | name = gensub(/^<\/a>$/, "\\1", "1") 33 | print path "#" name 34 | } 35 | 36 | -------------------------------------------------------------------------------- /bin/util/check_grammar.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Run LTeX grammar checker: https://valentjn.github.io/ltex/index.html 4 | 5 | ltex=~/bin/ltex-ls-15.2.0/bin/ltex-cli 6 | config=$(dirname "$0")/ltex_config.json 7 | temp=$(mktemp /tmp/ltex_XXXXXXXX.md) 8 | 9 | # Image links seems to cause a problem for LTeX, so strip them 10 | cat $1 | sed 's/^!\[.*$//' > $temp 11 | 12 | $ltex --client-configuration=$config $temp | sed "s:^$temp:\n$1:" 13 | 14 | rm -f $temp 15 | -------------------------------------------------------------------------------- /bin/util/check_spellings_list.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Sanitise the spellings list by finding any unused entries 4 | 5 | export LC_ALL=C.UTF-8 6 | 7 | here=$(dirname "$0") 8 | check=$here/../build/checks/spellcheck.sh 9 | source=$here/../../src/book.md 10 | wordlist=$here/../../src/spellings.en.pws 11 | 12 | # Pre-requisite is to pass a normal spell check (no words missing) 13 | output=$($check $source $wordlist) 14 | [[ "$output" == "" ]] || { 15 | echo "Existing spelling errors need to be fixed:" 16 | echo "$output" 17 | exit 1 18 | } 19 | 20 | # Now spell check against an empty list and compare (no extra words) 21 | missing=$(mktemp) 22 | 23 | $here/make_spellings_list.sh > $missing 24 | 25 | tail -n +2 $wordlist | diff $missing - 26 | 27 | rm -f $missing 28 | -------------------------------------------------------------------------------- /bin/util/check_urls.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Check availability of external markdown links in the supplied document. 4 | 5 | # Github severely rate limits unless you use your access creds. 6 | github_secret=$(cat $(dirname "$0")/../priv/github.txt) 7 | 8 | selfserver=https://eth2book.info 9 | 10 | for x in $(cat $1 | sed "s|(/\.\.|($selfserver|g" | grep -Pho '\(\Khttp[^)]+' | sed 's/#.*$//g' | sort -u) 11 | do 12 | echo $x; 13 | 14 | # Include credentials for github.com 15 | [[ "$x" =~ github.com ]] && creds="-u $github_secret" || creds="" 16 | 17 | # beaconcha.in, twitter and reddit don't like HEAD requests 18 | [[ "$x" =~ beaconcha.in || "$x" =~ reddit.com || "$x" =~ twitter.com ]] && head="" || head="--head" 19 | 20 | res=$(curl $creds -Lo /dev/null --max-time 5 --silent $head --write-out '%{http_code}\n' $x) 21 | 22 | if [ "200" -ne "$res" ] 23 | then 24 | echo "*** $res ***" 25 | fi 26 | done 27 | -------------------------------------------------------------------------------- /bin/util/check_urls_parallel.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # 4 | # Check availability of external markdown links in the supplied document. 5 | # 6 | # Gotchas: 7 | # - beaconcha.in is now doing more sophisticated bot detection, so always returns 403 8 | # - archive.org rate-limits quite severely, so some will show up as 000 9 | # - eigenlayer.xyz always returns 403 10 | 11 | if [ $# -eq 0 ]; then 12 | echo "Usage: $0 " 13 | exit 1 14 | fi 15 | 16 | if [ ! -f "$1" ]; then 17 | echo "Error: Cannot read $1" 18 | exit 1 19 | fi 20 | 21 | markdown_file=$1 22 | 23 | # Number of concurrent checks 24 | npara=8 25 | 26 | # Timeout in seconds 27 | timeout=10 28 | 29 | # Github severely rate limits unless you use your access creds. 30 | github_secret=$(cat $(dirname "$0")/../priv/github.txt) 31 | 32 | # File to store non-200 URLs 33 | non_200_urls_tmp=$(mktemp) 34 | 35 | # Where to find the book itself (for relative links that are really absolute) 36 | selfserver=https://eth2book.info 37 | 38 | # Function to check a single URL 39 | check_url() { 40 | x="$1" 41 | echo $x; 42 | 43 | # Include credentials for github.com 44 | [[ "$x" =~ github.com ]] && creds="-u $github_secret" || creds="" 45 | 46 | # beaconcha.in, twitter and reddit don't like HEAD requests 47 | [[ "$x" =~ beaconcha.in || "$x" =~ reddit.com || "$x" =~ twitter.com ]] && head="" || head="--head" 48 | 49 | res=$(curl $creds -Lo /dev/null --max-time $timeout --silent $head --write-out '%{http_code}\n' $x) 50 | 51 | if [ "200" -ne "$res" ] 52 | then 53 | echo "$res $x" >> $non_200_urls_tmp 54 | fi 55 | } 56 | 57 | export -f check_url 58 | export timeout github_secret non_200_urls_tmp 59 | 60 | # Extract URLs and pass them to check_url function in parallel 61 | cat $markdown_file \ 62 | | sed "s|(/\.\.|($selfserver|g" \ 63 | | grep -Pho '\(\Khttp[^)]+' \ 64 | | sed 's/#.*$//g' \ 65 | | sort -u \ 66 | | xargs -P $npara -I {} bash -c 'check_url "$@"' _ {} 67 | 68 | # Print non-200 URLs 69 | exit_code=0 70 | echo 71 | if [ -s $non_200_urls_tmp ] 72 | then 73 | echo "*** Failing URLs: ***" 74 | cat $non_200_urls_tmp 75 | exit_code=1 76 | else 77 | echo "*** All URLs are good ***" 78 | fi 79 | 80 | rm $non_200_urls_tmp 81 | exit $exit_code 82 | -------------------------------------------------------------------------------- /bin/util/constants.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | config=$(mktemp) 4 | phase0=$(mktemp) 5 | altair=$(mktemp) 6 | 7 | wget -O $config https://raw.githubusercontent.com/ethereum/consensus-specs/v1.1.1/configs/mainnet.yaml 8 | wget -O $phase0 https://raw.githubusercontent.com/ethereum/consensus-specs/v1.1.1/presets/mainnet/phase0.yaml 9 | wget -O $altair https://raw.githubusercontent.com/ethereum/consensus-specs/v1.1.1/presets/mainnet/altair.yaml 10 | 11 | cat $config $phase0 $altair | awk ' 12 | BEGIN { 13 | print "{" 14 | } 15 | 16 | # Skip comment lines 17 | /^#/ { next } 18 | 19 | /: / { 20 | key = gensub(/^(.+): .+$/, "\\1", "1") 21 | value = gensub(/^.+: (.+)$/, "\\1", "1") 22 | 23 | print " \"" key "\": \"" value "\"," 24 | } 25 | 26 | END { 27 | # Lift constants from the spec manually 28 | print " \"GENESIS_SLOT\": \"Slot(0)\"," 29 | print " \"GENESIS_EPOCH\": \"Epoch(0)\"," 30 | print " \"FAR_FUTURE_EPOCH\": \"Epoch(2**64 - 1)\"," 31 | print " \"BASE_REWARDS_PER_EPOCH\": \"uint64(4)\"," 32 | print " \"DEPOSIT_CONTRACT_TREE_DEPTH\": \"uint64(2**5) (= 32)\"," 33 | print " \"JUSTIFICATION_BITS_LENGTH\": \"uint64(4)\"," 34 | print " \"ENDIANNESS\": \"'little'\"," 35 | print " \"BLS_WITHDRAWAL_PREFIX\": \"Bytes1('0x00')\"," 36 | print " \"ETH1_ADDRESS_WITHDRAWAL_PREFIX\": \"Bytes1('0x01')\"," 37 | print " \"TIMELY_SOURCE_FLAG_INDEX\": \"0\"," 38 | print " \"TIMELY_TARGET_FLAG_INDEX\": \"1\"," 39 | print " \"TIMELY_HEAD_FLAG_INDEX\": \"2\"," 40 | print " \"TIMELY_SOURCE_WEIGHT\": \"uint64(14)\"," 41 | print " \"TIMELY_TARGET_WEIGHT\": \"uint64(26)\"," 42 | print " \"TIMELY_HEAD_WEIGHT\": \"uint64(14)\"," 43 | print " \"SYNC_REWARD_WEIGHT\": \"uint64(2)\"," 44 | print " \"PROPOSER_WEIGHT\": \"uint64(8)\"," 45 | print " \"WEIGHT_DENOMINATOR\": \"uint64(64)\"," 46 | print " \"DOMAIN_SYNC_COMMITTEE\": \"DomainType('0x07000000')\"," 47 | print " \"DOMAIN_SYNC_COMMITTEE_SELECTION_PROOF\": \"DomainType('0x08000000')\"," 48 | print " \"DOMAIN_CONTRIBUTION_AND_PROOF\": \"DomainType('0x09000000')\"," 49 | print " \"DOMAIN_BEACON_PROPOSER\": \"DomainType('0x00000000')\"," 50 | print " \"DOMAIN_BEACON_ATTESTER\": \"DomainType('0x01000000')\"," 51 | print " \"DOMAIN_RANDAO\": \"DomainType('0x02000000')\"," 52 | print " \"DOMAIN_DEPOSIT\": \"DomainType('0x03000000')\"," 53 | print " \"DOMAIN_VOLUNTARY_EXIT\": \"DomainType('0x04000000')\"," 54 | print " \"DOMAIN_SELECTION_PROOF\": \"DomainType('0x05000000')\"," 55 | print " \"DOMAIN_AGGREGATE_AND_PROOF\": \"DomainType('0x06000000')\"" 56 | print "}" 57 | }' 58 | -------------------------------------------------------------------------------- /bin/util/functions.awk: -------------------------------------------------------------------------------- 1 | #!/usr/bin/gawk -f 2 | 3 | # Insert an anchor for each function defined in the document, 2 lines ahead. 4 | 5 | NR == 1 { 6 | lm2 = $0 7 | } 8 | 9 | NR == 2 { 10 | lm1 = $0 11 | } 12 | 13 | NR > 2 { 14 | if ($0 ~ /^def /) { 15 | f = gensub(/^def ([^(]+).+$/, "\\1", "1") 16 | print "\n" 17 | } 18 | print lm2 19 | lm2 = lm1 20 | lm1 = $0 21 | } 22 | 23 | END { 24 | print lm2 25 | print lm1 26 | } 27 | -------------------------------------------------------------------------------- /bin/util/git-pre-commit-hook.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # Run the checks only if file `book.md` is staged 4 | if git diff --exit-code -s --staged src/book.md 5 | then 6 | exit 0 7 | fi 8 | 9 | # Run the pre-build checks on the book source 10 | node --input-type=module -e 'import runChecks from "./bin/build/prebuild.js"; runChecks()' 11 | 12 | if [ "$?" != "0" ] 13 | then 14 | echo "\nError: Not committing due to failed checks.\n" >&2 15 | exit 1 16 | fi 17 | -------------------------------------------------------------------------------- /bin/util/ltex_config.json: -------------------------------------------------------------------------------- 1 | { 2 | "language": "en-GB", 3 | "disabledRules": { 4 | "en-GB": [ 5 | "ARROWS", 6 | "COMMA_PARENTHESIS_WHITESPACE", 7 | "ENGLISH_WORD_REPEAT_BEGINNING_RULE", 8 | "EN_WORD_COHERENCY", 9 | "IN_A_X_MANNER", 10 | "LARGE_NUMBER_OF", 11 | "MORFOLOGIK_RULE_EN_GB", 12 | "OXFORD_SPELLING_Z_NOT_S", 13 | "SMALL_NUMBER_OF", 14 | "SOME_OF_THE", 15 | "UPPERCASE_SENTENCE_START", 16 | "WANNA", 17 | "WHITESPACE_RULE" 18 | ] 19 | }, 20 | "hiddenFalsePositives": { 21 | "en-GB": [ 22 | "{\"rule\": \"ADMIT_ENJOY_VB\", \"sentence\": \"i is used\"}", 23 | "{\"rule\": \"ADVERB_VERB_ADVERB_REPETITION\", \"sentence\": \"per slot per shard\"}", 24 | "{\"rule\": \"ADVERB_WORD_ORDER\", \"sentence\": \"happen rarely\"}", 25 | "{\"rule\": \"AGREEMENT_SENT_START\", \"sentence\": \"Slashings balances updates\"}", 26 | "{\"rule\": \"CD_NN\", \"sentence\": \"ETH\"}", 27 | "{\"rule\": \"COMMA_COMPOUND_SENTENCE\", \"sentence\": \"(recovered and we are no longer)\"}", 28 | "{\"rule\": \"COMMA_COMPOUND_SENTENCE_2\", \"sentence\": \"(components are showing and the wires)|(will fail and the block)\"}", 29 | "{\"rule\": \"DID_FOUND_AMBIGUOUS\", \"sentence\": \"bound the score\"}", 30 | "{\"rule\": \"DT_JJ_NO_NOUN\", \"sentence\": \"a boolean is either True or False\"}", 31 | "{\"rule\": \"EN_A_VS_AN\", \"sentence\": \"(an xor)|(a uint)|(the list a)\"}", 32 | "{\"rule\": \"EN_COMPOUNDS\", \"sentence\": \"([Bb]lock chain)|(space saving)|(anti-pattern)\"}", 33 | "{\"rule\": \"EN_SPECIFIC_CASE\", \"sentence\": \"SLOTS_PER_EPOCH times\"}", 34 | "{\"rule\": \"EVERY_EACH_SINGULAR\", \"sentence\": \"[Ee]very (SLOTS_PER|EPOCHS_PER)\"}", 35 | "{\"rule\": \"FINAL_ADVERB_COMMA\", \"sentence\": \"acting honestly\"}", 36 | "{\"rule\": \"HAVE_PART_AGREEMENT\", \"sentence\": \"TARGET_AGGREGATORS\"}", 37 | "{\"rule\": \"I_LOWERCASE\", \"sentence\": \"(counter i)|(index i)|(bits of i)|(i is incremented)|(i is used)\"}", 38 | "{\"rule\": \"LC_AFTER_PERIOD\", \"sentence\": \"(state.slashings)|(validator.slashed)\"}", 39 | "{\"rule\": \"MASS_AGREEMENT\", \"sentence\": \"decrease the score\"}", 40 | "{\"rule\": \"MISSING_GENITIVE\", \"sentence\": \"(Nakamoto consensus)|(Ethereum transaction)|(Merkle roots)\"}", 41 | "{\"rule\": \"PEOPLE_VBZ\", \"sentence\": \"people is a joy\"}", 42 | "{\"rule\": \"PHRASE_REPETITION\", \"sentence\": \"a committee a committee\"}", 43 | "{\"rule\": \"POSSESSIVE_APOSTROPHE\", \"sentence\": \"(specs repo)|(Withdrawals section)|(rewards calculation)|(roots accumulator)\"}", 44 | "{\"rule\": \"QUESTION_MARK\", \"sentence\": \"Were we to\"}", 45 | "{\"rule\": \"SENTENCE_WHITESPACE\", \"sentence\": \"(KeyValidate)|(FastAggregateVerify)|(Verify)\"}", 46 | "{\"rule\": \"SUBSEQUENT_TO\", \"sentence\": \"subsequent to that genesis event\"}", 47 | "{\"rule\": \"THERE_S_MANY\", \"sentence\": \"The only curiosity here is the lines\"}", 48 | "{\"rule\": \"THE_PUNCT\", \"sentence\": \"call it a, rather than attestation\"}", 49 | "{\"rule\": \"THE_SENT_END\", \"sentence\": \"IndexedAttestation, a\"}", 50 | "{\"rule\": \"THE_SUPERLATIVE\", \"sentence\": \"Latest Message Driven\"}", 51 | "{\"rule\": \"THE_WORSE_OF\", \"sentence\": \"the higher of\"}", 52 | "{\"rule\": \"TO_DO_HYPHEN\", \"sentence\": \"to DoS\"}", 53 | "{\"rule\": \"WHETHER\", \"sentence\": \"whether or not\"}" 54 | ] 55 | }, 56 | "markdown": { 57 | "nodes": { 58 | "BlockQuote": "ignore", 59 | "Code": "default" 60 | } 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /bin/util/make_spellings_list.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Make a fresh spellings list 4 | 5 | export LC_ALL=C.UTF-8 6 | 7 | here=$(dirname "$0") 8 | check=$here/../build/checks/spellcheck.sh 9 | source=$here/../../src/book.md 10 | 11 | $check $source /dev/null | awk '{print $3}' | sort -u 12 | -------------------------------------------------------------------------------- /bin/util/patch_all.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # 4 | # Apply a git commit to all branches 5 | # 6 | 7 | branches='altair bellatrix capella deneb' 8 | 9 | if [ $# -eq 0 ]; then 10 | echo "Usage: $0 " 11 | exit 1 12 | fi 13 | 14 | # Save the starting branch so we can return to it later 15 | start=$(git branch --show-current) 16 | 17 | for branch in $branches; 18 | do 19 | [[ $branch == $start ]] && continue 20 | echo "*** Patching $branch" 21 | git switch $branch && git cherry-pick --allow-empty $1 22 | if [ $? -ne 0 ] 23 | then 24 | echo "*** Cherry pick failed on $branch" 25 | echo "*** Aborting" 26 | git cherry-pick --abort 27 | fi 28 | echo 29 | done 30 | 31 | git switch $start 32 | -------------------------------------------------------------------------------- /bin/util/publish.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | version=$(git branch --show-current 2>/dev/null || echo 'unknown') 4 | 5 | wait_for_input () { 6 | read -s < /dev/tty 7 | } 8 | 9 | was_it_ok () { 10 | if [ $1 -ne 0 ] 11 | then 12 | echo "Exiting: $2 failed." 13 | exit 1 14 | fi 15 | } 16 | 17 | echo 18 | echo "*** Publishing to path $version ***" 19 | 20 | cd $(dirname "$0")/../.. 21 | 22 | # Set the host variable 23 | source bin/priv/server.sh 24 | 25 | echo 26 | echo "*** Building site..." 27 | 28 | npm run clean 29 | npm run build 30 | was_it_ok $? "npm run build" 31 | 32 | echo 33 | echo "*** Building PDF..." 34 | bin/pdf/make_pdf src/book.md 35 | was_it_ok $? "make_pdf" 36 | 37 | mv book.pdf dist/ 38 | 39 | echo 40 | echo "*** Ready to upload - press [ENTER] to continue" 41 | wait_for_input 42 | tar zcf - dist | ssh $host tar zxfC - eth2book 43 | 44 | echo 45 | echo "*** Ready to install - press [ENTER] to continue" 46 | wait_for_input 47 | ssh $host eth2book/install_astro.sh $version 48 | -------------------------------------------------------------------------------- /bin/util/stats.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | here=$(dirname "$0") 4 | 5 | src=$here/../../src 6 | pdf=$here/../../book.pdf 7 | 8 | bytes=$(cat $src/book.md | wc -c) 9 | footnotes=$(grep '^\[^.*\]:' $src/book.md | wc -l) 10 | words=$($here/../build/checks/spellcheck_prep.pl $src/book.md | sed 's/^^ //' | wc -w) 11 | pages=$(pdfinfo $pdf | grep '^Pages' | awk '{print $2}') 12 | external=$(cat $src/book.md | grep -Pho '\(\Khttp[^)]+' | sed 's/#.*$//g' | sort -u | wc -l) 13 | internal=$(cat $src/book.md | grep -Pho '\(\K/[^)]+' | wc -l) 14 | charts=$(ls $src/images/charts/*.svg | wc -l) 15 | diagrams=$(ls $src/images/diagrams/*.svg | wc -l) 16 | 17 | echo Bytes: $bytes 18 | echo Words: $words 19 | echo Pages: $pages 20 | echo Images: $((charts + diagrams)) 21 | echo Internal links: $internal 22 | echo External pages: $external 23 | echo Footnotes: $footnotes 24 | -------------------------------------------------------------------------------- /bin/util/update_spellings_list.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Update the spellings list with all the current "mis-spellings" 4 | 5 | here=$(dirname "$0") 6 | wordlist=$here/../../src/spellings.en.pws 7 | newlist=$(mktemp) 8 | 9 | $here/make_spellings_list.sh > $newlist 10 | 11 | count=$(cat $newlist | wc -l) 12 | 13 | # Add header line 14 | echo "personal_ws-1.1 en $count utf-8" > $wordlist 15 | cat $newlist >> $wordlist 16 | 17 | rm $newlist 18 | -------------------------------------------------------------------------------- /bin/util/validate.js: -------------------------------------------------------------------------------- 1 | import axios from 'axios'; 2 | import fs from 'fs'; 3 | 4 | export default function validateHtml(fileName) { 5 | 6 | const file = fs.readFileSync(fileName) 7 | 8 | axios({ 9 | method: 'post', 10 | url: 'https://validator.w3.org/nu/?out=json', 11 | data: file, 12 | headers: { 13 | 'Content-Type': 'text/html' 14 | }, 15 | maxContentLength: Infinity, 16 | maxBodyLength: Infinity 17 | }).then(function (response) { 18 | console.log(response.data.messages.filter(m => m.type !== 'info')); 19 | }) 20 | } 21 | -------------------------------------------------------------------------------- /eslint.config.js: -------------------------------------------------------------------------------- 1 | import js from '@eslint/js'; 2 | import globals from 'globals'; 3 | import { defineConfig } from 'eslint/config'; 4 | import eslintPluginAstro from 'eslint-plugin-astro'; 5 | 6 | export default defineConfig([ 7 | { 8 | files: ['**/*.{js,mjs,cjs}'], 9 | plugins: { js }, 10 | extends: ['js/recommended'], 11 | }, 12 | { 13 | files: ['**/*.{js,mjs,cjs}'], 14 | languageOptions: { globals: { ...globals.browser, ...globals.node } }, 15 | }, 16 | { 17 | rules: { 18 | 'no-unused-vars': [ 19 | 'error', 20 | { 21 | caughtErrors: 'none', 22 | }, 23 | ], 24 | }, 25 | }, 26 | ...eslintPluginAstro.configs.recommended, 27 | ]); 28 | -------------------------------------------------------------------------------- /integrations/my_add_tooltips.js: -------------------------------------------------------------------------------- 1 | import { visit } from 'unist-util-visit'; 2 | import fs from 'fs'; 3 | 4 | // Add a tooltip to constant values in the text according to the mapping in the 5 | // supplied file. 6 | 7 | let constantsMap = {}; 8 | 9 | function addTooltips() { 10 | return function (tree) { 11 | visit(tree, 'inlineCode', (node, index, parent) => { 12 | // HTML in headings causes problems for the page index, so skip these 13 | if (parent.type !== 'heading') { 14 | const text = node.value; 15 | const value = constantsMap[text]; 16 | if (value) { 17 | node.type = 'html'; 18 | node.value = `${text}`; 19 | node.children = undefined; 20 | } 21 | } 22 | }); 23 | }; 24 | } 25 | 26 | export default function (options) { 27 | // Read the constants file and store it for later 28 | const constantsFile = options?.constantsFile || ''; 29 | constantsMap = JSON.parse(fs.readFileSync(constantsFile, 'utf8')); 30 | 31 | return { 32 | name: 'myAddTooltips', 33 | hooks: { 34 | 'astro:config:setup': ({ updateConfig }) => { 35 | updateConfig({ 36 | markdown: { 37 | remarkPlugins: [addTooltips], 38 | }, 39 | }); 40 | }, 41 | }, 42 | }; 43 | } 44 | -------------------------------------------------------------------------------- /integrations/my_autolink_headings.js: -------------------------------------------------------------------------------- 1 | import { CONTINUE, SKIP, visit } from 'unist-util-visit'; 2 | import { fromHtmlIsomorphic } from 'hast-util-from-html-isomorphic'; 3 | import { toString } from 'hast-util-to-string'; 4 | import { isElement } from 'hast-util-is-element'; 5 | import { matches } from 'hast-util-select'; 6 | 7 | // Add IDs and SVG permalinks to headings 8 | // (rehype-autolink-headings is good, but can't be configured to ignore some headings) 9 | 10 | const anchor = fromHtmlIsomorphic( 11 | '', 12 | { fragment: true }, 13 | ).children[0]; 14 | 15 | // Should match the method in bin/build/checks/links.pl 16 | function slugIt(heading) { 17 | return toString(heading) 18 | .trim() 19 | .toLowerCase() 20 | .replace(/\s+/g, '-') 21 | .replace(/[^a-z0-9_-]/g, ''); 22 | } 23 | 24 | function autoLinkHeadings(options) { 25 | const { headings, exclude } = options; 26 | return function (tree) { 27 | visit(tree, 'element', (node) => { 28 | if (!isElement(node, headings) || (exclude && matches(exclude, node))) { 29 | return CONTINUE; 30 | } 31 | const newAnchor = structuredClone(anchor); 32 | if (node.properties.id) { 33 | const id = node.properties.id; 34 | newAnchor.properties = { ...newAnchor.properties, href: '#' + id }; 35 | } else { 36 | const id = slugIt(node); 37 | newAnchor.properties = { ...newAnchor.properties, href: '#' + id }; 38 | node.properties.id = id; 39 | } 40 | node.children.unshift(newAnchor); 41 | return SKIP; 42 | }); 43 | }; 44 | } 45 | 46 | // The headings to process 47 | const defaultHeadings = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6']; 48 | // Headings that match this selector are ignored 49 | const defaultExclude = undefined; 50 | 51 | export default function (options) { 52 | const headings = 53 | options?.headings !== undefined ? options.headings : defaultHeadings; 54 | const exclude = 55 | options?.exclude !== undefined ? options.exclude : defaultExclude; 56 | return { 57 | name: 'myAutoLinkHeadings', 58 | hooks: { 59 | 'astro:config:setup': ({ updateConfig, logger }) => { 60 | logger.debug('Headings: ' + headings); 61 | logger.debug('Exclude: ' + exclude); 62 | updateConfig({ 63 | markdown: { 64 | rehypePlugins: [ 65 | [autoLinkHeadings, { headings: headings, exclude: exclude }], 66 | ], 67 | }, 68 | }); 69 | }, 70 | }, 71 | }; 72 | } 73 | -------------------------------------------------------------------------------- /integrations/my_build_checks.js: -------------------------------------------------------------------------------- 1 | import runChecks from '../bin/build/prebuild.js'; 2 | 3 | function buildChecks(logger) { 4 | logger.info('Running pre-build checks:'); 5 | runChecks(logger, false); 6 | } 7 | 8 | export default function () { 9 | let doChecks; 10 | return { 11 | name: 'myBuildChecks', 12 | hooks: { 13 | 'astro:config:setup': ({ command }) => { 14 | doChecks = command === 'build' && process.env.UE_NOCHECK === undefined; 15 | }, 16 | 'astro:config:done': ({ logger }) => { 17 | if (doChecks) { 18 | buildChecks(logger); 19 | } 20 | }, 21 | }, 22 | }; 23 | } 24 | -------------------------------------------------------------------------------- /integrations/my_cleanup_html.js: -------------------------------------------------------------------------------- 1 | import { visit, SKIP } from 'unist-util-visit'; 2 | 3 | // Clean up any weird HTML artefacts, especially those that fail validation 4 | 5 | function cleanupHtml() { 6 | return function (tree) { 7 | // Remove `is:raw=""` that's on `code` elements, probably from Prism. 8 | visit(tree, 'element', (node) => { 9 | if (node.tagName == 'code') { 10 | delete node.properties['is:raw']; 11 | } 12 | }); 13 | 14 | // Remove any comments 15 | visit(tree, 'comment', (node, index, parent) => { 16 | parent.children.splice(index, 1); 17 | return SKIP; 18 | }); 19 | }; 20 | } 21 | 22 | export default function () { 23 | return { 24 | name: 'myCleanupHtml', 25 | hooks: { 26 | 'astro:config:setup': ({ updateConfig }) => { 27 | updateConfig({ 28 | markdown: { 29 | rehypePlugins: [cleanupHtml], 30 | }, 31 | }); 32 | }, 33 | }, 34 | }; 35 | } 36 | -------------------------------------------------------------------------------- /integrations/my_fixup_links.js: -------------------------------------------------------------------------------- 1 | import { visit } from 'unist-util-visit'; 2 | 3 | // Prepend `base` to URLs in the markdown file. 4 | // It seems that [Astro does not do this](https://github.com/withastro/astro/issues/3626) 5 | 6 | function fixupLinks(basePath) { 7 | return function (tree) { 8 | visit(tree, 'element', (node) => { 9 | if (node.tagName == 'a' && node.properties.href) { 10 | // Add basePath prefix to local URLs that lack it 11 | // [Astro does not do this](https://github.com/withastro/astro/issues/3626) 12 | if ( 13 | node.properties.href.startsWith('/') && 14 | !node.properties.href.startsWith(basePath + '/') 15 | ) { 16 | node.properties.href = basePath + node.properties.href; 17 | } 18 | 19 | // Add rel="external noopener" and target="_blank" attributes to off-site links 20 | if ( 21 | !node.properties.href.startsWith('/') && 22 | !node.properties.href.startsWith('#') 23 | ) { 24 | node.properties.rel = ['external', 'noopener']; 25 | node.properties.target = '_blank'; 26 | } 27 | } 28 | }); 29 | }; 30 | } 31 | 32 | export default function () { 33 | return { 34 | name: 'myFixupLinks', 35 | hooks: { 36 | 'astro:config:setup': ({ config, updateConfig }) => { 37 | updateConfig({ 38 | markdown: { 39 | rehypePlugins: [[fixupLinks, config.base]], 40 | }, 41 | }); 42 | }, 43 | }, 44 | }; 45 | } 46 | -------------------------------------------------------------------------------- /integrations/my_htaccess.js: -------------------------------------------------------------------------------- 1 | import fs from 'fs'; 2 | 3 | // Write a .htaccess file to set the correct 404 page 4 | 5 | function writeHtaccess(base, dir, logger) { 6 | const file = dir.pathname + '.htaccess'; 7 | const contents = `ErrorDocument 404 ${base}/404.html\n`; 8 | fs.writeFileSync(file, contents); 9 | logger.info(`Wrote .htaccess file to ${file}`); 10 | } 11 | 12 | export default function (base) { 13 | return { 14 | name: 'myHtaccess', 15 | hooks: { 16 | 'astro:build:done': ({ dir, logger }) => { 17 | writeHtaccess(base, dir, logger); 18 | }, 19 | }, 20 | }; 21 | } 22 | -------------------------------------------------------------------------------- /integrations/my_search_index.js: -------------------------------------------------------------------------------- 1 | import { visit, SKIP, CONTINUE } from 'unist-util-visit'; 2 | import { matches } from 'hast-util-select'; 3 | import fs from 'fs'; 4 | 5 | // File scoped to accumulate the index across calls to mySearchIndex 6 | const searchIndex = []; 7 | 8 | function isExcludedFrontmatter(frontmatter, exclude) { 9 | return exclude.frontmatter.some( 10 | (test) => frontmatter[test.key] === test.value, 11 | ); 12 | } 13 | 14 | // Recursively concatenate all text in child nodes while respecting exclusions 15 | function getText(node, exclude) { 16 | if (node.type === 'text') { 17 | // [\u202F\u00A0] is a non-breaking space 18 | return node.value.replace(/[\u202F\u00A0]/, ' '); 19 | } 20 | 21 | if (node.type !== 'element' || matches(exclude.ignore, node)) { 22 | return ''; 23 | } 24 | 25 | // Add some minimal formatting for tables 26 | let separator = ''; 27 | if (node.type === 'element') { 28 | if (node.tagName === 'tr') { 29 | separator = ' | '; 30 | } else if (node.tagName === 'tbody' || node.tagName === 'table') { 31 | separator = '
'; 32 | } 33 | } 34 | 35 | return node.children 36 | .map((node) => { 37 | return getText(node, exclude); 38 | }) 39 | .filter((x) => x) 40 | .join(separator); 41 | } 42 | 43 | function getChunks(tree, chunkTypes, exclude) { 44 | const counts = Array(chunkTypes.length).fill(0); 45 | let chunks = []; 46 | 47 | // Walk the tree until we find an element we want to treat as a chunk, then get 48 | // all its text content. 49 | visit(tree, 'element', (node) => { 50 | if (matches(exclude.ignore, node)) { 51 | return SKIP; 52 | } 53 | 54 | for (let idx = 0; idx < chunkTypes.length; idx++) { 55 | const type = chunkTypes[idx]; 56 | if (matches(type.query, node)) { 57 | const text = getText(node, exclude); 58 | if (text !== '') { 59 | const tagName = node.tagName.toLowerCase(); 60 | let id = node.properties?.id; 61 | if (id === undefined) { 62 | // Edit the element's ID so we can find it from the search page later 63 | id = tagName + '_' + counts[idx]; 64 | node.properties.id = id; 65 | ++counts[idx]; 66 | } 67 | 68 | chunks.push({ 69 | type: tagName, 70 | label: type.label, 71 | id: id, 72 | text: text, 73 | weight: type.weight === undefined ? 1 : type.weight, 74 | }); 75 | } 76 | return SKIP; 77 | } 78 | } 79 | 80 | return CONTINUE; 81 | }); 82 | 83 | return chunks; 84 | } 85 | 86 | function includePage(frontmatter, exclude) { 87 | return !isExcludedFrontmatter(frontmatter, exclude); 88 | } 89 | 90 | function buildSearchIndex(options) { 91 | const { chunkTypes, exclude, logger } = { ...options }; 92 | 93 | return function (tree, file) { 94 | const frontmatter = file.data.astro.frontmatter; 95 | 96 | if (includePage(frontmatter, exclude)) { 97 | logger.debug('Processing ' + frontmatter.path); 98 | 99 | const chunks = getChunks(tree, chunkTypes, exclude); 100 | const pageIndexData = { 101 | frontmatter: { 102 | path: frontmatter.path, 103 | titles: frontmatter.titles, 104 | }, 105 | chunks: chunks, 106 | }; 107 | searchIndex.push(pageIndexData); 108 | } else { 109 | logger.debug('Ignoring ' + frontmatter.path); 110 | } 111 | }; 112 | } 113 | 114 | function writeSearchIndex(dir, file, logger) { 115 | const fileName = dir.pathname + file; 116 | 117 | if (searchIndex.length) { 118 | logger.info('Indexed ' + searchIndex.length + ' pages'); 119 | } else { 120 | logger.warn('No pages were indexed'); 121 | } 122 | 123 | fs.writeFileSync(fileName, JSON.stringify(searchIndex)); 124 | logger.info('Wrote search index to ' + fileName); 125 | } 126 | 127 | export default function (options) { 128 | if (options.enabled === false) { 129 | return { name: 'my-search-index' }; 130 | } 131 | 132 | return { 133 | name: 'mySearchIndex', 134 | hooks: { 135 | // We build the search index with rehype 136 | 'astro:config:setup': ({ updateConfig, logger }) => { 137 | updateConfig({ 138 | markdown: { 139 | rehypePlugins: [[buildSearchIndex, { ...options, logger: logger }]], 140 | }, 141 | }); 142 | }, 143 | // We write the search index to a file once the build is complete 144 | 'astro:build:done': ({ dir, logger }) => { 145 | writeSearchIndex(dir, options.indexFile, logger); 146 | }, 147 | }, 148 | }; 149 | } 150 | -------------------------------------------------------------------------------- /integrations/my_svg_inline.js: -------------------------------------------------------------------------------- 1 | import { visit } from 'unist-util-visit'; 2 | import { optimize } from 'svgo'; 3 | import { getHashDigest } from 'loader-utils'; 4 | import path from 'path'; 5 | import fs from 'fs'; 6 | 7 | // Inline SVG files into the Markdown AST 8 | 9 | // SVGO doesn't really support adding elements, and the API changes. 10 | // The below is based on code from the "reusePaths" plugin. 11 | const addTitle = { 12 | name: 'addTitle', 13 | type: 'visitor', 14 | active: true, 15 | fn: (ast, params) => { 16 | return { 17 | element: { 18 | exit: (node, parentNode) => { 19 | if (node.name === 'svg' && parentNode.type === 'root') { 20 | const hasTitle = node.children.some( 21 | (child) => child.type === 'element' && child.name === 'title', 22 | ); 23 | if (!hasTitle) { 24 | const titleElement = { 25 | type: 'element', 26 | name: 'title', 27 | attributes: {}, 28 | children: [], 29 | }; 30 | Object.defineProperty(titleElement, 'parentNode', { 31 | writable: true, 32 | value: node, 33 | }); 34 | const titleContents = { 35 | type: 'text', 36 | value: params.titleText, 37 | }; 38 | Object.defineProperty(titleContents, 'parentNode', { 39 | writable: true, 40 | value: titleElement, 41 | }); 42 | titleElement.children.push(titleContents); 43 | node.children.unshift(titleElement); 44 | } 45 | } 46 | }, 47 | }, 48 | }; 49 | }, 50 | }; 51 | 52 | // See https://www.npmjs.com/package/svgo 53 | const plugins = [ 54 | 'preset-default', 55 | 'prefixIds', 56 | 'removeDimensions', 57 | 'removeXMLNS', 58 | { 59 | name: 'addAttributesToSVGElement', 60 | params: { attribute: { role: 'img' } }, 61 | }, 62 | ]; 63 | 64 | const addTitleSettings = { 65 | name: addTitle.name, 66 | type: addTitle.type, 67 | active: addTitle.active, 68 | fn: addTitle.fn, 69 | params: undefined, 70 | }; 71 | 72 | const addAttributes = { 73 | name: 'addAttributesToSVGElement', 74 | params: undefined, 75 | }; 76 | 77 | function inlineSvg(options) { 78 | const filePath = options.filePath || ''; 79 | const cachePathTmp = options.cachePath; 80 | const cachePath = cachePathTmp.endsWith('/') 81 | ? cachePathTmp 82 | : cachePathTmp + '/'; 83 | const { logger, doCache } = options; 84 | 85 | return function (tree) { 86 | visit(tree, 'paragraph', async (node) => { 87 | if (node.children[0].type == 'image') { 88 | const image = node.children[0]; 89 | 90 | if (image.url.endsWith('.svg')) { 91 | const originalSvg = fs.readFileSync(filePath + image.url, 'utf8'); 92 | const basename = path.basename(image.url, '.svg'); 93 | 94 | // We need to distinguish multiple SVGs on the same page by using "prefixIds" 95 | const digest = getHashDigest(basename, 'md5', 'base52', 4); 96 | 97 | // Configure the SVGO addAttributes plugin to add an ID to SVG element 98 | addAttributes['params'] = { attribute: { id: basename + '-svg' } }; 99 | 100 | // Configure our custom plugin that adds a title element 101 | addTitleSettings['params'] = { titleText: image.alt }; 102 | 103 | // If the cachePath option is provided, we load the optimised SVG from there 104 | // when it exists and is newer than the original SVG. If a cached version is 105 | // is not available or is older than the original SVG, we rewrite it. 106 | 107 | const origMtime = fs.statSync(filePath + image.url).mtime; 108 | const cacheFile = doCache ? cachePath + basename + '.svg' : null; 109 | const goodCache = 110 | doCache && 111 | fs.existsSync(cacheFile) && 112 | fs.statSync(cacheFile).mtime > origMtime; 113 | 114 | let svg; 115 | if (goodCache) { 116 | svg = fs.readFileSync(cacheFile, 'utf8'); 117 | logger.debug(`Using cached ${basename}.svg`); 118 | } else { 119 | svg = optimize(originalSvg, { 120 | path: digest, 121 | plugins: plugins.concat([addTitleSettings, addAttributes]), 122 | }).data; 123 | logger.debug(`Optimising ${basename}.svg`); 124 | if (doCache) { 125 | fs.writeFileSync(cacheFile, svg); 126 | logger.debug(`Caching ${basename}.svg`); 127 | } else { 128 | logger.debug(`Not caching ${basename}.svg`); 129 | } 130 | } 131 | 132 | // Modify the current node in-place 133 | node.type = 'html'; 134 | node.value = svg; 135 | node.children = []; 136 | } 137 | } 138 | }); 139 | }; 140 | } 141 | 142 | export default function (options) { 143 | return { 144 | name: 'mySvgInline', 145 | hooks: { 146 | 'astro:config:setup': ({ updateConfig, logger }) => { 147 | let doCache = false; 148 | if (options.cachePath) { 149 | try { 150 | if (fs.statSync(options.cachePath).isDirectory()) { 151 | doCache = true; 152 | } else { 153 | logger.warn( 154 | `Not caching SVGs: ${options.cachePath} is not a directory`, 155 | ); 156 | } 157 | } catch (e) { 158 | logger.warn( 159 | `Not caching SVGs: ${options.cachePath} does not exist`, 160 | ); 161 | } 162 | } else { 163 | logger.info('Not caching SVGs: no cachePath provided'); 164 | } 165 | updateConfig({ 166 | markdown: { 167 | remarkPlugins: [ 168 | [inlineSvg, { ...options, logger: logger, doCache: doCache }], 169 | ], 170 | }, 171 | }); 172 | }, 173 | }, 174 | }; 175 | } 176 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "upgrading-ethereum-book", 3 | "version": "0.3.0", 4 | "private": true, 5 | "type": "module", 6 | "description": "A technical handbook on Ethereum's move to proof of stake and beyond", 7 | "author": "Ben Edgington", 8 | "keywords": [], 9 | "scripts": { 10 | "devel": "astro dev", 11 | "build": "astro build", 12 | "minim": "rm -rf dist/ node_modules/.astro && UE_MINIMAL=t astro build", 13 | "serve": "astro preview", 14 | "clean": "rm -rf dist/ .astro/ node_modules/.astro", 15 | "debug": "DEBUG='astro:my*' astro build", 16 | "astro": "astro", 17 | "check": "node --input-type=module -e 'import runChecks from \"./bin/build/prebuild.js\"; runChecks()'", 18 | "links": "bin/util/check_urls.sh src/book.md", 19 | "spell": "bin/util/check_spellings_list.sh", 20 | "spfix": "bin/util/update_spellings_list.sh", 21 | "gramm": "bin/util/check_grammar.sh src/book.md", 22 | "pdfit": "bin/pdf/make_pdf src/book.md", 23 | "stats": "bin/util/stats.sh", 24 | "valid": "node --input-type=module -e 'import validateHtml from \"./bin/util/validate.js\"; validateHtml(\"dist/part2/building_blocks/ssz/index.html\")'" 25 | }, 26 | "dependencies": { 27 | "astro": "^5.8.1", 28 | "glob": "^11.0.2", 29 | "hast-util-select": "^6.0.4", 30 | "hast-util-to-string": "^3.0.1", 31 | "loader-utils": "^3.3.1", 32 | "markdownlint": "^0.38.0", 33 | "rehype-katex": "^7.0.1", 34 | "remark-math": "^6.0.0", 35 | "svgo": "^4.0.0-rc.4" 36 | }, 37 | "devDependencies": { 38 | "@eslint/js": "^9.28.0", 39 | "axios": "^1.9.0", 40 | "eslint": "^9.28.0", 41 | "eslint-plugin-astro": "^1.3.1", 42 | "globals": "^16.1.0", 43 | "prettier": "^3.5.3", 44 | "prettier-plugin-astro": "^0.14.1" 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /public/dark_230103.css: -------------------------------------------------------------------------------- 1 | :root { 2 | --background: #111; 3 | --foreground: #eee; 4 | --link-colour: #9cf; 5 | --fn-tt-foreground: #eee; 6 | --fn-tt-background: #333; 7 | --table-border: #999; 8 | --th-background: #444; 9 | --th-border: #999; 10 | --th-colour: #fff; 11 | --sidebar-background: #222; 12 | --sidebar-foreground: #ccc; 13 | --pagenavi-link: #ccc; 14 | --pagenavi-link-hover: #fff; 15 | --search-chunk-background: #222; 16 | --search-text-highlight: #484848; 17 | } 18 | -------------------------------------------------------------------------------- /public/light_230103.css: -------------------------------------------------------------------------------- 1 | :root { 2 | --background: #fff; 3 | --foreground: #000; 4 | --link-colour: #0366d6; 5 | --fn-tt-foreground: #000; 6 | --fn-tt-background: #ddd; 7 | --table-border: #999; 8 | --th-background: #999; 9 | --th-border: #ccc; 10 | --th-colour: #fff; 11 | --sidebar-background: #f3f5f6; 12 | --sidebar-foreground: #000; 13 | --pagenavi-link: #666; 14 | --pagenavi-link-hover: #333; 15 | --search-chunk-background: #eee; 16 | --search-text-highlight: #d8d8d8; 17 | } 18 | -------------------------------------------------------------------------------- /src/charts/randao_extend.py: -------------------------------------------------------------------------------- 1 | def prob_tail_eq(r, k): 2 | return (1 - r) * r**k if k < N else r**k 3 | 4 | # The sum of the products of all the q_i in the hypercube of side j and dim k 5 | # Recursive is cooler, but written iteratively so that python doesn't run out of stack 6 | def hyper(q, j, k): 7 | h = 1 8 | for n in range(1, k + 1): 9 | h = sum([q[i] * h for i in range(j)]) 10 | return h 11 | 12 | # Smoke test. 13 | assert abs(hyper([0.9, 0.09, 0.009, 0.0009, 0.00009, 0.00001], 6, 32) - 1.0) < 1e-12 14 | 15 | N = 32 # The number of slots per epoch 16 | KMAX = 12 # The maximum length of prior tail we will consider 17 | NINT = 20 # The number of intervals of r between 0 and 1 to generate 18 | 19 | expected = [[] for i in range(KMAX + 1)] 20 | prob_dec = [[] for i in range(KMAX + 1)] 21 | rs = [i / NINT for i in range(1, NINT)] 22 | for r in rs: 23 | # q[j] = the probability of having a tail of exactly j in one attempt 24 | q = [prob_tail_eq(r, j) for j in range(N + 1)] 25 | for k in range(KMAX + 1): 26 | h = [hyper(q, j, 2**k) for j in range(N + 2)] 27 | # p[j] = the probability that with a tail of k I can achieve a tail of j in the next epoch 28 | p = [h[j + 1] - h[j] for j in range(N + 1)] 29 | # The expected length of tail in the next epoch given r and k 30 | expected[k].append(sum([j * p[j] for j in range(N + 1)])) 31 | # The probability of a decrease in tail length to < k 32 | prob_dec[k].append(h[k]) 33 | print(rs) 34 | print(expected) 35 | print(prob_dec) 36 | -------------------------------------------------------------------------------- /src/charts/reward_variance.py: -------------------------------------------------------------------------------- 1 | # Adapted from https://pintail.xyz/posts/modelling-the-impact-of-altair/ 2 | 3 | import math 4 | from scipy.stats import binom 5 | 6 | def get_quantile(pmf, quantile): 7 | cumulative = 0 8 | for x, prob in sorted(pmf.items()): 9 | cumulative += prob 10 | if cumulative >= quantile: 11 | return x 12 | 13 | NUM_VALIDATORS = 500000 14 | 15 | SECONDS_PER_YEAR = 31557600 16 | SECONDS_PER_SLOT = 12 17 | SLOTS_PER_EPOCH = 32 18 | COMMITTEE_EPOCHS = 256 19 | COMMITTEE_VALIDATORS = 512 20 | 21 | slots_per_year = SECONDS_PER_YEAR / SECONDS_PER_SLOT 22 | epochs_per_year = slots_per_year / SLOTS_PER_EPOCH 23 | committees_per_year = epochs_per_year / COMMITTEE_EPOCHS 24 | 25 | GWEI_PER_ETH = int(1e9) 26 | gwei_per_validator = 32 * GWEI_PER_ETH 27 | BASE_REWARD_FACTOR = 64 28 | 29 | HEAD_WEIGHT = 14 30 | SOURCE_WEIGHT = 14 31 | TARGET_WEIGHT = 26 32 | SYNC_WEIGHT = 2 33 | PROPOSER_WEIGHT = 8 34 | WEIGHT_DENOMINATOR = 64 35 | 36 | base_reward = gwei_per_validator * BASE_REWARD_FACTOR // math.isqrt(NUM_VALIDATORS * gwei_per_validator) 37 | total_reward = base_reward * NUM_VALIDATORS 38 | 39 | altair_proposer_reward = total_reward * PROPOSER_WEIGHT // SLOTS_PER_EPOCH // WEIGHT_DENOMINATOR 40 | altair_att_reward = base_reward * (HEAD_WEIGHT + SOURCE_WEIGHT + TARGET_WEIGHT) // WEIGHT_DENOMINATOR 41 | sync_reward = total_reward * COMMITTEE_EPOCHS * SYNC_WEIGHT // COMMITTEE_VALIDATORS // WEIGHT_DENOMINATOR 42 | 43 | max_committees = 4 44 | max_proposals = 15 45 | 46 | # distribution of committee selections per year 47 | n_committees = [el for el in range(max_committees + 1)] 48 | pmf_committees = binom.pmf(n_committees, committees_per_year, COMMITTEE_VALIDATORS / NUM_VALIDATORS) 49 | 50 | # distribution of block proposal opportunities per year 51 | n_proposals = [el for el in range(max_proposals + 1)] 52 | pmf_proposals = binom.pmf(n_proposals, slots_per_year, 1 / NUM_VALIDATORS) 53 | 54 | # calculate all possible reward levels 55 | altair_pmf = {} 56 | attestation_rewards = epochs_per_year * altair_att_reward 57 | for comms in n_committees: 58 | for props in n_proposals: 59 | reward = comms * sync_reward + props * altair_proposer_reward + attestation_rewards 60 | prob = pmf_committees[comms] * pmf_proposals[props] 61 | if reward in altair_pmf: 62 | altair_pmf[reward] += prob 63 | else: 64 | altair_pmf[reward] = prob 65 | 66 | #min_reward = attestation_rewards / GWEI_PER_ETH 67 | #max_reward = (max_committees * sync_reward + max_proposals * altair_proposer_reward + attestation_rewards) / GWEI_PER_ETH 68 | min_reward = 1.1 69 | max_reward = 1.8 70 | n_bins = 35 71 | bins = [min_reward + i * (max_reward - min_reward) / n_bins for i in range(n_bins)] 72 | altair_hist = [0] * n_bins 73 | 74 | # bin the rewards to generate histogram 75 | for reward_gwei, prob in altair_pmf.items(): 76 | reward = reward_gwei / GWEI_PER_ETH 77 | for i, edge in enumerate(bins[1:]): 78 | if reward < edge: 79 | altair_hist[i] += prob 80 | break 81 | 82 | altair_mean = sum([p * r / GWEI_PER_ETH for r, p in altair_pmf.items()]) 83 | altair_sigma = math.sqrt(sum([p * (r / GWEI_PER_ETH)**2 for r, p in altair_pmf.items()]) - altair_mean**2) 84 | altair_median = get_quantile(altair_pmf, 0.5) / GWEI_PER_ETH 85 | 86 | print('\nAltair annual reward statistics (ETH)') 87 | print('-------------------------------------') 88 | print(f' median: {altair_median:.4f}') 89 | print(f' mean: {altair_mean:.4f}') 90 | print(f' standard deviation: {altair_sigma:.4f}') 91 | 92 | print(sum(altair_hist)) # check histogram sums to unity 93 | print(altair_hist) 94 | print(bins) 95 | 96 | c10 = get_quantile(altair_pmf, 0.10) / GWEI_PER_ETH 97 | c90 = get_quantile(altair_pmf, 0.90) / GWEI_PER_ETH 98 | print(f'10th percentile: {c10:.4f}') 99 | print(f'90th percentile: {c90:.4f}') 100 | -------------------------------------------------------------------------------- /src/components/Banner.astro: -------------------------------------------------------------------------------- 1 | --- 2 | import '../css/banner.css'; 3 | 4 | // This will differ between the git branches for the different historical versions 5 | const addBanner = false; 6 | --- 7 | 8 | { 9 | addBanner && ( 10 | 16 | ) 17 | } 18 | -------------------------------------------------------------------------------- /src/components/Footer.astro: -------------------------------------------------------------------------------- 1 | --- 2 | import '../css/footer.css'; 3 | import { Metadata } from '../include/SiteConfig.js'; 4 | --- 5 | 6 | 20 | -------------------------------------------------------------------------------- /src/components/FootnoteTooltips.astro: -------------------------------------------------------------------------------- 1 | --- 2 | import '../css/footnote-tooltips.css'; 3 | --- 4 | 5 | 60 | -------------------------------------------------------------------------------- /src/components/Matomo.astro: -------------------------------------------------------------------------------- 1 | --- 2 | // This is generated by the Matomo installation at eth2book.info and copied and pasted 3 | // from there. 4 | --- 5 | 6 | 24 | -------------------------------------------------------------------------------- /src/components/NestedList.astro: -------------------------------------------------------------------------------- 1 | --- 2 | import { Metadata } from '../include/SiteConfig.js'; 3 | 4 | const { items, level, idx } = Astro.props; 5 | 6 | function conditionalLink(to, noLink, isCurrent, children) { 7 | const active = isCurrent ? ' class="index-active" aria-current="page"' : ''; 8 | const ret = noLink 9 | ? children 10 | : `${children}`; 11 | return ret; 12 | } 13 | 14 | function renderNestedList(items, level, idx) { 15 | let result = ''; 16 | let i = idx; 17 | 18 | while (i < items.length) { 19 | const item = items[i]; 20 | const labelSpan = item.label 21 | ? `${item.label}` 22 | : ''; 23 | 24 | if (item.level === level) { 25 | let nestedContent = ''; 26 | if (i + 1 < items.length && items[i + 1].level > level) { 27 | nestedContent = renderNestedList(items, level + 1, i + 1); 28 | } 29 | 30 | result += 31 | `
  • ` + 32 | `${conditionalLink(item.link, item.hide, item.here, labelSpan + ' ' + item.title)}` + 33 | `${nestedContent}` + 34 | `
  • `; 35 | 36 | // Skip child elements 37 | i++; 38 | while (i < items.length && items[i].level > level) i++; 39 | } else { 40 | break; 41 | } 42 | } 43 | return `
      ${result}
    `; 44 | } 45 | --- 46 | 47 | 48 | -------------------------------------------------------------------------------- /src/components/PageList.astro: -------------------------------------------------------------------------------- 1 | --- 2 | import NestedList from './NestedList.astro'; 3 | 4 | const { pages, depth, here } = Astro.props; 5 | 6 | const filteredPages = pages.filter((p) => p.data.index !== null); 7 | const render = filteredPages.length !== 0; 8 | 9 | // Make a flat array of list level and the list info 10 | const layout = filteredPages.map((p) => { 11 | return { 12 | level: p.data.index.length, 13 | label: p.data.index.join('.'), 14 | title: p.data.titles[p.data.titles.length - 1], 15 | link: p.data.path, 16 | hide: p.data.hide, 17 | here: p.data.sequence === here, 18 | }; 19 | }); 20 | --- 21 | 22 | {render && } 23 | -------------------------------------------------------------------------------- /src/components/PageNavi.astro: -------------------------------------------------------------------------------- 1 | --- 2 | import '../css/pagenavi.css'; 3 | import NestedList from './NestedList.astro'; 4 | 5 | const { page } = Astro.props; 6 | 7 | const headings = page.rendered.metadata.headings; 8 | 9 | const items = headings 10 | ? headings 11 | .filter((h) => h.depth >= 2 && h.text !== 'Footnotes') 12 | .map((h) => { 13 | return { 14 | level: h.depth, 15 | label: '', 16 | title: h.text, 17 | link: page.data.path + '#' + h.slug, 18 | hide: false, 19 | here: false, 20 | }; 21 | }) 22 | : []; 23 | --- 24 | 25 | { 26 | headings && ( 27 | 34 | ) 35 | } 36 | -------------------------------------------------------------------------------- /src/components/PrevNext.astro: -------------------------------------------------------------------------------- 1 | --- 2 | import '../css/prevnext.css'; 3 | import { Metadata } from '../include/SiteConfig.js'; 4 | import PrevNextLink from './PrevNextLink.astro'; 5 | 6 | const { seq, pages } = Astro.props; 7 | 8 | const prevPage = seq 9 | ? pages.filter((p) => p.data.sequence === seq - 1)[0] 10 | : null; 11 | const nextPage = seq 12 | ? pages.filter((p) => p.data.sequence === seq + 1)[0] 13 | : null; 14 | --- 15 | 16 | { 17 | seq && ( 18 | 33 | ) 34 | } 35 | -------------------------------------------------------------------------------- /src/components/PrevNextLink.astro: -------------------------------------------------------------------------------- 1 | --- 2 | import { Metadata } from '../include/SiteConfig.js'; 3 | 4 | const { page, rel } = Astro.props; 5 | const skip = page === null || page === undefined || page.data.sequence < 0; 6 | const title = skip ? null : page.data.titles.join(' > '); 7 | --- 8 | 9 | { 10 | !skip && ( 11 | 12 | 13 | 14 | ) 15 | } 16 | -------------------------------------------------------------------------------- /src/components/Scripts.astro: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | --- 4 | 5 | 68 | -------------------------------------------------------------------------------- /src/components/Search.astro: -------------------------------------------------------------------------------- 1 | --- 2 | import '../css/search.css'; 3 | import { Metadata, SearchOptions } from '../include/SiteConfig.js'; 4 | --- 5 | 6 |
    7 |
    8 | 15 | 16 | 17 | 18 | 19 |
    20 |
    21 |

    No results

    22 |
    23 |
    24 | 25 | 210 | -------------------------------------------------------------------------------- /src/components/Sidebar.astro: -------------------------------------------------------------------------------- 1 | --- 2 | import '../css/sidebar.css'; 3 | import { Metadata } from '../include/SiteConfig.js'; 4 | import PageList from './PageList.astro'; 5 | 6 | const { pageIndex, pages, here } = Astro.props; 7 | 8 | // List only parts and chapters and immediate children in the sidebar 9 | const index = pageIndex ? pageIndex : []; 10 | const filteredPages = 11 | index.length < 2 12 | ? pages.filter((p) => p.data.index.length <= 2) 13 | : pages.filter( 14 | (p) => 15 | p.data.index.length <= 2 || 16 | (p.data.index.length === 3 && 17 | p.data.index[0] === index[0] && 18 | p.data.index[1] === index[1]), 19 | ); 20 | --- 21 | 22 | 32 | -------------------------------------------------------------------------------- /src/components/Subsections.astro: -------------------------------------------------------------------------------- 1 | --- 2 | import PageList from './PageList.astro'; 3 | import '../css/subsections.css'; 4 | 5 | const { indexArray, pages } = Astro.props; 6 | 7 | let index = indexArray; 8 | 9 | // Only add the auto index for Parts, not any deeper structure 10 | const skip = index === undefined || index === null || index.length > 1; 11 | 12 | // Special hacky handling for the the /contents page 13 | if (!skip && index[0] === -1) { 14 | index = []; 15 | } 16 | 17 | // Find pages that are subsections of the page we are on 18 | const indexFilterString = skip || index.length === 0 ? '' : index.join() + ','; 19 | const filteredPages = skip 20 | ? [] 21 | : pages.filter((p) => p.data.index.join().startsWith(indexFilterString)); 22 | --- 23 | 24 | { 25 | filteredPages.length > 0 && ( 26 |
    27 | 28 |
    29 | ) 30 | } 31 | -------------------------------------------------------------------------------- /src/content.config.js: -------------------------------------------------------------------------------- 1 | import { defineCollection, z } from 'astro:content'; 2 | import { glob } from 'astro/loaders'; 3 | 4 | const minimal = import.meta.env.UE_MINIMAL === undefined ? false : true; 5 | if (minimal) { 6 | console.log('Building minimal configuration'); 7 | } 8 | 9 | const pages = defineCollection({ 10 | loader: minimal 11 | ? glob({ pattern: '**/preface.md', base: './src/md/pages' }) 12 | : glob({ pattern: '**/*.md', base: './src/md/pages' }), 13 | schema: z.object({ 14 | hide: z.boolean(), 15 | path: z.string(), 16 | titles: z.array(z.string()), 17 | index: z.array(z.number()), 18 | sequence: z.number(), 19 | search: z.boolean().optional(), 20 | }), 21 | }); 22 | 23 | const special = defineCollection({ 24 | loader: minimal 25 | ? glob({ pattern: 'search.md', base: './src/md' }) 26 | : glob({ pattern: '*.md', base: './src/md' }), 27 | schema: z.object({ 28 | path: z.string(), 29 | titles: z.array(z.string()).optional(), 30 | index: z.array(z.number()).optional(), 31 | sequence: z.number().optional(), 32 | search: z.boolean().optional(), 33 | }), 34 | }); 35 | 36 | export const collections = { pages, special }; 37 | -------------------------------------------------------------------------------- /src/css/banner.css: -------------------------------------------------------------------------------- 1 | @media print { 2 | #banner { 3 | display: none; 4 | } 5 | } 6 | 7 | #banner { 8 | position: sticky; 9 | position: -webkit-sticky; 10 | top: 0; 11 | left: 0; 12 | margin: 0; 13 | padding: 0.5ex 0; 14 | width: 100%; 15 | background: #505; 16 | text-align: center; 17 | z-index: 2; 18 | } 19 | 20 | #banner p { 21 | color: #fff; 22 | margin: 0; 23 | } 24 | 25 | #banner p a { 26 | color: inherit; 27 | text-decoration: underline; 28 | } 29 | -------------------------------------------------------------------------------- /src/css/custom.css: -------------------------------------------------------------------------------- 1 | :root { 2 | --sidebar-width: 20vw; 3 | --content-width: 50vw; 4 | --navi-width: 20vw; 5 | --max-page-width: 1920px; 6 | 7 | /* Dark mode transition */ 8 | --transition: 0.5s ease 0s; 9 | } 10 | 11 | .scrollable { 12 | height: 100vh; 13 | overflow-y: auto; 14 | overscroll-behavior: none; 15 | /* Thin scrollbars on FireFox (seems to be default on Linux, but not Windows) */ 16 | scrollbar-width: thin; 17 | } 18 | 19 | /*** Thin scrollbars on Chrome ***/ 20 | ::-webkit-scrollbar { 21 | width: 9px; 22 | height: 9px; 23 | } 24 | 25 | ::-webkit-scrollbar-track { 26 | background: transparent; 27 | } 28 | 29 | ::-webkit-scrollbar-thumb { 30 | background-color: rgba(155, 155, 155, 0.5); 31 | border-radius: 20px; 32 | border: transparent; 33 | } 34 | -------------------------------------------------------------------------------- /src/css/dark-mode-toggle.css: -------------------------------------------------------------------------------- 1 | aside#dark-mode-toggle { 2 | position: absolute; 3 | left: 0; 4 | top: 0; 5 | padding: 0 0 0 0.5rem; 6 | } 7 | 8 | dark-mode-toggle { 9 | --dark-mode-toggle-dark-icon: url("../images/sun.svg"); 10 | --dark-mode-toggle-light-icon: url("../images/moon.svg"); 11 | --dark-mode-toggle-icon-size: 20px; 12 | } 13 | 14 | @media print { 15 | aside#dark-mode-toggle { 16 | display: none; 17 | } 18 | } 19 | 20 | @media screen and (max-width: 1200px) { 21 | aside#dark-mode-toggle { 22 | position: absolute; 23 | display: block; 24 | width: 100vw; 25 | padding: 0; 26 | height: 20px; 27 | overflow-y: visible; 28 | text-align: right; 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /src/css/fonts.css: -------------------------------------------------------------------------------- 1 | @font-face { 2 | font-family: 'Computer Modern Serif'; 3 | src: url('https://eth2book.info/fonts/cmu/cmunrm.woff2') format('woff2'), 4 | url('https://eth2book.info/fonts/cmu/cmunrm.woff') format('woff'); 5 | font-weight: normal; 6 | font-style: normal; 7 | } 8 | 9 | @font-face { 10 | font-family: 'Computer Modern Serif'; 11 | src: url('https://eth2book.info/fonts/cmu/cmunbx.woff2') format('woff2'), 12 | url('https://eth2book.info/fonts/cmu/cmunbx.woff') format('woff'); 13 | font-weight: bold; 14 | font-style: normal; 15 | } 16 | 17 | @font-face { 18 | font-family: 'Computer Modern Serif'; 19 | src: url('https://eth2book.info/fonts/cmu/cmunti.woff2') format('woff2'), 20 | url('https://eth2book.info/fonts/cmu/cmunti.woff') format('woff'); 21 | font-weight: normal; 22 | font-style: italic; 23 | } 24 | 25 | @font-face { 26 | font-family: 'Computer Modern Serif'; 27 | src: url('https://eth2book.info/fonts/cmu/cmunbi.woff2') format('woff2'), 28 | url('https://eth2book.info/fonts/cmu/cmunbi.woff') format('woff'); 29 | font-weight: bold; 30 | font-style: italic; 31 | } 32 | 33 | @font-face { 34 | font-family: 'Computer Modern Typewriter'; 35 | src: url('https://eth2book.info/fonts/cmu/cmuntt.woff2') format('woff2'), 36 | url('https://eth2book.info/fonts/cmu/cmuntt.woff') format('woff'); 37 | font-weight: normal; 38 | font-style: normal; 39 | } 40 | 41 | @font-face { 42 | font-family: 'Computer Modern Typewriter'; 43 | src: url('https://eth2book.info/fonts/cmu/cmuntb.woff2') format('woff2'), 44 | url('https://eth2book.info/fonts/cmu/cmuntb.woff') format('woff'); 45 | font-weight: bold; 46 | font-style: normal; 47 | } 48 | 49 | @font-face { 50 | font-family: 'Computer Modern Typewriter'; 51 | src: url('https://eth2book.info/fonts/cmu/cmunit.woff2') format('woff2'), 52 | url('https://eth2book.info/fonts/cmu/cmunit.woff') format('woff'); 53 | font-weight: normal; 54 | font-style: italic; 55 | } 56 | 57 | @font-face { 58 | font-family: 'Computer Modern Typewriter'; 59 | src: url('https://eth2book.info/fonts/cmu/cmuntx.woff2') format('woff2'), 60 | url('https://eth2book.info/fonts/cmu/cmuntx.woff') format('woff'); 61 | font-weight: bold; 62 | font-style: italic; 63 | } 64 | -------------------------------------------------------------------------------- /src/css/footer.css: -------------------------------------------------------------------------------- 1 | footer { 2 | font-size: 85%; 3 | border-top: solid 1px #999; 4 | padding-top: 1ex; 5 | margin-top: 10ex; 6 | width: 100%; 7 | } 8 | 9 | footer a.githash { 10 | font-family: "Computer Modern Typewriter", monospace; 11 | } 12 | 13 | footer a[rel*="external"]::after { 14 | content: ""; 15 | } 16 | -------------------------------------------------------------------------------- /src/css/footnote-tooltips.css: -------------------------------------------------------------------------------- 1 | @import "custom.css"; 2 | 3 | sup:has(a[data-footnote-ref]) { 4 | position: relative; 5 | display: inline; 6 | } 7 | 8 | sup:hover span.fn-span { 9 | visibility: visible; 10 | opacity: 1; 11 | z-index: 999; 12 | } 13 | 14 | .fn-span { 15 | visibility: hidden; 16 | font-family: 'Open Sans', sans-serif; 17 | font-size: 80%; 18 | font-weight: normal; 19 | position: absolute; 20 | top: 30px; 21 | color: var(--fn-tt-foreground); 22 | background: var(--fn-tt-background);; 23 | border-radius: 6px; 24 | padding: 5px 10px; 25 | width: calc(var(--content-width) / 3); 26 | white-space: normal; 27 | text-align: left; 28 | opacity: 0; 29 | transition: opacity 0.8s; 30 | } 31 | 32 | .fn-span p { 33 | padding: 0; 34 | margin: 0; 35 | } 36 | 37 | @media print { 38 | .fn-span { 39 | display: none; 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /src/css/page.css: -------------------------------------------------------------------------------- 1 | @import "custom.css"; 2 | @import "fonts.css"; 3 | @import "dark-mode-toggle.css"; 4 | 5 | html { 6 | height: 100vh; 7 | overflow-y: hidden; 8 | } 9 | 10 | body { 11 | margin: 0; 12 | padding: 0; 13 | background-color: var(--background); 14 | color: var(--foreground); 15 | text-rendering: optimizeLegibility; 16 | font-family: system-ui,-apple-system,Segoe UI,Roboto,Ubuntu,Cantarell,Noto Sans,sans-serif,BlinkMacSystemFont,"Segoe UI",Helvetica,Arial,sans-serif,"Apple Color Emoji","Segoe UI Emoji","Segoe UI Symbol"; 17 | transition: color var(--transition), 18 | background-color var(--transition); 19 | } 20 | 21 | /* Screen reader only content - leave it there but make it take no space */ 22 | .sr-only { 23 | position: absolute; 24 | width: 1px; 25 | height: 1px; 26 | padding: 0; 27 | margin: -1px; 28 | overflow: hidden; 29 | clip-path: rect(0 0 0 0); 30 | border: 0; 31 | } 32 | 33 | #page { 34 | display: flex; 35 | flex-direction: row; 36 | width: 100vw; 37 | height: 100vh; 38 | margin: 0px; 39 | padding: 0px; 40 | overflow: hidden; 41 | } 42 | 43 | #main-content { 44 | display: inline-block; 45 | box-sizing: border-box; 46 | margin: 0; 47 | padding: 0; 48 | font-family: "Computer Modern Serif", serif; 49 | font-size: 20px; 50 | overflow-x: hidden; 51 | } 52 | 53 | #padded-content { 54 | width: var(--content-width); 55 | margin: 0 calc( (100vw - var(--sidebar-width) - var(--content-width) - var(--navi-width)) / 2 ); 56 | padding: 0; 57 | } 58 | 59 | code, pre, tt { 60 | font-family: "Computer Modern Typewriter", SFMono-Regular, Consolas, Liberation Mono, Menlo, monospace; 61 | } 62 | 63 | p { 64 | margin-top: 0px; 65 | margin-bottom: 16px; 66 | line-height: 1.4128; 67 | } 68 | 69 | a { 70 | color: var(--link-colour); 71 | text-decoration: none; 72 | transition: color var(--transition); 73 | } 74 | 75 | a:hover { 76 | text-decoration: underline; 77 | } 78 | 79 | a[rel*="external"]::after { 80 | content: "↗"; 81 | font-size: 10px; 82 | vertical-align: top; 83 | } 84 | 85 | code { 86 | font-size: 95%; 87 | } 88 | 89 | /*** Constant line heights ***/ 90 | 91 | sup, sub { 92 | font-size: 0.75em; 93 | line-height: 0; 94 | vertical-align: baseline; 95 | position: relative; 96 | display: inline-block; 97 | } 98 | 99 | sup { 100 | top: -0.6em; 101 | left: 0.1em; 102 | } 103 | 104 | sub { 105 | bottom: -0.25em; 106 | } 107 | 108 | sup a, sub a, sup span, sub span { 109 | line-height: normal; 110 | font-size: inherit; 111 | vertical-align: baseline; 112 | } 113 | 114 | /*** Headings ***/ 115 | 116 | h2, h3, h4, h5, h6 { 117 | position: relative; 118 | } 119 | 120 | h3 { 121 | padding-bottom: 6px; 122 | margin-bottom: 1ex; 123 | } 124 | 125 | h4, h5, h6 { 126 | margin-top: 4ex; 127 | margin-bottom: 1ex; 128 | } 129 | 130 | h3 + h4 { 131 | margin-top: 1ex; 132 | } 133 | 134 | h1 + p, 135 | h2 + p, 136 | h3 + p, 137 | h4 + p, 138 | h5 + p, 139 | h6 + p { 140 | margin-top: 1ex; 141 | } 142 | 143 | h3 { 144 | font-size: 130%; 145 | } 146 | 147 | h4 { 148 | font-size: 120%; 149 | } 150 | 151 | h5 { 152 | font-size: 110%; 153 | } 154 | 155 | h6 { 156 | font-size: 100%; 157 | } 158 | 159 | h4 code, h5 code, h6 code {font-size: inherit; } 160 | 161 | /*** Heading autolinks ***/ 162 | 163 | a.anchor { 164 | position: absolute; 165 | left: -20px; 166 | display: inline-block; 167 | } 168 | 169 | a.anchor svg { 170 | width: 16px; 171 | height: 16px; 172 | opacity: 0; 173 | transition: opacity 0.3s ease; 174 | 175 | } 176 | 177 | h2:hover svg, h3:hover svg, h4:hover svg, h5:hover svg, h6:hover svg { 178 | opacity: 1; 179 | } 180 | 181 | /*** Others ***/ 182 | 183 | figure { 184 | text-align: center; 185 | padding: 0; 186 | margin: 3ex auto; 187 | font-style: italic; 188 | } 189 | 190 | figcaption { 191 | text-align: center; 192 | padding: 0; 193 | margin: 0 auto 3ex auto; 194 | font-style: italic; 195 | } 196 | 197 | table { 198 | border: solid var(--table-border) 1px; 199 | border-collapse: collapse; 200 | margin: 3ex 0; 201 | } 202 | 203 | td, th { 204 | border: solid var(--table-border) 1px; 205 | padding: 4px 8px; 206 | text-align: left; 207 | vertical-align: top; 208 | } 209 | 210 | th { 211 | color: var(--th-colour); 212 | background-color: var(--th-background); 213 | border: solid var(--th-border) 1px; 214 | border-top-style: inherit; 215 | } 216 | 217 | th:first-child { 218 | border-left-style: inherit; 219 | } 220 | 221 | th:last-child { 222 | border-right-style: inherit; 223 | } 224 | 225 | th:empty { 226 | display: none; 227 | } 228 | 229 | main li { 230 | line-height: 1.4128; 231 | padding-bottom: 3px; 232 | } 233 | 234 | main ol { 235 | list-style-type: decimal; 236 | } 237 | 238 | main ol ol { 239 | list-style-type: lower-alpha; 240 | } 241 | 242 | main ol ol ol { 243 | list-style-type: lower-roman; 244 | } 245 | 246 | blockquote { 247 | margin-left: 0; 248 | padding-left: 2em; 249 | border-left: solid #ccc 4px; 250 | } 251 | 252 | /*** Title page format ***/ 253 | 254 | div.title-page { 255 | display: flex; 256 | min-height: 80vh; 257 | margin: 0 20%; 258 | text-align: center; 259 | flex-direction: column; 260 | justify-content: space-around; 261 | font-weight: bold; 262 | } 263 | 264 | div.title-page .h2, div.title-page .h3, div.title-page .h4 { 265 | border: none; 266 | padding: 0; 267 | margin: 1ex 0; 268 | } 269 | 270 | div.title-page .h2 { 271 | font-size: 30px; 272 | } 273 | 274 | div.title-page .h3 { 275 | font-size: 26px; 276 | } 277 | 278 | div.title-page .h4 { 279 | font-size: 24px; 280 | } 281 | 282 | div.title-page svg { 283 | margin: 1ex auto; 284 | } 285 | 286 | /*** Headings for chapters ***/ 287 | div.chapter-header h1 { 288 | display:inline; 289 | font-size: 90%; 290 | } 291 | 292 | /*** Headings for sections ***/ 293 | div.section-header h1, div.section-header h2 { 294 | display:inline; 295 | font-size: 0.9em; 296 | } 297 | div.section-header h1::after { 298 | content: " | "; 299 | } 300 | 301 | div.section-header, div.chapter-header { 302 | padding: 0; 303 | border-bottom: solid 1px #999; 304 | } 305 | 306 | details { 307 | margin-bottom: 2ex; 308 | } 309 | 310 | details summary { 311 | font-weight: bold; 312 | } 313 | 314 | details p:first-of-type { 315 | padding-top: 2ex; 316 | } 317 | 318 | /*** Code prettifier ***/ 319 | 320 | pre.language-text { 321 | background-color: inherit; 322 | color: inherit; 323 | } 324 | 325 | code.language-text { 326 | color: inherit; 327 | } 328 | 329 | div.gatsby-highlight { 330 | margin: 1ex 0 3ex 0; 331 | } 332 | 333 | pre[class^="language-"] { 334 | padding: 1ex 1em; 335 | border-radius: 1ex; 336 | line-height: 1.0; 337 | } 338 | 339 | pre[class^="language-"] code { 340 | font-size: 0.8em; 341 | line-height: 1.0; 342 | } 343 | 344 | /*** Equations ***/ 345 | 346 | /* Keep inline equations the same size as surrounding text */ 347 | p span.katex, li span.katex { 348 | font-size: inherit; 349 | } 350 | 351 | /* Control size of the displayed equations - this makes them a little smaller */ 352 | .katex-display .katex { 353 | font-size: 1em; 354 | } 355 | 356 | /*** Footnotes ***/ 357 | section.footnotes { 358 | margin-top: 6ex; 359 | border: 0; 360 | border-top: solid 1px #ccc; 361 | font-size: 85%; 362 | } 363 | 364 | /*** Summary boxes ***/ 365 | div.summary { 366 | width: 80%; 367 | margin: 1ex auto; 368 | padding: 1ex 1em; 369 | border: double #999 3.5px; 370 | } 371 | 372 | /*** SVG stuff - needed for dark mode support. Here be dragons. ***/ 373 | 374 | svg path, svg g, svg use { 375 | transition: stroke var(--transition), 376 | fill var(--transition); 377 | } 378 | 379 | a.anchor svg path { 380 | fill: var(--foreground); 381 | } 382 | 383 | figure svg path { 384 | stroke: var(--foreground); 385 | fill: none; 386 | } 387 | 388 | /* Correct weight for axes and labels in charts */ 389 | figure.chart svg path[class$="Text"], 390 | figure.chart svg g[class*="Axis"] g[class$="tick"] path { 391 | fill: var(--foreground)!important; 392 | stroke: none!important; 393 | } 394 | 395 | /* The order of these two matters. The first makes sure our dashed lines are not fat, 396 | the second that other elements are not messed up */ 397 | figure.diagram svg path[d] { 398 | stroke: none!important; 399 | fill:var(--foreground)!important; 400 | } 401 | 402 | figure.diagram svg path[stroke] { 403 | stroke: var(--foreground)!important; 404 | fill: none!important; 405 | } 406 | 407 | /* Some elements need a fill... */ 408 | figure.diagram svg path[fill] { 409 | fill: var(--background)!important; 410 | } 411 | 412 | /* ...and some don't. */ 413 | figure.diagram svg path[fill=none] { 414 | fill: none!important; 415 | } 416 | 417 | /* Make Firefox show text in diagrams */ 418 | figure.diagram svg use { 419 | fill: var(--foreground); 420 | } 421 | 422 | /* Always display my avatar as positive */ 423 | div.title-page svg g#layer101 path { 424 | fill: #343434!important; 425 | } 426 | 427 | @media print { 428 | html, body, #page, #main-content { 429 | --background: #fff; 430 | --foreground: #000; 431 | display: block !important; 432 | overflow: visible !important; 433 | width: 100% !important; 434 | max-width: initial !important; 435 | min-width: initial !important; 436 | margin: initial !important; 437 | padding: initial !important; 438 | background-color: var(--background); 439 | color: var(--foreground); 440 | } 441 | #main-content { 442 | font-size: 12pt !important; 443 | } 444 | #padded-content { 445 | width: auto !important; 446 | margin: 0 !important; 447 | padding: 0 12mm !important; 448 | } 449 | h1, h2, h3, h4, h5, h6 { 450 | position: initial !important; 451 | break-after: avoid; /* Not working with Chrome? */ 452 | page-break-after: avoid; 453 | } 454 | td, th { 455 | color: inherit; 456 | border: solid #999999 1px; 457 | } 458 | a { 459 | color: black; 460 | } 461 | a[rel*="external"] { 462 | color: blue; 463 | } 464 | a[rel*="external"]::after { 465 | content: ""; 466 | } 467 | a.anchor { 468 | display: none; 469 | } 470 | a.data-footnote-backref { 471 | display: none; 472 | } 473 | /* Undo code styling */ 474 | pre[class*="language-"] { 475 | background: none; 476 | overflow: hidden; 477 | } 478 | pre[class*="language-"] code { 479 | color: black !important; 480 | font-size: 75% !important; 481 | } 482 | pre[class*="language-"] code span { 483 | color: black !important; 484 | } 485 | summary { 486 | list-style: none; 487 | } 488 | } 489 | 490 | /* For wide displays */ 491 | @media screen and (min-width: 1920px) { 492 | body { 493 | position: relative; 494 | width: var(--max-page-width); 495 | margin: 0 auto; 496 | } 497 | #padded-content { 498 | width: calc( var(--max-page-width) / 2 ); 499 | margin: 0 calc( var(--max-page-width) / 20 ); 500 | } 501 | } 502 | 503 | /* For mobile */ 504 | @media screen and (max-width: 1200px) { 505 | #main-content { 506 | width: auto; 507 | } 508 | #padded-content { 509 | width: auto; 510 | margin: 0 auto; 511 | padding: 1.5rem 1.5rem 5rem 1.5rem; 512 | min-width: auto; 513 | } 514 | footer { 515 | padding-bottom: 1rem; 516 | } 517 | div.title-page { 518 | margin: 0; 519 | } 520 | } 521 | 522 | /* Further changes for very narrow screens */ 523 | @media screen and (max-width: 600px) { 524 | #padded-content { 525 | padding: 1.5rem 0.75rem 5rem 0.75rem; 526 | } 527 | figure { 528 | width: 100%!important; 529 | } 530 | footer { 531 | margin-top: 3rem; 532 | } 533 | blockquote { 534 | padding-left: 1rem; 535 | } 536 | ul { 537 | padding-left: 1.2rem; 538 | } 539 | ol { 540 | padding-left: 1.5rem; 541 | } 542 | } 543 | -------------------------------------------------------------------------------- /src/css/pagenavi.css: -------------------------------------------------------------------------------- 1 | @import "custom.css"; 2 | 3 | #page-navi { 4 | display: inline-block; 5 | box-sizing: border-box; 6 | width: var(--navi-width); 7 | padding: 0 1em 0 2em; 8 | margin: 0px; 9 | font-size: 80%; 10 | } 11 | 12 | #page-navi a { 13 | color: var(--pagenavi-link); 14 | transition: color var(--transition); 15 | } 16 | 17 | #page-navi a:hover { 18 | text-decoration: none; 19 | color: var(--pagenavi-link-hover); 20 | } 21 | 22 | #page-navi ul { 23 | padding-left: 1em; 24 | } 25 | 26 | #page-navi li { 27 | list-style-type: none; 28 | padding-bottom: 3px; 29 | font-weight: normal; 30 | } 31 | 32 | #page-navi > ul > li { 33 | padding-top: 1ex; 34 | font-weight: bold; 35 | } 36 | 37 | @media print { 38 | #page-navi { 39 | display: none; 40 | } 41 | } 42 | 43 | @media screen and (min-width: 1920px) { 44 | #page-navi { 45 | width: calc( var(--max-page-width) / 5 ); 46 | } 47 | } 48 | 49 | @media screen and (max-width: 1200px) { 50 | #page-navi { 51 | display: none; 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /src/css/prevnext.css: -------------------------------------------------------------------------------- 1 | .prevnext { 2 | width: 100%; 3 | margin: 0; 4 | padding: 6px 0; 5 | display: flex; 6 | justify-content: space-between; 7 | } 8 | 9 | .prevnext span { 10 | display: flex; 11 | } 12 | 13 | .prevnext span.prev { 14 | justify-content: flex-start; 15 | } 16 | 17 | .prevnext span.next { 18 | justify-content: flex-end; 19 | } 20 | 21 | .prevnext span.contents { 22 | display: none; 23 | } 24 | 25 | @media print { 26 | .prevnext { 27 | display: none; 28 | } 29 | } 30 | 31 | @media screen and (max-width: 1200px) { 32 | prevnext span.contents { 33 | display: flex; 34 | justify-content: center; 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /src/css/search.css: -------------------------------------------------------------------------------- 1 | @import "custom.css"; 2 | 3 | #search-parameters { 4 | margin-bottom: 2rem; 5 | } 6 | 7 | #search-parameters input#search-text { 8 | background-color: var(--background); 9 | color: var(--foreground); 10 | padding: 6px; 11 | margin-right: 12px; 12 | border: 1px solid #ccc; 13 | font-size: 1rem; 14 | width: 50%; 15 | transition: color var(--transition), 16 | background-color var(--transition); 17 | } 18 | 19 | #search-parameters input#search-text:focus { 20 | outline: none; 21 | } 22 | 23 | #search-parameters span#checkbox { 24 | display: inline-block; 25 | } 26 | 27 | #search-parameters span#checkbox label { 28 | padding-left: 0.2rem; 29 | font-family: sans-serif; 30 | font-size: 1rem; 31 | } 32 | 33 | #search-parameters input#is-case-sensitive { 34 | width:1rem; 35 | height:1rem; 36 | } 37 | 38 | #search-results ul { 39 | padding-left: 0; 40 | margin-left: 0; 41 | list-style-type: none; 42 | } 43 | 44 | #search-results ul li ul{ 45 | padding-left: 0.5rem; 46 | } 47 | 48 | #search-results ul li ul li{ 49 | margin: 0.8rem 0; 50 | padding: 0.5rem; 51 | background: var(--search-chunk-background); 52 | border-radius: 0.75rem; 53 | font-size: 1.1rem; 54 | } 55 | 56 | #search-results a.label { 57 | display: block; 58 | font-family: sans-serif; 59 | font-size: 0.9rem; 60 | } 61 | 62 | #search-results span.chunk-text.pre { 63 | display: block; 64 | white-space: pre; 65 | font-family: monospace; 66 | font-size: 0.9rem; 67 | line-height: 1.1rem; 68 | overflow-x: auto; 69 | } 70 | 71 | #search-results > ul li { 72 | padding-bottom: 1ex; 73 | } 74 | 75 | #search-results > ul li:last-child { 76 | margin-bottom: 1ex; 77 | } 78 | 79 | #search-results span.match-text { 80 | background-color: var(--search-text-highlight); 81 | } 82 | -------------------------------------------------------------------------------- /src/css/sidebar.css: -------------------------------------------------------------------------------- 1 | @import "custom.css"; 2 | 3 | nav.sidebar { 4 | display: inline-block; 5 | box-sizing: border-box; 6 | width: var(--sidebar-width); 7 | padding: 0 1em 0 2em; 8 | margin: 0px; 9 | border-right: 1px solid rgb(230, 236, 241); 10 | text-align: center; 11 | background-color: var(--sidebar-background); 12 | color: var(--sidebar-foreground); 13 | transition: color var(--transition), 14 | background-color var(--transition); 15 | } 16 | 17 | #index { 18 | margin: 0 1em; 19 | text-align: left; 20 | } 21 | 22 | #index, 23 | #index a:hover { 24 | text-decoration: none; 25 | } 26 | 27 | #index li { 28 | text-indent: -1em; 29 | list-style-type: none; 30 | line-height: 1.4; 31 | font-weight: normal; 32 | } 33 | 34 | #index ul { 35 | padding-left: 1em; 36 | } 37 | 38 | #index > ul > li { 39 | padding-top: 1ex; 40 | font-weight: bold; 41 | } 42 | 43 | #index > ul > li > a > span.label-string { 44 | display: none; 45 | } 46 | 47 | #index a.index-active { 48 | text-decoration: underline; 49 | } 50 | 51 | nav.sidebar div.sidebar-title { 52 | font-size: 14px; 53 | font-weight: bold; 54 | padding: 10px 0; 55 | } 56 | 57 | nav.sidebar div.sidebar-title a:hover { 58 | text-decoration: none; 59 | } 60 | 61 | @media print { 62 | nav.sidebar { 63 | display: none; 64 | } 65 | } 66 | 67 | @media screen and (min-width: 1920px) { 68 | nav.sidebar { 69 | width: calc( var(--max-page-width) / 5 ); 70 | } 71 | } 72 | 73 | @media screen and (max-width: 1200px) { 74 | nav.sidebar { 75 | display: none; 76 | } 77 | } 78 | -------------------------------------------------------------------------------- /src/css/subsections.css: -------------------------------------------------------------------------------- 1 | .subsection-list .label-string { 2 | display: none; 3 | } 4 | -------------------------------------------------------------------------------- /src/images/benjaminion.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/benjaminion/upgrading-ethereum-book/8697ba063012970a996288db4b9ebab3be1dfd37/src/images/benjaminion.pdf -------------------------------------------------------------------------------- /src/images/benjaminion.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/images/moon.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | -------------------------------------------------------------------------------- /src/images/sun.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /src/include/SiteConfig.js: -------------------------------------------------------------------------------- 1 | import { execSync } from 'child_process'; 2 | 3 | function getGitHash() { 4 | try { 5 | return execSync('git log -1 --format="%h" 2>/dev/null', { 6 | encoding: 'utf8', 7 | }).replace(/(\r\n|\n|\r)/, ''); 8 | } catch (e) { 9 | return 'unknown'; 10 | } 11 | } 12 | 13 | function getGitBranch() { 14 | try { 15 | return execSync('git branch --show-current 2>/dev/null', { 16 | encoding: 'utf8', 17 | }).replace(/(\r\n|\n|\r)/, ''); 18 | } catch (e) { 19 | return 'unknown'; 20 | } 21 | } 22 | 23 | const date = new Date().toISOString().substr(0, 16).replace('T', ' ') + ' UTC'; 24 | const version = getGitBranch(); 25 | const hostname = 'https://eth2book.info'; 26 | const canonical = hostname + '/latest'; 27 | 28 | const Metadata = { 29 | title: 'Upgrading Ethereum', 30 | description: 31 | "A technical handbook on Ethereum's move to proof of stake and beyond", 32 | author: 'Ben Edgington', 33 | gitHash: getGitHash(), 34 | gitUrl: 'https://github.com/benjaminion/upgrading-ethereum-book', 35 | date: date, 36 | licenceUrl: 'https://creativecommons.org/licenses/by-sa/4.0/', 37 | licence: 'CC BY-SA 4.0', 38 | hostname: hostname, 39 | version: version, 40 | canonical: canonical, 41 | }; 42 | 43 | const SearchOptions = { 44 | enabled: true, 45 | indexFile: 'search-index.json', 46 | // Matching elements have their text added to the index. First match wins. 47 | // Note that these are not full CSS selectors - they can only match the current element. 48 | // See https://github.com/syntax-tree/hast-util-select#matchesselector-node-space 49 | chunkTypes: [ 50 | { query: 'figcaption', label: 'Figure caption' }, 51 | { query: 'li[id^="fn-"]', label: 'Footnote' }, 52 | { query: 'li', label: 'List item' }, 53 | { query: 'pre', label: 'Code' }, 54 | { query: 'table', label: 'Table' }, 55 | { query: 'h3, h4', label: 'Heading', weight: 10 }, 56 | { query: 'h5, h6', label: 'Minor Heading', weight: 5 }, 57 | { query: 'p', label: 'Paragraph' }, 58 | ], 59 | exclude: { 60 | // Note, only pages under src/md/pages have a "hide" property. 61 | frontmatter: [ 62 | { key: 'hide', value: true }, 63 | { key: 'search', value: false }, 64 | { key: 'path', value: '/annotated-spec/' }, 65 | ], 66 | // Elements matching this query are ignored completely, including their text: 67 | ignore: 68 | 'svg, details, mtable, mrow, [aria-hidden="true"], a[id^="fnref-"], a.data-footnote-backref', 69 | }, 70 | }; 71 | 72 | export { Metadata, SearchOptions }; 73 | -------------------------------------------------------------------------------- /src/include/constants.json: -------------------------------------------------------------------------------- 1 | { 2 | "PRESET_BASE": "'mainnet'", 3 | "MIN_GENESIS_ACTIVE_VALIDATOR_COUNT": "16384", 4 | "MIN_GENESIS_TIME": "1606824000 (Dec 1, 2020, 12pm UTC)", 5 | "GENESIS_FORK_VERSION": "0x00000000", 6 | "GENESIS_DELAY": "604800 seconds (7 days)", 7 | "ALTAIR_FORK_VERSION": "0x01000000", 8 | "ALTAIR_FORK_EPOCH": "74240 (Oct 27, 2021, 10:56:23 UTC)", 9 | "BELLATRIX_FORK_VERSION": "0x02000000", 10 | "BELLATRIX_FORK_EPOCH": "144896 (Sep 6, 2022, 11:34:47 UTC)", 11 | "CAPELLA_FORK_VERSION": "0x03000000", 12 | "CAPELLA_FORK_EPOCH": "194048 (Apr 12, 2023, 22:27:35 UTC)", 13 | "SECONDS_PER_SLOT": "12", 14 | "SECONDS_PER_ETH1_BLOCK": "14", 15 | "MIN_VALIDATOR_WITHDRAWABILITY_DELAY": "256 Epochs", 16 | "SHARD_COMMITTEE_PERIOD": "256 Epochs", 17 | "ETH1_FOLLOW_DISTANCE": "2048", 18 | "INACTIVITY_SCORE_BIAS": "4", 19 | "INACTIVITY_SCORE_RECOVERY_RATE": "16", 20 | "EJECTION_BALANCE": "16000000000", 21 | "MIN_PER_EPOCH_CHURN_LIMIT": "4", 22 | "CHURN_LIMIT_QUOTIENT": "65536", 23 | "DEPOSIT_CHAIN_ID": "1", 24 | "DEPOSIT_NETWORK_ID": "1", 25 | "DEPOSIT_CONTRACT_ADDRESS": "0x00000000219ab540356cBB839Cbe05303d7705Fa", 26 | "MAX_COMMITTEES_PER_SLOT": "64", 27 | "TARGET_COMMITTEE_SIZE": "128", 28 | "MAX_VALIDATORS_PER_COMMITTEE": "2048", 29 | "SHUFFLE_ROUND_COUNT": "90", 30 | "HYSTERESIS_QUOTIENT": "4", 31 | "HYSTERESIS_DOWNWARD_MULTIPLIER": "1", 32 | "HYSTERESIS_UPWARD_MULTIPLIER": "5", 33 | "MIN_DEPOSIT_AMOUNT": "1000000000 Gwei", 34 | "MAX_EFFECTIVE_BALANCE": "32000000000 Gwei", 35 | "EFFECTIVE_BALANCE_INCREMENT": "1000000000 Gwei", 36 | "MIN_ATTESTATION_INCLUSION_DELAY": "1 Epoch", 37 | "SLOTS_PER_EPOCH": "32", 38 | "MIN_SEED_LOOKAHEAD": "1", 39 | "MAX_SEED_LOOKAHEAD": "4", 40 | "EPOCHS_PER_ETH1_VOTING_PERIOD": "64", 41 | "SLOTS_PER_HISTORICAL_ROOT": "8192", 42 | "MIN_EPOCHS_TO_INACTIVITY_PENALTY": "4", 43 | "EPOCHS_PER_HISTORICAL_VECTOR": "65536", 44 | "EPOCHS_PER_SLASHINGS_VECTOR": "8192", 45 | "HISTORICAL_ROOTS_LIMIT": "16777216", 46 | "VALIDATOR_REGISTRY_LIMIT": "1099511627776", 47 | "BASE_REWARD_FACTOR": "64", 48 | "WHISTLEBLOWER_REWARD_QUOTIENT": "512", 49 | "PROPOSER_REWARD_QUOTIENT": "8", 50 | "INACTIVITY_PENALTY_QUOTIENT": "67108864", 51 | "MIN_SLASHING_PENALTY_QUOTIENT": "128", 52 | "PROPORTIONAL_SLASHING_MULTIPLIER": "1", 53 | "MAX_PROPOSER_SLASHINGS": "16", 54 | "MAX_ATTESTER_SLASHINGS": "2", 55 | "MAX_ATTESTATIONS": "128", 56 | "MAX_DEPOSITS": "16", 57 | "MAX_VOLUNTARY_EXITS": "16", 58 | "MAX_BLS_TO_EXECUTION_CHANGES": "16", 59 | "INACTIVITY_PENALTY_QUOTIENT_ALTAIR": "50331648", 60 | "MIN_SLASHING_PENALTY_QUOTIENT_ALTAIR": "64", 61 | "PROPORTIONAL_SLASHING_MULTIPLIER_ALTAIR": "2", 62 | "INACTIVITY_PENALTY_QUOTIENT_BELLATRIX": "2**24 (= 16,777,216)", 63 | "MIN_SLASHING_PENALTY_QUOTIENT_BELLATRIX": "2**5 (= 32)", 64 | "PROPORTIONAL_SLASHING_MULTIPLIER_BELLATRIX": "3", 65 | "SYNC_COMMITTEE_SIZE": "512", 66 | "EPOCHS_PER_SYNC_COMMITTEE_PERIOD": "256", 67 | "MIN_SYNC_COMMITTEE_PARTICIPANTS": "1", 68 | "GENESIS_SLOT": "Slot(0)", 69 | "GENESIS_EPOCH": "Epoch(0)", 70 | "FAR_FUTURE_EPOCH": "Epoch(2**64 - 1)", 71 | "BASE_REWARDS_PER_EPOCH": "4", 72 | "DEPOSIT_CONTRACT_TREE_DEPTH": "2**5 (= 32)", 73 | "MAX_DEPOSIT_COUNT": "2**32 - 1", 74 | "JUSTIFICATION_BITS_LENGTH": "4", 75 | "ENDIANNESS": "little", 76 | "BLS_WITHDRAWAL_PREFIX": "Bytes1(0x00)", 77 | "ETH1_ADDRESS_WITHDRAWAL_PREFIX": "Bytes1(0x01)", 78 | "TIMELY_SOURCE_FLAG_INDEX": "0", 79 | "TIMELY_TARGET_FLAG_INDEX": "1", 80 | "TIMELY_HEAD_FLAG_INDEX": "2", 81 | "TIMELY_SOURCE_WEIGHT": "14", 82 | "TIMELY_TARGET_WEIGHT": "26", 83 | "TIMELY_HEAD_WEIGHT": "14", 84 | "SYNC_REWARD_WEIGHT": "2", 85 | "PROPOSER_WEIGHT": "8", 86 | "WEIGHT_DENOMINATOR": "64", 87 | "DOMAIN_BEACON_PROPOSER": "0x00000000", 88 | "DOMAIN_BEACON_ATTESTER": "0x01000000", 89 | "DOMAIN_RANDAO": "0x02000000", 90 | "DOMAIN_DEPOSIT": "0x03000000", 91 | "DOMAIN_VOLUNTARY_EXIT": "0x04000000", 92 | "DOMAIN_SELECTION_PROOF": "0x05000000", 93 | "DOMAIN_AGGREGATE_AND_PROOF": "0x06000000", 94 | "DOMAIN_SYNC_COMMITTEE": "0x07000000", 95 | "DOMAIN_SYNC_COMMITTEE_SELECTION_PROOF": "0x08000000", 96 | "DOMAIN_CONTRIBUTION_AND_PROOF": "0x09000000", 97 | "DOMAIN_BLS_TO_EXECUTION_CHANGE": "0x0A000000", 98 | "TARGET_AGGREGATORS_PER_COMMITTEE": "16", 99 | "RANDOM_SUBNETS_PER_VALIDATOR": "1", 100 | "EPOCHS_PER_RANDOM_SUBNET_SUBSCRIPTION": "256", 101 | "ATTESTATION_SUBNET_COUNT": "64", 102 | "TARGET_AGGREGATORS_PER_SYNC_SUBCOMMITTEE": "16", 103 | "SYNC_COMMITTEE_SUBNET_COUNT": "4", 104 | "MAX_BYTES_PER_TRANSACTION": "2**30 (= 1,073,741,824)", 105 | "MAX_TRANSACTIONS_PER_PAYLOAD": "2**20 (= 1,048,576)", 106 | "BYTES_PER_LOGS_BLOOM": "2**8 (= 256)", 107 | "MAX_EXTRA_DATA_BYTES": "2**5 (= 32)", 108 | "TERMINAL_TOTAL_DIFFICULTY": "58750000000000000000000", 109 | "TERMINAL_BLOCK_HASH": "Hash32()", 110 | "TERMINAL_BLOCK_HASH_ACTIVATION_EPOCH": "FAR_FUTURE_EPOCH", 111 | "INTERVALS_PER_SLOT": "3", 112 | "SAFE_SLOTS_TO_UPDATE_JUSTIFIED": "8 slots, 96 seconds", 113 | "PROPOSER_SCORE_BOOST": "40", 114 | "MAX_WITHDRAWALS_PER_PAYLOAD": "16", 115 | "MAX_VALIDATORS_PER_WITHDRAWALS_SWEEP": "16384" 116 | } 117 | -------------------------------------------------------------------------------- /src/layouts/Html.astro: -------------------------------------------------------------------------------- 1 | --- 2 | import 'katex/dist/katex.min.css'; 3 | import 'prismjs/themes/prism-tomorrow.min.css' 4 | import { Metadata } from '../include/SiteConfig.js'; 5 | import Matomo from '../components/Matomo.astro'; 6 | 7 | const { pageTitle, canonical, pageUrl } = Astro.props; 8 | --- 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | {/* Dark mode stuff */} 39 | 40 | 41 |