├── LICENSE ├── script.js ├── README.md └── paywalls.json /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2022 Emre 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /script.js: -------------------------------------------------------------------------------- 1 | const projects = [ 2 | { 3 | name: "Archive.is", 4 | url: "https://archive.is/", 5 | }, 6 | { 7 | name: "12ft.io", 8 | url: "https://12ft.io/", 9 | }, 10 | { 11 | name: "Archive.org", 12 | url: "https://web.archive.org/web/", 13 | }, 14 | ]; 15 | 16 | const paywalls = []; // Add paywall domains from paywalls.json here 17 | let titles = document.querySelectorAll("table tr.athing"); 18 | let postTitle = document.querySelector("tbody table.fatitem tr.athing"); 19 | 20 | // The main function that updates the UI with the links. 21 | const passTheButter = (node) => { 22 | let meta = node.nextSibling.querySelector(".subtext"); 23 | let link = node.querySelector(".titleline a").href; 24 | let domain = node.querySelector("span.sitestr") ? node.querySelector("span.sitestr").innerText : ""; 25 | let paywall = paywalls.find((paywall) => domain.includes(paywall)); 26 | 27 | if (paywall) { 28 | let paywallSpan = document.createElement("span"); 29 | paywallSpan.appendChild(document.createTextNode(" | 💰")); 30 | 31 | projects.forEach((project) => { 32 | const anchor = document.createElement("a"); 33 | const line = document.createElement("span"); 34 | line.textContent = " | "; 35 | anchor.setAttribute("href", `${project.url}${link}`); 36 | anchor.setAttribute("target", "_blank"); 37 | anchor.setAttribute("rel", "noopener noreferrer"); 38 | anchor.textContent = project.name; 39 | paywallSpan.appendChild(line); 40 | paywallSpan.appendChild(anchor); 41 | }); 42 | paywallSpan.appendChild(document.createTextNode(" | ")); 43 | paywallSpan.appendChild( 44 | Object.assign(document.createElement("a"), { 45 | href: `https://github.com/MostlyEmre/hn-anti-paywall`, 46 | target: "_blank", 47 | rel: "noopener noreferrer", 48 | textContent: "ℹ", 49 | }) 50 | ); 51 | 52 | meta.appendChild(paywallSpan); 53 | } 54 | }; 55 | 56 | postTitle 57 | ? passTheButter(postTitle) 58 | : titles.forEach((title) => { 59 | passTheButter(title); 60 | }); 61 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) 2 | 3 | Note! If you want to build the script without using GreasyFork or something. Add the array of paywall urls inside `paywalls.json` into the paywalls const in `script.js`. The code won't work otherwise. 4 | 5 | **GreasyFork**: https://greasyfork.org/en/scripts/452024-hacker-news-anti-paywall 6 | **Detailed write-up**: https://gist.github.com/MostlyEmre/ddec18c4a5b18413994ff9e179bf00ac 7 | **Show HN**: https://news.ycombinator.com/item?id=33794672 8 | 9 | - [Usage](#usage) 10 | - [Screenshots](#screenshots) 11 | - [Anti-Paywall Archive Link Providers](#anti-paywall-archive-link-providers) 12 | - [Paywall List](#paywall-list) 13 | 14 | ## Usage 15 | 16 | - Install through [GreasyFork](https://greasyfork.org/en/scripts/452024-hacker-news-anti-paywall) 17 | - Surf the HackerNews 18 | - Click on one of the archive links when you see a paywalled article to read it without paywall. 19 | - If the specific article isn't available, try another archive link. 20 | - If the article is not available in any of the links, please archive it through the `archive.is` link. 21 | 22 | ## Screenshots 23 | 24 | ![A screenshot of the script in multiple posts page](https://i.imgur.com/YFkP8qW.png) 25 | ![A screenshot of the script on single post page](https://i.imgur.com/BAJnlAF.png) 26 | 27 | ## Anti-Paywall Archive Link Providers 28 | 29 | Main: `https://archive.is/` 30 | Alternative 1: `https://12ft.io/` 31 | Alternative 2: `archive.org` 32 | 33 | **Feel free to recommend more.** `Archive.is` and `Archive.org` doesn't have all the links archived. `12ft.io` works differently but it doesn't support all websites. I found that majority of paywalled links are available through one of these providers in any given paywall. 34 | 35 | **Make the world a better place:** When you click on the `archive.is` URL. If there are no pre-existing archives available, then please click on `archive this url` as this will help other users to see an archived link. Kinda like `BE KIND REWIND`. 36 | 37 | ## Paywall List 38 | 39 | The paywall list (`paywalls.json`) is decoupled from the following URL. At first, I was mindfully fetching the list, but then I just manually inserted the list, way simpler this way. 40 | 41 | [iamadamdev/bypass-paywalls-chrome](https://github.com/iamadamdev/bypass-paywalls-chrome/blob/master/src/js/sites.js) 42 | 43 | **If you wish to update the paywall list (`paywalls.json`), feel free to edit and send a pull request.** 44 | -------------------------------------------------------------------------------- /paywalls.json: -------------------------------------------------------------------------------- 1 | [ 2 | "adweek.com", 3 | "ad.nl", 4 | "ambito.com", 5 | "americanbanker.com", 6 | "baltimoresun.com", 7 | "barrons.com", 8 | "bloomberg.com", 9 | "bloombergquint.com", 10 | "bndestem.nl", 11 | "bostonglobe.com", 12 | "bd.nl", 13 | "brisbanetimes.com.au", 14 | "businessinsider.com", 15 | "caixinglobal.com", 16 | "centralwesterndaily.com.au", 17 | "cen.acs.org", 18 | "chicagotribune.com", 19 | "corriere.it", 20 | "chicagobusiness.com", 21 | "dailypress.com", 22 | "gelderlander.nl", 23 | "groene.nl", 24 | "demorgen.be", 25 | "denverpost.com", 26 | "speld.nl", 27 | "destentor.nl", 28 | "tijd.be", 29 | "volkskrant.nl", 30 | "df.cl", 31 | "editorialedomani.it", 32 | "dynamed.com", 33 | "ed.nl", 34 | "elmercurio.com", 35 | "elmundo.es", 36 | "elpais.com", 37 | "elperiodico.com", 38 | "elu24.ee", 39 | "britannica.com", 40 | "estadao.com.br", 41 | "examiner.com.au", 42 | "expansion.com", 43 | "fnlondon.com", 44 | "financialpost.com", 45 | "ft.com", 46 | "firstthings.com", 47 | "foreignpolicy.com", 48 | "fortune.com", 49 | "genomeweb.com", 50 | "glassdoor.com", 51 | "globes.co.il", 52 | "grubstreet.com", 53 | "haaretz.com", 54 | "haaretz.co.il", 55 | "harpers.org", 56 | "courant.com", 57 | "hbr.org", 58 | "hbrchina.org", 59 | "heraldsun.com.au", 60 | "fd.nl", 61 | "historyextra.com", 62 | "humo.be", 63 | "ilmanifesto.it", 64 | "inc.com", 65 | "interest.co.nz", 66 | "investorschronicle.co.uk", 67 | "lanacion.com.ar", 68 | "repubblica.it", 69 | "lastampa.it", 70 | "latercera.com", 71 | "lavoixdunord.fr", 72 | "lecho.be", 73 | "ledevoir.com", 74 | "leparisien.fr", 75 | "lesechos.fr", 76 | "loebclassics.com", 77 | "lrb.co.uk", 78 | "labusinessjournal.com", 79 | "latimes.com", 80 | "medium.com", 81 | "medscape.com", 82 | "mexiconewsdaily.com", 83 | "sloanreview.mit.edu", 84 | "technologyreview.com", 85 | "mv-voice.com", 86 | "nationalgeographic.com", 87 | "nationalpost.com", 88 | "nzz.ch", 89 | "newstatesman.com", 90 | "nydailynews.com", 91 | "nymag.com", 92 | "nzherald.co.nz", 93 | "nrc.nl", 94 | "ntnews.com.au", 95 | "ocregister.com", 96 | "orlandosentinel.com", 97 | "paloaltoonline.com", 98 | "parool.nl", 99 | "postimees.ee", 100 | "pzc.nl", 101 | "qz.com", 102 | "quora.com", 103 | "gelocal.it", 104 | "republic.ru", 105 | "reuters.com", 106 | "sandiegouniontribune.com", 107 | "sfchronicle.com", 108 | "scientificamerican.com", 109 | "seekingalpha.com", 110 | "slate.com", 111 | "sofrep.com", 112 | "startribune.com", 113 | "statista.com", 114 | "stuff.co.nz", 115 | "\"sueddeutsche.de\"", 116 | "sun-sentinel.com", 117 | "techinasia.com", 118 | "telegraaf.nl", 119 | "time.com", 120 | "adelaidenow.com.au", 121 | "theadvocate.com.au", 122 | "theage.com.au", 123 | "the-american-interest.com", 124 | "theathletic.com", 125 | "theathletic.co.uk", 126 | "theatlantic.com", 127 | "afr.com", 128 | "theaustralian.com.au", 129 | "bizjournals.com", 130 | "canberratimes.com.au", 131 | "thecourier.com.au", 132 | "couriermail.com.au", 133 | "thecut.com", 134 | "dailytelegraph.com.au", 135 | "thediplomat.com", 136 | "economist.com", 137 | "theglobeandmail.com", 138 | "theherald.com.au", 139 | "thehindu.com", 140 | "irishtimes.com", 141 | "japantimes.co.jp", 142 | "kansascity.com", 143 | "themarker.com", 144 | "mercurynews.com", 145 | "themercury.com.au", 146 | "mcall.com", 147 | "thenation.com", 148 | "thenational.scot", 149 | "news-gazette.com", 150 | "newyorker.com", 151 | "nytimes.com", 152 | "theolivepress.es", 153 | "inquirer.com", 154 | "thesaturdaypaper.com.au", 155 | "seattletimes.com", 156 | "spectator.com.au", 157 | "spectator.co.uk", 158 | "spectator.us", 159 | "smh.com.au", 160 | "telegraph.co.uk", 161 | "thestar.com", 162 | "wsj.com", 163 | "washingtonpost.com", 164 | "thewrap.com", 165 | "the-tls.co.uk", 166 | "towardsdatascience.com", 167 | "trouw.nl", 168 | "tubantia.nl", 169 | "vanityfair.com", 170 | "vn.nl", 171 | "vulture.com", 172 | "journalnow.com", 173 | "wired.com", 174 | "zeit.de" 175 | ] 176 | --------------------------------------------------------------------------------