├── .gitignore ├── w3c.json ├── .github ├── dependabot.yml ├── workflows │ ├── request-pr-review.yml │ ├── monitor-specs.yml │ ├── check-base-url.yml │ ├── report-new-specs.yml │ ├── lint.yml │ ├── release-package.yml │ ├── submit-suggested-spec.yml │ ├── build.yml │ ├── build-skip-iso.yml │ └── check-suggested-spec.yml ├── incorrect-base-url.md └── ISSUE_TEMPLATE │ └── suggest-spec.yml ├── packages ├── web-specs │ ├── package.json │ └── README.md └── browser-specs │ ├── package.json │ └── README.md ├── src ├── load-json.js ├── split-issue-body.js ├── extract-pages.js ├── graphql.js ├── fetch-json.js ├── compute-standing.js ├── octokit.js ├── determine-filename.js ├── compute-prevnext.js ├── check-base-url.js ├── compute-alternate-urls.js ├── compute-currentlevel.js ├── compute-shorttitle.js ├── data │ └── multispecs-repos.json ├── request-pr-review.js ├── prepare-packages.js ├── parse-spec-url.js ├── compute-series-urls.js ├── compute-categories.js ├── lint.js ├── fetch-iso-info.js ├── bump-packages-minor.js ├── throttled-queue.js ├── load-spec.js ├── release-package.js ├── determine-testpath.js ├── compute-repository.js ├── fetch-groups.js └── monitor-specs.js ├── test ├── cli.js ├── determine-filename.js ├── data.js ├── shortname-continuity.js ├── extract-pages.js ├── compute-standing.js ├── compute-categories.js ├── compute-shorttitle.js ├── compute-repository.js ├── compute-currentlevel.js ├── compute-prevnext.js ├── fetch-iso-info.js ├── lint.js ├── compute-series-urls.js ├── fetch-groups.js ├── specs.js └── compute-shortname.js ├── schema ├── data.json ├── index.json ├── specs.json └── definitions.json ├── package.json ├── index.js └── LICENSE.md /.gitignore: -------------------------------------------------------------------------------- 1 | .buildsteps 2 | .cache 3 | node_modules/ 4 | config.json 5 | packages/**/index.json -------------------------------------------------------------------------------- /w3c.json: -------------------------------------------------------------------------------- 1 | { 2 | "contacts": ["dontcallmedom", "tidoust"], 3 | "repo-type": "tool" 4 | } 5 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: npm 4 | directory: "/" 5 | schedule: 6 | interval: daily 7 | time: '10:00' 8 | open-pull-requests-limit: 10 -------------------------------------------------------------------------------- /packages/web-specs/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "web-specs", 3 | "version": "3.75.0", 4 | "description": "Curated list of technical Web specifications", 5 | "repository": { 6 | "type": "git", 7 | "url": "https://github.com/w3c/browser-specs.git" 8 | }, 9 | "bugs": { 10 | "url": "https://github.com/w3c/browser-specs/issues" 11 | }, 12 | "license": "CC0-1.0", 13 | "files": [ 14 | "index.json" 15 | ], 16 | "main": "index.json" 17 | } -------------------------------------------------------------------------------- /packages/browser-specs/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "browser-specs", 3 | "version": "4.65.0", 4 | "description": "Curated list of technical Web specifications that are directly implemented or that will be implemented by Web browsers.", 5 | "repository": { 6 | "type": "git", 7 | "url": "https://github.com/w3c/browser-specs.git" 8 | }, 9 | "bugs": { 10 | "url": "https://github.com/w3c/browser-specs/issues" 11 | }, 12 | "license": "CC0-1.0", 13 | "files": [ 14 | "index.json" 15 | ], 16 | "main": "index.json" 17 | } -------------------------------------------------------------------------------- /src/load-json.js: -------------------------------------------------------------------------------- 1 | import { readFile } from 'node:fs/promises'; 2 | 3 | /** 4 | * Load a JSON file as JS object. 5 | * 6 | * @function 7 | * @param {String} filename The path to the file to require 8 | * @return {Object} The result of loading and parsing the file relative to the 9 | * current working directory. 10 | */ 11 | export default async function (filename) { 12 | try { 13 | const json = await readFile(filename, 'utf8'); 14 | return JSON.parse(json); 15 | } 16 | catch (err) { 17 | return null; 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /src/split-issue-body.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Helper function to split an issue body (in markdown) into sections 3 | */ 4 | export default function splitIssueBodyIntoSections(body) { 5 | return body.split(/^### /m) 6 | .filter(section => !!section) 7 | .map(section => section.split(/\r?\n/)) 8 | .map(section => { 9 | let value = section.slice(1).join('\n').trim(); 10 | if (value.replace(/^_(.*)_$/, '$1') === 'No response') { 11 | value = null; 12 | } 13 | return { 14 | title: section[0].replace(/ \(Optional\)$/, ''), 15 | value 16 | }; 17 | }); 18 | } -------------------------------------------------------------------------------- /test/cli.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Make sure that the browser-specs CLI runs as intended. 3 | */ 4 | import { describe, it } from "node:test"; 5 | import assert from "node:assert"; 6 | import path from "node:path"; 7 | import { fileURLToPath } from "node:url"; 8 | import { exec as execCb } from 'node:child_process'; 9 | import util from "node:util"; 10 | const exec = util.promisify(execCb); 11 | 12 | const scriptPath = path.dirname(fileURLToPath(import.meta.url)); 13 | const cwd = path.join(scriptPath, '..', 'src'); 14 | 15 | describe("The browser-specs CLI", () => { 16 | it("runs without errors", async () => { 17 | await exec("node cli.js --help", { cwd }); 18 | }); 19 | }); -------------------------------------------------------------------------------- /.github/workflows/request-pr-review.yml: -------------------------------------------------------------------------------- 1 | name: "NPM release: Request review of pre-release PR" 2 | 3 | on: 4 | schedule: 5 | - cron: '0 5 * * 4' 6 | workflow_dispatch: 7 | 8 | jobs: 9 | review: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - name: Checkout latest version of release script 13 | uses: actions/checkout@v4 14 | 15 | - name: Setup node.js 16 | uses: actions/setup-node@v4 17 | with: 18 | node-version: 20 19 | cache: 'npm' 20 | 21 | - name: Install dependencies 22 | run: npm ci 23 | 24 | - name: Request review of pre-release PR 25 | run: node src/request-pr-review.js 26 | env: 27 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} -------------------------------------------------------------------------------- /.github/incorrect-base-url.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Base URL mismatch 3 | assignees: tidoust, dontcallmedom 4 | labels: bug 5 | --- 6 | [check-base-url](../blob/main/src/check-base-url.js) has detected that the base URL (i.e. the one that appears in the root `url` property in `index.json`) of the following specifications does not match the `release` URL or the `nightly` URL: 7 | 8 | {{ env.check_list }} 9 | 10 | Please review the above list. For each specification, consider updating the URL in [specs.json](../blob/main/specs.json) or fixing the info at the source (the W3C API, Specref, or the spec itself). If the discrepancy seems warranted, the specification should be hardcoded as an exception to the rule in the [check-base-url](../blob/main/src/check-base-url.js) script. -------------------------------------------------------------------------------- /.github/workflows/monitor-specs.yml: -------------------------------------------------------------------------------- 1 | name: Monitor specs 2 | 3 | on: 4 | schedule: 5 | - cron: '0 0 1 */2 *' 6 | workflow_dispatch: 7 | 8 | jobs: 9 | find-specs: 10 | name: Update the list of monitored specs and highlights those that have changed 11 | runs-on: ubuntu-latest 12 | steps: 13 | - name: Checkout latest version of release script 14 | uses: actions/checkout@v4 15 | 16 | - name: Setup node.js 17 | uses: actions/setup-node@v4 18 | with: 19 | node-version: 20 20 | cache: 'npm' 21 | 22 | - name: Install dependencies 23 | run: npm ci 24 | 25 | - name: Check specs that changed since last review 26 | run: node src/monitor-specs.js --update 27 | env: 28 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 29 | -------------------------------------------------------------------------------- /.github/workflows/check-base-url.yml: -------------------------------------------------------------------------------- 1 | name: Check base URL 2 | 3 | on: 4 | schedule: 5 | - cron: '30 0 * * 1' 6 | workflow_dispatch: 7 | 8 | jobs: 9 | find-specs: 10 | name: Check base URL 11 | runs-on: ubuntu-latest 12 | steps: 13 | - name: Checkout repo 14 | uses: actions/checkout@v4 15 | 16 | - name: Setup node.js 17 | uses: actions/setup-node@v4 18 | with: 19 | node-version: 20 20 | cache: 'npm' 21 | 22 | - name: Setup environment 23 | run: npm ci 24 | 25 | - name: Check base URL 26 | run: node src/check-base-url.js # sets check_list env variable 27 | 28 | - name: Report any mismatch in an issue 29 | uses: JasonEtco/create-an-issue@v2 30 | if: ${{ env.check_list }} 31 | env: 32 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 33 | with: 34 | filename: .github/incorrect-base-url.md -------------------------------------------------------------------------------- /src/extract-pages.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Module that exports a function that takes the URL of the index page of a 3 | * multi-page spec as input and that returns the list of pages referenced in 4 | * the table of contents, in document order, excluding the index page. 5 | */ 6 | 7 | import loadSpec from './load-spec.js'; 8 | 9 | export default async function (url, browser) { 10 | const page = await browser.newPage(); 11 | try { 12 | await loadSpec(url, page); 13 | const allPages = await page.evaluate(_ => 14 | [...document.querySelectorAll('.toc a[href]')] 15 | .map(link => link.href) 16 | .map(url => url.split('#')[0]) 17 | .filter(url => url !== window.location.href) 18 | ); 19 | const pageSet = new Set(allPages); 20 | return [...pageSet]; 21 | } 22 | catch (err) { 23 | throw new Error(`Could not extract pages from ${url}: ${err.message}`); 24 | } 25 | finally { 26 | await page.close(); 27 | } 28 | }; -------------------------------------------------------------------------------- /src/graphql.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Send a GraphQL request to the GitHub GraphQL endpoint, authenticating using 3 | * the provided token. 4 | */ 5 | export default async function (query, variables, graphqlToken) { 6 | if (typeof variables === 'string') { 7 | graphqlToken = variables; 8 | variables = null; 9 | } 10 | const res = await fetch("https://api.github.com/graphql", { 11 | method: "POST", 12 | headers: { 13 | "Content-Type": "application/json", 14 | "Authorization": `bearer ${graphqlToken}` 15 | }, 16 | body: JSON.stringify({ query, variables }, null, 2) 17 | }); 18 | if (res.status !== 200) { 19 | if (res.status >= 500) { 20 | throw new Error(`GraphQL server error, ${res.status} status received`); 21 | } 22 | if (res.status === 403) { 23 | throw new Error(`GraphQL server reports that the API key is invalid, ${res.status} status received`); 24 | } 25 | throw new Error(`GraphQL server returned an unexpected HTTP status ${res.status}`); 26 | } 27 | return res.json(); 28 | } 29 | -------------------------------------------------------------------------------- /schema/data.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "http://json-schema.org/schema#", 3 | "$id": "https://w3c.github.io/browser-specs/schema/data.json", 4 | 5 | "type": "object", 6 | "propertyNames": { 7 | "type": "string", 8 | "pattern": "^[\\w\\-\\.]+\\/[\\w\\-\\.]+$" 9 | }, 10 | "additionalProperties": { 11 | "type": "object", 12 | "properties": { 13 | "url": { 14 | "$ref": "definitions.json#/$defs/url" 15 | }, 16 | "shortname": { 17 | "type": "object", 18 | "properties": { 19 | "pattern": { 20 | "type": "string" 21 | }, 22 | "prefix": { 23 | "type": "string" 24 | } 25 | }, 26 | "required": ["pattern"], 27 | "additionalProperties": false 28 | }, 29 | "path": { 30 | "type": "string", 31 | "pattern": "[\\w\\-]+" 32 | }, 33 | "exclude": { 34 | "type": "array", 35 | "items": { 36 | "type": "string", 37 | "pattern": "[\\w\\-]+" 38 | } 39 | } 40 | }, 41 | "required": ["url", "shortname", "exclude"], 42 | "additionalProperties": false 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /.github/workflows/report-new-specs.yml: -------------------------------------------------------------------------------- 1 | name: Report new specs 2 | 3 | on: 4 | schedule: 5 | - cron: '0 0 * * 1' 6 | workflow_dispatch: 7 | 8 | jobs: 9 | find-specs: 10 | name: Find potential new specs 11 | runs-on: ubuntu-latest 12 | steps: 13 | # Starting with Ubuntu 23+, a security feature prevents running Puppeteer 14 | # by default. It needs to be disabled. Using the "easiest" option, see: 15 | # https://chromium.googlesource.com/chromium/src/+/main/docs/security/apparmor-userns-restrictions.md 16 | # https://github.com/puppeteer/puppeteer/pull/13196/files 17 | - name: Disable AppArmor 18 | run: echo 0 | sudo tee /proc/sys/kernel/apparmor_restrict_unprivileged_userns 19 | 20 | - name: Checkout latest version of release script 21 | uses: actions/checkout@v4 22 | 23 | - name: Setup node.js 24 | uses: actions/setup-node@v4 25 | with: 26 | node-version: 20 27 | cache: 'npm' 28 | 29 | - name: Install dependencies 30 | run: npm ci 31 | 32 | - name: Find new candidate specs 33 | run: npx find-specs --github --repos 34 | env: 35 | GITHUB_TOKEN: ${{ secrets.BOT_TOKEN }} 36 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "browser-specs", 3 | "version": "2.27.0", 4 | "repository": { 5 | "type": "git", 6 | "url": "git+https://github.com/w3c/browser-specs.git" 7 | }, 8 | "files": [ 9 | "index.json" 10 | ], 11 | "license": "CC0-1.0", 12 | "main": "index.json", 13 | "scripts": { 14 | "build": "node src/build-index.js", 15 | "build-skip-iso": "node src/build-index.js --skip-fetch=iso", 16 | "lint": "node src/lint.js", 17 | "lint-fix": "node src/lint.js --fix", 18 | "test": "node --test --test-reporter=spec", 19 | "test-index": "node --test --test-reporter=spec test/index.js" 20 | }, 21 | "type": "module", 22 | "bin": { 23 | "browser-specs": "./src/cli.js", 24 | "find-specs": "./src/find-specs.js" 25 | }, 26 | "devDependencies": { 27 | "@actions/core": "^2.0.1", 28 | "@jsdevtools/npm-publish": "^4.1.1", 29 | "@octokit/plugin-throttling": "^11.0.3", 30 | "@octokit/rest": "^22.0.1", 31 | "ajv": "^8.17.1", 32 | "ajv-formats": "^3.0.1", 33 | "commander": "^14.0.2", 34 | "puppeteer": "^24.33.0", 35 | "reffy": "^20.0.1", 36 | "rimraf": "^6.1.2", 37 | "undici": "^7.16.0" 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /src/fetch-json.js: -------------------------------------------------------------------------------- 1 | import ThrottledQueue from "./throttled-queue.js"; 2 | 3 | // Make sure we remain "friendly" with servers 4 | // In particular, we're going to have to fetch a number of w3c.json files from 5 | // https://raw.githubusercontent.com which seems to restrict the total number 6 | // of allowed requests to ~5000 per hour and per IP address. 7 | const fetchQueue = new ThrottledQueue({ 8 | maxParallel: 4, 9 | sleepInterval: 1000 10 | }); 11 | 12 | // Maintain a cache of fetched JSON resources in memory to avoid sending the 13 | // same fetch request again and again 14 | const cache = {}; 15 | 16 | /** 17 | * Fetch a JSON URL 18 | */ 19 | export default async function (url, options) { 20 | if (cache[url]) { 21 | return structuredClone(cache[url]); 22 | } 23 | const res = await fetchQueue.runThrottledPerOrigin(url, fetch, url, options); 24 | if (res.status === 404) { 25 | return null; 26 | } 27 | if (res.status !== 200) { 28 | throw new Error(`Server returned an error for ${url}, status code is ${res.status}`); 29 | } 30 | 31 | try { 32 | const body = await res.json(); 33 | cache[url] = body; 34 | return structuredClone(body); 35 | } 36 | catch (err) { 37 | throw new Error(`Server returned invalid JSON for ${url}`); 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /.github/workflows/lint.yml: -------------------------------------------------------------------------------- 1 | name: Test and lint 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request: 8 | branches: 9 | - main 10 | workflow_dispatch: 11 | 12 | jobs: 13 | lint: 14 | runs-on: ubuntu-latest 15 | steps: 16 | # Starting with Ubuntu 23+, a security feature prevents running Puppeteer 17 | # by default. It needs to be disabled. Using the "easiest" option, see: 18 | # https://chromium.googlesource.com/chromium/src/+/main/docs/security/apparmor-userns-restrictions.md 19 | # https://github.com/puppeteer/puppeteer/pull/13196/files 20 | - name: Disable AppArmor 21 | run: echo 0 | sudo tee /proc/sys/kernel/apparmor_restrict_unprivileged_userns 22 | 23 | - name: Checkout latest version of release script 24 | uses: actions/checkout@v4 25 | 26 | - name: Setup node.js 27 | uses: actions/setup-node@v4 28 | with: 29 | node-version: 20 30 | cache: 'npm' 31 | 32 | - name: Install dependencies 33 | run: npm ci 34 | 35 | - name: Test 36 | env: 37 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 38 | if: ${{ env.GITHUB_TOKEN }} 39 | run: | 40 | npm run test 41 | 42 | - name: Lint 43 | run: npm run lint 44 | -------------------------------------------------------------------------------- /src/compute-standing.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Module that exports a function that takes a spec object that already has its 3 | * `nightly.status` (and `release.status` for released specs) properties set as 4 | * input, and that returns the "standing" of the spec. 5 | * 6 | * Note (2023-01-06): The definition of "standing" remains fuzzy and this 7 | * property should be regarded as unstable. 8 | */ 9 | 10 | // List of spec statuses that are not "official" ones, in the sense that the 11 | // specs have not been officially adopted by a group as a deliverable. 12 | const unofficialStatuses = [ 13 | "A Collection of Interesting Ideas", 14 | "Unofficial Proposal Draft" 15 | ]; 16 | 17 | 18 | /** 19 | * Exports main function that takes a spec object and returns the standing of 20 | * the spec. 21 | */ 22 | export default function (spec) { 23 | if (!spec) { 24 | throw "Invalid spec object passed as parameter"; 25 | } 26 | 27 | // If spec is already explicit about its standing, who are we to disagree? 28 | if (spec.standing) { 29 | return spec.standing; 30 | } 31 | 32 | const status = spec.release?.status ?? spec.nightly?.status; 33 | if (status === "Discontinued Draft") { 34 | return "discontinued"; 35 | } 36 | else { 37 | return unofficialStatuses.includes(status) ? "pending" : "good"; 38 | } 39 | } -------------------------------------------------------------------------------- /src/octokit.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Wrapper around Octokit to add throttling and avoid hitting rate limits 3 | */ 4 | 5 | import { throttling } from "@octokit/plugin-throttling"; 6 | import { Octokit as OctokitRest } from "@octokit/rest"; 7 | const Octokit = OctokitRest.plugin(throttling); 8 | 9 | const MAX_RETRIES = 3; 10 | 11 | export default function (params) { 12 | params = params || {}; 13 | 14 | const octoParams = Object.assign({ 15 | throttle: { 16 | onRateLimit: (retryAfter, options) => { 17 | if (options.request.retryCount < MAX_RETRIES) { 18 | console.warn(`Rate limit exceeded, retrying after ${retryAfter} seconds`) 19 | return true; 20 | } else { 21 | console.error(`Rate limit exceeded, giving up after ${MAX_RETRIES} retries`); 22 | return false; 23 | } 24 | }, 25 | onSecondaryRateLimit: (retryAfter, options) => { 26 | if (options.request.retryCount < MAX_RETRIES) { 27 | console.warn(`Abuse detection triggered, retrying after ${retryAfter} seconds`) 28 | return true; 29 | } else { 30 | console.error(`Abuse detection triggered, giving up after ${MAX_RETRIES} retries`); 31 | return false; 32 | } 33 | } 34 | } 35 | }, params); 36 | 37 | return new Octokit(octoParams); 38 | } 39 | -------------------------------------------------------------------------------- /test/determine-filename.js: -------------------------------------------------------------------------------- 1 | import { describe, it } from "node:test"; 2 | import assert from "node:assert"; 3 | import determineFilename from "../src/determine-filename.js"; 4 | 5 | describe("determine-filename module", function () { 6 | // Long timeout since tests need to send network requests 7 | const timeout = { 8 | timeout: 30000 9 | }; 10 | 11 | it("extracts filename from URL (.html)", timeout, async () => { 12 | const url = "https://example.org/spec/filename.html"; 13 | const filename = await determineFilename(url); 14 | assert.equal(filename, "filename.html"); 15 | }); 16 | 17 | it("extracts filename from URL (.pdf)", timeout, async () => { 18 | const url = "https://example.org/spec/filename.pdf"; 19 | const filename = await determineFilename(url); 20 | assert.equal(filename, "filename.pdf"); 21 | }); 22 | 23 | it("finds index.html filenames", timeout, async () => { 24 | const url = "https://w3c.github.io/presentation-api/"; 25 | const filename = await determineFilename(url); 26 | assert.equal(filename, "index.html"); 27 | }); 28 | 29 | it("finds Overview.html filenames", timeout, async () => { 30 | const url = "https://www.w3.org/TR/presentation-api/"; 31 | const filename = await determineFilename(url); 32 | assert.equal(filename, "Overview.html"); 33 | }); 34 | }); 35 | -------------------------------------------------------------------------------- /test/data.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Make sure that the src/data/*.json files respect the right JSON schema 3 | */ 4 | 5 | import { describe, it } from "node:test"; 6 | import assert from "node:assert"; 7 | import path from "node:path"; 8 | import { fileURLToPath } from "node:url"; 9 | import schema from "../schema/data.json" with { type: "json" }; 10 | import dfnsSchema from "../schema/definitions.json" with { type: "json" }; 11 | import loadJSON from "../src/load-json.js"; 12 | import Ajv from "ajv"; 13 | import addFormats from "ajv-formats"; 14 | const ajv = (new Ajv()).addSchema(dfnsSchema); 15 | addFormats(ajv); 16 | 17 | const scriptPath = path.dirname(fileURLToPath(import.meta.url)); 18 | const multiReposFile = path.resolve(scriptPath, "..", "src", "data", "multispecs-repos.json"); 19 | 20 | describe("Data files", () => { 21 | describe("The JSON schema", () => { 22 | it("is valid", () => { 23 | const isSchemaValid = ajv.validateSchema(schema); 24 | assert.ok(isSchemaValid); 25 | }); 26 | }); 27 | 28 | describe("The multispecs-repos.json list", () => { 29 | it("respects the JSON schema", async () => { 30 | const list = await loadJSON(multiReposFile); 31 | const validate = ajv.compile(schema); 32 | const isValid = validate(list, { format: "full" }); 33 | assert.strictEqual(validate.errors, null); 34 | }); 35 | }); 36 | }); 37 | -------------------------------------------------------------------------------- /test/shortname-continuity.js: -------------------------------------------------------------------------------- 1 | // Tests may run against a test version of the index file 2 | import { describe, it, before } from "node:test"; 3 | import assert from "node:assert"; 4 | import os from "node:os"; 5 | import fs from "node:fs"; 6 | import path from "node:path"; 7 | import util from "node:util"; 8 | import { exec as execCb } from "node:child_process"; 9 | import { fileURLToPath } from "node:url"; 10 | const exec = util.promisify(execCb); 11 | import loadJSON from "../src/load-json.js"; 12 | 13 | const scriptPath = path.dirname(fileURLToPath(import.meta.url)); 14 | const specsFile = process.env.testIndex ?? path.resolve(scriptPath, "..", "index.json"); 15 | const specs = await loadJSON(specsFile); 16 | 17 | describe("The build", {timeout: 60000}, function () { 18 | let tmpdir; 19 | 20 | before(async () => { 21 | tmpdir = await fs.promises.mkdtemp(path.join(os.tmpdir(), "web-specs-")); 22 | await exec("npm install web-specs", { cwd: tmpdir }); 23 | }); 24 | 25 | it("preserves shortnames", async () => { 26 | const lastPublishedSpecs = await loadJSON(path.join( 27 | tmpdir, "node_modules", "web-specs", "index.json")); 28 | 29 | const shortnames = lastPublishedSpecs.map(spec => spec.shortname); 30 | const wrong = shortnames.filter(shortname => !specs.find(spec => 31 | spec.shortname === shortname || 32 | spec.formerNames?.includes(shortname)) 33 | ); 34 | assert.deepStrictEqual(wrong, []); 35 | }); 36 | }); 37 | -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | import specs from "./index.json" with { type: "json" }; 4 | import { fileURLToPath } from 'node:url'; 5 | import process from 'node:process'; 6 | 7 | 8 | /** 9 | * Return the list of specs that match the specified filter. 10 | * 11 | * - If the filter is an integer, return the spec at that index in the list 12 | * - If the filter is full or delta, return specs with same level composition 13 | * - If the filter is empty, return the whole list 14 | * - return specs that have the same URL, name, shortname, or source otherwise 15 | */ 16 | function getSpecs(filter) { 17 | if (filter) { 18 | const res = filter.match(/^\d+$/) ? 19 | [specs[parseInt(filter, 10)]] : 20 | specs.filter(s => 21 | s.url === filter || 22 | s.name === filter || 23 | s.seriesComposition === filter || 24 | s.source === filter || 25 | s.title === filter || 26 | (s.series && s.series.shortname === filter) || 27 | (s.release && s.release.url === filter) || 28 | (s.nightly && s.nightly.url === filter)); 29 | return res; 30 | } 31 | else { 32 | return specs; 33 | } 34 | } 35 | 36 | export { getSpecs }; 37 | 38 | if (process.argv[1] === fileURLToPath(import.meta.url)) { 39 | // Code used as command-line interface (CLI), output info about known specs. 40 | const res = getSpecs(process.argv[2]); 41 | console.log(JSON.stringify(res.length === 1 ? res[0] : res, null, 2)); 42 | } 43 | -------------------------------------------------------------------------------- /.github/workflows/release-package.yml: -------------------------------------------------------------------------------- 1 | # Publish a new package when a pre-release PR is merged. 2 | # 3 | # Job does nothing if PR that was merged is not a pre-release PR. 4 | 5 | name: "Publish NPM package if needed" 6 | 7 | permissions: 8 | # Required to create/update references (release tags), 9 | # includes "read", which is needed to retrieve a PR: 10 | # https://docs.github.com/en/rest/git/refs#create-a-reference--fine-grained-access-tokens 11 | # https://docs.github.com/en/rest/pulls/pulls#get-a-pull-request--fine-grained-access-tokens 12 | contents: write 13 | 14 | # Required for Open ID Connect (OIDC) authentication for npm publication: 15 | # https://docs.npmjs.com/trusted-publishers#github-actions-configuration 16 | id-token: write 17 | 18 | on: 19 | pull_request: 20 | branches: 21 | - main 22 | types: 23 | - closed 24 | 25 | jobs: 26 | release: 27 | if: startsWith(github.head_ref, 'release-') && github.event.pull_request.merged == true 28 | runs-on: ubuntu-latest 29 | steps: 30 | - name: Checkout latest version of release script 31 | uses: actions/checkout@v6 32 | with: 33 | ref: main 34 | 35 | - name: Setup node.js 36 | uses: actions/setup-node@v6 37 | with: 38 | node-version: 24 39 | cache: 'npm' 40 | 41 | - name: Install dependencies 42 | run: npm ci 43 | 44 | - name: Release package if needed 45 | run: node src/release-package.js ${{ github.event.pull_request.number }} 46 | env: 47 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 48 | -------------------------------------------------------------------------------- /src/determine-filename.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Module that takes the URL of the index page of a spec as input, possibly 3 | * without a filename, and that tries to determine the underlying filename. 4 | * 5 | * For instance: 6 | * - given "https://w3c.github.io/webrtc-identity/identity.html", the function 7 | * would return "identity.html" 8 | * - given "https://compat.spec.whatwg.org/", the function would determine that 9 | * the filename is "index.html". 10 | */ 11 | 12 | export default async function (url) { 13 | // Extract filename directly from the URL when possible 14 | const match = url.match(/\/([^/]+\.(html|pdf|txt))$/); 15 | if (match) { 16 | return match[1]; 17 | } 18 | 19 | // RFC-editor HTML rendering 20 | const rfcMatch = url.match(/\/rfc\/(rfc[0-9]+)$/); 21 | if (rfcMatch) { 22 | return rfcMatch[1] + '.html'; 23 | } 24 | 25 | // Make sure that url ends with a "/" 26 | const urlWithSlash = url.endsWith("/") ? url : url + "/"; 27 | 28 | // Check common candidates 29 | const candidates = [ 30 | "index.html", 31 | "Overview.html" 32 | ]; 33 | 34 | for (const candidate of candidates) { 35 | const res = await fetch(urlWithSlash + candidate, { method: "HEAD" }); 36 | if (res.status >= 200 && res.status < 300) { 37 | return candidate; 38 | } 39 | else if (res.status !== 404) { 40 | console.warn(`[warning] fetching "${urlWithSlash + candidate}" returned unexpected HTTP status ${res.status}`); 41 | } 42 | } 43 | 44 | // Not found? Look at Content-Location header 45 | const res = await fetch(url, { method: "HEAD" }); 46 | const filename = res.headers.get("Content-Location"); 47 | return filename; 48 | } 49 | -------------------------------------------------------------------------------- /src/compute-prevnext.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Module that exports a function that takes a spec object that already has a 3 | * "shortname", "series" and "level" properties (if needed) as input along with 4 | * a list of specs with the same info for each spec, and that returns an object 5 | * with "seriesPrevious" and "seriesNext" properties as needed, that point 6 | * to the "shortname" of the spec object that describes the previous and next 7 | * level for the spec in the list. 8 | */ 9 | 10 | /** 11 | * Exports main function that takes a spec object and a list of specs (which 12 | * may contain the spec object itself) and returns an object with properties 13 | * "seriesPrevious" and/or "seriesNext" set. Function only sets the 14 | * properties when needed, so returned object may be empty. 15 | */ 16 | export default function (spec, list) { 17 | if (!spec || !spec.shortname || !spec.series || !spec.series.shortname) { 18 | throw "Invalid spec object passed as parameter"; 19 | } 20 | 21 | list = list || []; 22 | const level = spec.seriesVersion || "0"; 23 | 24 | return list 25 | .filter(s => s.series.shortname === spec.series.shortname && s.seriesComposition !== "fork") 26 | .sort((a, b) => (a.seriesVersion || "0").localeCompare(b.seriesVersion || "0")) 27 | .reduce((res, s) => { 28 | if ((s.seriesVersion || "0") < level) { 29 | // Previous level is the last spec with a lower level 30 | res.seriesPrevious = s.shortname; 31 | } 32 | else if ((s.seriesVersion || "0") > level) { 33 | // Next level is the first spec with a greater level 34 | if (!res.seriesNext) { 35 | res.seriesNext = s.shortname; 36 | } 37 | } 38 | return res; 39 | }, {}); 40 | } -------------------------------------------------------------------------------- /src/check-base-url.js: -------------------------------------------------------------------------------- 1 | /** 2 | * CLI tool that parses the generated index of specifications to make sure that 3 | * the base URL either matches the release URL if there is one, or the nightly 4 | * URL otherwise. 5 | * 6 | * The CLI tool returns Markdown that can typically be used to create an issue. 7 | * It also sets a check_list environment variable that can be used in GitHub 8 | * actions. 9 | * 10 | * No content is returned when everything looks good. 11 | */ 12 | 13 | import core from "@actions/core"; 14 | import specs from "../index.json" with { type: "json" }; 15 | 16 | const problems = specs 17 | // A subset of the IETF RFCs are crawled from their httpwg.org rendering 18 | // see https://github.com/tobie/specref/issues/672 and 19 | // https://github.com/w3c/browser-specs/issues/280 20 | // Also, the revision for CSS2 is ignored on purpose to squash CSS 2.1 and 21 | // CSS 2.2 into a single entry 22 | .filter(s => s.nightly && 23 | !s.nightly.url.startsWith('https://httpwg.org') && 24 | !s.nightly.url.startsWith('https://www.ietf.org/') && 25 | !s.nightly.url.startsWith('https://explainers-by-googlers.github.io/CHIPS-spec/')) 26 | .filter(s => (s.release && s.url !== s.release.url) || (!s.release && s.url !== s.nightly.url)) 27 | .filter(s => s.shortname !== 'CSS2') 28 | .map(s => { 29 | const expected = s.release ? "release" : "nightly"; 30 | const expectedUrl = s.release ? s.release.url : s.nightly.url; 31 | return `- [ ] [${s.title}](${s.url}): expected ${expected} URL ${expectedUrl} to match base URL ${s.url}`; 32 | }); 33 | 34 | if (problems.length > 0) { 35 | const res = problems.join("\n"); 36 | core.exportVariable("check_list", res); 37 | console.log(res); 38 | } 39 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/suggest-spec.yml: -------------------------------------------------------------------------------- 1 | name: New spec 2 | description: Use this issue template to suggest that a new spec be added to the list. 3 | labels: ["new spec", "review"] 4 | title: "Add new spec: " 5 | body: 6 | - type: markdown 7 | attributes: 8 | value: | 9 | > [!Note] 10 | Thank you for proposing a new spec! Spec suggestions use a structured issue so that we can semi-automate the process. A bot will check the suggestion after creation, and report on missing properties or other problems before we review the suggestion. 11 | 12 | - type: input 13 | id: url 14 | attributes: 15 | label: URL 16 | description: | 17 | *Somewhat* stable spec URL. See [`url`](https://github.com/w3c/browser-specs/#url) for details. 18 | value: "https://" 19 | validations: 20 | required: true 21 | 22 | - type: textarea 23 | id: rationale 24 | attributes: 25 | label: Rationale 26 | description: | 27 | Please explain how the spec meets the [spec selection criteria](https://github.com/w3c/browser-specs/#spec-selection-criteria), e.g., with a pointer to an intent to implement or to a sister project issue. You may leave this field blank if that seems straightforward. 28 | validations: 29 | required: false 30 | 31 | - type: textarea 32 | id: custom 33 | attributes: 34 | label: Additional properties 35 | description: | 36 | A JSON object with the spec properties that cannot be determined automatically, e.g., [`seriesComposition`](https://github.com/w3c/browser-specs/#seriescomposition). Please **ignore this field** unless you know what you're doing. We will check and set properties as needed afterwards. 37 | render: json 38 | value: "{}" 39 | validations: 40 | required: false 41 | -------------------------------------------------------------------------------- /src/compute-alternate-urls.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Module that exports a function that takes a spec object as input that already 3 | * has most of its info filled out and returns an object with "alternativeUrls" 4 | * based on well-known patterns for certain publishers. 5 | */ 6 | import computeShortname from "./compute-shortname.js"; 7 | 8 | export default function (spec) { 9 | if (!spec?.url || !spec?.nightly) { 10 | throw "Invalid spec object passed as parameter"; 11 | } 12 | const alternate = new Set(spec.nightly.alternateUrls); 13 | 14 | // Document well-known patterns also used in other specs 15 | // datatracker and (now deprecated) tools.ietf.org 16 | if (spec.organization === "IETF" && spec.url.startsWith("https://www.rfc-editor.org/rfc/")) { 17 | alternate.add(spec.url.replace("https://www.rfc-editor.org/rfc/", "https://datatracker.ietf.org/doc/html/")); 18 | alternate.add(spec.url.replace("https://www.rfc-editor.org/rfc/", "https://tools.ietf.org/html/")); 19 | } 20 | 21 | // Add alternate w3c.github.io URLs for CSS specs 22 | // (Note drafts of CSS Houdini and Visual effects task forces don't have a 23 | // w3c.github.io version) 24 | // (Also note the CSS WG uses the "css" series shortname for CSS snapshots 25 | // and not for the CSS 2.x series) 26 | if (spec?.nightly?.url.match(/\/drafts\.csswg\.org/)) { 27 | const draft = computeShortname(spec.nightly.url); 28 | alternate.add(`https://w3c.github.io/csswg-drafts/${draft.shortname}/`); 29 | if ((spec.series.currentSpecification === spec.shortname) && 30 | (draft.shortname !== draft.series.shortname) && 31 | (draft.series.shortname !== 'css')) { 32 | alternate.add(`https://w3c.github.io/csswg-drafts/${draft.series.shortname}/`); 33 | } 34 | } 35 | spec.nightly.alternateUrls = Array.from(alternate); 36 | }; 37 | -------------------------------------------------------------------------------- /src/compute-currentlevel.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Module that exports a function that takes a spec object and a list of specs 3 | * that contains it, and that returns an object with a "currentSpecification" 4 | * property set to the "shortname" of the spec object that should be seen as 5 | * the current level for the set of specs with the same series' shortname in 6 | * the list. 7 | * 8 | * Each spec in the list must have "shortname", "series" and "seriesVersion" 9 | * (if needed) properties. 10 | * 11 | * By default, the current level is defined as the last level that is not a 12 | * delta/fork spec, unless a level is explicitly flagged with a "forceCurrent" 13 | * property in the list of specs. 14 | */ 15 | 16 | /** 17 | * Exports main function that takes a spec object and a list of specs (which 18 | * must contain the spec object itself) and returns an object with a 19 | * "currentSpecification" property. Function always sets the property (value is 20 | * the name of the spec itself when it is the current level) 21 | */ 22 | export default function (spec, list) { 23 | list = list || []; 24 | if (!spec) { 25 | throw "Invalid spec object passed as parameter"; 26 | } 27 | 28 | const current = list.reduce((candidate, curr) => { 29 | if (curr.series.shortname === candidate.series.shortname && 30 | !candidate.forceCurrent && 31 | curr.seriesComposition !== "fork" && 32 | curr.seriesComposition !== "delta" && 33 | (curr.forceCurrent || 34 | candidate.seriesComposition === "delta" || 35 | candidate.seriesComposition === "fork" || 36 | (curr.seriesVersion || "0") > (candidate.seriesVersion || "0"))) { 37 | return curr; 38 | } 39 | else { 40 | return candidate; 41 | } 42 | }, spec); 43 | 44 | return { 45 | currentSpecification: current.shortname, 46 | forceCurrent: current.forceCurrent 47 | }; 48 | }; -------------------------------------------------------------------------------- /schema/index.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "http://json-schema.org/schema#", 3 | "$id": "https://w3c.github.io/browser-specs/schema/index.json", 4 | 5 | "type": "array", 6 | "items": { 7 | "type": "object", 8 | "properties": { 9 | "url": { "$ref": "definitions.json#/$defs/url" }, 10 | "shortname": { "$ref": "definitions.json#/$defs/shortname" }, 11 | "forkOf": { "$ref": "definitions.json#/$defs/shortname" }, 12 | "forks": { "$ref": "definitions.json#/$defs/forks" }, 13 | "series": { "$ref": "definitions.json#/$defs/series" }, 14 | "seriesVersion": { "$ref": "definitions.json#/$defs/seriesVersion" }, 15 | "seriesComposition": { "$ref": "definitions.json#/$defs/seriesComposition" }, 16 | "seriesPrevious": { "$ref": "definitions.json#/$defs/shortname" }, 17 | "seriesNext": { "$ref": "definitions.json#/$defs/shortname" }, 18 | "nightly": { "$ref": "definitions.json#/$defs/nightly" }, 19 | "tests": { "$ref": "definitions.json#/$defs/tests" }, 20 | "release": { "$ref": "definitions.json#/$defs/indexfile/release" }, 21 | "title": { "$ref": "definitions.json#/$defs/title" }, 22 | "shortTitle": { "$ref": "definitions.json#/$defs/title" }, 23 | "source": { "$ref": "definitions.json#/$defs/source" }, 24 | "organization": { "$ref": "definitions.json#/$defs/organization" }, 25 | "groups": { "$ref": "definitions.json#/$defs/groups" }, 26 | "categories": { "$ref": "definitions.json#/$defs/categories" }, 27 | "standing": { "$ref": "definitions.json#/$defs/standing" }, 28 | "obsoletedBy": { "$ref": "definitions.json#/$defs/obsoletedBy" }, 29 | "formerNames": { "$ref": "definitions.json#/$defs/formerNames" } 30 | }, 31 | "required": [ 32 | "url", "shortname", "series", "seriesComposition", 33 | "title", "shortTitle", "source", "organization", "groups", "categories", 34 | "standing" 35 | ], 36 | "additionalProperties": false 37 | }, 38 | "minItems": 1 39 | } 40 | -------------------------------------------------------------------------------- /schema/specs.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "http://json-schema.org/schema#", 3 | "$id": "https://w3c.github.io/browser-specs/schema/specs.json", 4 | 5 | "type": "array", 6 | "items": { 7 | "oneOf": [ 8 | { 9 | "type": "string", 10 | "pattern": "^https://[^\\s]+(\\s(delta|current|multipage))?$" 11 | }, 12 | { 13 | "type": "object", 14 | "properties": { 15 | "url": { "$ref": "definitions.json#/$defs/url" }, 16 | "shortname": { "$ref": "definitions.json#/$defs/shortname" }, 17 | "forkOf": { "$ref": "definitions.json#/$defs/shortname" }, 18 | "series": { "$ref": "definitions.json#/$defs/series" }, 19 | "seriesVersion": { "$ref": "definitions.json#/$defs/seriesVersion" }, 20 | "seriesComposition": { "$ref": "definitions.json#/$defs/seriesComposition" }, 21 | "nightly": { "$ref": "definitions.json#/$defs/nightly" }, 22 | "release": { "$ref": "definitions.json#/$defs/specsfile/release" }, 23 | "tests": { "$ref": "definitions.json#/$defs/tests" }, 24 | "title": { "$ref": "definitions.json#/$defs/title" }, 25 | "shortTitle": { "$ref": "definitions.json#/$defs/title" }, 26 | "organization": { "$ref": "definitions.json#/$defs/organization" }, 27 | "groups": { "$ref": "definitions.json#/$defs/groups" }, 28 | "categories": { "$ref": "definitions.json#/$defs/categories-specs" }, 29 | "standing": { "$ref": "definitions.json#/$defs/standing" }, 30 | "obsoletedBy": { "$ref": "definitions.json#/$defs/obsoletedBy" }, 31 | "formerNames": { "$ref": "definitions.json#/$defs/formerNames" }, 32 | "forceCurrent": { "type": "boolean" }, 33 | "multipage": { 34 | "type": "string", 35 | "enum": [ 36 | "all", "release", "nightly" 37 | ] 38 | } 39 | }, 40 | "required": ["url"], 41 | "additionalProperties": false 42 | } 43 | ] 44 | }, 45 | "minItems": 1 46 | } 47 | -------------------------------------------------------------------------------- /test/extract-pages.js: -------------------------------------------------------------------------------- 1 | import { describe, it, before, after } from "node:test"; 2 | import assert from "node:assert"; 3 | import puppeteer from "puppeteer"; 4 | import extractPages from "../src/extract-pages.js"; 5 | 6 | describe("extract-pages module", function () { 7 | // Long timeout since tests need to send network requests 8 | const timeout = { 9 | timeout: 30000 10 | }; 11 | 12 | let browser; 13 | 14 | before(async () => { 15 | browser = await puppeteer.launch(); 16 | }); 17 | 18 | after(async () => { 19 | await browser.close(); 20 | }); 21 | 22 | it("extracts pages from the SVG2 spec", timeout, async () => { 23 | const url = "https://svgwg.org/svg2-draft/"; 24 | const pages = await extractPages(url, browser); 25 | assert.ok(pages.length > 20); 26 | }); 27 | 28 | it("extracts pages from the HTML spec", timeout, async () => { 29 | const url = "https://html.spec.whatwg.org/multipage/"; 30 | const pages = await extractPages(url, browser); 31 | assert.ok(pages.length > 20); 32 | }); 33 | 34 | it("extracts pages from the CSS 2.1 spec", timeout, async () => { 35 | const url = "https://www.w3.org/TR/CSS21/"; 36 | const pages = await extractPages(url, browser); 37 | assert.ok(pages.length > 20); 38 | }); 39 | 40 | it("does not include the index page as first page", timeout, async () => { 41 | const url = "https://svgwg.org/svg2-draft/" 42 | const pages = await extractPages(url, browser); 43 | assert.ok(!pages.find(page => page.url)); 44 | }); 45 | 46 | it("does not get lost when given a single-page ReSpec spec", timeout, async () => { 47 | const url = "https://w3c.github.io/presentation-api/"; 48 | const pages = await extractPages(url, browser); 49 | assert.deepStrictEqual(pages, []); 50 | }); 51 | 52 | it("does not get lost when given a single-page Bikeshed spec", timeout, async () => { 53 | const url = "https://w3c.github.io/mediasession/"; 54 | const pages = await extractPages(url, browser); 55 | assert.deepStrictEqual(pages, []); 56 | }); 57 | }); 58 | -------------------------------------------------------------------------------- /src/compute-shorttitle.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Module that exports a function that takes a title as input and returns a 3 | * meaningful short title out of it, or the full title if it cannot be 4 | * abbreviated. 5 | * 6 | * For instance, given "CSS Conditional Rules Module Level 3" as a title, the 7 | * function would return "CSS Conditional 3" 8 | */ 9 | 10 | 11 | /** 12 | * Internal function that takes a URL as input and returns a name for it 13 | * if the URL matches well-known patterns, or if the given parameter is actually 14 | * already a name (meaning that it does not contains any "/"). 15 | * 16 | * The function throws if it cannot compute a meaningful name from the URL. 17 | */ 18 | export default function (title) { 19 | if (!title) { 20 | return title; 21 | } 22 | 23 | // Handle HTTP/1.1 specs separately to preserve feature name after "HTTP/1.1" 24 | const httpStart = 'Hypertext Transfer Protocol (HTTP/1.1): '; 25 | if (title.startsWith(httpStart)) { 26 | return 'HTTP/1.1 ' + title.substring(httpStart.length); 27 | } 28 | 29 | const level = title.match(/\s(\d+(\.\d+)?)$/); 30 | const shortTitle = title 31 | .trim() 32 | .replace(/\s/g, ' ') // Replace non-breaking spaces 33 | .replace(/ \d+(\.\d+)?$/, '') // Drop level number for now 34 | .replace(/(:| -)? Level$/i, '') // Drop "Level" 35 | .replace(/ \(\v\d+(\.\d+)?\)/i, '') // Drop "(vx.y)" 36 | .replace(/\(Draft\)/i, '') // Drop "(Draft)" indication 37 | .replace(/ Module$/i, '') // Drop "Module" (now followed by level) 38 | .replace(/ Proposal$/i, '') // Drop "Proposal" (TC39 proposals) 39 | .replace(/ Specification$/i, '') // Drop "Specification" 40 | .replace(/ Standard$/i, '') // Drop "Standard" and "Living Standard" 41 | .replace(/ Living$/i, '') 42 | .replace(/ \([^\)]+ Edition\)/i, '') // Drop edition indication 43 | .replace(/^.*\(([^\)]+)\).*$/, '$1') // Use abbr between parentheses 44 | .trim(); 45 | 46 | if (level) { 47 | return shortTitle + " " + level[1]; 48 | } 49 | else { 50 | return shortTitle; 51 | } 52 | }; 53 | -------------------------------------------------------------------------------- /test/compute-standing.js: -------------------------------------------------------------------------------- 1 | import { describe, it } from "node:test"; 2 | import assert from "node:assert"; 3 | import computeStanding from "../src/compute-standing.js"; 4 | 5 | describe("compute-standing module", () => { 6 | it("returns `good` for an Editor's Draft", function () { 7 | const spec = { nightly: { status: "Editor's Draft" } }; 8 | assert.strictEqual(computeStanding(spec), "good"); 9 | }); 10 | 11 | it("returns `good` for a Living Standard", function () { 12 | const spec = { nightly: { status: "Living Standard" } }; 13 | assert.strictEqual(computeStanding(spec), "good"); 14 | }); 15 | 16 | it("returns `good` for a Working Draft", function () { 17 | const spec = { 18 | release: { status: "Working Draft" }, 19 | nightly: { status: "Editor's Draft" } 20 | }; 21 | assert.strictEqual(computeStanding(spec), "good"); 22 | }); 23 | 24 | it("returns `good` for a Recommendation", function () { 25 | const spec = { 26 | release: { status: "Recommendation" }, 27 | nightly: { status: "Editor's Draft" } 28 | }; 29 | assert.strictEqual(computeStanding(spec), "good"); 30 | }); 31 | 32 | it("returns `pending` for a Collection of Interesting Ideas", function () { 33 | const spec = { nightly: { status: "A Collection of Interesting Ideas" } }; 34 | assert.strictEqual(computeStanding(spec), "pending"); 35 | }); 36 | 37 | it("returns `pending` for an Unofficial Proposal Draft", function () { 38 | const spec = { nightly: { status: "Unofficial Proposal Draft" } }; 39 | assert.strictEqual(computeStanding(spec), "pending"); 40 | }); 41 | 42 | it("returns `discontinued` for an Discontinued Draft", function () { 43 | const spec = { nightly: { status: "Discontinued Draft" } }; 44 | assert.strictEqual(computeStanding(spec), "discontinued"); 45 | }); 46 | 47 | it("returns `good` for an ISO spec", function () { 48 | const spec = { url: "https://www.iso.org/standard/85253.html" }; 49 | assert.strictEqual(computeStanding(spec), "good"); 50 | }); 51 | 52 | it("returns the standing that the spec says it has", function () { 53 | const spec = { 54 | standing: "good", 55 | nightly: { status: "Unofficial Proposal Draft" } 56 | }; 57 | assert.strictEqual(computeStanding(spec), "good"); 58 | }); 59 | }); 60 | -------------------------------------------------------------------------------- /.github/workflows/submit-suggested-spec.yml: -------------------------------------------------------------------------------- 1 | name: Create a PR for the suggested spec 2 | 3 | on: 4 | issue_comment: 5 | # Details for types below can be found at: 6 | # https://docs.github.com/en/webhooks-and-events/webhooks/webhook-events-and-payloads?actionType=edited#issues 7 | types: 8 | # Job triggered when an issue comment is created (or edited) 9 | - created 10 | - edited 11 | 12 | jobs: 13 | prepare: 14 | name: Create pull request if needed 15 | runs-on: ubuntu-latest 16 | # TODO: Check comment user permissions more thoroughly, for instance using 17 | # the REST API (see link below). This is going to be needed if we decide to 18 | # remove the PR step and have the bot merge directly to the `main` branch. 19 | # https://docs.github.com/en/rest/collaborators/collaborators?apiVersion=2022-11-28#get-repository-permissions-for-a-user 20 | if: ${{ github.event.comment.author_association == 'MEMBER' && contains(github.event.comment.body, '@browser-specs-bot ') }} 21 | steps: 22 | # Starting with Ubuntu 23+, a security feature prevents running Puppeteer 23 | # by default. It needs to be disabled. Using the "easiest" option, see: 24 | # https://chromium.googlesource.com/chromium/src/+/main/docs/security/apparmor-userns-restrictions.md 25 | # https://github.com/puppeteer/puppeteer/pull/13196/files 26 | - name: Disable AppArmor 27 | run: echo 0 | sudo tee /proc/sys/kernel/apparmor_restrict_unprivileged_userns 28 | 29 | - name: Setup node.js 30 | uses: actions/setup-node@v4 31 | with: 32 | node-version: 20 33 | 34 | - name: Checkout code 35 | uses: actions/checkout@v4 36 | with: 37 | ref: main 38 | 39 | - name: Install dependencies 40 | run: npm ci 41 | 42 | - name: Setup git 43 | run: | 44 | git config user.name "browser-specs-bot" 45 | git config user.email "<>" 46 | git remote set-url --push origin https://x-access-token:$GITHUB_TOKEN@github.com/$GITHUB_REPOSITORY 47 | env: 48 | GITHUB_TOKEN: ${{ secrets.BOT_TOKEN }} 49 | 50 | - name: Add suggested spec to specs.json 51 | if: ${{ contains(github.event.comment.body, '@browser-specs-bot pr') }} 52 | run: npx browser-specs build $NUMBER --pr 53 | env: 54 | GITHUB_TOKEN: ${{ secrets.BOT_TOKEN }} 55 | NUMBER: ${{ github.event.issue.number }} 56 | -------------------------------------------------------------------------------- /src/data/multispecs-repos.json: -------------------------------------------------------------------------------- 1 | { 2 | "w3c/css-houdini-drafts": { 3 | "url": "https://drafts.css-houdini.org/$path/", 4 | "shortname": { 5 | "pattern": "drafts\\.css-houdini\\.org/([^/]+)/" 6 | }, 7 | "exclude": [ 8 | "images" 9 | ] 10 | }, 11 | "w3c/csswg-drafts": { 12 | "url": "https://drafts.csswg.org/$path/", 13 | "shortname": { 14 | "pattern": "drafts\\.csswg\\.org/([^/]+)/" 15 | }, 16 | "exclude": [ 17 | "shared", 18 | "indexes", 19 | "bin", 20 | "css-module", 21 | "css-module-bikeshed" 22 | ] 23 | }, 24 | "w3c/data-shapes": { 25 | "url": "https://w3c.github.io/data-shapes/$path/", 26 | "shortname": { 27 | "pattern": "w3c\\.github\\.io/data-shapes/([^/]+)/" 28 | }, 29 | "exclude": [ 30 | "admin", 31 | "unmaintained" 32 | ] 33 | }, 34 | "w3c/fxtf-drafts": { 35 | "url": "https://drafts.fxtf.org/$path/", 36 | "shortname": { 37 | "pattern": "drafts\\.fxtf\\.org/([^/]+)/" 38 | }, 39 | "exclude": [ 40 | "shared", 41 | "web-anim" 42 | ] 43 | }, 44 | "w3c/pronunciation": { 45 | "url": "https://w3c.github.io/pronunciation/$path/", 46 | "shortname": { 47 | "pattern": "w3c\\.github\\.io/pronunciation/([^/]+)/", 48 | "prefix": "pronunciation-" 49 | }, 50 | "exclude": [ 51 | "common", 52 | "docs", 53 | "presentations", 54 | "samples", 55 | "scripts" 56 | ] 57 | }, 58 | "w3c/sdw": { 59 | "url": "https://w3c.github.io/sdw/$path/", 60 | "shortname": { 61 | "pattern": "w3c\\.github\\.io/sdw/([^/]+)/", 62 | "prefix": "sdw-" 63 | }, 64 | "exclude": [ 65 | "UseCases", 66 | "jwoc", 67 | "meetings", 68 | "proposals", 69 | "publishing-snapshots", 70 | "resources", 71 | "roadmap", 72 | "sdw-sosa-ssn", 73 | "subsetting" 74 | ] 75 | }, 76 | "w3c/sdw-sosa-ssn": { 77 | "url": "https://w3c.github.io/sdw-sosa-ssn/$path/", 78 | "shortname": { 79 | "pattern": "w3c\\.github\\.io/sdw-sosa-ssn/([^/]+)/", 80 | "prefix": "sdw-" 81 | }, 82 | "exclude": [ 83 | "ogcapi-sosa" 84 | ] 85 | }, 86 | "w3c/svgwg": { 87 | "url": "https://svgwg.org/$path/", 88 | "shortname": { 89 | "pattern": "svgwg\\.org/specs/([^/]+)/", 90 | "prefix": "svg-" 91 | }, 92 | "path": "specs", 93 | "exclude": [ 94 | "specs/template" 95 | ] 96 | } 97 | } -------------------------------------------------------------------------------- /.github/workflows/build.yml: -------------------------------------------------------------------------------- 1 | name: Update spec info (full build) 2 | 3 | on: 4 | schedule: 5 | # At 6:10PM on Saturday and Sunday 6 | # (see build-skip-iso.yml workflow for rest of the week) 7 | - cron: '10 18 * * SAT,SUN' 8 | workflow_dispatch: 9 | 10 | jobs: 11 | fetch: 12 | runs-on: ubuntu-latest 13 | steps: 14 | # Starting with Ubuntu 23+, a security feature prevents running Puppeteer 15 | # by default. It needs to be disabled. Using the "easiest" option, see: 16 | # https://chromium.googlesource.com/chromium/src/+/main/docs/security/apparmor-userns-restrictions.md 17 | # https://github.com/puppeteer/puppeteer/pull/13196/files 18 | - name: Disable AppArmor 19 | run: echo 0 | sudo tee /proc/sys/kernel/apparmor_restrict_unprivileged_userns 20 | 21 | - name: Checkout repo 22 | uses: actions/checkout@v4 23 | with: 24 | # Need to checkout all history as job also needs to access the 25 | # xxx-specs@latest branches 26 | fetch-depth: 0 27 | 28 | - name: Setup node.js 29 | uses: actions/setup-node@v4 30 | with: 31 | node-version: 20 32 | cache: 'npm' 33 | 34 | - name: Setup environment 35 | run: npm ci 36 | 37 | - name: Build new index file 38 | env: 39 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 40 | run: npm run build 41 | 42 | - name: Test new index file 43 | env: 44 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 45 | run: | 46 | npm run test-index 47 | 48 | - name: Bump minor version of packages if needed 49 | run: node src/bump-packages-minor.js 50 | 51 | - name: Commit updates 52 | run: | 53 | git config user.name "fetch-info bot" 54 | git config user.email "<>" 55 | git commit -m "[data] Update spec info" -a || true 56 | 57 | - name: Push changes 58 | uses: ad-m/github-push-action@v0.8.0 59 | with: 60 | github_token: ${{ secrets.GITHUB_TOKEN }} 61 | branch: main 62 | 63 | - name: Prepare packages data 64 | run: node src/prepare-packages.js 65 | 66 | - name: Create/Update pre-release PR for web-specs 67 | run: node src/prepare-release.js web-specs 68 | env: 69 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 70 | 71 | - name: Create/Update pre-release PR for browser-specs 72 | run: node src/prepare-release.js browser-specs 73 | env: 74 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 75 | 76 | # Following step runs even if a previous step failed to upload intermediary 77 | # build files 78 | - name: Store intermediary steps as artifacts 79 | if: ${{ !cancelled() }} 80 | uses: actions/upload-artifact@v4 81 | with: 82 | name: buildsteps 83 | path: | 84 | .buildsteps 85 | -------------------------------------------------------------------------------- /.github/workflows/build-skip-iso.yml: -------------------------------------------------------------------------------- 1 | name: Update spec info (skip ISO specs) 2 | 3 | on: 4 | schedule: 5 | # Every 6 hours from Monday to Friday 6 | # (see build.yml workflow for Saturday and Sunday) 7 | - cron: '10 */6 * * MON-FRI' 8 | workflow_dispatch: 9 | 10 | jobs: 11 | fetch: 12 | runs-on: ubuntu-latest 13 | steps: 14 | # Starting with Ubuntu 23+, a security feature prevents running Puppeteer 15 | # by default. It needs to be disabled. Using the "easiest" option, see: 16 | # https://chromium.googlesource.com/chromium/src/+/main/docs/security/apparmor-userns-restrictions.md 17 | # https://github.com/puppeteer/puppeteer/pull/13196/files 18 | - name: Disable AppArmor 19 | run: echo 0 | sudo tee /proc/sys/kernel/apparmor_restrict_unprivileged_userns 20 | 21 | - name: Checkout repo 22 | uses: actions/checkout@v4 23 | with: 24 | # Need to checkout all history as job also needs to access the 25 | # xxx-specs@latest branches 26 | fetch-depth: 0 27 | 28 | - name: Setup node.js 29 | uses: actions/setup-node@v4 30 | with: 31 | node-version: 20 32 | cache: 'npm' 33 | 34 | - name: Setup environment 35 | run: npm ci 36 | 37 | - name: Build new index file 38 | env: 39 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 40 | run: npm run build-skip-iso 41 | 42 | - name: Test new index file 43 | env: 44 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 45 | run: | 46 | npm run test-index 47 | 48 | - name: Bump minor version of packages if needed 49 | run: node src/bump-packages-minor.js 50 | 51 | - name: Commit updates 52 | run: | 53 | git config user.name "fetch-info bot" 54 | git config user.email "<>" 55 | git commit -m "[data] Update spec info" -a || true 56 | 57 | - name: Push changes 58 | uses: ad-m/github-push-action@v0.8.0 59 | with: 60 | github_token: ${{ secrets.GITHUB_TOKEN }} 61 | branch: main 62 | 63 | - name: Prepare packages data 64 | run: node src/prepare-packages.js 65 | 66 | - name: Create/Update pre-release PR for web-specs 67 | run: node src/prepare-release.js web-specs 68 | env: 69 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 70 | 71 | - name: Create/Update pre-release PR for browser-specs 72 | run: node src/prepare-release.js browser-specs 73 | env: 74 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 75 | 76 | # Following step runs even if a previous step failed to upload intermediary 77 | # build files 78 | - name: Store intermediary steps as artifacts 79 | if: ${{ !cancelled() }} 80 | uses: actions/upload-artifact@v4 81 | with: 82 | name: buildsteps 83 | path: | 84 | .buildsteps 85 | -------------------------------------------------------------------------------- /test/compute-categories.js: -------------------------------------------------------------------------------- 1 | import { describe, it } from "node:test"; 2 | import assert from "node:assert"; 3 | import computeCategories from "../src/compute-categories.js"; 4 | 5 | describe("compute-categories module", () => { 6 | it("sets `browser` category when group targets browsers", function () { 7 | const spec = { 8 | groups: [ { name: "Web Applications Working Group" } ] 9 | }; 10 | assert.deepStrictEqual(computeCategories(spec), ["browser"]); 11 | }); 12 | 13 | it("sets `browser` category when one of the groups targets browsers", function () { 14 | const spec = { 15 | groups: [ 16 | { name: "Accessible Platform Architectures Working Group" }, 17 | { name: "Web Applications Working Group" } 18 | ] 19 | }; 20 | assert.deepStrictEqual(computeCategories(spec), ["browser"]); 21 | }); 22 | 23 | it("does not set a `browser` category when group does not target browsers", function () { 24 | const spec = { 25 | groups: [ { name: "Accessible Platform Architectures Working Group" } ] 26 | }; 27 | assert.deepStrictEqual(computeCategories(spec), []); 28 | }); 29 | 30 | it("does not set a `browser` category when all groups does not target browsers", function () { 31 | const spec = { 32 | groups: [ { name: "Accessible Platform Architectures Working Group" } ] 33 | }; 34 | assert.deepStrictEqual(computeCategories(spec), []); 35 | }); 36 | 37 | it("resets categories when asked to", function () { 38 | const spec = { 39 | groups: [ { name: "Web Applications Working Group" } ], 40 | categories: "reset" 41 | }; 42 | assert.deepStrictEqual(computeCategories(spec), []); 43 | }); 44 | 45 | it("drops browser when asked to", function () { 46 | const spec = { 47 | groups: [ { name: "Web Applications Working Group" } ], 48 | categories: "-browser" 49 | }; 50 | assert.deepStrictEqual(computeCategories(spec), []); 51 | }); 52 | 53 | it("adds browser when asked to", function () { 54 | const spec = { 55 | groups: [ { name: "Accessible Platform Architectures Working Group" } ], 56 | categories: "+browser" 57 | }; 58 | assert.deepStrictEqual(computeCategories(spec), ["browser"]); 59 | }); 60 | 61 | it("accepts an array of categories", function () { 62 | const spec = { 63 | groups: [ { name: "Accessible Platform Architectures Working Group" } ], 64 | categories: ["reset", "+browser"] 65 | }; 66 | assert.deepStrictEqual(computeCategories(spec), ["browser"]); 67 | }); 68 | 69 | it("throws if spec object is empty", () => { 70 | assert.throws( 71 | () => computeCategories({}), 72 | /^Invalid spec object passed as parameter$/); 73 | }); 74 | 75 | it("throws if spec object does not have a groups property", () => { 76 | assert.throws( 77 | () => computeCategories({ url: "https://example.org/" }), 78 | /^Invalid spec object passed as parameter$/); 79 | }); 80 | }); 81 | -------------------------------------------------------------------------------- /src/request-pr-review.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Request a review on a pending pre-release PR 3 | */ 4 | 5 | import Octokit from "./octokit.js"; 6 | import loadJSON from "./load-json.js"; 7 | 8 | // Repository to process and PR reviewers 9 | const owner = "w3c"; 10 | const repo = "browser-specs"; 11 | const reviewers = ["dontcallmedom", "tidoust"]; 12 | 13 | 14 | /** 15 | * Create or update pre-release pull request 16 | * 17 | * @function 18 | * @param {String} type Package name 19 | */ 20 | async function requestReview(type) { 21 | console.log(`Check pre-release PR for the ${type} package`); 22 | const searchResponse = await octokit.search.issuesAndPullRequests({ 23 | q: `repo:${owner}/${repo} type:pr state:open head:release-${type}-` 24 | }); 25 | const found = searchResponse?.data?.items?.[0]; 26 | 27 | const pendingPRResponse = found ? 28 | await octokit.pulls.get({ 29 | owner, repo, 30 | pull_number: found.number 31 | }) : 32 | null; 33 | const pendingPR = pendingPRResponse?.data; 34 | console.log(pendingPR ? 35 | `- Found pending pre-release PR: ${pendingPR.title} (#${pendingPR.number})` : 36 | "- No pending pre-release PR"); 37 | if (!pendingPR) { 38 | return; 39 | } 40 | 41 | console.log(`- Targeted list of reviewers: ${reviewers.join(", ")}`); 42 | console.log(`- Pending PR was created by: ${pendingPR.user.login}`); 43 | const currentReviewers = pendingPR.requested_reviewers.map(r => r.login); 44 | console.log(`- Current reviewers: ${currentReviewers.length > 0 ? currentReviewers.join(", ") : "none"}`); 45 | const reviewersToAdd = reviewers.filter(login => !currentReviewers.includes(login) && pendingPR.user.login !== login); 46 | console.log(`- Reviewers to add: ${reviewersToAdd.length > 0 ? reviewersToAdd.join(", ") : "none"}`); 47 | if (reviewersToAdd.length > 0) { 48 | await octokit.pulls.requestReviewers({ 49 | owner, 50 | repo, 51 | pull_number: pendingPR.number, 52 | reviewers: reviewersToAdd 53 | }); 54 | console.log("- Reviewers added"); 55 | } 56 | } 57 | 58 | 59 | /******************************************************************************* 60 | Retrieve GITHUB_TOKEN from environment, prepare Octokit and kick things off 61 | *******************************************************************************/ 62 | const config = await loadJSON("config.json"); 63 | const GITHUB_TOKEN = config?.GITHUB_TOKEN ?? process.env.GITHUB_TOKEN; 64 | if (!GITHUB_TOKEN) { 65 | console.error("GITHUB_TOKEN must be set to some personal access token as an env variable or in a config.json file"); 66 | process.exit(1); 67 | } 68 | 69 | const octokit = new Octokit({ 70 | auth: GITHUB_TOKEN, 71 | //log: console 72 | }); 73 | 74 | requestReview("web-specs") 75 | .then(() => console.log()) 76 | .then(() => requestReview("browser-specs")) 77 | .then(() => { 78 | console.log(); 79 | console.log("== The end =="); 80 | }) 81 | .catch(err => { 82 | console.error(err); 83 | process.exit(1); 84 | }); -------------------------------------------------------------------------------- /src/prepare-packages.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Prepare the contents of the NPM packages 3 | * 4 | * NPM packages include browser-specs. 5 | * 6 | * These packages contain a filtered view of the list of specs. 7 | * 8 | * The script copies relevant files to the "packages" folders. 9 | * 10 | * node src/prepare-packages.js 11 | */ 12 | 13 | import fs from 'node:fs/promises'; 14 | import path from 'node:path'; 15 | import util from 'node:util'; 16 | import { fileURLToPath } from 'node:url'; 17 | import loadJSON from './load-json.js'; 18 | 19 | const scriptPath = path.dirname(fileURLToPath(import.meta.url)); 20 | 21 | async function preparePackages() { 22 | console.log('Load index file'); 23 | const index = await loadJSON(path.join(scriptPath, '..', 'index.json')); 24 | console.log(`- ${index.length} specs in index file`); 25 | 26 | const packages = [ 27 | { 28 | name: 'web-specs', 29 | filter: spec => true 30 | }, 31 | { 32 | name: 'browser-specs', 33 | filter: spec => 34 | spec.categories?.includes('browser') && 35 | spec.standing === 'good' 36 | } 37 | ]; 38 | 39 | for (const { name, filter } of packages) { 40 | console.log(); 41 | console.log(`Prepare the ${name} package`); 42 | 43 | // Only keep relevant specs targeted at browsers 44 | const specs = index.filter(filter); 45 | console.log(`- ${specs.length}/${index.length} specs to include in the package`); 46 | 47 | // Write packages/${name}/index.json 48 | await fs.writeFile( 49 | path.resolve(scriptPath, '..', 'packages', name, 'index.json'), 50 | JSON.stringify(specs, null, 2), 51 | 'utf8'); 52 | console.log(`- packages/${name}/index.json updated`); 53 | 54 | // Update README.md 55 | const commonReadme = await fs.readFile(path.resolve(scriptPath, '..', 'README.md'), 'utf8'); 56 | const packageReadmeFile = path.resolve(scriptPath, '..', 'packages', name, 'README.md'); 57 | let packageReadme = await fs.readFile(packageReadmeFile, 'utf8'); 58 | const commonBlocks = [ 59 | { start: '<!-- COMMON-TOC: start -->', end: '<!-- COMMON-TOC: end -->' }, 60 | { start: '<!-- COMMON-BODY: start -->', end: '<!-- COMMON-BODY: end -->' } 61 | ]; 62 | for (const { start, end } of commonBlocks) { 63 | const [commonStart, commonEnd] = [commonReadme.indexOf(start), commonReadme.indexOf(end)]; 64 | const [packageStart, packageEnd] = [packageReadme.indexOf(start), packageReadme.indexOf(end)]; 65 | const commonBlock = commonReadme.substring(commonStart, commonEnd); 66 | packageReadme = packageReadme.substring(0, packageStart) + 67 | commonBlock + 68 | packageReadme.substring(packageEnd); 69 | } 70 | await fs.writeFile(packageReadmeFile, packageReadme, 'utf8'); 71 | console.log(`- packages/${name}/README.md updated`); 72 | } 73 | } 74 | 75 | /******************************************************************************* 76 | Kick things off 77 | *******************************************************************************/ 78 | preparePackages() 79 | .then(() => { 80 | console.log(); 81 | console.log("== The end =="); 82 | }) 83 | .catch(err => { 84 | console.error(err); 85 | process.exit(1); 86 | }); -------------------------------------------------------------------------------- /packages/web-specs/README.md: -------------------------------------------------------------------------------- 1 | # Web browser specifications 2 | 3 | This repository contains a curated list of technical Web specifications. 4 | 5 | This list is meant to be an up-to-date input source for projects that run 6 | analyses on web technologies to create reports on test coverage, 7 | cross-references, WebIDL, quality, etc. 8 | 9 | 10 | ## Table of Contents 11 | 12 | - [Installation and usage](#installation-and-usage) 13 | - [Upgrading](#upgrading) 14 | <!-- COMMON-TOC: start --><!-- COMMON-TOC: end --> 15 | - [Spec selection criteria](#spec-selection-criteria) 16 | 17 | 18 | ## Installation and usage 19 | 20 | The list is distributed as an NPM package. To incorporate it to your project, 21 | run: 22 | 23 | ```bash 24 | npm install web-specs 25 | ``` 26 | 27 | You can then retrieve the list from your Node.js program: 28 | 29 | ```js 30 | const specs = require("web-specs"); 31 | console.log(JSON.stringify(specs, null, 2)); 32 | ``` 33 | 34 | Alternatively, you can fetch [`index.json`](https://w3c.github.io/browser-specs/index.json) 35 | or retrieve the list from the [`web-specs@latest` branch](https://github.com/w3c/browser-specs/tree/web-specs%40latest). 36 | 37 | ## Upgrading 38 | 39 | The only breaking change in version `3.x` is that some spec entries may not 40 | have a `nightly` property. This happens for specs that are not public. An 41 | example of a non public spec is an ISO standard. In such cases, the `url` 42 | property targets the public page that describes the spec on the standardization 43 | organization's web site. To upgrade from version `2.x` to version `3.x`, make 44 | sure that your code can handle specs without a `nightly` property. 45 | 46 | <!-- COMMON-BODY: start --> 47 | <!-- COMMON-BODY: end --> 48 | 49 | ## Spec selection criteria 50 | 51 | This repository contains a curated list of technical Web specifications that are 52 | deemed relevant for the Web platform. Roughly speaking, this list should match 53 | the list of web specs actively developed by W3C, the WHATWG and a few other 54 | organizations. 55 | 56 | To try to make things more concrete, the following criteria are used to assess 57 | whether a spec should a priori appear in the list: 58 | 59 | 1. The spec is stable or in development. Superseded and abandoned specs will not 60 | appear in the list. For instance, the list contains the HTML LS spec, but not 61 | HTML 4.01 or HTML 5). 62 | 2. The spec is being developed by a well-known standardization or 63 | pre-standardization group. Today, this means a W3C Working Group or Community 64 | Group, the WHATWG, the IETF, the TC39 group or the Khronos Group. 65 | 4. The spec sits at the application layer or is "close to it". For instance, 66 | most IETF specs are likely out of scope, but some that are exposed to Web developers are in scope. 67 | 5. The spec defines normative content (terms, CSS, IDL), or it contains 68 | informative content that other specs often need to refer to (e.g. guidelines 69 | from horizontal activities such as accessibility, internationalization, privacy 70 | and security). 71 | 72 | There are and there will be exceptions to the rule. Besides, some of these 73 | criteria remain fuzzy and/or arbitrary, and we expect them to evolve over time, 74 | typically driven by needs expressed by projects that may want to use the list. -------------------------------------------------------------------------------- /.github/workflows/check-suggested-spec.yml: -------------------------------------------------------------------------------- 1 | name: Check suggested spec 2 | 3 | on: 4 | issues: 5 | # Details for types below can be found at: 6 | # https://docs.github.com/en/webhooks-and-events/webhooks/webhook-events-and-payloads?actionType=edited#issues 7 | types: 8 | # Job triggered when an issue is created or re-opened 9 | - opened 10 | - reopened 11 | 12 | # or gets "edited" (title or body updated) 13 | - edited 14 | workflow_dispatch: 15 | inputs: 16 | issueNumber: 17 | description: 'Issue number' 18 | required: true 19 | type: string 20 | 21 | jobs: 22 | check-spec: 23 | name: Check suggested spec 24 | runs-on: ubuntu-latest 25 | # We're only interested in "new spec" issues 26 | if: ${{ github.event_name == 'workflow_dispatch' || contains(github.event.issue.labels.*.name, 'new spec') }} 27 | steps: 28 | # Starting with Ubuntu 23+, a security feature prevents running Puppeteer 29 | # by default. It needs to be disabled. Using the "easiest" option, see: 30 | # https://chromium.googlesource.com/chromium/src/+/main/docs/security/apparmor-userns-restrictions.md 31 | # https://github.com/puppeteer/puppeteer/pull/13196/files 32 | - name: Disable AppArmor 33 | run: echo 0 | sudo tee /proc/sys/kernel/apparmor_restrict_unprivileged_userns 34 | 35 | - name: Setup node.js 36 | uses: actions/setup-node@v4 37 | with: 38 | node-version: 20 39 | 40 | - name: Checkout code 41 | uses: actions/checkout@v4 42 | with: 43 | ref: main 44 | 45 | - name: Install dependencies 46 | run: npm ci 47 | 48 | - name: Check suggested spec 49 | run: npx browser-specs build $NUMBER --reffy > res.md 50 | env: 51 | GITHUB_TOKEN: ${{ secrets.BOT_TOKEN }} 52 | NUMBER: ${{ github.event_name == 'workflow_dispatch' && inputs.issueNumber || github.event.issue.number }} 53 | 54 | # Report the result within a comment, using the `gh` command. 55 | # Unfortunately, the `--edit-last` option does not create a comment if 56 | # none exists. Ideally, we would check whether we created a comment first 57 | # and set/reset the option accordingly. To avoid creating more logic, 58 | # we'll just try the command with the option, and then without if that 59 | # fails. Note the nuance between "conclusion" and "outcome" in: 60 | # https://docs.github.com/en/actions/learn-github-actions/contexts#steps-context 61 | - name: Update last comment with result (if possible) 62 | id: update 63 | continue-on-error: true 64 | run: gh issue comment "$NUMBER" --body-file res.md --edit-last 65 | env: 66 | GITHUB_TOKEN: ${{ secrets.BOT_TOKEN }} 67 | NUMBER: ${{ github.event_name == 'workflow_dispatch' && inputs.issueNumber || github.event.issue.number }} 68 | 69 | - name: Add new comment with result (if none existed) 70 | if: ${{ steps.update.outcome == 'failure' }} 71 | run: gh issue comment "$NUMBER" --body-file res.md 72 | env: 73 | GITHUB_TOKEN: ${{ secrets.BOT_TOKEN }} 74 | NUMBER: ${{ github.event_name == 'workflow_dispatch' && inputs.issueNumber || github.event.issue.number }} 75 | -------------------------------------------------------------------------------- /src/parse-spec-url.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Helper method that parses a spec URL and returns some information on the 3 | * type of URL (github, /TR, WHATWG, etc.), the owning organization on GitHub 4 | * and the likely GitHub repository name. 5 | * 6 | * Note that the repository name may be incorrect for /TR specs (as spec 7 | * shortnames do not always match the name of the actual repo). 8 | */ 9 | 10 | export default function (url) { 11 | if (!url) { 12 | throw "No URL passed as parameter"; 13 | } 14 | 15 | const githubcom = url.match(/^https:\/\/github\.com\/([^\/]*)\/([^\/]*)\/?/); 16 | if (githubcom) { 17 | return { type: "github", owner: githubcom[1], name: githubcom[2] }; 18 | } 19 | 20 | const githubio = url.match(/^https:\/\/([^\.]*)\.github\.io\/([^\/]*)\/?/); 21 | if (githubio) { 22 | let name = githubio[2]; 23 | if ((name.endsWith("-aam") || name.endsWith("-aria")) && name !== "html-aria") { 24 | // AAM and ARIA specs moved to the ARIA mono-repo, except HTML-ARIA, 25 | // which is maintained by the Web Apps WG. 26 | name = "aria"; 27 | } 28 | return { type: "github", owner: githubio[1], name }; 29 | } 30 | 31 | const whatwg = url.match(/^https:\/\/([^\.]*).spec.whatwg.org\//); 32 | if (whatwg) { 33 | return { type: "custom", owner: "whatwg", name: whatwg[1] }; 34 | } 35 | 36 | const tc39 = url.match(/^https:\/\/tc39.es\/([^\/]*)\//); 37 | if (tc39) { 38 | return { type: "custom", owner: "tc39", name: tc39[1] }; 39 | } 40 | 41 | const csswg = url.match(/^https?:\/\/drafts.csswg.org\/([^\/]*)\/?/); 42 | if (csswg) { 43 | return { type: "custom", owner: "w3c", name: "csswg-drafts" }; 44 | } 45 | 46 | const ghfxtf = url.match(/^https:\/\/drafts.fxtf.org\/([^\/]*)\/?/); 47 | if (ghfxtf) { 48 | return { type: "custom", owner: "w3c", name: "fxtf-drafts" }; 49 | } 50 | 51 | const houdini = url.match(/^https:\/\/drafts.css-houdini.org\/([^\/]*)\/?/); 52 | if (houdini) { 53 | return { type: "custom", owner: "w3c", name: "css-houdini-drafts" }; 54 | } 55 | 56 | const svgwg = url.match(/^https:\/\/svgwg.org\/specs\/([^\/]*)\/?/); 57 | if (svgwg) { 58 | return { type: "custom", owner: "w3c", name: "svgwg" }; 59 | } 60 | if (url === "https://svgwg.org/svg2-draft/") { 61 | return { type: "custom", owner: "w3c", name: "svgwg" }; 62 | } 63 | 64 | const webgl = url.match(/^https:\/\/registry\.khronos\.org\/webgl\//); 65 | if (webgl) { 66 | return { type: "custom", owner: "khronosgroup", name: "WebGL" }; 67 | } 68 | 69 | const khronos = url.match(/^https:\/\/registry\.khronos\.org\/([^\/]+)\//); 70 | if (khronos) { 71 | return { type: "custom", owner: "khronosgroup", name: khronos[1] }; 72 | } 73 | 74 | const httpwg = url.match(/^https:\/\/httpwg\.org\/specs\/rfc[0-9]+\.html$/); 75 | if (httpwg) { 76 | return { type: "custom", owner: "httpwg", name: "httpwg.github.io" }; 77 | } 78 | 79 | const w3cTr = url.match(/^https?:\/\/(?:www\.)?w3\.org\/TR\/([^\/]+)\/$/); 80 | if (w3cTr) { 81 | return { type: "tr", owner: "w3c", name: w3cTr[1] }; 82 | } 83 | 84 | const tag = url.match(/^https?:\/\/(?:www\.)?w3\.org\/2001\/tag\/doc\/([^\/]+)\/?$/); 85 | if (tag) { 86 | return { type: "custom", owner: "w3ctag", name: tag[1] }; 87 | } 88 | 89 | return null; 90 | } 91 | -------------------------------------------------------------------------------- /test/compute-shorttitle.js: -------------------------------------------------------------------------------- 1 | import { describe, it } from "node:test"; 2 | import assert from "node:assert"; 3 | import computeShortTitle from "../src/compute-shorttitle.js"; 4 | 5 | describe("compute-shorttitle module", () => { 6 | function assertTitle(title, expected) { 7 | const shortTitle = computeShortTitle(title); 8 | assert.equal(shortTitle, expected); 9 | } 10 | 11 | it("finds abbreviation for main CSS spec", () => { 12 | assertTitle( 13 | "Cascading Style Sheets Level 2 Revision 1 (CSS 2.1) Specification", 14 | "CSS 2.1"); 15 | }); 16 | 17 | it("does not choke on non-breaking spaces", () => { 18 | assertTitle( 19 | "CSS Backgrounds and Borders Module Level\u00A04", 20 | "CSS Backgrounds and Borders 4"); 21 | }); 22 | 23 | it("does not choke on levels that are not levels", () => { 24 | assertTitle( 25 | "CORS and RFC1918", 26 | "CORS and RFC1918"); 27 | }); 28 | 29 | it("finds abbreviation for WAI-ARIA title", () => { 30 | assertTitle( 31 | "Accessible Rich Internet Applications (WAI-ARIA) 1.2", 32 | "WAI-ARIA 1.2"); 33 | }); 34 | 35 | it("drops 'Level' from title but keeps level number", () => { 36 | assertTitle( 37 | "CSS Foo Level 42", 38 | "CSS Foo 42"); 39 | }); 40 | 41 | it("drops 'Module' from title but keeps level number", () => { 42 | assertTitle( 43 | "CSS Foo Module Level 42", 44 | "CSS Foo 42"); 45 | }); 46 | 47 | it("drops '- Level' from title", () => { 48 | assertTitle( 49 | "Foo - Level 2", 50 | "Foo 2"); 51 | }); 52 | 53 | it("drops ': Level' from title", () => { 54 | assertTitle( 55 | "Foo: Level 7", 56 | "Foo 7"); 57 | }); 58 | 59 | it("drops 'Module - Level' from title", () => { 60 | assertTitle( 61 | "Foo Module - Level 3", 62 | "Foo 3"); 63 | }); 64 | 65 | it("drops 'Specification' from end of title", () => { 66 | assertTitle( 67 | "Foo Specification", 68 | "Foo"); 69 | }); 70 | 71 | it("drops 'Standard' from end of title", () => { 72 | assertTitle( 73 | "Foo Standard", 74 | "Foo"); 75 | }); 76 | 77 | it("drops 'Living Standard' from end of title", () => { 78 | assertTitle( 79 | "Foo Living Standard", 80 | "Foo"); 81 | }); 82 | 83 | it("drops edition indications", () => { 84 | assertTitle( 85 | "Foo (Second Edition) Bar", 86 | "Foo Bar"); 87 | }); 88 | 89 | it("drops '(Draft)' from title", () => { 90 | assertTitle( 91 | "(Draft) Beer", 92 | "Beer"); 93 | }); 94 | 95 | it("preserves title when needed", () => { 96 | assertTitle( 97 | "Edition Module Standard Foo", 98 | "Edition Module Standard Foo"); 99 | }); 100 | 101 | it("drops 'Proposal' from end of title", () => { 102 | assertTitle( 103 | "Hello world API Proposal", 104 | "Hello world API"); 105 | }); 106 | 107 | it("preserves scope in HTTP/1.1 spec titles", () => { 108 | assertTitle( 109 | "Hypertext Transfer Protocol (HTTP/1.1): Foo bar", 110 | "HTTP/1.1 Foo bar") 111 | }); 112 | 113 | it("applies rules in order", () => { 114 | assertTitle( 115 | " AOMedia Film Grain Synthesis (v1.0) (AFGS1) specification (Draft) ", 116 | "AFGS1") 117 | }); 118 | }); 119 | -------------------------------------------------------------------------------- /src/compute-series-urls.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Module that exports a function that takes a spec object as input that already 3 | * has most of its info filled out ("series", but also "release" and "nightly" 4 | * properties filled out) and that returns an object with a "releaseUrl" and 5 | * "nightlyUrl" property when possible that target the unversioned versions, of 6 | * the spec, in other words the series itself. 7 | * 8 | * The function also takes the list of spec objects as second parameter. When 9 | * computing the release URL, it will iterate through the specs in the same 10 | * series to find one that has a release URL. 11 | */ 12 | 13 | function computeSeriesUrls(spec) { 14 | if (!spec?.shortname || !spec.series?.shortname) { 15 | throw "Invalid spec object passed as parameter"; 16 | } 17 | 18 | const res = {}; 19 | 20 | // We create a "CSS" series in browser-specs for CSS2 different from the 21 | // "css" series for CSS snapshots but the W3C API mixes both, and the URL 22 | // https://www.w3.org/TR/CSS/ that would logically be computed as series URL 23 | // actually returns a CSS Snapshot. Let's use the spec URL instead 24 | // (https://www.w3.org/TR/CSS2/). 25 | if (spec.shortname === "CSS2") { 26 | res.releaseUrl = spec.url; 27 | res.nightlyUrl = spec.nightly.url; 28 | } 29 | 30 | // If spec shortname and series shortname match, then series URLs match the 31 | // spec URLs. 32 | else if (spec.shortname === spec.series.shortname) { 33 | if (spec.release?.url) { 34 | res.releaseUrl = spec.release.url; 35 | } 36 | if (spec.nightly?.url) { 37 | res.nightlyUrl = spec.nightly.url; 38 | } 39 | } 40 | 41 | // When shortnames do not match, replace the spec shortname by the series 42 | // shortname in the URL 43 | else { 44 | if (spec.release?.url) { 45 | res.releaseUrl = spec.release.url.replace( 46 | new RegExp(`/${spec.shortname}/`), 47 | `/${spec.series.shortname}/`); 48 | } 49 | if (spec.nightly?.url) { 50 | res.nightlyUrl = spec.nightly.url.replace( 51 | new RegExp(`/${spec.shortname}/`), 52 | `/${spec.series.shortname}/`); 53 | } 54 | } 55 | 56 | return res; 57 | } 58 | 59 | /** 60 | * Exports main function that takes a spec object and returns an object with 61 | * properties "releaseUrl" and "nightlyUrl". Function only sets the properties 62 | * when needed, so returned object may be empty. 63 | * 64 | * Function also takes the list of spec objects as input. It iterates through 65 | * the list to look for previous versions of a spec to find a suitable release 66 | * URL when the latest version does not have one. 67 | */ 68 | export default function (spec, list) { 69 | list = list || []; 70 | 71 | // Compute series info for current version of the spec if it is in the list 72 | const currentSpec = list.find(s => s.shortname === spec.series?.currentSpecification); 73 | const res = computeSeriesUrls(currentSpec ?? spec); 74 | 75 | // Look for a release URL in given spec and previous versions 76 | if (!res.releaseUrl) { 77 | while (spec) { 78 | const prev = computeSeriesUrls(spec); 79 | if (prev.releaseUrl) { 80 | res.releaseUrl = prev.releaseUrl; 81 | break; 82 | } 83 | spec = list.find(s => s.shortname === spec.seriesPrevious); 84 | if (!spec) { 85 | break; 86 | } 87 | } 88 | } 89 | return res; 90 | } -------------------------------------------------------------------------------- /src/compute-categories.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Module that exports a function that takes a spec object that already has its 3 | * `groups` property (see `fetch-groups.js`) and its `nightly.repository` 4 | * property (see `compute-repository.js`) set as input, and that returns 5 | * a list of categories for the spec. 6 | * 7 | * Note (2022-02-08): The function merely sets `browser` for now. Logic (and 8 | * initial spec properties used to compute the list) will likely be adjusted 9 | * over time. 10 | */ 11 | 12 | /** 13 | * Some Working Groups do not develop specifications directly targeted at 14 | * browsers. Specs from these Working Groups should not be flagged with a 15 | * "browsers" category. Ideally, we'd gather that information from some 16 | * authoritative source, but that information is not available, so let's 17 | * maintain a short list of working groups to catch main cases. 18 | * 19 | * Note: Closed groups may still need to appear in that list when specs they 20 | * developed are in browser-specs (for example, the Audiobooks Working Group) 21 | */ 22 | const nonBrowserGroups = [ 23 | "Accessibility Guidelines Working Group", 24 | "Accessible Platform Architectures Working Group", 25 | "Advisory Board", 26 | "Audiobooks Working Group", 27 | "Data Shapes Working Group", 28 | "Dataset Exchange Working Group", 29 | "Decentralized Identifier Working Group", 30 | "Distributed Tracing Working Group", 31 | "JSON-LD Working Group", 32 | "Linked Web Storage Working Group", 33 | "MiniApps Working Group", 34 | "Patents and Standards Interest Group", 35 | "Publishing Maintenance Working Group", 36 | "RDF & SPARQL Working Group", 37 | "RDF Dataset Canonicalization and Hash Working Group", 38 | "RDF-star Working Group", 39 | "Spatial Data on the Web Interest Group", 40 | "Spatio-temporal Data on the Web Working Group", 41 | "Technical Architecture Group", 42 | "Verifiable Credentials Working Group", 43 | "Web of Things Working Group" 44 | ]; 45 | 46 | 47 | /** 48 | * Exports main function that takes a spec object and returns a list of 49 | * categories for the spec. 50 | * 51 | * Function may return an empty array. If the spec object contains a 52 | * `categories` property, the list of categories is adjusted accordingly. For 53 | * instance, if the spec object contains `+browser`, `browser` is added to the 54 | * list. If it contains `-browser`, `browser` won't appear in the list. If it 55 | * contains `reset`, the function does not attempt to compute a list but rather 56 | * returns the list of categories in the spec object. 57 | */ 58 | export default function (spec) { 59 | if (!spec || !spec.groups) { 60 | throw "Invalid spec object passed as parameter"; 61 | } 62 | 63 | let list = []; 64 | const requestedCategories = (typeof spec.categories === "string") ? 65 | [spec.categories] : 66 | (spec.categories || []); 67 | 68 | // All specs target browsers by default unless the spec object says otherwise 69 | if (!requestedCategories.includes("reset")) { 70 | const browserGroup = spec.groups.find(group => !nonBrowserGroups.includes(group.name)); 71 | if (browserGroup) { 72 | list.push("browser"); 73 | } 74 | } 75 | 76 | // Apply requested incremental updates 77 | requestedCategories.filter(incr => (incr !== "reset")).forEach(incr => { 78 | const category = incr.substring(1); 79 | if (incr.startsWith("+")) { 80 | list.push(category); 81 | } 82 | else { 83 | list = list.filter(cat => cat !== category); 84 | } 85 | }); 86 | 87 | return list; 88 | } -------------------------------------------------------------------------------- /packages/browser-specs/README.md: -------------------------------------------------------------------------------- 1 | # Web browser specifications 2 | 3 | This repository contains a curated list of technical Web specifications that are 4 | directly implemented or that will be implemented by Web browsers. 5 | 6 | This list is meant to be an up-to-date input source for projects that run 7 | analyses on browser technologies to create reports on test coverage, 8 | cross-references, WebIDL, quality, etc. 9 | 10 | 11 | ## Table of Contents 12 | 13 | - [Installation and usage](#installation-and-usage) 14 | - [Upgrading](#upgrading) 15 | <!-- COMMON-TOC: start --><!-- COMMON-TOC: end --> 16 | - [Spec selection criteria](#spec-selection-criteria) 17 | 18 | ## Installation and usage 19 | 20 | The list is distributed as an NPM package. To incorporate it to your project, 21 | run: 22 | 23 | ```bash 24 | npm install browser-specs 25 | ``` 26 | 27 | You can then retrieve the list from your Node.js program: 28 | 29 | ```js 30 | const specs = require("browser-specs"); 31 | console.log(JSON.stringify(specs, null, 2)); 32 | ``` 33 | 34 | Alternatively, you can fetch [`index.json`](https://w3c.github.io/browser-specs/index.json) 35 | or retrieve the list from the [`web-specs@latest` branch](https://github.com/w3c/browser-specs/tree/web-specs%40latest), 36 | and filter the resulting list to only preserve specs that have `"browser"` in 37 | their `categories` property. 38 | 39 | ## Upgrading 40 | 41 | The only breaking change in version `4.x` is that some spec entries may not 42 | have a `nightly` property. This happens for specs that are not public. An 43 | example of a non public spec is an ISO standard. In such cases, the `url` 44 | property targets the public page that describes the spec on the standardization 45 | organization's web site. To upgrade from version `3.x` to version `4.x`, make 46 | sure that your code can handle specs without a `nightly` property. 47 | 48 | <!-- COMMON-BODY: start --> 49 | <!-- COMMON-BODY: end --> 50 | 51 | ## Spec selection criteria 52 | 53 | This repository contains a curated list of technical Web specifications that are 54 | deemed relevant for Web browsers. Roughly speaking, this list should match the 55 | list of specs that appear in projects such as [Web Platform 56 | Tests](https://github.com/web-platform-tests/wpt) or 57 | [MDN](https://developer.mozilla.org/). 58 | 59 | To try to make things more concrete, the following criteria are used to assess 60 | whether a spec should a priori appear in the list: 61 | 62 | 1. The spec is stable or in development. Superseded and abandoned specs will not 63 | appear in the list. For instance, the list contains the HTML LS spec, but not 64 | HTML 4.01 or HTML 5). 65 | 2. The spec is being developed by a well-known standardization or 66 | pre-standardization group. Today, this means a W3C Working Group or Community 67 | Group, the WHATWG, the IETF, the TC39 group or the Khronos Group. 68 | 3. Web browsers expressed some level of support for the spec, e.g. through a 69 | public intent to implement. 70 | 4. The spec sits at the application layer or is "close to it". For instance, 71 | most IETF specs are likely out of scope, but some that are exposed to Web developers are in scope. 72 | 5. The spec defines normative content (terms, CSS, IDL), or it contains 73 | informative content that other specs often need to refer to (e.g. guidelines 74 | from horizontal activities such as accessibility, internationalization, privacy 75 | and security). 76 | 77 | There are and there will be exceptions to the rule. Besides, some of these 78 | criteria remain fuzzy and/or arbitrary, and we expect them to evolve over time, 79 | typically driven by needs expressed by projects that may want to use the list. -------------------------------------------------------------------------------- /test/compute-repository.js: -------------------------------------------------------------------------------- 1 | import { describe, it } from "node:test"; 2 | import assert from "node:assert"; 3 | import computeRepo from "../src/compute-repository.js"; 4 | 5 | describe("compute-repository module", async () => { 6 | async function computeSingleRepo(url) { 7 | const spec = { nightly: { url } }; 8 | const result = await computeRepo([spec]); 9 | return result[0].nightly.repository; 10 | }; 11 | 12 | it("handles github.com URLs", async () => { 13 | assert.equal( 14 | await computeSingleRepo("https://github.com/orgname/specname"), 15 | "https://github.com/orgname/specname"); 16 | }); 17 | 18 | it("handles xxx.github.io URLs", async () => { 19 | assert.equal( 20 | await computeSingleRepo("https://orgname.github.io/specname"), 21 | "https://github.com/orgname/specname"); 22 | }); 23 | 24 | it("handles xxx.github.io URLs with trailing slash", async () => { 25 | assert.equal( 26 | await computeSingleRepo("https://orgname.github.io/specname/"), 27 | "https://github.com/orgname/specname"); 28 | }); 29 | 30 | it("handles WHATWG URLs", async () => { 31 | assert.equal( 32 | await computeSingleRepo("https://specname.spec.whatwg.org/"), 33 | "https://github.com/whatwg/specname"); 34 | }); 35 | 36 | it("handles TC39 URLs", async () => { 37 | assert.equal( 38 | await computeSingleRepo("https://tc39.es/js-ftw/"), 39 | "https://github.com/tc39/js-ftw"); 40 | }); 41 | 42 | it("handles CSS WG URLs", async () => { 43 | assert.equal( 44 | await computeSingleRepo("https://drafts.csswg.org/css-everything-42/"), 45 | "https://github.com/w3c/csswg-drafts"); 46 | }); 47 | 48 | it("handles FX TF URLs", async () => { 49 | assert.equal( 50 | await computeSingleRepo("https://drafts.fxtf.org/wow/"), 51 | "https://github.com/w3c/fxtf-drafts"); 52 | }); 53 | 54 | it("handles CSS Houdini URLs", async () => { 55 | assert.equal( 56 | await computeSingleRepo("https://drafts.css-houdini.org/magic-11/"), 57 | "https://github.com/w3c/css-houdini-drafts"); 58 | }); 59 | 60 | it("handles SVG WG URLs", async () => { 61 | assert.equal( 62 | await computeSingleRepo("https://svgwg.org/specs/svg-ftw"), 63 | "https://github.com/w3c/svgwg"); 64 | }); 65 | 66 | it("handles the SVG2 URL", async () => { 67 | assert.equal( 68 | await computeSingleRepo("https://svgwg.org/svg2-draft/"), 69 | "https://github.com/w3c/svgwg"); 70 | }); 71 | 72 | it("handles WebGL URLs", async () => { 73 | assert.equal( 74 | await computeSingleRepo("https://registry.khronos.org/webgl/specs/latest/1.0/"), 75 | "https://github.com/khronosgroup/WebGL"); 76 | }); 77 | 78 | it("handles IETF HTTP WG URLs", async () => { 79 | assert.equal( 80 | await computeSingleRepo("https://httpwg.org/http-extensions/draft-ietf-httpbis-digest-headers.html"), 81 | "https://github.com/httpwg/http-extensions"); 82 | }); 83 | 84 | it("handles specs without nightly URLs", async () => { 85 | const spec = { url: "https://www.iso.org/standard/85253.html" }; 86 | const result = await computeRepo([spec]); 87 | assert.equal(result[0].nightly, undefined); 88 | }); 89 | 90 | it("returns null when repository cannot be derived from URL", async () => { 91 | assert.equal( 92 | await computeSingleRepo("https://example.net/repoless"), 93 | null); 94 | }); 95 | 96 | it("reports right ARIA mono-repository for relevant specs", async () => { 97 | assert.equal( 98 | await computeSingleRepo("https://w3c.github.io/core-aam"), 99 | "https://github.com/w3c/aria"); 100 | }); 101 | }); 102 | -------------------------------------------------------------------------------- /src/lint.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | import fs from "node:fs/promises"; 4 | import process from "node:process"; 5 | import { fileURLToPath } from "node:url"; 6 | import computeShortname from "./compute-shortname.js"; 7 | import computePrevNext from "./compute-prevnext.js"; 8 | 9 | function compareSpecs(a, b) { 10 | return a.url.localeCompare(b.url); 11 | } 12 | 13 | 14 | // Shorten definition of spec to more human-readable version 15 | function shortenDefinition(spec) { 16 | const short = {}; 17 | for (const property of Object.keys(spec)) { 18 | if (!((property === "seriesComposition" && spec[property] === "full") || 19 | (property === "seriesComposition" && spec[property] === "fork") || 20 | (property === "multipage" && !spec[property]) || 21 | (property === "forceCurrent" && !spec[property]))) { 22 | short[property] = spec[property]; 23 | } 24 | } 25 | if (Object.keys(short).length === 1) { 26 | return short.url; 27 | } 28 | else if (Object.keys(short).length === 2 && 29 | spec.seriesComposition === "delta") { 30 | return `${spec.url} delta`; 31 | } 32 | else if (Object.keys(short).length === 2 && 33 | spec.forceCurrent) { 34 | return `${spec.url} current`; 35 | } 36 | else if (Object.keys(short).length === 2 && 37 | spec.multipage === "all") { 38 | return `${spec.url} multipage`; 39 | } 40 | else { 41 | return short; 42 | } 43 | } 44 | 45 | 46 | // Lint specs list defined as a JSON string 47 | function lintStr(specsStr) { 48 | const specs = JSON.parse(specsStr); 49 | 50 | // Normalize end of lines, different across platforms, for comparison 51 | specsStr = specsStr.replace(/\r\n/g, "\n"); 52 | 53 | // Convert entries to spec objects, drop duplicates, and sort per URL 54 | const sorted = specs 55 | .map(spec => (typeof spec === "string") ? 56 | { 57 | url: new URL(spec.split(" ")[0]).toString(), 58 | seriesComposition: (spec.split(' ')[1] === "delta") ? "delta" : "full", 59 | forceCurrent: (spec.split(' ')[1] === "current"), 60 | multipage: (spec.split(' ')[1] === "multipage") ? "all" : undefined 61 | } : 62 | Object.assign({}, spec, { url: new URL(spec.url).toString() })) 63 | .filter((spec, idx, list) => 64 | !list.find((s, i) => i < idx && compareSpecs(s, spec) === 0)) 65 | .sort(compareSpecs); 66 | 67 | // Generate names and links between levels 68 | const linkedList = sorted 69 | .map(s => Object.assign({}, s, computeShortname(s.shortname || s.url))) 70 | .map((s, _, list) => Object.assign({}, s, computePrevNext(s, list))); 71 | 72 | // Shorten definition when possible 73 | const fixed = sorted 74 | .map(shortenDefinition); 75 | 76 | const linted = JSON.stringify(fixed, null, 2) + "\n"; 77 | return (linted !== specsStr) ? linted : null; 78 | } 79 | 80 | 81 | // Lint by normalizing specs.json and comparing it to the original, 82 | // fixing it in place if |fix| is true. 83 | async function lint(fix = false) { 84 | const specs = await fs.readFile("./specs.json", "utf8"); 85 | const linted = lintStr(specs); 86 | if (linted) { 87 | if (fix) { 88 | console.log("specs.json has lint issues, updating in place"); 89 | await fs.writeFile("./specs.json", linted, "utf8"); 90 | } 91 | else { 92 | console.log("specs.json has lint issues, run with --fix"); 93 | } 94 | return false; 95 | } 96 | 97 | console.log("specs.json passed lint"); 98 | return true; 99 | } 100 | 101 | export { 102 | lintStr, 103 | lint 104 | }; 105 | 106 | if (process.argv[1] === fileURLToPath(import.meta.url)) { 107 | // Code used as command-line interface (CLI), run linting process 108 | lint(process.argv.includes("--fix")).then( 109 | ok => { 110 | process.exit(ok ? 0 : 1); 111 | }, 112 | reason => { 113 | console.error(reason); 114 | process.exit(1); 115 | } 116 | ); 117 | } 118 | -------------------------------------------------------------------------------- /test/compute-currentlevel.js: -------------------------------------------------------------------------------- 1 | import { describe, it } from "node:test"; 2 | import assert from "node:assert"; 3 | import computeCurrentLevel from "../src/compute-currentlevel.js"; 4 | 5 | describe("compute-currentlevel module", () => { 6 | function getCurrentName(spec, list) { 7 | return computeCurrentLevel(spec, list).currentSpecification; 8 | } 9 | function getSpec(options) { 10 | options = options || {}; 11 | const res = { 12 | shortname: options.shortname ?? (options.seriesVersion ? `spec-${options.seriesVersion}` : "spec"), 13 | series: { shortname: "spec" }, 14 | }; 15 | for (const property of Object.keys(options)) { 16 | res[property] = options[property]; 17 | } 18 | return res; 19 | } 20 | function getOther(options) { 21 | options = options || {}; 22 | const res = { 23 | shortname: (options.seriesVersion ? `other-${options.seriesVersion}` : "other"), 24 | series: { shortname: "other" }, 25 | }; 26 | for (const property of Object.keys(options)) { 27 | res[property] = options[property]; 28 | } 29 | return res; 30 | } 31 | 32 | it("returns the spec name if list is empty", () => { 33 | const spec = getSpec(); 34 | assert.equal(getCurrentName(spec), spec.shortname); 35 | }); 36 | 37 | it("returns the name of the latest level", () => { 38 | const spec = getSpec({ seriesVersion: "1" }); 39 | const current = getSpec({ seriesVersion: "2" }); 40 | assert.equal( 41 | getCurrentName(spec, [spec, current]), 42 | current.shortname); 43 | }); 44 | 45 | it("returns the name of the latest level that is not a delta spec", () => { 46 | const spec = getSpec({ seriesVersion: "1" }); 47 | const delta = getSpec({ seriesVersion: "2", seriesComposition: "delta" }); 48 | assert.equal( 49 | getCurrentName(spec, [spec, delta]), 50 | spec.shortname); 51 | }); 52 | 53 | it("returns the name of the latest level that is not a fork spec", () => { 54 | const spec = getSpec({ seriesVersion: "1" }); 55 | const fork = getSpec({ seriesVersion: "2", seriesComposition: "fork" }); 56 | assert.equal( 57 | getCurrentName(spec, [spec, fork]), 58 | spec.shortname); 59 | }); 60 | 61 | it("gets back to the latest level when spec is a delta spec", () => { 62 | const spec = getSpec({ seriesVersion: "1" }); 63 | const delta = getSpec({ seriesVersion: "2", seriesComposition: "delta" }); 64 | assert.equal( 65 | getCurrentName(delta, [spec, delta]), 66 | spec.shortname); 67 | }); 68 | 69 | it("gets back to the latest level when spec is a fork spec", () => { 70 | const spec = getSpec({ seriesVersion: "1" }); 71 | const fork = getSpec({ seriesVersion: "2", seriesComposition: "fork" }); 72 | assert.equal( 73 | getCurrentName(fork, [spec, fork]), 74 | spec.shortname); 75 | }); 76 | 77 | it("returns the spec name if it is flagged as current", () => { 78 | const spec = getSpec({ seriesVersion: "1", forceCurrent: true }); 79 | const last = getSpec({ seriesVersion: "2" }); 80 | assert.equal( 81 | getCurrentName(spec, [spec, last]), 82 | spec.shortname); 83 | }); 84 | 85 | it("returns the name of the level flagged as current", () => { 86 | const spec = getSpec({ seriesVersion: "1" }); 87 | const current = getSpec({ seriesVersion: "2", forceCurrent: true }); 88 | const last = getSpec({ seriesVersion: "3" }); 89 | assert.equal( 90 | getCurrentName(spec, [spec, current, last]), 91 | current.shortname); 92 | }); 93 | 94 | it("does not take other shortnames into account", () => { 95 | const spec = getSpec({ seriesVersion: "1" }); 96 | const other = getOther({ seriesVersion: "2"}); 97 | assert.equal( 98 | getCurrentName(spec, [spec, other]), 99 | spec.shortname); 100 | }); 101 | 102 | it("does not take forks into account", () => { 103 | const spec = getSpec({ shortname: "spec-1-fork-1", seriesVersion: "1", seriesComposition: "fork" }); 104 | const base = getSpec({ seriesVersion: "1" }); 105 | assert.equal( 106 | getCurrentName(spec, [spec, base]), 107 | base.shortname); 108 | }); 109 | }); 110 | -------------------------------------------------------------------------------- /src/fetch-iso-info.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Module that exports a function that takes an array of specifications objects 3 | * that each have at least a "url" property. The function completes 4 | * specification objects that target ISO specifications with title, 5 | * organization and group info from the ISO Open Data catalog: 6 | * https://www.iso.org/open-data.html 7 | * 8 | * Having to download ~100MB of data for a fairly restricted number of ISO 9 | * specifications in browser-specs that will hardly ever change is not 10 | * fantastic. But ISO has put several restrictions in place that make fetching 11 | * individual pages harder with a bot. 12 | * 13 | * To avoid wasting time and resources, function may be called with a 14 | * "skipFetch" option. When set to "iso" or "all", code skips fetch and reuses 15 | * previous data instead. The expectation is that this option will be set for 16 | * most automated builds. 17 | */ 18 | 19 | async function fetchInfoFromISO(specs, options) { 20 | if (!specs || specs.find(spec => !spec.url)) { 21 | throw "Invalid list of specifications passed as parameter"; 22 | } 23 | 24 | const isoRe = /\.iso\.org\//; 25 | const isoSpecs = specs.filter(spec => spec.url.match(isoRe)); 26 | if (isoSpecs.length === 0) { 27 | return specs; 28 | } 29 | 30 | if (!['all', 'iso'].includes(options?.skipFetch)) { 31 | // Fetch the list of technical committees 32 | const tcUrl = 'https://isopublicstorageprod.blob.core.windows.net/opendata/_latest/iso_technical_committees/json/iso_technical_committees.jsonl'; 33 | const tcResponse = await fetch(tcUrl, options); 34 | const tc = (await tcResponse.text()).split('\n') 35 | .map(line => line ? JSON.parse(line) : {}); 36 | 37 | // Fetch the ISO catalog 38 | // TODO: read lines one by one instead of reading the entire body at once 39 | const catalogUrl = `https://isopublicstorageprod.blob.core.windows.net/opendata/_latest/iso_deliverables_metadata/json/iso_deliverables_metadata.jsonl`; 40 | const catalogResponse = await fetch(catalogUrl, options); 41 | const catalog = (await catalogResponse.text()).split('\n'); 42 | 43 | for (const line of catalog) { 44 | if (!line) { 45 | continue; 46 | } 47 | const json = JSON.parse(line); 48 | const spec = isoSpecs.find(s => s.url.endsWith(`/${json.id}.html`)); 49 | if (!spec) { 50 | continue; 51 | } 52 | const group = tc.find(c => c.reference === json.ownerCommittee); 53 | if (!group) { 54 | throw new Error(`Inconsistent information in catalog, could not find group "${json.ownerCommittee}"`); 55 | } 56 | 57 | // Let's compute a nice shortname using the reference number, 58 | // e.g., iso10918-5, excluding the release year if it exists. 59 | const match = json.reference.match(/ ([\d\-]+)(:\d+)?$/); 60 | if (!match) { 61 | throw new Error(`Could not extract ISO shortname from reference "${json.reference}"`); 62 | } 63 | const shortname = 'iso' + match[1]; 64 | 65 | spec.__iso = { 66 | shortname, 67 | organization: json.ownerCommittee.startsWith('ISO/IEC') ? 'ISO/IEC' : 'ISO', 68 | groups: [{ 69 | name: group.reference, 70 | url: `https://www.iso.org/committee/${group.id}.html` 71 | }], 72 | title: json.title.en, 73 | source: 'iso' 74 | }; 75 | } 76 | } 77 | 78 | return specs.map(spec => { 79 | if (spec.url.match(isoRe) && (spec.__iso || spec.__last)) { 80 | const isoInfo = spec.__iso; 81 | delete spec.__iso; 82 | const copy = Object.assign({}, spec, { 83 | shortname: isoInfo?.shortname ?? spec.__last?.shortname, 84 | organization: isoInfo?.organization ?? spec.__last?.organization, 85 | groups: isoInfo?.groups ?? spec.__last?.groups, 86 | title: isoInfo?.title ?? spec.__last?.title, 87 | source: isoInfo?.source ?? spec.__last?.source ?? spec.source 88 | }); 89 | if (!copy.series) { 90 | copy.series = {}; 91 | } 92 | copy.series.shortname = isoInfo?.shortname ?? spec.__last?.series.shortname; 93 | copy.series.currentSpecification = isoInfo?.shortname ?? spec.__last?.series.currentSpecification; 94 | return copy; 95 | } 96 | else { 97 | return spec; 98 | } 99 | }); 100 | } 101 | 102 | export default fetchInfoFromISO; 103 | -------------------------------------------------------------------------------- /src/bump-packages-minor.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Bump the minor version of packages when the list of specs has changed. 3 | * 4 | * node src/bump-packages-minor.js 5 | * 6 | * This script is intended to be run at the end of a build before committing 7 | * the result back to the main branch to automatically bump the minor version 8 | * in the `package.json` files under the `packages` folders when the new index 9 | * files contains new/deleted specs to commit. 10 | * 11 | * The script does not bump a version that matches x.y.0 because such a version 12 | * means a minor bump is already pending release. 13 | */ 14 | 15 | import fs from 'node:fs/promises'; 16 | import path from 'node:path'; 17 | import { execSync } from 'node:child_process'; 18 | import { fileURLToPath } from "node:url"; 19 | import loadJSON from './load-json.js'; 20 | 21 | const scriptPath = path.dirname(fileURLToPath(import.meta.url)); 22 | 23 | function specsMatch(s1, s2) { 24 | return s1.url === s2.url && s1.shortname === s2.shortname; 25 | } 26 | 27 | async function isMinorBumpNeeded(type) { 28 | // Retrieve the fullname of the remote ref "${type}@latest" 29 | const refs = execSync(`git show-ref ${type}@latest`, { encoding: 'utf8' }) 30 | .trim().split('\n').map(ref => ref.split(' ')[1]) 31 | .filter(ref => ref.startsWith('refs/remotes/')); 32 | if (refs.length > 1) { 33 | throw new Error(`More than one remote refs found for ${type}@latest`); 34 | } 35 | if (refs === 0) { 36 | throw new Error(`No remote ref found for ${type}@latest`); 37 | } 38 | 39 | // Retrieve contents of last released index file 40 | const res = execSync( 41 | `git show ${refs[0]}:index.json`, 42 | { encoding: 'utf8' }).trim(); 43 | let lastIndexFile = JSON.parse(res); 44 | 45 | // Load new file 46 | let newIndexFile = await loadJSON(path.resolve(scriptPath, '..', 'index.json')); 47 | 48 | // Filter specs if needed 49 | if (type === "browser-specs") { 50 | lastIndexFile = lastIndexFile.filter(s => !s.categories || s.categories.includes('browser')); 51 | newIndexFile = newIndexFile.filter(s => s.categories.includes('browser')); 52 | } 53 | 54 | return !!( 55 | lastIndexFile.find(spec => !newIndexFile.find(s => specsMatch(spec, s))) || 56 | newIndexFile.find(spec => !lastIndexFile.find(s => specsMatch(spec, s))) 57 | ); 58 | } 59 | 60 | 61 | async function checkPackage(type) { 62 | console.log(`Check ${type} package`); 63 | const packageFile = path.join(scriptPath, '..', 'packages', type, 'package.json'); 64 | const packageContents = await loadJSON(packageFile); 65 | const version = packageContents.version; 66 | console.log(`- Current version: ${version}`); 67 | 68 | // Loosely adapted from semver: 69 | // https://github.com/npm/node-semver/blob/cb1ca1d5480a6c07c12ac31ba5f2071ed530c4ed/internal/re.js#L37 70 | // (not using semver directly to avoid having to install dependencies in job) 71 | const reVersion = /^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)$/; 72 | const versionTokens = version.match(reVersion); 73 | const major = parseInt(versionTokens[1], 10); 74 | const minor = parseInt(versionTokens[2], 10); 75 | const patch = parseInt(versionTokens[3], 10); 76 | 77 | if (patch === 0) { 78 | console.log('- No bump needed, minor bump already pending'); 79 | return; 80 | } 81 | 82 | const bumpNeeded = await isMinorBumpNeeded(type); 83 | if (bumpNeeded) { 84 | console.log('- new/deleted spec(s) found'); 85 | const newVersion = `${major}.${minor+1}.0`; 86 | packageContents.version = newVersion; 87 | await fs.writeFile(path.resolve(scriptPath, packageFile), JSON.stringify(packageContents, null, 2), 'utf8'); 88 | console.log(`- Version bumped to ${newVersion}`); 89 | } 90 | else { 91 | console.log('- No bump needed'); 92 | } 93 | } 94 | 95 | 96 | async function checkPackages() { 97 | const packagesFolder = path.resolve(scriptPath, '..', 'packages'); 98 | const types = await fs.readdir(packagesFolder); 99 | for (const type of types) { 100 | const stat = await fs.lstat(path.join(packagesFolder, type)); 101 | if (stat.isDirectory()) { 102 | await checkPackage(type); 103 | } 104 | } 105 | } 106 | 107 | 108 | /******************************************************************************* 109 | Main loop 110 | *******************************************************************************/ 111 | checkPackages() 112 | .then(() => { 113 | console.log(); 114 | console.log("== The end =="); 115 | }) 116 | .catch(err => { 117 | console.error(err); 118 | process.exit(1); 119 | }); 120 | -------------------------------------------------------------------------------- /src/throttled-queue.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Helper function to sleep for a specified number of milliseconds 3 | */ 4 | function sleep(ms) { 5 | return new Promise(resolve => setTimeout(resolve, ms, 'slept')); 6 | } 7 | 8 | 9 | /** 10 | * Helper function that returns the "origin" of a URL, defined in a loose way 11 | * as the part of the true origin that identifies the server that's going to 12 | * serve the resource. 13 | * 14 | * For example "github.io" for all specs under github.io, "whatwg.org" for 15 | * all WHATWG specs, "csswg.org" for CSS specs at large (including Houdini 16 | * and FXTF specs since they are served by the same server). 17 | */ 18 | function getOrigin(url) { 19 | if (!url) { 20 | return ''; 21 | } 22 | const origin = (new URL(url)).origin; 23 | if (origin.endsWith('.whatwg.org')) { 24 | return 'https://whatwg.org'; 25 | } 26 | else if (origin.endsWith('.github.io')) { 27 | return 'https://github.io'; 28 | } 29 | else if (origin.endsWith('.csswg.org') || 30 | origin.endsWith('.css-houdini.org') || 31 | origin.endsWith('.fxtf.org')) { 32 | return 'https://csswg.org'; 33 | } 34 | else { 35 | return origin; 36 | } 37 | } 38 | 39 | 40 | /** 41 | * The ThrottledQueue class can be used to run a series of tasks that send 42 | * network requests to an origin server in parallel, up to a certain limit, 43 | * while guaranteeing that only one request will be sent to a given origin 44 | * server at a time. 45 | */ 46 | export default class ThrottledQueue { 47 | originQueue = {}; 48 | maxParallel = 4; 49 | sleepInterval = 2000; 50 | ongoing = 0; 51 | pending = []; 52 | 53 | constructor(options = { maxParallel: 4, sleepInterval: 2000 }) { 54 | if (options.maxParallel >= 0) { 55 | this.maxParallel = options.maxParallel; 56 | } 57 | if (options.sleepInterval) { 58 | this.sleepInterval = options.sleepInterval; 59 | } 60 | } 61 | 62 | /** 63 | * Run the given processing function with the given parameters, immediately 64 | * if possible or as soon as possible when too many tasks are already running 65 | * in parallel. 66 | * 67 | * Note this function has no notion of origin. Users may call the function 68 | * directly if they don't need any throttling per origin. 69 | */ 70 | async runThrottled(processFunction, ...params) { 71 | if (this.ongoing >= this.maxParallel) { 72 | return new Promise((resolve, reject) => { 73 | this.pending.push({ params, resolve, reject }); 74 | }); 75 | } 76 | else { 77 | this.ongoing += 1; 78 | const result = await processFunction.call(null, ...params); 79 | this.ongoing -= 1; 80 | 81 | // Done with current task, trigger next pending task in the background 82 | setTimeout(_ => { 83 | if (this.pending.length && this.ongoing < this.maxParallel) { 84 | const next = this.pending.shift(); 85 | this.runThrottled(processFunction, ...next.params) 86 | .then(result => next.resolve(result)) 87 | .catch(err => next.reject(err)); 88 | } 89 | }, 0); 90 | 91 | return result; 92 | } 93 | } 94 | 95 | /** 96 | * Run the given processing function with the given parameters, immediately 97 | * if possible or as soon as possible when too many tasks are already running 98 | * in parallel, or when there's already a task being run against the same 99 | * origin as that of the provided URL. 100 | * 101 | * Said differently, the function serializes tasks per origin, and calls 102 | * "runThrottled" to restrict the number of tasks that run in parallel to the 103 | * requested maximum. 104 | * 105 | * Additionally, the function forces a 2 second sleep after processing to 106 | * keep a low network profile (sleeping time can be adjusted per origin 107 | * depending if the sleepInterval parameter that was passed to the 108 | * constructor is a function. 109 | */ 110 | async runThrottledPerOrigin(url, processFunction, ...params) { 111 | const origin = getOrigin(url); 112 | if (!this.originQueue[origin]) { 113 | this.originQueue[origin] = Promise.resolve(true); 114 | } 115 | return new Promise((resolve, reject) => { 116 | this.originQueue[origin] = this.originQueue[origin] 117 | .then(async _ => this.runThrottled(processFunction, ...params)) 118 | .then(async result => { 119 | const interval = (typeof this.sleepInterval === 'function') ? 120 | this.sleepInterval(origin) : 121 | this.sleepInterval; 122 | await sleep(interval); 123 | return result; 124 | }) 125 | .then(resolve) 126 | .catch(reject); 127 | }); 128 | } 129 | } 130 | -------------------------------------------------------------------------------- /test/compute-prevnext.js: -------------------------------------------------------------------------------- 1 | import { describe, it } from "node:test"; 2 | import assert from "node:assert"; 3 | import computePrevNext from "../src/compute-prevnext.js"; 4 | 5 | describe("compute-prevnext module", () => { 6 | function getSpec(seriesVersion) { 7 | if (seriesVersion) { 8 | return { 9 | shortname: `spec-${seriesVersion}`, 10 | series: { shortname: "spec" }, 11 | seriesVersion 12 | }; 13 | } 14 | else { 15 | return { 16 | shortname: `spec-${seriesVersion}`, 17 | series: { shortname: "spec" } 18 | }; 19 | } 20 | } 21 | function getOther(seriesVersion) { 22 | if (seriesVersion) { 23 | return { 24 | shortname: `other-${seriesVersion}`, 25 | series: { shortname: "other" }, 26 | seriesVersion 27 | }; 28 | } 29 | else { 30 | return { 31 | shortname: `other-${seriesVersion}`, 32 | series: { shortname: "other" } 33 | }; 34 | } 35 | } 36 | 37 | it("sets previous link if it exists", () => { 38 | const prev = getSpec("1"); 39 | const spec = getSpec("2"); 40 | assert.deepStrictEqual( 41 | computePrevNext(spec, [prev]), 42 | { seriesPrevious: prev.shortname }); 43 | }); 44 | 45 | it("sets next link if it exists", () => { 46 | const spec = getSpec("1"); 47 | const next = getSpec("2"); 48 | assert.deepStrictEqual( 49 | computePrevNext(spec, [next]), 50 | { seriesNext: next.shortname }); 51 | }); 52 | 53 | it("sets previous/next links when both exist", () => { 54 | const prev = getSpec("1"); 55 | const spec = getSpec("2"); 56 | const next = getSpec("3"); 57 | assert.deepStrictEqual( 58 | computePrevNext(spec, [next, prev, spec]), 59 | { seriesPrevious: prev.shortname, seriesNext: next.shortname }); 60 | }); 61 | 62 | it("sets previous/next links when level are version numbers", () => { 63 | const prev = getSpec("1.1"); 64 | const spec = getSpec("1.2"); 65 | const next = getSpec("1.3"); 66 | assert.deepStrictEqual( 67 | computePrevNext(spec, [next, prev, spec]), 68 | { seriesPrevious: prev.shortname, seriesNext: next.shortname }); 69 | }); 70 | 71 | it("selects the right previous level when multiple exist", () => { 72 | const old = getSpec("1"); 73 | const prev = getSpec("2"); 74 | const spec = getSpec("4"); 75 | assert.deepStrictEqual( 76 | computePrevNext(spec, [spec, prev, old]), 77 | { seriesPrevious: prev.shortname }); 78 | }); 79 | 80 | it("selects the right next level when multiple exist", () => { 81 | const spec = getSpec("1"); 82 | const next = getSpec("2"); 83 | const last = getSpec("3"); 84 | assert.deepStrictEqual( 85 | computePrevNext(spec, [spec, last, next]), 86 | { seriesNext: next.shortname }); 87 | }); 88 | 89 | it("considers absence of level to be level 0", () => { 90 | const spec = getSpec(); 91 | const next = getSpec("1"); 92 | assert.deepStrictEqual( 93 | computePrevNext(spec, [next]), 94 | { seriesNext: next.shortname }); 95 | }); 96 | 97 | it("is not affected by presence of other specs", () => { 98 | const prev = getSpec("1"); 99 | const spec = getSpec("3"); 100 | const next = getSpec("5"); 101 | assert.deepStrictEqual( 102 | computePrevNext(spec, [next, getOther("2"), spec, getOther("4"), prev]), 103 | { seriesPrevious: prev.shortname, seriesNext: next.shortname }); 104 | }); 105 | 106 | it("returns an empty object if list is empty", () => { 107 | const spec = getSpec(); 108 | assert.deepStrictEqual(computePrevNext(spec), {}); 109 | }); 110 | 111 | it("returns an empty object if list is the spec to check", () => { 112 | const spec = getSpec(); 113 | assert.deepStrictEqual(computePrevNext(spec, [spec]), {}); 114 | }); 115 | 116 | it("returns an empty object in absence of other levels", () => { 117 | const spec = getSpec("2"); 118 | assert.deepStrictEqual( 119 | computePrevNext(spec, [getOther("1"), spec, getOther("3")]), {}); 120 | }); 121 | 122 | it("throws if spec object is not given", () => { 123 | assert.throws( 124 | () => computePrevNext(), 125 | /^Invalid spec object passed as parameter$/); 126 | }); 127 | 128 | it("throws if spec object is empty", () => { 129 | assert.throws( 130 | () => computePrevNext({}), 131 | /^Invalid spec object passed as parameter$/); 132 | }); 133 | 134 | it("throws if spec object does not have a name", () => { 135 | assert.throws( 136 | () => computePrevNext({ shortname: "spec" }), 137 | /^Invalid spec object passed as parameter$/); 138 | }); 139 | 140 | it("throws if spec object does not have a shortname", () => { 141 | assert.throws( 142 | () => computePrevNext({ name: "spec" }), 143 | /^Invalid spec object passed as parameter$/); 144 | }); 145 | }); 146 | -------------------------------------------------------------------------------- /src/load-spec.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Loads a spec into a Puppeteer page, avoiding fetching resources that are not 3 | * useful for our needs (images, streams) and that, once in a while, tend to 4 | * cause timeout issues on CSS servers. 5 | */ 6 | 7 | import process from "node:process"; 8 | import { Buffer } from "node:buffer"; 9 | 10 | class HttpStatusError extends Error { 11 | constructor(url, status, statusText) { 12 | super(`Could not fetch ${url}, got HTTP status ${status} ${statusText}`); 13 | this.name = 'HttpStatusError'; 14 | this.url = url; 15 | this.status = status; 16 | this.statusText = statusText; 17 | } 18 | } 19 | 20 | export default async function (url, page) { 21 | // Inner function that returns a network interception method for Puppeteer, 22 | // to avoid downloading images and getting stuck on streams. 23 | // NB: this is a simplified version of the code used in Reffy: 24 | // https://github.com/w3c/reffy/blob/25bb1be05be63cae399d2648ecb1a5ea5ab8430a/src/lib/util.js#L351 25 | function interceptRequest(cdp) { 26 | return async function ({ requestId, request }) { 27 | try { 28 | // Abort network requests to common image formats 29 | if (/\.(gif|ico|jpg|jpeg|png|ttf|woff|svg|css)$/i.test(request.url)) { 30 | await cdp.send('Fetch.failRequest', { requestId, errorReason: 'Failed' }); 31 | return; 32 | } 33 | 34 | // Abort network requests that return a "stream", they don't 35 | // play well with Puppeteer's "networkidle0" option 36 | if (request.url.startsWith('https://drafts.csswg.org/api/drafts/') || 37 | request.url.startsWith('https://drafts.css-houdini.org/api/drafts/') || 38 | request.url.startsWith('https://drafts.fxtf.org/api/drafts/') || 39 | request.url.startsWith('https://api.csswg.org/shepherd/') || 40 | request.url.startsWith('https://test.csswg.org/harness/')) { 41 | await cdp.send('Fetch.failRequest', { requestId, errorReason: 'Failed' }); 42 | return; 43 | } 44 | 45 | // Test mode, process the request within the Node.js process so that we 46 | // can mock the HTTP agent 47 | if (process.env.BROWSER_SPECS_MOCK_HTTP) { 48 | const response = await fetch(request.url, { 49 | headers: request.headers 50 | }); 51 | const body = Buffer.from(await response.arrayBuffer()); 52 | const headers = []; 53 | response.headers.forEach((value, name) => { 54 | headers.push({ name, value }); 55 | }); 56 | 57 | await cdp.send('Fetch.fulfillRequest', { 58 | requestId, 59 | responseCode: response.status, 60 | responseHeaders: headers, 61 | body: body.toString('base64') 62 | }); 63 | return; 64 | } 65 | 66 | // Proceed with the network request otherwise 67 | await cdp.send('Fetch.continueRequest', { requestId }); 68 | } 69 | catch (err) { 70 | console.warn(`[warn] Network request to ${request.url} failed`, err); 71 | } 72 | } 73 | } 74 | 75 | // Intercept network requests to avoid downloading images and streams 76 | const cdp = await page.target().createCDPSession(); 77 | 78 | try { 79 | await cdp.send('Fetch.enable'); 80 | cdp.on('Fetch.requestPaused', interceptRequest(cdp)); 81 | 82 | const response = await page.goto(url, { timeout: 120000, waitUntil: 'networkidle0' }); 83 | 84 | if (response.status() !== 200) { 85 | throw new HttpStatusError(url, response.status(), response.statusText()); 86 | } 87 | // Wait until the generation of the spec is completely over 88 | // (same code as in Reffy, except Reffy forces the latest version of 89 | // Respec and thus does not need to deal with older specs that rely 90 | // on a version that sets `respecIsReady` and not `respec.ready`. 91 | await page.evaluate(async () => { 92 | const usesRespec = 93 | (window.respecConfig || window.eval('typeof respecConfig !== "undefined"')) && 94 | window.document.head.querySelector("script[src*='respec']"); 95 | 96 | function sleep(ms) { 97 | return new Promise(resolve => setTimeout(resolve, ms, 'slept')); 98 | } 99 | 100 | async function isReady(counter) { 101 | counter = counter || 0; 102 | if (counter > 60) { 103 | throw new Error(`Respec generation took too long for ${window.location.toString()}`); 104 | } 105 | if (document.respec?.ready || document.respecIsReady) { 106 | // Wait for resolution of ready promise 107 | const res = await Promise.race([document.respec?.ready ?? document.respecIsReady, sleep(60000)]); 108 | if (res === 'slept') { 109 | throw new Error(`Respec generation took too long for ${window.location.toString()}`); 110 | } 111 | } 112 | else if (usesRespec) { 113 | await sleep(1000); 114 | await isReady(counter + 1); 115 | } 116 | } 117 | 118 | await isReady(); 119 | }); 120 | 121 | return page; 122 | } 123 | finally { 124 | await cdp.detach(); 125 | } 126 | } 127 | -------------------------------------------------------------------------------- /test/fetch-iso-info.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Tests for the fetch-info module 3 | */ 4 | 5 | import { describe, it, before, after } from "node:test"; 6 | import assert from "node:assert"; 7 | import fetchInfoFromISO from "../src/fetch-iso-info.js"; 8 | 9 | import { MockAgent, setGlobalDispatcher, getGlobalDispatcher } from 'undici'; 10 | 11 | const tcResponse = ` 12 | {"id":48148,"reference":"ISO/TC 38"} 13 | {"id":45316,"reference":"ISO/IEC JTC 1/SC 29"} 14 | `; 15 | 16 | const catalogResponse = ` 17 | {"id":54989,"deliverableType":"IS","supplementType":null,"reference":"ISO/IEC 10918-5:2013","title":{"en":"Information technology — Digital compression and coding of continuous-tone still images: JPEG File Interchange Format (JFIF) — Part 5:","fr":"Technologies de l'information — Compression numérique et codage des images fixes à modelé continu: Format d'échange de fichiers JPEG (JFIF) — Partie 5:"},"publicationDate":"2013-04-26","edition":1,"icsCode":["35.040.30"],"ownerCommittee":"ISO/IEC JTC 1/SC 29","currentStage":9060,"replaces":null,"replacedBy":null,"languages":["en"],"pages":{"en":9},"scope":{"en":"<p>ISO/IEC 10918-5:2013 specifies the JPEG File Interchange Format (JFIF).</p>\\n"}} 18 | {"id":61292,"deliverableType":"IS","supplementType":null,"reference":"ISO 18074:2015","title":{"en":"Textiles — Identification of some animal fibres by DNA analysis method — Cashmere, wool, yak and their blends","fr":"Textiles — Identification de certaines fibres animales par la méthode d'analyse de l'ADN — Cachemire, laine, yack et leurs mélanges"},"publicationDate":"2015-11-19","edition":1,"icsCode":["59.080.01"],"ownerCommittee":"ISO/TC 38","currentStage":9093,"replaces":null,"replacedBy":null,"languages":["en","fr"],"pages":{"en":22},"scope":{"en":"<p>ISO 18074:2015 specifies a testing method for DNA analysis of some animal fibres to identify cashmere, wool, yak, and their blends by using extraction, amplification by the polymerase chain reaction (PCR) method and DNA detection processes.</p>\\n<p>ISO 18084:2015 is applicable to cashmere, yak, and wool and their blends as a qualitative method.</p>\\n\\n"}} 19 | `; 20 | 21 | describe("The ISO catalog module", async function () { 22 | // Long time out since tests need to send network requests 23 | const timeout = { 24 | timeout: 60000 25 | }; 26 | 27 | let defaultDispatcher = getGlobalDispatcher(); 28 | let mockAgent = new MockAgent(); 29 | 30 | function initIntercepts() { 31 | const mockPool = mockAgent 32 | .get("https://isopublicstorageprod.blob.core.windows.net"); 33 | mockPool 34 | .intercept({ path: "/opendata/_latest/iso_technical_committees/json/iso_technical_committees.jsonl" }) 35 | .reply(200, tcResponse); 36 | mockPool 37 | .intercept({ path: "/opendata/_latest/iso_deliverables_metadata/json/iso_deliverables_metadata.jsonl" }) 38 | .reply(200, catalogResponse); 39 | } 40 | 41 | before(() => { 42 | setGlobalDispatcher(mockAgent); 43 | mockAgent.disableNetConnect(); 44 | }); 45 | 46 | after(() => { 47 | setGlobalDispatcher(defaultDispatcher); 48 | }) 49 | 50 | it("extracts spec info for an ISO spec", timeout, async () => { 51 | initIntercepts(); 52 | const spec = { url: "https://www.iso.org/standard/61292.html" }; 53 | const specs = await fetchInfoFromISO([spec]); 54 | assert.ok(specs[0]); 55 | assert.equal(specs[0].shortname, "iso18074"); 56 | assert.equal(specs[0].series?.shortname, "iso18074"); 57 | assert.equal(specs[0].series?.currentSpecification, "iso18074"); 58 | assert.equal(specs[0].source, "iso"); 59 | assert.equal(specs[0].title, "Textiles — Identification of some animal fibres by DNA analysis method — Cashmere, wool, yak and their blends"); 60 | assert.equal(specs[0].organization, "ISO"); 61 | assert.equal(specs[0].groups[0].url, "https://www.iso.org/committee/48148.html"); 62 | assert.equal(specs[0].groups[0].name, "ISO/TC 38"); 63 | assert.equal(specs[0].nightly, undefined); 64 | }); 65 | 66 | it("extracts spec info for an ISO/IEC spec", timeout, async () => { 67 | initIntercepts(); 68 | const spec = { url: "https://www.iso.org/standard/54989.html" }; 69 | const specs = await fetchInfoFromISO([spec]); 70 | assert.ok(specs[0]); 71 | assert.equal(specs[0].shortname, "iso10918-5"); 72 | assert.equal(specs[0].series?.shortname, "iso10918-5"); 73 | assert.equal(specs[0].series?.currentSpecification, "iso10918-5"); 74 | assert.equal(specs[0].source, "iso"); 75 | assert.equal(specs[0].title, "Information technology — Digital compression and coding of continuous-tone still images: JPEG File Interchange Format (JFIF) — Part 5:"); 76 | assert.equal(specs[0].organization, "ISO/IEC"); 77 | assert.equal(specs[0].groups[0].url, "https://www.iso.org/committee/45316.html"); 78 | assert.equal(specs[0].groups[0].name, "ISO/IEC JTC 1/SC 29"); 79 | assert.equal(specs[0].nightly, undefined); 80 | }); 81 | 82 | it("skips fetch in the absence of specs from ISO", timeout, async () => { 83 | // Note: as we don't call initIntercepts(), mock agent will throw if 84 | // code attempts to fetch something from the network 85 | const spec = { url: "https://www.w3.org/TR/from-w3c-with-love/" }; 86 | const specs = await fetchInfoFromISO([spec]); 87 | assert.ok(specs[0]); 88 | }); 89 | 90 | it("skips fetch when asked", timeout, async () => { 91 | // Note: as we don't call initIntercepts(), mock agent will throw if 92 | // code attempts to fetch something from the network. 93 | const spec = { url: "https://www.iso.org/standard/54989.html" }; 94 | const specs = await fetchInfoFromISO([spec], { skipFetch: 'iso' }); 95 | assert.ok(specs[0]); 96 | }); 97 | }); 98 | -------------------------------------------------------------------------------- /src/release-package.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Publish an NPM package when pre-release PR is merged, using the commit on 3 | * which the pre-release PR is based as source of data. 4 | */ 5 | 6 | import Octokit from "./octokit.js"; 7 | import fs from "node:fs"; 8 | import path from "node:path"; 9 | import os from "node:os"; 10 | import { execSync } from "node:child_process"; 11 | import { rimraf } from "rimraf"; 12 | import { npmPublish } from "@jsdevtools/npm-publish"; 13 | import loadJSON from "./load-json.js"; 14 | 15 | const owner = "w3c"; 16 | const repo = "browser-specs"; 17 | 18 | 19 | /** 20 | * Release package at the requested version. 21 | * 22 | * @function 23 | * @param {Number} prNumber Pre-release PR number 24 | */ 25 | async function releasePackage(prNumber) { 26 | console.log(`Retrieve pre-release PR`); 27 | const prResponse = await octokit.pulls.get({ 28 | owner, repo, 29 | pull_number: prNumber 30 | }); 31 | const preReleasePR = prResponse?.data; 32 | if (!preReleasePR) { 33 | console.log("- Given PR does not seem to exist, nothing to release"); 34 | return; 35 | } 36 | 37 | // Extract type from PR title 38 | console.log(`- Given PR title: ${preReleasePR.title}`); 39 | const match = preReleasePR.title.match(/^📦 Release (.*)@(.*)$/); 40 | if (!match) { 41 | console.log("- Given PR is not a pre-release PR, nothing to release"); 42 | return; 43 | } 44 | const type = match[1]; 45 | 46 | if (!["web-specs", "browser-specs"].includes(type)) { 47 | console.log(`- Unknown package type "${type}", nothing to release`); 48 | return; 49 | } 50 | 51 | // Extract commit to release from PR 52 | const preReleaseSha = preReleasePR.base.sha; 53 | console.log(`- Found commit to release: ${preReleaseSha}`); 54 | 55 | console.log(); 56 | console.log("Publish package to npm"); 57 | console.log("- Checkout repo at right commit in temporary folder"); 58 | const tmpFolder = fs.mkdtempSync(path.join(os.tmpdir(), `${repo}-`)); 59 | 60 | try { 61 | execSync(`git clone https://github.com/${owner}/${repo}`, { 62 | cwd: tmpFolder 63 | }); 64 | const installFolder = path.join(tmpFolder, repo); 65 | execSync(`git reset --hard ${preReleaseSha}`, { 66 | cwd: installFolder 67 | }); 68 | console.log(`- Installation folder: ${installFolder}`); 69 | 70 | console.log("- Prepare package files"); 71 | execSync("npm ci", { cwd: installFolder }); 72 | execSync("node src/prepare-packages.js", { cwd: installFolder }); 73 | 74 | console.log(`- Publish packages/${type} folder to npm`); 75 | const packageFolder = path.join(installFolder, "packages", type, "package.json"); 76 | const pubOptions = { 77 | package: packageFolder 78 | //, debug: console.debug 79 | }; 80 | if (NPM_TOKEN) { 81 | pubOptions.token = NPM_TOKEN; 82 | } 83 | const pubResult = await npmPublish(pubOptions); 84 | console.log(`- Published version was ${pubResult.oldVersion}`); 85 | console.log(`- Version bump: ${pubResult.type}`); 86 | console.log(`- Published version is ${pubResult.version}`); 87 | 88 | console.log(); 89 | console.log("Add release tag to commit"); 90 | if (pubResult.version === pubResult.oldVersion) { 91 | console.log("- Skip, no actual package released"); 92 | } 93 | else { 94 | const rawTag = `${type}@${pubResult.version}`; 95 | await octokit.git.createRef({ 96 | owner, repo, 97 | ref: `refs/tags/${rawTag}`, 98 | sha: preReleaseSha 99 | }); 100 | console.log(`- Tagged released commit ${preReleaseSha} with tag "${rawTag}"`); 101 | 102 | await octokit.git.updateRef({ 103 | owner, repo, 104 | ref: `heads/${type}@latest`, 105 | sha: preReleaseSha 106 | }); 107 | console.log(`- Updated ${type}-latest to point to released commit ${preReleaseSha}`); 108 | } 109 | } 110 | finally { 111 | console.log("Clean temporary folder"); 112 | try { 113 | rimraf.sync(tmpFolder); 114 | console.log("- done"); 115 | } 116 | catch { 117 | } 118 | } 119 | } 120 | 121 | 122 | /******************************************************************************* 123 | Retrieve tokens from environment, prepare Octokit and kick things off 124 | *******************************************************************************/ 125 | const config = await loadJSON("config.json"); 126 | const GITHUB_TOKEN = config?.GITHUB_TOKEN ?? process.env.GITHUB_TOKEN; 127 | if (!GITHUB_TOKEN) { 128 | console.error("GITHUB_TOKEN must be set to some personal access token as an env variable or in a config.json file"); 129 | process.exit(1); 130 | } 131 | 132 | // An NPM token is needed to run the script from a local machine. 133 | // Authentication from a GitHub workflow rather relies on OpenID Connect 134 | // and the release workflow must be added as a trusted publisher for each 135 | // npm package that can be released, see: 136 | // https://docs.npmjs.com/trusted-publishers 137 | const NPM_TOKEN = config?.NPM_TOKEN ?? process.env.NPM_TOKEN; 138 | 139 | // Note: npm-publish has a bug and needs an "INPUT_TOKEN" env variable: 140 | // https://github.com/JS-DevTools/npm-publish/issues/15 141 | // (we're passing the token to the function directly, no need to set it here) 142 | process.env.INPUT_TOKEN = ""; 143 | 144 | const octokit = new Octokit({ 145 | auth: GITHUB_TOKEN 146 | //, log: console 147 | }); 148 | 149 | const prereleasePR = parseInt(process.argv[2], 10); 150 | 151 | releasePackage(prereleasePR) 152 | .then(() => { 153 | console.log(); 154 | console.log("== The end =="); 155 | }) 156 | .catch(err => { 157 | console.error(err); 158 | process.exit(1); 159 | }); 160 | -------------------------------------------------------------------------------- /test/lint.js: -------------------------------------------------------------------------------- 1 | import { describe, it } from "node:test"; 2 | import assert from "node:assert"; 3 | import { lintStr } from "../src/lint.js"; 4 | 5 | describe("Linter", () => { 6 | describe("lintStr()", () => { 7 | function toStr(specs) { 8 | return JSON.stringify(specs, null, 2) + "\n"; 9 | } 10 | 11 | it("passes if specs contains a valid URL", () => { 12 | const specs = ["https://www.w3.org/TR/spec/"]; 13 | assert.equal(lintStr(toStr(specs)), null); 14 | }); 15 | 16 | it("passes if specs contains multiple sorted URLs", () => { 17 | const specs = [ 18 | "https://www.w3.org/TR/spec1/", 19 | "https://www.w3.org/TR/spec2/" 20 | ]; 21 | assert.equal(lintStr(toStr(specs)), null); 22 | }); 23 | 24 | it("passes if specs contains a URL with a delta spec", () => { 25 | const specs = [ 26 | "https://www.w3.org/TR/spec-1/", 27 | "https://www.w3.org/TR/spec-2/ delta" 28 | ]; 29 | assert.equal(lintStr(toStr(specs)), null); 30 | }); 31 | 32 | it("passes if specs contains a URL with a spec flagged as current", () => { 33 | const specs = [ 34 | "https://www.w3.org/TR/spec-1/ current", 35 | "https://www.w3.org/TR/spec-2/" 36 | ]; 37 | assert.equal(lintStr(toStr(specs)), null); 38 | }); 39 | 40 | it("passes if specs contains a URL with a spec flagged as multipage", () => { 41 | const specs = [ 42 | "https://www.w3.org/TR/spec-1/ multipage" 43 | ]; 44 | assert.equal(lintStr(toStr(specs)), null); 45 | }); 46 | 47 | it("sorts URLs", () => { 48 | const specs = [ 49 | "https://www.w3.org/TR/spec2/", 50 | "https://www.w3.org/TR/spec1/" 51 | ]; 52 | assert.equal( 53 | lintStr(toStr(specs)), 54 | toStr([ 55 | "https://www.w3.org/TR/spec1/", 56 | "https://www.w3.org/TR/spec2/" 57 | ])); 58 | }); 59 | 60 | it("lints a URL", () => { 61 | const specs = [ 62 | { "url": "https://example.org", "shortname": "test" } 63 | ]; 64 | assert.equal(lintStr(toStr(specs)), toStr([ 65 | { "url": "https://example.org/", "shortname": "test" } 66 | ])); 67 | }); 68 | 69 | it("lints an object with only a URL to a URL", () => { 70 | const specs = [ 71 | { "url": "https://www.w3.org/TR/spec/" } 72 | ]; 73 | assert.equal(lintStr(toStr(specs)), toStr([ 74 | "https://www.w3.org/TR/spec/" 75 | ])); 76 | }); 77 | 78 | it("lints an object with only a URL and a delta flag to a string", () => { 79 | const specs = [ 80 | "https://www.w3.org/TR/spec-1/", 81 | { "url": "https://www.w3.org/TR/spec-2/", seriesComposition: "delta" } 82 | ]; 83 | assert.equal(lintStr(toStr(specs)), toStr([ 84 | "https://www.w3.org/TR/spec-1/", 85 | "https://www.w3.org/TR/spec-2/ delta" 86 | ])); 87 | }); 88 | 89 | it("lints an object with only a URL and a current flag to a string", () => { 90 | const specs = [ 91 | { "url": "https://www.w3.org/TR/spec-1/", "forceCurrent": true }, 92 | "https://www.w3.org/TR/spec-2/" 93 | ]; 94 | assert.equal(lintStr(toStr(specs)), toStr([ 95 | "https://www.w3.org/TR/spec-1/ current", 96 | "https://www.w3.org/TR/spec-2/" 97 | ])); 98 | }); 99 | 100 | it("lints an object with only a URL and a multipage flag to a string", () => { 101 | const specs = [ 102 | { "url": "https://www.w3.org/TR/spec-1/", "multipage": "all" } 103 | ]; 104 | assert.equal(lintStr(toStr(specs)), toStr([ 105 | "https://www.w3.org/TR/spec-1/ multipage" 106 | ])); 107 | }); 108 | 109 | it("lints an object with a 'full' flag", () => { 110 | const specs = [ 111 | { "url": "https://www.w3.org/TR/spec/", "seriesComposition": "full" } 112 | ]; 113 | assert.equal(lintStr(toStr(specs)), toStr([ 114 | "https://www.w3.org/TR/spec/" 115 | ])); 116 | }); 117 | 118 | it("lints an object with a current flag set to false", () => { 119 | const specs = [ 120 | { "url": "https://www.w3.org/TR/spec/", "forceCurrent": false } 121 | ]; 122 | assert.equal(lintStr(toStr(specs)), toStr([ 123 | "https://www.w3.org/TR/spec/" 124 | ])); 125 | }); 126 | 127 | it("lints an object with a multipage flag set to null", () => { 128 | const specs = [ 129 | { "url": "https://www.w3.org/TR/spec/", "multipage": null } 130 | ]; 131 | assert.equal(lintStr(toStr(specs)), toStr([ 132 | "https://www.w3.org/TR/spec/" 133 | ])); 134 | }); 135 | 136 | it("drops duplicate URLs", () => { 137 | const specs = [ 138 | "https://www.w3.org/TR/duplicate/", 139 | "https://www.w3.org/TR/duplicate/" 140 | ]; 141 | assert.equal( 142 | lintStr(toStr(specs)), 143 | toStr(["https://www.w3.org/TR/duplicate/"])); 144 | }); 145 | 146 | it("drops duplicate URLs defined as string and object", () => { 147 | const specs = [ 148 | { "url": "https://www.w3.org/TR/duplicate/" }, 149 | "https://www.w3.org/TR/duplicate/" 150 | ]; 151 | assert.equal( 152 | lintStr(toStr(specs)), 153 | toStr(["https://www.w3.org/TR/duplicate/"])); 154 | }); 155 | 156 | it("lints an object with a forkOf and a seriesComposition property", () => { 157 | const specs = [ 158 | "https://www.w3.org/TR/spec-1/", 159 | { "url": "https://www.w3.org/TR/spec-2/", seriesComposition: "fork", forkOf: "spec-1" } 160 | ]; 161 | assert.equal(lintStr(toStr(specs)), toStr([ 162 | "https://www.w3.org/TR/spec-1/", 163 | { "url": "https://www.w3.org/TR/spec-2/", forkOf: "spec-1" } 164 | ])); 165 | }); 166 | }); 167 | }); 168 | -------------------------------------------------------------------------------- /schema/definitions.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "http://json-schema.org/schema#", 3 | "$id": "https://w3c.github.io/browser-specs/schema/definitions.json", 4 | 5 | "$defs": { 6 | "url": { 7 | "type": "string", 8 | "format": "uri" 9 | }, 10 | 11 | "filename": { 12 | "type": "string", 13 | "pattern": "^[\\w\\-\\.]+\\.(html|pdf|txt)$" 14 | }, 15 | 16 | "relativePath": { 17 | "type": "string", 18 | "pattern": "^[\\w\\-\\.]+(\\/[\\w\\-\\.]+)*$" 19 | }, 20 | 21 | "shortname": { 22 | "type": "string", 23 | "pattern": "^[\\w\\-]+((?<=v?\\d+)\\.\\d+)?$" 24 | }, 25 | 26 | "series": { 27 | "type": "object", 28 | "properties": { 29 | "shortname": { 30 | "type": "string", 31 | "pattern": "^[\\w\\-]+$" 32 | }, 33 | "title": { "$ref": "#/$defs/title" }, 34 | "shortTitle": { "$ref": "#/$defs/title" }, 35 | "currentSpecification": { "$ref": "#/$defs/shortname" }, 36 | "releaseUrl": { "$ref": "#/$defs/url" }, 37 | "nightlyUrl": { "$ref": "#/$defs/url" } 38 | }, 39 | "required": ["shortname"], 40 | "additionalProperties": false 41 | }, 42 | 43 | "seriesVersion": { 44 | "type": "string", 45 | "pattern": "^\\d+(\\.\\d+){0,2}$" 46 | }, 47 | 48 | "seriesComposition": { 49 | "type": "string", 50 | "enum": ["full", "delta", "fork"] 51 | }, 52 | 53 | "forceCurrent": { 54 | "type": "boolean" 55 | }, 56 | 57 | "title": { 58 | "type": "string" 59 | }, 60 | 61 | "source": { 62 | "type": "string", 63 | "enum": ["w3c", "spec", "ietf", "whatwg", "iso"] 64 | }, 65 | 66 | "nightly": { 67 | "type": "object", 68 | "properties": { 69 | "url": { "$ref": "#/$defs/url" }, 70 | "status": { 71 | "type": "string", 72 | "enum": [ 73 | "A Collection of Interesting Ideas", 74 | "Draft Community Group Report", 75 | "Draft Deliverable", 76 | "Draft Finding", 77 | "Draft Registry", 78 | "Editor's Draft", 79 | "Experimental", 80 | "Final Deliverable", 81 | "Informational", 82 | "Internet Standard", 83 | "Living Standard", 84 | "Proposed Standard", 85 | "TAG Finding", 86 | "Unofficial Proposal Draft", 87 | "Working Group Approved Draft" 88 | ] 89 | }, 90 | "alternateUrls": { 91 | "type": "array", 92 | "items": { "$ref": "#/$defs/url" } 93 | }, 94 | "filename": { "$ref": "#/$defs/filename" }, 95 | "sourcePath": { "$ref": "#/$defs/relativePath" }, 96 | "pages": { 97 | "type": "array", 98 | "items": { "$ref": "#/$defs/url" } 99 | }, 100 | "repository": { "$ref": "#/$defs/url" } 101 | }, 102 | "additionalProperties": false 103 | }, 104 | 105 | "tests": { 106 | "type": "object", 107 | "properties": { 108 | "repository": { "$ref": "#/$defs/url" }, 109 | "testPaths": { 110 | "type": "array", 111 | "items": { "$ref": "#/$defs/relativePath" }, 112 | "minItems": 1 113 | }, 114 | "excludePaths": { 115 | "type": "array", 116 | "items": { "$ref": "#/$defs/relativePath" }, 117 | "minItems": 1 118 | } 119 | }, 120 | "required": ["repository"], 121 | "additionalProperties": false 122 | }, 123 | 124 | "groups": { 125 | "type": "array", 126 | "items": { 127 | "type": "object", 128 | "properties": { 129 | "name": { "type": "string" }, 130 | "url": { "$ref": "#/$defs/url" } 131 | }, 132 | "required": ["name", "url"], 133 | "additionalProperties": false 134 | } 135 | }, 136 | 137 | "organization": { 138 | "type": "string" 139 | }, 140 | 141 | "categories": { 142 | "type": "array", 143 | "items": { 144 | "type": "string", 145 | "enum": ["browser"] 146 | } 147 | }, 148 | 149 | "categories-specs": { 150 | "oneOf": [ 151 | { 152 | "type": "string", 153 | "enum": ["reset", "+browser", "-browser"] 154 | }, 155 | { 156 | "type": "array", 157 | "items": { 158 | "type": "string", 159 | "enum": ["reset", "+browser", "-browser"] 160 | }, 161 | "minItems": 1 162 | } 163 | ] 164 | }, 165 | 166 | "forks": { 167 | "type": "array", 168 | "items": { "$ref": "#/$defs/shortname" } 169 | }, 170 | 171 | "standing": { 172 | "type": "string", 173 | "enum": ["good", "pending", "discontinued"] 174 | }, 175 | 176 | "obsoletedBy": { 177 | "type": "array", 178 | "items": { "$ref": "#/$defs/shortname" }, 179 | "minItems": 1 180 | }, 181 | 182 | "formerNames": { 183 | "type": "array", 184 | "items": { "$ref": "#/$defs/shortname" }, 185 | "minItems": 1 186 | }, 187 | 188 | "specsfile": { 189 | "release": { 190 | "type": "object", 191 | "properties": { 192 | "url": { "$ref": "#/$defs/url" }, 193 | "status": { 194 | "type": "string", 195 | "enum": [ 196 | "Candidate Recommendation Draft", 197 | "Candidate Recommendation Snapshot", 198 | "Discontinued Draft", 199 | "Draft Note", 200 | "Draft Registry", 201 | "Final Deliverable", 202 | "First Public Working Draft", 203 | "Note", 204 | "Proposed Recommendation", 205 | "Recommendation", 206 | "Statement", 207 | "Working Draft" 208 | ] 209 | }, 210 | "filename": { "$ref": "#/$defs/filename" }, 211 | "pages": { 212 | "type": "array", 213 | "items": { "$ref": "#/$defs/url" } 214 | } 215 | }, 216 | "additionalProperties": false 217 | } 218 | }, 219 | 220 | "indexfile": { 221 | "release": { 222 | "$ref": "#/$defs/specsfile/release", 223 | "required": ["url", "status"] 224 | } 225 | } 226 | } 227 | } -------------------------------------------------------------------------------- /src/determine-testpath.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Module that takes a list of spec objects as input and returns, for each spec, 3 | * the URL of the repository that contains the test suite of the spec along with 4 | * the path under which the tests are to be found in that repository. 5 | * 6 | * The function will run git commands on the command-line and populate the local 7 | * ".cache" folder. 8 | */ 9 | 10 | import fs from "node:fs"; 11 | import path from "node:path"; 12 | import { execSync } from "node:child_process"; 13 | import { fileURLToPath } from "node:url"; 14 | 15 | // Cache folder under which the WPT repository will be cloned 16 | const scriptPath = path.dirname(fileURLToPath(import.meta.url)); 17 | const cacheFolder = path.resolve(scriptPath, "..", ".cache"); 18 | const wptFolder = path.resolve(cacheFolder, "wpt"); 19 | 20 | /** 21 | * Helper function to setup the cache folder 22 | */ 23 | function setupCacheFolder() { 24 | try { 25 | fs.mkdirSync(cacheFolder); 26 | } 27 | catch (err) { 28 | if (err.code !== 'EEXIST') { 29 | throw err; 30 | } 31 | } 32 | } 33 | 34 | /** 35 | * Helper function that returns true when the WPT folder already exists 36 | * (which is taken to mean that the repository has already been cloned) 37 | */ 38 | function wptFolderExists() { 39 | try { 40 | fs.accessSync(wptFolder); 41 | return true; 42 | } 43 | catch (err) { 44 | if (err.code !== "ENOENT") { 45 | throw err; 46 | } 47 | return false; 48 | } 49 | } 50 | 51 | /** 52 | * Helper function that fetches the latest version of the WPT repository, 53 | * restricting the checkout to META.yml files 54 | */ 55 | function fetchWPT() { 56 | setupCacheFolder(); 57 | if (wptFolderExists()) { 58 | // Pull latest commit from master branch 59 | execSync("git pull origin master", { cwd: wptFolder }); 60 | } 61 | else { 62 | // Clone repo using sparse mode: the repo is huge and we're only interested 63 | // in META.yml files 64 | execSync("git clone https://github.com/web-platform-tests/wpt.git --depth 1 --sparse", { cwd: cacheFolder }); 65 | execSync("git sparse-checkout set --no-cone", { cwd: wptFolder }); 66 | execSync("git sparse-checkout add **/META.yml", { cwd: wptFolder }); 67 | } 68 | } 69 | 70 | /** 71 | * Helper function that reads "spec" entries in all META.yml files of the WPT 72 | * repository. 73 | * 74 | * Note the function parses META.yml files as regular text files. That works 75 | * well but a proper YAML parser would be needed if we need to handle things 76 | * such as comments. 77 | */ 78 | async function readWptMetaFiles() { 79 | async function readFolder(folder) { 80 | let res = []; 81 | const contents = await fs.promises.readdir(folder); 82 | for (const name of contents) { 83 | const filename = path.resolve(folder, name); 84 | const stat = await fs.promises.stat(filename); 85 | if (stat.isDirectory()) { 86 | const nestedFiles = await readFolder(filename); 87 | res = res.concat(nestedFiles); 88 | } 89 | else if (name === "META.yml") { 90 | const file = await fs.promises.readFile(filename, "utf8"); 91 | const match = file.match(/(?:^|\n)spec: (.*)$/m); 92 | if (match) { 93 | res.push({ 94 | folder: folder.substring(wptFolder.length + 1).replace(/\\/g, "/"), 95 | spec: match[1] 96 | }); 97 | } 98 | } 99 | } 100 | return res; 101 | } 102 | 103 | fetchWPT(); 104 | return await readFolder(wptFolder); 105 | } 106 | 107 | 108 | /** 109 | * Returns the first item in the list found in the array, or null if none of 110 | * the items exists in the array. 111 | */ 112 | function getFirstFoundInArray(paths, ...items) { 113 | for (const item of items) { 114 | const path = paths.find(p => p === item); 115 | if (path) { 116 | return path; 117 | } 118 | } 119 | return null; 120 | } 121 | 122 | 123 | /** 124 | * Exports main function that takes a list of specs as input, completes entries 125 | * with a tests property when possible and returns the list. 126 | * 127 | * The options parameter is used to specify the GitHub API authentication token. 128 | */ 129 | export default async function (specs, options) { 130 | if (!specs || specs.find(spec => !spec.shortname || !spec.series || !spec.series.shortname)) { 131 | throw "Invalid list of specifications passed as parameter"; 132 | } 133 | options = options || {}; 134 | 135 | const wptFolders = await readWptMetaFiles(); 136 | 137 | function determineTestInfo(spec) { 138 | const info = { 139 | repository: "https://github.com/web-platform-tests/wpt" 140 | }; 141 | 142 | if (spec.tests) { 143 | return Object.assign(info, spec.tests); 144 | } 145 | 146 | if (spec.url.startsWith("https://registry.khronos.org/webgl/")) { 147 | info.repository = "https://github.com/KhronosGroup/WebGL"; 148 | info.testPaths = ["conformance-suites"]; 149 | // TODO: Be more specific, tests for extensions should one of the files in: 150 | // https://github.com/KhronosGroup/WebGL/tree/master/conformance-suites/2.0.0/conformance2/extensions 151 | // https://github.com/KhronosGroup/WebGL/tree/master/conformance-suites/2.0.0/conformance/extensions 152 | // https://github.com/KhronosGroup/WebGL/tree/master/conformance-suites/1.0.3/conformance/extensions 153 | return info; 154 | } 155 | 156 | if (spec.url.startsWith("https://tc39.es/proposal-") || !spec.nightly) { 157 | // TODO: proposals may or may not have tests under tc39/test262, it would 158 | // be good to have that info here. However, that seems hard to assess 159 | // automatically and tedious to handle as exceptions in specs.json. 160 | return null; 161 | } 162 | 163 | // Note the use of startsWith below, needed to cover cases where a META.yml 164 | // file targets a specific page in a multipage spec (for HTML, typically), 165 | // or a fragment within a spec. 166 | const folders = wptFolders 167 | .filter(item => 168 | item.spec.startsWith(spec.nightly.url) || 169 | item.spec.startsWith(spec.nightly.url.replace(/-\d+\/$/, "/"))) 170 | .map(item => item.folder); 171 | if (folders.length > 0) { 172 | // Don't list subfolders when parent folder is already in the list 173 | info.testPaths = folders.filter(p1 => !folders.find(p2 => (p1 !== p2) && p1.startsWith(p2))); 174 | 175 | // Exclude subfolders of listed folders when they map to another spec 176 | const excludePaths = folders 177 | .map(path => wptFolders.filter(item => 178 | (item.folder !== path) && 179 | item.folder.startsWith(path + "/") && 180 | !item.spec.startsWith(spec.nightly.url) && 181 | !item.spec.startsWith(spec.nightly.url.replace(/-\d+\/$/, "/")))) 182 | .flat() 183 | .map(item => item.folder); 184 | if (excludePaths.length > 0) { 185 | info.excludePaths = excludePaths; 186 | } 187 | 188 | return info; 189 | } 190 | return null; 191 | } 192 | 193 | const testInfos = specs.map(determineTestInfo); 194 | for (const spec of specs) { 195 | const testInfo = testInfos.shift(); 196 | if (testInfo) { 197 | spec.tests = testInfo; 198 | } 199 | } 200 | 201 | return specs; 202 | }; 203 | -------------------------------------------------------------------------------- /test/compute-series-urls.js: -------------------------------------------------------------------------------- 1 | import { describe, it } from "node:test"; 2 | import assert from "node:assert"; 3 | import computeSeriesUrls from "../src/compute-series-urls.js"; 4 | 5 | describe("compute-series-urls module", () => { 6 | it("returns spec URLs when spec has no level", () => { 7 | const spec = { 8 | url: "https://www.w3.org/TR/preload/", 9 | shortname: "preload", 10 | series: { shortname: "preload" }, 11 | release: { url: "https://www.w3.org/TR/preload/" }, 12 | nightly: { url: "https://w3c.github.io/preload/" } 13 | }; 14 | assert.deepStrictEqual(computeSeriesUrls(spec), 15 | { releaseUrl: "https://www.w3.org/TR/preload/", 16 | nightlyUrl: "https://w3c.github.io/preload/" }); 17 | }); 18 | 19 | 20 | it("does not return a release URL if spec has none", () => { 21 | const spec = { 22 | url: "https://compat.spec.whatwg.org/", 23 | shortname: "compat", 24 | series: { shortname: "compat" }, 25 | nightly: { url: "https://compat.spec.whatwg.org/" } 26 | }; 27 | assert.deepStrictEqual(computeSeriesUrls(spec), 28 | { nightlyUrl: "https://compat.spec.whatwg.org/" }); 29 | }); 30 | 31 | 32 | it("does not return a nightly URL if spec has none", () => { 33 | const spec = { 34 | url: "https://compat.spec.whatwg.org/", 35 | shortname: "compat", 36 | series: { shortname: "compat" }, 37 | }; 38 | assert.deepStrictEqual(computeSeriesUrls(spec), {}); 39 | }); 40 | 41 | 42 | it("returns series URLs for Houdini specs", () => { 43 | const spec = { 44 | url: "https://www.w3.org/TR/css-paint-api-1/", 45 | shortname: "css-paint-api-1", 46 | series: { shortname: "css-paint-api" }, 47 | release: { url: "https://www.w3.org/TR/css-paint-api-1/" }, 48 | nightly: { url: "https://drafts.css-houdini.org/css-paint-api-1/" } 49 | }; 50 | assert.deepStrictEqual(computeSeriesUrls(spec), 51 | { releaseUrl: "https://www.w3.org/TR/css-paint-api/", 52 | nightlyUrl: "https://drafts.css-houdini.org/css-paint-api/" }); 53 | }); 54 | 55 | 56 | it("returns series URLs for CSS specs", () => { 57 | const spec = { 58 | url: "https://www.w3.org/TR/css-fonts-4/", 59 | shortname: "css-fonts-4", 60 | series: { shortname: "css-fonts" }, 61 | release: { url: "https://www.w3.org/TR/css-fonts-4/" }, 62 | nightly: { url: "https://drafts.csswg.org/css-fonts-4/" } 63 | }; 64 | assert.deepStrictEqual(computeSeriesUrls(spec), 65 | { releaseUrl: "https://www.w3.org/TR/css-fonts/", 66 | nightlyUrl: "https://drafts.csswg.org/css-fonts/" }); 67 | }); 68 | 69 | 70 | it("handles CSS2 correctly", () => { 71 | const spec = { 72 | url: "https://www.w3.org/TR/CSS2/", 73 | shortname: "CSS2", 74 | series: { shortname: "CSS" }, 75 | release: { url: "https://www.w3.org/TR/CSS2/" }, 76 | nightly: { url: "https://drafts.csswg.org/css2/" } 77 | }; 78 | assert.deepStrictEqual(computeSeriesUrls(spec), 79 | { releaseUrl: "https://www.w3.org/TR/CSS2/", 80 | nightlyUrl: "https://drafts.csswg.org/css2/" }); 81 | }); 82 | 83 | 84 | it("returns right nightly URL for series when spec's nightly has no level", () => { 85 | const spec = { 86 | url: "https://www.w3.org/TR/pointerlock-2/", 87 | shortname: "pointerlock-2", 88 | series: { shortname: "pointerlock" }, 89 | release: { url: "https://www.w3.org/TR/pointerlock-2/" }, 90 | nightly: { url: "https://w3c.github.io/pointerlock/" } 91 | }; 92 | assert.deepStrictEqual(computeSeriesUrls(spec), 93 | { releaseUrl: "https://www.w3.org/TR/pointerlock/", 94 | nightlyUrl: "https://w3c.github.io/pointerlock/" }); 95 | }); 96 | 97 | 98 | it("does not invent an unversioned nightly URL for SVG 2", () => { 99 | const spec = { 100 | url: "https://www.w3.org/TR/SVG2/", 101 | shortname: "SVG2", 102 | series: { shortname: "SVG" }, 103 | release: { url: "https://www.w3.org/TR/SVG2/" }, 104 | nightly: { url: "https://svgwg.org/svg2-draft/" } 105 | }; 106 | assert.deepStrictEqual(computeSeriesUrls(spec), 107 | { releaseUrl: "https://www.w3.org/TR/SVG/", 108 | nightlyUrl: "https://svgwg.org/svg2-draft/" }); 109 | }); 110 | 111 | 112 | it("looks for a release URL in previous versions", () => { 113 | const spec = { 114 | url: "https://drafts.csswg.org/css-fonts-5/", 115 | shortname: "css-fonts-5", 116 | series: { shortname: "css-fonts" }, 117 | seriesPrevious: "css-fonts-4", 118 | nightly: { url: "https://drafts.csswg.org/css-fonts-5/" } 119 | }; 120 | 121 | const list = [ 122 | spec, 123 | { 124 | url: "https://drafts.csswg.org/css-fonts-4/", 125 | shortname: "css-fonts-4", 126 | series: { shortname: "css-fonts" }, 127 | seriesPrevious: "css-fonts-3", 128 | nightly: { url: "https://drafts.csswg.org/css-fonts-4/" } 129 | }, 130 | { 131 | url: "https://drafts.csswg.org/css-fonts-3/", 132 | shortname: "css-fonts-3", 133 | series: { shortname: "css-fonts" }, 134 | release: { url: "https://www.w3.org/TR/css-fonts-3/" }, 135 | nightly: { url: "https://drafts.csswg.org/css-fonts-3/" } 136 | } 137 | ]; 138 | 139 | assert.deepStrictEqual(computeSeriesUrls(spec, list), 140 | { releaseUrl: "https://www.w3.org/TR/css-fonts/", 141 | nightlyUrl: "https://drafts.csswg.org/css-fonts/" }); 142 | }); 143 | 144 | 145 | it("looks for a release URL in the provided spec if not the current one", () => { 146 | const spec = { 147 | url: "https://drafts.fxtf.org/compositing-1/", 148 | shortname: "compositing-1", 149 | series: { shortname: "compositing", currentSpecification: "compositing-2" }, 150 | nightly: { url: "https://drafts.fxtf.org/compositing-1/" }, 151 | release: { url: "https://www.w3.org/TR/compositing-1/" } 152 | }; 153 | 154 | const list = [ 155 | spec, 156 | { 157 | url: "https://drafts.fxtf.org/compositing-2/", 158 | shortname: "compositing-2", 159 | series: { shortname: "compositing", currentSpecification: "compositing-2" }, 160 | seriesPrevious: "compositing-1", 161 | nightly: { url: "https://drafts.fxtf.org/compositing-2/" } 162 | } 163 | ]; 164 | 165 | assert.deepStrictEqual(computeSeriesUrls(spec, list), 166 | { releaseUrl: "https://www.w3.org/TR/compositing/", 167 | nightlyUrl: "https://drafts.fxtf.org/compositing/" }); 168 | }); 169 | 170 | 171 | it("computes info based on current specification", () => { 172 | const spec = { 173 | url: "https://www.w3.org/TR/SVG11/", 174 | seriesComposition: "full", 175 | shortname: "SVG11", 176 | series: { shortname: "SVG", currentSpecification: "SVG2" }, 177 | release: { url: "https://www.w3.org/TR/SVG11/" }, 178 | nightly: { url: "https://www.w3.org/TR/SVG11/" } 179 | }; 180 | 181 | const list = [ 182 | spec, 183 | { 184 | url: "https://www.w3.org/TR/SVG2/", 185 | seriesComposition: "full", 186 | shortname: "SVG2", 187 | series: { shortname: "SVG", currentSpecification: "SVG2" }, 188 | release: { url: "https://www.w3.org/TR/SVG2/" }, 189 | nightly: { url: "https://svgwg.org/svg2-draft/" } 190 | } 191 | ]; 192 | 193 | assert.deepStrictEqual(computeSeriesUrls(spec, list), 194 | { releaseUrl: "https://www.w3.org/TR/SVG/", 195 | nightlyUrl: "https://svgwg.org/svg2-draft/" }); 196 | }); 197 | }); 198 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | ## Applicable licenses 2 | 3 | This software and associated documentation files (the "Software") are licensed under the terms of the [MIT License](#mit-license). 4 | 5 | Additionally, the list of technical Web specifications (the [index.json](index.json) file) is published under the terms of the [CC0 license](#cc0-license). 6 | 7 | 8 | ## MIT License 9 | 10 | Copyright (c) 2020 World Wide Web Consortium 11 | 12 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 13 | 14 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 15 | 16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | 18 | 19 | ## CC0 License 20 | 21 | ### Statement of Purpose 22 | 23 | The laws of most jurisdictions throughout the world automatically confer exclusive Copyright and Related Rights (defined below) upon the creator and subsequent owner(s) (each and all, an "owner") of an original work of authorship and/or a database (each, a "Work"). 24 | 25 | Certain owners wish to permanently relinquish those rights to a Work for the purpose of contributing to a commons of creative, cultural and scientific works ("Commons") that the public can reliably and without fear of later claims of infringement build upon, modify, incorporate in other works, reuse and redistribute as freely as possible in any form whatsoever and for any purposes, including without limitation commercial purposes. These owners may contribute to the Commons to promote the ideal of a free culture and the further production of creative, cultural and scientific works, or to gain reputation or greater distribution for their Work in part through the use and efforts of others. 26 | 27 | For these and/or other purposes and motivations, and without any expectation of additional consideration or compensation, the person associating CC0 with a Work (the "Affirmer"), to the extent that he or she is an owner of Copyright and Related Rights in the Work, voluntarily elects to apply CC0 to the Work and publicly distribute the Work under its terms, with knowledge of his or her Copyright and Related Rights in the Work and the meaning and intended legal effect of CC0 on those rights. 28 | 29 | ### Copyright and Related Rights 30 | 31 | A Work made available under CC0 may be protected by copyright and related or neighboring rights ("Copyright and Related Rights"). Copyright and Related Rights include, but are not limited to, the following: 32 | 33 | i. the right to reproduce, adapt, distribute, perform, display, communicate, and translate a Work; 34 | ii. moral rights retained by the original author(s) and/or performer(s); 35 | iii. publicity and privacy rights pertaining to a person's image or likeness depicted in a Work; 36 | iv. rights protecting against unfair competition in regards to a Work, subject to the limitations in paragraph 4(a), below; 37 | v. rights protecting the extraction, dissemination, use and reuse of data in a Work; 38 | vi. database rights (such as those arising under Directive 96/9/EC of the European Parliament and of the Council of 11 March 1996 on the legal protection of databases, and under any national implementation thereof, including any amended or successor version of such directive); and 39 | vii. other similar, equivalent or corresponding rights throughout the world based on applicable law or treaty, and any national implementations thereof. 40 | 41 | ### Waiver 42 | 43 | To the greatest extent permitted by, but not in contravention of, applicable law, Affirmer hereby overtly, fully, permanently, irrevocably and unconditionally waives, abandons, and surrenders all of Affirmer's Copyright and Related Rights and associated claims and causes of action, whether now known or unknown (including existing as well as future claims and causes of action), in the Work (i) in all territories worldwide, (ii) for the maximum duration provided by applicable law or treaty (including future time extensions), (iii) in any current or future medium and for any number of copies, and (iv) for any purpose whatsoever, including without limitation commercial, advertising or promotional purposes (the "Waiver"). Affirmer makes the Waiver for the benefit of each member of the public at large and to the detriment of Affirmer's heirs and successors, fully intending that such Waiver shall not be subject to revocation, rescission, cancellation, termination, or any other legal or equitable action to disrupt the quiet enjoyment of the Work by the public as contemplated by Affirmer's express Statement of Purpose. 44 | 45 | ### Public License Fallback 46 | 47 | Should any part of the Waiver for any reason be judged legally invalid or ineffective under applicable law, then the Waiver shall be preserved to the maximum extent permitted taking into account Affirmer's express Statement of Purpose. In addition, to the extent the Waiver is so judged Affirmer hereby grants to each affected person a royalty-free, non transferable, non sublicensable, non exclusive, irrevocable and unconditional license to exercise Affirmer's Copyright and Related Rights in the Work (i) in all territories worldwide, (ii) for the maximum duration provided by applicable law or treaty (including future time extensions), (iii) in any current or future medium and for any number of copies, and (iv) for any purpose whatsoever, including without limitation commercial, advertising or promotional purposes (the "License"). The License shall be deemed effective as of the date CC0 was applied by Affirmer to the Work. Should any part of the License for any reason be judged legally invalid or ineffective under applicable law, such partial invalidity or ineffectiveness shall not invalidate the remainder of the License, and in such case Affirmer hereby affirms that he or she will not (i) exercise any of his or her remaining Copyright and Related Rights in the Work or (ii) assert any associated claims and causes of action with respect to the Work, in either case contrary to Affirmer's express Statement of Purpose. 48 | 49 | ### Limitations and Disclaimers 50 | 51 | a. No trademark or patent rights held by Affirmer are waived, abandoned, surrendered, licensed or otherwise affected by this document. 52 | b. Affirmer offers the Work as-is and makes no representations or warranties of any kind concerning the Work, express, implied, statutory or otherwise, including without limitation warranties of title, merchantability, fitness for a particular purpose, non infringement, or the absence of latent or other defects, accuracy, or the present or absence of errors, whether or not discoverable, all to the greatest extent permissible under applicable law. 53 | c. Affirmer disclaims responsibility for clearing rights of other persons that may apply to the Work or any use thereof, including without limitation any person's Copyright and Related Rights in the Work. Further, Affirmer disclaims responsibility for obtaining any necessary consents, permissions or other rights required for any use of the Work. 54 | d. Affirmer understands and acknowledges that Creative Commons is not a party to this document and has no duty or obligation with respect to this CC0 or use of the Work. 55 | -------------------------------------------------------------------------------- /src/compute-repository.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Module that exports a function that takes a list of specifications as input 3 | * and computes, for each of them, the URL of the repository that contains the 4 | * source code for this, as well as the source file of the specification at the 5 | * HEAD of default branch in the repository. 6 | * 7 | * The function needs an authentication token for the GitHub API. 8 | */ 9 | 10 | import Octokit from "./octokit.js"; 11 | import parseSpecUrl from "./parse-spec-url.js"; 12 | 13 | 14 | /** 15 | * Returns the first item in the list found in the Git tree, or null if none of 16 | * the items exists in the array. 17 | */ 18 | function getFirstFoundInTree(paths, ...items) { 19 | for (const item of items) { 20 | const path = paths.find(p => p.path === item); 21 | if (path) { 22 | return path; 23 | } 24 | } 25 | return null; 26 | } 27 | 28 | 29 | /** 30 | * Exports main function that takes a list of specs (with a nighly.url property) 31 | * as input, completes entries with a nightly.repository property when possible 32 | * and returns the list. 33 | * 34 | * The options parameter is used to specify the GitHub API authentication token. 35 | * In the absence of it, the function does not go through the GitHub API and 36 | * thus cannot set most of the information. This is useful to run tests without 37 | * an authentication token (but obviously means that the owner name returned 38 | * by the function will remain the lowercased version, and that the returned 39 | * info won't include the source file). 40 | */ 41 | export default async function (specs, options) { 42 | if (!specs) { 43 | throw "Invalid list of specifications passed as parameter"; 44 | } 45 | options = options || {}; 46 | 47 | const octokit = new Octokit({ auth: options.githubToken }); 48 | const repoCache = new Map(); 49 | const repoPathCache = new Map(); 50 | const userCache = new Map(); 51 | 52 | /** 53 | * Take a GitHub repo owner name (lowercase version) and retrieve the real 54 | * owner name (with possible uppercase characters) from the GitHub API. 55 | */ 56 | async function fetchRealGitHubOwnerName(username) { 57 | if (!userCache.has(username)) { 58 | const { data } = await octokit.users.getByUsername({ username }); 59 | if (data.message) { 60 | // Alert when user does not exist 61 | throw res.message; 62 | } 63 | userCache.set(username, data.login); 64 | } 65 | return userCache.get(username); 66 | } 67 | 68 | /** 69 | * Determine the name of the file that contains the source of the spec in the 70 | * default branch of the GitHub repository associated with the specification. 71 | */ 72 | async function determineSourcePath(spec, repo) { 73 | // Retrieve all paths of the GitHub repository 74 | const cacheKey = `${repo.owner}/${repo.name}`; 75 | if (!repoPathCache.has(cacheKey)) { 76 | const { data } = await octokit.git.getTree({ 77 | owner: repo.owner, 78 | repo: repo.name, 79 | tree_sha: "HEAD", 80 | recursive: true 81 | }); 82 | const paths = data.tree; 83 | repoPathCache.set(cacheKey, paths); 84 | } 85 | const paths = repoPathCache.get(cacheKey); 86 | 87 | // Extract filename from nightly URL when there is one 88 | const match = spec.nightly.url.match(/\/([\w\-]+)\.html$/); 89 | const nightlyFilename = match ? match[1] : ""; 90 | 91 | const sourcePath = getFirstFoundInTree(paths, 92 | // Common paths for CSS specs 93 | `${spec.shortname}.bs`, 94 | `${spec.shortname}/Overview.bs`, 95 | `${spec.shortname}/Overview.src.html`, 96 | `${spec.series.shortname}/Overview.bs`, 97 | `${spec.series.shortname}/Overview.src.html`, 98 | 99 | // Used for SHACL specs 100 | `${spec.shortname}/index.html`, 101 | 102 | // Used for ARIA specs 103 | `${spec.series.shortname}/index.html`, 104 | 105 | // Named after the nightly filename 106 | `${nightlyFilename}.bs`, 107 | `${nightlyFilename}.html`, 108 | `${nightlyFilename}.src.html`, 109 | `${nightlyFilename}.md`, 110 | 111 | // WebGL extensions 112 | `extensions/${spec.shortname}/extension.xml`, 113 | 114 | // WebAssembly specs 115 | `document/${spec.series.shortname.replace(/^wasm-/, '')}/index.bs`, 116 | 117 | // SVG specs 118 | `specs/${spec.shortname.replace(/^svg-/, '')}/master/Overview.html`, 119 | `master/Overview.html`, 120 | 121 | // HTTPWG specs 122 | `specs/${spec.shortname}.xml`, 123 | 124 | // Following patterns are used in a small number of cases, but could 125 | // perhaps appear again in the future, so worth handling here. 126 | "spec/index.bs", 127 | "spec/index.html", // Only one TC39 spec 128 | "spec/Overview.html", // Only WebCrypto 129 | "docs/index.bs", // Only ServiceWorker 130 | "spec.html", // Most TC39 specs 131 | "spec.emu", // Some TC39 specs 132 | `${spec.shortname}/Overview.html`, // css-color-3, mediaqueries-3 133 | 134 | // Most common patterns, checking on "index.html" last as some repos 135 | // include such a file to store the generated spec from the source. 136 | "index.src.html", 137 | "index.bs", 138 | "spec.bs", 139 | "index.md", 140 | "index.html" 141 | ); 142 | 143 | if (!sourcePath) { 144 | return null; 145 | } 146 | 147 | // Fetch target file for symlinks 148 | if (sourcePath.mode === "120000") { 149 | const { data } = await octokit.git.getBlob({ 150 | owner: repo.owner, 151 | repo: repo.name, 152 | file_sha: sourcePath.sha 153 | }); 154 | return Buffer.from(data.content, "base64").toString("utf8"); 155 | } 156 | return sourcePath.path; 157 | } 158 | 159 | async function isRealRepo(repo) { 160 | if (!options.githubToken) { 161 | // Assume the repo exists if we can't check 162 | return true; 163 | } 164 | const cacheKey = `${repo.owner}/${repo.name}`; 165 | if (!repoCache.has(cacheKey)) { 166 | try { 167 | await octokit.repos.get({ 168 | owner: repo.owner, 169 | repo: repo.name 170 | }); 171 | repoCache.set(cacheKey, true); 172 | } 173 | catch (err) { 174 | if (err.status === 404) { 175 | repoCache.set(cacheKey, false); 176 | } 177 | else { 178 | throw err; 179 | } 180 | } 181 | } 182 | return repoCache.get(cacheKey); 183 | } 184 | 185 | // Compute GitHub repositories with lowercase owner names 186 | const repos = specs.map(spec => spec.nightly ? 187 | parseSpecUrl(spec.nightly.repository ?? spec.nightly.url) : 188 | null); 189 | 190 | if (options.githubToken) { 191 | // Fetch the real name of repository owners (preserving case) 192 | for (const repo of repos) { 193 | if (repo) { 194 | repo.owner = await fetchRealGitHubOwnerName(repo.owner); 195 | } 196 | } 197 | } 198 | 199 | // Compute final repo URL and add source file if possible 200 | for (const spec of specs) { 201 | const repo = repos.shift(); 202 | if (repo && await isRealRepo(repo)) { 203 | spec.nightly.repository = `https://github.com/${repo.owner}/${repo.name}`; 204 | 205 | if (options.githubToken && !spec.nightly.sourcePath) { 206 | const sourcePath = await determineSourcePath(spec, repo); 207 | if (sourcePath) { 208 | spec.nightly.sourcePath = sourcePath; 209 | } 210 | } 211 | } 212 | else if (spec.nightly?.url.match(/\/httpwg\.org\//)) { 213 | const draftName = spec.nightly.url.match(/\/(draft-ietf-(.+))\.html$/); 214 | spec.nightly.repository = 'https://github.com/httpwg/http-extensions'; 215 | spec.nightly.sourcePath = `${draftName[1]}.md`; 216 | } 217 | } 218 | 219 | return specs; 220 | }; 221 | -------------------------------------------------------------------------------- /test/fetch-groups.js: -------------------------------------------------------------------------------- 1 | import { describe, it } from "node:test"; 2 | import assert from "node:assert"; 3 | import fetchGroups from "../src/fetch-groups.js"; 4 | 5 | const githubToken = (function () { 6 | try { 7 | return require("../config.json").GITHUB_TOKEN; 8 | } 9 | catch (err) { 10 | return null; 11 | } 12 | })() ?? process.env.GITHUB_TOKEN; 13 | 14 | describe("fetch-groups module (without API keys)", function () { 15 | // Long timeout since tests may need to send network requests 16 | const timeout = { timeout: 30000 }; 17 | 18 | async function fetchGroupsFor(url, options) { 19 | const spec = { url }; 20 | const result = await fetchGroups([spec], options); 21 | return result[0]; 22 | }; 23 | 24 | it("handles WHATWG URLs", timeout, async () => { 25 | const res = await fetchGroupsFor("https://url.spec.whatwg.org/"); 26 | assert.equal(res.organization, "WHATWG"); 27 | assert.deepStrictEqual(res.groups, [{ 28 | name: "URL Workstream", 29 | url: "https://url.spec.whatwg.org/" 30 | }]); 31 | }); 32 | 33 | it("handles TC39 URLs", timeout, async () => { 34 | const res = await fetchGroupsFor("https://tc39.es/proposal-relative-indexing-method/"); 35 | assert.equal(res.organization, "Ecma International"); 36 | assert.deepStrictEqual(res.groups, [{ 37 | name: "TC39", 38 | url: "https://tc39.es/" 39 | }]); 40 | }); 41 | 42 | it("handles W3C TAG URLs", timeout, async () => { 43 | const res = await fetchGroupsFor("https://www.w3.org/2001/tag/doc/promises-guide"); 44 | assert.equal(res.organization, "W3C"); 45 | assert.deepStrictEqual(res.groups, [{ 46 | name: "Technical Architecture Group", 47 | url: "https://www.w3.org/2001/tag/" 48 | }]); 49 | }); 50 | 51 | it("handles WebGL URLs", timeout, async () => { 52 | const res = await fetchGroupsFor("https://registry.khronos.org/webgl/extensions/EXT_clip_cull_distance/"); 53 | assert.equal(res.organization, "Khronos Group"); 54 | assert.deepStrictEqual(res.groups, [{ 55 | name: "WebGL Working Group", 56 | url: "https://www.khronos.org/webgl/" 57 | }]); 58 | }); 59 | 60 | it("handles IETF RFCs", timeout, async () => { 61 | const res = await fetchGroupsFor("https://www.rfc-editor.org/rfc/rfc9110"); 62 | assert.equal(res.organization, "IETF"); 63 | assert.deepStrictEqual(res.groups, [{ 64 | name: "HTTP Working Group", 65 | url: "https://datatracker.ietf.org/wg/httpbis/" 66 | }]); 67 | }); 68 | 69 | it("handles IETF group drafts", timeout, async () => { 70 | const res = await fetchGroupsFor("https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-digest-headers"); 71 | assert.equal(res.organization, "IETF"); 72 | assert.deepStrictEqual(res.groups, [{ 73 | name: "HTTP Working Group", 74 | url: "https://datatracker.ietf.org/wg/httpbis/" 75 | }]); 76 | }); 77 | 78 | it("handles IETF individual drafts", timeout, async () => { 79 | const res = await fetchGroupsFor("https://datatracker.ietf.org/doc/html/draft-cutler-httpbis-partitioned-cookies"); 80 | assert.equal(res.organization, "IETF"); 81 | assert.deepStrictEqual(res.groups, [{ 82 | name: "Individual Submissions", 83 | url: "https://datatracker.ietf.org/wg/none/" 84 | }]); 85 | }); 86 | 87 | it("handles IETF area drafts", timeout, async () => { 88 | const res = await fetchGroupsFor("https://datatracker.ietf.org/doc/html/draft-zern-webp"); 89 | assert.equal(res.organization, "IETF"); 90 | assert.deepStrictEqual(res.groups, [{ 91 | name: "Applications and Real-Time Area", 92 | url: "https://datatracker.ietf.org/wg/art/" 93 | }]); 94 | }); 95 | 96 | it("handles AOM specs", timeout, async () => { 97 | const res = await fetchGroupsFor("https://aomediacodec.github.io/afgs1-spec/"); 98 | assert.equal(res.organization, "Alliance for Open Media"); 99 | assert.deepStrictEqual(res.groups, [{ 100 | name: "Codec Working Group", 101 | url: "https://aomedia.org/about/#codec-working-group" 102 | }]); 103 | }); 104 | 105 | it("preserves provided info", timeout, async () => { 106 | const spec = { 107 | url: "https://url.spec.whatwg.org/", 108 | organization: "Acme Corporation", 109 | groups: [{ 110 | name: "Road Runner Group", 111 | url: "https://en.wikipedia.org/wiki/Wile_E._Coyote_and_the_Road_Runner" 112 | }] 113 | }; 114 | const res = await fetchGroups([spec]); 115 | assert.equal(res[0].organization, spec.organization); 116 | assert.deepStrictEqual(res[0].groups, spec.groups); 117 | }); 118 | 119 | it("preserves provided info for Patent Policy", timeout, async () => { 120 | const spec = { 121 | "url": "https://www.w3.org/Consortium/Patent-Policy/", 122 | "shortname": "w3c-patent-policy", 123 | "groups": [ 124 | { 125 | "name": "Patents and Standards Interest Group", 126 | "url": "https://www.w3.org/2004/pp/psig/" 127 | } 128 | ] 129 | }; 130 | const res = await fetchGroups([spec]); 131 | assert.equal(res[0].organization, "W3C"); 132 | assert.deepStrictEqual(res[0].groups, spec.groups); 133 | }); 134 | 135 | describe("fetch from W3C API", () => { 136 | it("handles /TR URLs", timeout, async () => { 137 | const res = await fetchGroupsFor("https://www.w3.org/TR/gamepad/"); 138 | assert.equal(res.organization, "W3C"); 139 | assert.deepStrictEqual(res.groups, [{ 140 | name: "Web Applications Working Group", 141 | url: "https://www.w3.org/groups/wg/webapps/" 142 | }]); 143 | }); 144 | 145 | it("handles multiple /TR URLs", timeout, async () => { 146 | const specs = [ 147 | { url: "https://www.w3.org/TR/gamepad/" }, 148 | { url: "https://www.w3.org/TR/accname-1.2/" } 149 | ]; 150 | const res = await fetchGroups(specs); 151 | assert.equal(res[0].organization, "W3C"); 152 | assert.deepStrictEqual(res[0].groups, [{ 153 | name: "Web Applications Working Group", 154 | url: "https://www.w3.org/groups/wg/webapps/" 155 | }]); 156 | assert.equal(res[1].organization, "W3C"); 157 | assert.deepStrictEqual(res[1].groups, [{ 158 | name: "Accessible Rich Internet Applications Working Group", 159 | url: "https://www.w3.org/WAI/about/groups/ariawg/" 160 | }]); 161 | }); 162 | 163 | it("handles w3c.github.io URLs", timeout, async () => { 164 | const res = await fetchGroupsFor("https://w3c.github.io/web-nfc/", { githubToken }); 165 | assert.equal(res.organization, "W3C"); 166 | assert.deepStrictEqual(res.groups, [{ 167 | name: "Web NFC Community Group", 168 | url: "https://www.w3.org/community/web-nfc/" 169 | }]); 170 | }); 171 | 172 | it("handles SVG URLs", timeout, async () => { 173 | const res = await fetchGroupsFor("https://svgwg.org/specs/animations/"); 174 | assert.equal(res.organization, "W3C"); 175 | assert.deepStrictEqual(res.groups, [{ 176 | name: "SVG Working Group", 177 | url: "https://www.w3.org/groups/wg/svg/" 178 | }]); 179 | }); 180 | 181 | it("handles CSS WG URLs", timeout, async () => { 182 | const res = await fetchGroupsFor("https://drafts.csswg.org/css-animations-2/"); 183 | assert.equal(res.organization, "W3C"); 184 | assert.deepStrictEqual(res.groups, [{ 185 | name: "Cascading Style Sheets (CSS) Working Group", 186 | url: "https://www.w3.org/groups/wg/css/" 187 | }]); 188 | }); 189 | 190 | it("handles CSS Houdini TF URLs", timeout, async () => { 191 | const res = await fetchGroupsFor("https://drafts.css-houdini.org/css-typed-om-2/"); 192 | assert.equal(res.organization, "W3C"); 193 | assert.deepStrictEqual(res.groups, [{ 194 | name: "Cascading Style Sheets (CSS) Working Group", 195 | url: "https://www.w3.org/groups/wg/css/" 196 | }]); 197 | }); 198 | 199 | it("handles CSS FXTF URLs", timeout, async () => { 200 | const res = await fetchGroupsFor("https://drafts.fxtf.org/filter-effects-2/"); 201 | assert.equal(res.organization, "W3C"); 202 | assert.deepStrictEqual(res.groups, [{ 203 | name: "Cascading Style Sheets (CSS) Working Group", 204 | url: "https://www.w3.org/groups/wg/css/" 205 | }]); 206 | }); 207 | 208 | it("uses last published info for discontinued specs", timeout, async () => { 209 | const spec = { 210 | url: "https://wicg.github.io/close-watcher/", 211 | shortname: "close-watcher", 212 | __last: { 213 | standing: "discontinued", 214 | organization: "Acme Corporation", 215 | groups: [{ 216 | name: "Road Runner", 217 | url: "beep beep" 218 | }] 219 | } 220 | }; 221 | const result = await fetchGroups([spec]); 222 | assert.equal(result[0].organization, spec.__last.organization); 223 | assert.deepStrictEqual(result[0].groups, spec.__last.groups); 224 | }); 225 | }); 226 | }); 227 | -------------------------------------------------------------------------------- /src/fetch-groups.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Module that exports a function that takes a list of specifications as input 3 | * and computes, for each of them, the name of the organization and groups 4 | * within that organization that develop the specification. 5 | * 6 | * The function needs an authentication token for the GitHub API. 7 | */ 8 | 9 | import Octokit from "./octokit.js"; 10 | import parseSpecUrl from "./parse-spec-url.js"; 11 | import fetchJSON from "./fetch-json.js"; 12 | 13 | 14 | /** 15 | * We will very likely need to use group information from the validate-repos 16 | * project which compiles w3c.json files across repositories. 17 | */ 18 | let w3cGroups = null; 19 | 20 | 21 | /** 22 | * Exports main function that takes a list of specs (with a url property) 23 | * as input, completes entries with an "organization" property that contains the 24 | * name of the organization such as W3C, WHATWG, IETF, Khronos Group, 25 | * Ecma International, and a "groups" property that contains an array of objects 26 | * that describe the groups responsible for the spec. 27 | * 28 | * The function preserves the properties if they have already been provided in 29 | * the input array. 30 | * 31 | * The options parameter is used to specify the GitHub API 32 | * authentication token. 33 | */ 34 | export default async function (specs, options) { 35 | // Maintain a cache of fetched resources in memory to avoid sending the 36 | // same fetch request again and again 37 | const cache = {}; 38 | 39 | for (const spec of specs) { 40 | if (spec.__last?.standing === 'discontinued' && 41 | (!spec.standing || spec.standing === 'discontinued')) { 42 | spec.organization = spec.__last.organization; 43 | spec.groups = spec.__last.groups; 44 | continue; 45 | } 46 | const info = parseSpecUrl(spec.url); 47 | if (!info) { 48 | // For IETF documents, retrieve the group info from datatracker 49 | const ietfName = 50 | spec.url.match(/rfc-editor\.org\/rfc\/([^\/]+)/) ?? 51 | spec.url.match(/datatracker\.ietf\.org\/doc\/html\/([^\/]+)/); 52 | if (ietfName) { 53 | spec.organization = spec.organization ?? "IETF"; 54 | if (spec.groups) continue; 55 | const ietfJson = await fetchJSON(`https://datatracker.ietf.org/doc/${ietfName[1]}/doc.json`, options); 56 | if (ietfJson.group?.type === "WG") { 57 | spec.groups = [{ 58 | name: `${ietfJson.group.name} Working Group`, 59 | url: `https://datatracker.ietf.org/wg/${ietfJson.group.acronym}/` 60 | }]; 61 | continue; 62 | } 63 | else if ((ietfJson.group?.type === "Individual") || 64 | (ietfJson.group?.type === "Area")) { 65 | // Document uses the "Individual Submissions" stream, linked to the 66 | // "none" group in IETF: https://datatracker.ietf.org/group/none/ 67 | // or to an IETF area, which isn't truly a group but still looks like 68 | // one. That's fine, let's reuse that info. 69 | spec.groups = [{ 70 | name: ietfJson.group.name, 71 | url: `https://datatracker.ietf.org/wg/${ietfJson.group.acronym}/` 72 | }]; 73 | continue; 74 | } 75 | else { 76 | throw new Error(`Could not derive IETF group for ${spec.url}. 77 | Unknown group type found in https://datatracker.ietf.org/doc/${ietfName[1]}/doc.json`); 78 | } 79 | } 80 | 81 | if (!spec.groups) { 82 | throw new Error(`Cannot extract any useful info from ${spec.url}`); 83 | } 84 | } 85 | 86 | if (info && info.owner === "whatwg") { 87 | const workstreams = await fetchJSON("https://raw.githubusercontent.com/whatwg/sg/main/db.json", options); 88 | const workstream = workstreams.workstreams.find(ws => ws.standards.find(s => s.href === spec.url)); 89 | if (!workstream) { 90 | throw new Error(`No WHATWG workstream found for ${spec.url}`); 91 | } 92 | spec.organization = spec.organization ?? "WHATWG"; 93 | spec.groups = spec.groups ?? [{ 94 | name: `${workstream.name} Workstream`, 95 | url: spec.url 96 | }]; 97 | continue; 98 | } 99 | 100 | if (info && info.owner === "tc39") { 101 | spec.organization = spec.organization ?? "Ecma International"; 102 | spec.groups = spec.groups ?? [{ 103 | name: "TC39", 104 | url: "https://tc39.es/" 105 | }]; 106 | continue; 107 | } 108 | 109 | if (info && info.owner === "khronosgroup") { 110 | spec.organization = spec.organization ?? "Khronos Group"; 111 | spec.groups = spec.groups ?? [{ 112 | name: "WebGL Working Group", 113 | url: "https://www.khronos.org/webgl/" 114 | }]; 115 | continue; 116 | } 117 | 118 | if (info && info.owner === "w3ctag") { 119 | spec.groups = spec.groups ?? [{ 120 | name: "Technical Architecture Group", 121 | url: "https://www.w3.org/2001/tag/" 122 | }]; 123 | } 124 | 125 | // For the Alliance for Open Media (AOM), let's consider that the Codec WG 126 | // is the default group, noting that it is not super clear which AOM group 127 | // develops which spec in practice: https://aomedia.org/about/ 128 | if (info && info.owner === "aomediacodec") { 129 | spec.organization = spec.organization ?? "Alliance for Open Media"; 130 | spec.groups = spec.groups ?? [{ 131 | name: "Codec Working Group", 132 | url: "https://aomedia.org/about/#codec-working-group" 133 | }] 134 | } 135 | 136 | 137 | 138 | // All specs that remain should be developed by some W3C group. 139 | spec.organization = spec.organization ?? "W3C"; 140 | 141 | if (!spec.groups) { 142 | // Get group info from validate-repos if possible to avoid having to 143 | // send individual network requests for each spec 144 | // Note: this will not yield anything for many /TR specs because we 145 | // guess the name of the repo from the shortname. 146 | if (!w3cGroups) { 147 | const report = await fetchJSON( 148 | "https://w3c.github.io/validate-repos/report.json" 149 | ); 150 | w3cGroups = report.groups; 151 | } 152 | spec.groups = Object.values(w3cGroups) 153 | .filter(group => group.repos?.find(repo => 154 | repo.fullName?.toLowerCase() === `${info.owner}/${info.name}`.toLowerCase() 155 | )) 156 | .map(group => Object.assign({ 157 | name: group.name, 158 | url: group._links.homepage.href 159 | })); 160 | } 161 | if (spec.groups.length === 0) { 162 | let groups = []; 163 | if (info.name === "svgwg") { 164 | groups.push(19480); 165 | } 166 | else if (info.type === "tr") { 167 | // Use the W3C API to find info about /TR specs 168 | const url = `https://api.w3.org/specifications/${info.name}/versions/latest`; 169 | let resp = await fetchJSON(url, options); 170 | if (!resp?._links?.deliverers) { 171 | throw new Error(`W3C API did not return deliverers for the spec`); 172 | } 173 | resp = await fetchJSON(resp._links.deliverers.href, options); 174 | 175 | if (!resp?._links?.deliverers) { 176 | throw new Error(`W3C API did not return deliverers for the spec`); 177 | } 178 | for (const deliverer of resp._links.deliverers) { 179 | groups.push(deliverer.href); 180 | } 181 | } 182 | else { 183 | // Use info in w3c.json file, which we'll either retrieve from the 184 | // repository when one is defined or directly from the spec origin 185 | let url = null; 186 | if (info.type === "github") { 187 | const octokit = new Octokit({ auth: options?.githubToken }); 188 | const cacheId = info.owner + "/" + info.name; 189 | const repo = cache[cacheId] ?? 190 | await octokit.repos.get({ owner: info.owner, repo: info.name }); 191 | cache[cacheId] = repo; 192 | const branch = repo?.data?.default_branch; 193 | if (!branch) { 194 | throw new Error(`Expected GitHub repository does not exist (${spec.url})`); 195 | } 196 | url = new URL(`https://raw.githubusercontent.com/${info.owner}/${info.name}/${branch}/w3c.json`); 197 | } 198 | else { 199 | url = new URL(spec.url); 200 | url.pathname = "/w3c.json"; 201 | } 202 | const body = await fetchJSON(url.toString(), options); 203 | 204 | // Note the "group" property is either an ID or an array of IDs 205 | groups = [body?.group].flat().filter(g => !!g); 206 | } 207 | 208 | // Retrieve info about W3C groups from W3C API 209 | // (Note the "groups" array may contain numbers, strings or API URLs) 210 | for (const id of groups) { 211 | const url = ('' + id).startsWith("https://") ? id : `https://api.w3.org/groups/${id}`; 212 | const info = await fetchJSON(url, options); 213 | spec.groups.push({ 214 | name: info.name, 215 | url: info._links.homepage.href 216 | }); 217 | } 218 | } 219 | } 220 | 221 | return specs; 222 | }; 223 | -------------------------------------------------------------------------------- /src/monitor-specs.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | /** 4 | * The monitor-specs script loops through the list of open issues in the 5 | * browser-specs repository that have a "new spec" label, checks those that 6 | * have not been reviewed for a while, and adds a comment and "review" label to 7 | * those that seems worth reviewing again because an update was detected since 8 | * last review. 9 | * 10 | * The last time that an issue was reviewed is the last time that the "review" 11 | * label was removed, which the script retrieves thanks through the GraphQL 12 | * endpoint. 13 | * 14 | * To report the list of issues that need a review (without updating the 15 | * issues), run: 16 | * node src/monitor-specs.js 17 | * 18 | * To report the list of issues that need a review **and** also update the 19 | * issues to add a comment/label, run: 20 | * node src/monitor-specs.js --update 21 | */ 22 | 23 | import sendGraphQLQuery from "./graphql.js"; 24 | import splitIssueBodyIntoSections from "./split-issue-body.js"; 25 | import loadJSON from "./load-json.js"; 26 | 27 | const config = await loadJSON("config.json"); 28 | const githubToken = config?.GITHUB_TOKEN ?? process.env.GITHUB_TOKEN; 29 | 30 | 31 | /** 32 | * The list of specs that are already known is derived from open and closed 33 | * issues in the browser-specs repository. 34 | */ 35 | const BROWSER_SPECS_REPO = { 36 | owner: "w3c", 37 | name: "browser-specs" 38 | }; 39 | 40 | 41 | /** 42 | * Script does not update GitHub issues by default 43 | */ 44 | const updateGitHubIssues = 45 | (process.argv[2] === "--update") || 46 | (process.argv[2] === "-u"); 47 | 48 | 49 | /** 50 | * Retrieve the list of specs and repositories that should not be reported 51 | * because we're already aware of them and their treatment is still pending or 52 | * we explicitly don't want to add them to browser-specs. 53 | */ 54 | async function fetchIssuesToReview() { 55 | let list = []; 56 | 57 | // Retrieve the list of open issues that have a "new spec" label and, 58 | // for each of them, the last "unlabeled" events. 59 | // Notes: 60 | // - Issues that have a "review" label get skipped for now. By definition, 61 | // a review is already pending for them. If this script is run every couple 62 | // of months, there should not be any issue in that category though... 63 | // - The code assumes that we won't ever set more than 10 different labels on 64 | // a single issue and that we'll find a "review" label removal within the 65 | // last 5 "unlabeled" events. That seems more than enough for now. 66 | let hasNextPage = true; 67 | let endCursor = ""; 68 | while (hasNextPage) { 69 | const response = await sendGraphQLQuery(`query { 70 | organization(login: "${BROWSER_SPECS_REPO.owner}") { 71 | repository(name: "${BROWSER_SPECS_REPO.name}") { 72 | issues( 73 | states: OPEN, 74 | labels: "new spec", 75 | first: 100 76 | ${endCursor ? ', after: "' + endCursor + '"' : ''} 77 | ) { 78 | pageInfo { 79 | endCursor 80 | hasNextPage 81 | } 82 | nodes { 83 | id 84 | number 85 | title 86 | body 87 | createdAt 88 | labels(first: 10) { 89 | nodes { 90 | name 91 | } 92 | } 93 | timelineItems(last: 5, itemTypes: UNLABELED_EVENT) { 94 | nodes { 95 | ... on UnlabeledEvent { 96 | label { 97 | name 98 | } 99 | createdAt 100 | } 101 | } 102 | } 103 | } 104 | } 105 | } 106 | } 107 | }`, githubToken); 108 | if (!response?.data?.organization?.repository?.issues) { 109 | console.log(JSON.stringify(response, null, 2)); 110 | throw new Error(`GraphQL error, could not retrieve the list of issues`); 111 | } 112 | const issues = response.data.organization.repository.issues; 113 | list.push(...issues.nodes 114 | .filter(issue => !issue.labels.nodes.find(label => label.name === "review")) 115 | ); 116 | hasNextPage = issues.pageInfo.hasNextPage; 117 | endCursor = issues.pageInfo.endCursor; 118 | } 119 | 120 | return list; 121 | } 122 | 123 | 124 | /** 125 | * Set a label on a GitHub issue 126 | */ 127 | const labelIds = {}; 128 | async function setIssueLabel(issue, label) { 129 | if (!labelIds[label]) { 130 | // Retrieve the label ID from GitHub if we don't know anything about it yet 131 | const labelResponse = await sendGraphQLQuery(`query { 132 | organization(login: "${BROWSER_SPECS_REPO.owner}") { 133 | repository(name: "${BROWSER_SPECS_REPO.name}") { 134 | label(name: "${label}") { 135 | id 136 | } 137 | } 138 | } 139 | }`, githubToken); 140 | if (!labelResponse?.data?.organization?.repository?.label?.id) { 141 | console.log(JSON.stringify(labelResponse, null, 2)); 142 | throw new Error(`GraphQL error, could not retrieve the "${label}" label`); 143 | } 144 | labelIds[label] = labelResponse.data.organization.repository.label.id; 145 | } 146 | 147 | // Set the label on the issue 148 | const response = await sendGraphQLQuery(`mutation { 149 | addLabelsToLabelable(input: { 150 | labelableId: "${issue.id}" 151 | labelIds: ["${labelIds[label]}"] 152 | clientMutationId: "mutatis mutandis" 153 | }) { 154 | labelable { 155 | ... on Issue { 156 | id 157 | } 158 | } 159 | } 160 | }`, githubToken); 161 | if (!response?.data?.addLabelsToLabelable?.labelable?.id) { 162 | console.log(JSON.stringify(response, null, 2)); 163 | throw new Error(`GraphQL error, could not add "${label}" label to issue #${session.number}`); 164 | } 165 | } 166 | 167 | 168 | /** 169 | * Add the "review" label to the given issue, along with a comment 170 | */ 171 | let reviewLabelId = null; 172 | async function flagIssueForReview(issue, comment) { 173 | if (comment) { 174 | // Using a variable to avoid having to deal with comment escaping issues 175 | const commentResponse = await sendGraphQLQuery(` 176 | mutation($comment: AddCommentInput!) { 177 | addComment(input: $comment) { 178 | subject { 179 | id 180 | } 181 | } 182 | }`, { 183 | comment: { 184 | subjectId: issue.id, 185 | body: comment, 186 | clientMutationId: "mutatis mutandis" 187 | } 188 | }, 189 | githubToken); 190 | if (!commentResponse?.data?.addComment?.subject?.id) { 191 | console.log(JSON.stringify(commentResponse, null, 2)); 192 | throw new Error(`GraphQL error, could not add comment to issue #${issue.number}`); 193 | } 194 | } 195 | 196 | await setIssueLabel(issue, "review"); 197 | } 198 | 199 | 200 | fetchIssuesToReview().then(async issues => { 201 | const issuesToReview = []; 202 | for (const issue of issues) { 203 | const lastReviewedEvent = issue.timelineItems.nodes.find(event => 204 | event.label.name === "review"); 205 | issue.lastReviewed = (new Date(lastReviewedEvent ? 206 | lastReviewedEvent.createdAt : 207 | issue.createdAt)) 208 | .toJSON() 209 | .slice(0, 10); 210 | 211 | const sections = splitIssueBodyIntoSections(issue.body); 212 | const urlSection = sections.find(section => section.title === 'URL'); 213 | if (!urlSection) { 214 | console.warn(`- ${issue.title} (#${issue.number}) does not follow the expected issue format`); 215 | if (updateGitHubIssues) { 216 | await setIssueLabel(issue, "invalid"); 217 | } 218 | continue; 219 | } 220 | 221 | // Retrieve the spec and check the last-modified HTTP header 222 | const response = await fetch(urlSection.value); 223 | const { headers } = response; 224 | 225 | // The CSS drafts use a proprietary header to expose the real last 226 | // modification date 227 | issue.lastRevised = (new Date(headers.get('Last-Revised') ? 228 | headers.get('Last-Revised') : 229 | headers.get('Last-Modified'))) 230 | .toJSON() 231 | .slice(0, 10); 232 | if (issue.lastRevised > issue.lastReviewed) { 233 | issuesToReview.push(issue); 234 | } 235 | // We don't need the response's body, but not reading it means Node will keep 236 | // the network request in memory, which prevents the CLI from returning until 237 | // a timeout occurs. 238 | await response.arrayBuffer(); 239 | } 240 | 241 | if (issuesToReview.length === 0) { 242 | console.log('No candidate spec to review'); 243 | return; 244 | } 245 | 246 | console.log('Candidate specs to review:'); 247 | console.log(issuesToReview 248 | .map(issue => `- ${issue.title} (#${issue.number}) updated on ${issue.lastRevised} (last reviewed on ${issue.lastReviewed})`) 249 | .join('\n') 250 | ); 251 | 252 | if (!updateGitHubIssues) { 253 | return; 254 | } 255 | 256 | console.log('Mark GitHub issues as needing a review...'); 257 | for (const issue of issuesToReview) { 258 | const comment = `The specification was updated on **${issue.lastRevised}** (last reviewed on ${issue.lastReviewed}).`; 259 | await flagIssueForReview(issue, comment); 260 | } 261 | console.log('Mark GitHub issues as needing a review... done'); 262 | }); 263 | -------------------------------------------------------------------------------- /test/specs.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Make sure that the specs.json respects the JSON schema and all constraints 3 | * that cannot be automatically linted. 4 | * 5 | * Note: The specs.json file may still need to be linted, and that's all fine! 6 | */ 7 | 8 | // Tests may run against a test version of the specs file 9 | import { describe, it } from "node:test"; 10 | import assert from "node:assert"; 11 | import path from "node:path"; 12 | import { fileURLToPath } from "node:url"; 13 | import schema from "../schema/specs.json" with { type: "json" }; 14 | import dfnsSchema from "../schema/definitions.json" with { type: "json" }; 15 | import computeInfo from "../src/compute-shortname.js"; 16 | import computePrevNext from "../src/compute-prevnext.js"; 17 | import loadJSON from "../src/load-json.js"; 18 | import Ajv from "ajv"; 19 | import addFormats from "ajv-formats"; 20 | const ajv = (new Ajv()).addSchema(dfnsSchema); 21 | addFormats(ajv); 22 | 23 | const scriptPath = path.dirname(fileURLToPath(import.meta.url)); 24 | const specsFile = process.env.testSpecs ?? path.resolve(scriptPath, "..", "specs.json"); 25 | const specs = await loadJSON(specsFile); 26 | 27 | // When an entry is invalid, the schema validator returns one error for each 28 | // "oneOf" option and one error on overall "oneOf" problem. This is confusing 29 | // for humans. The following function improves the error being returned. 30 | function clarifyErrors(errors) { 31 | if (!errors) { 32 | return errors; 33 | } 34 | 35 | // Update instancePath to drop misleading "[object Object]" 36 | errors.forEach(err => 37 | err.instancePath = err.instancePath.replace(/^\[object Object\]/, '')); 38 | 39 | if (errors.length < 2) { 40 | return errors; 41 | } 42 | 43 | // If first two errors are type errors for oneOf choices, item is neither 44 | // a string nor an object 45 | if ((errors[0].schemaPath === "#/items/oneOf/0/type") && 46 | (errors[1].schemaPath === "#/items/oneOf/1/type")) { 47 | return [ 48 | Object.assign(errors[0], { "message": "must be a string or an object" }) 49 | ]; 50 | } 51 | 52 | // Otherwise, if second error is a type error for second oneOf choice, 53 | // it means the item is actually a string that represents an invalid URL, 54 | // which the first error should capture. 55 | if (errors[1].schemaPath === "#/items/oneOf/1/type") { 56 | return [errors[0]]; 57 | } 58 | 59 | // Otherwise, item is an object that does not follow the schema, drop the 60 | // error that says that item is not a string and the error that says that it 61 | // does not meet one of the "oneOf" options. What remains should be the error 62 | // that explains why the item does not meet the schema for the object. 63 | const clearerErrors = errors.filter(error => 64 | (error.schemaPath !== "#/items/oneOf/0/type") && 65 | (error.schemaPath !== "#/items/oneOf")); 66 | 67 | // Improve an additional property message to point out the property that 68 | // should not be there (default message does not say it) 69 | clearerErrors.forEach(error => { 70 | if ((error.keyword === "additionalProperties") && 71 | error.params && error.params.additionalProperty) { 72 | error.message = "must not have additional property '" + 73 | error.params.additionalProperty + "'"; 74 | } 75 | }); 76 | 77 | // If there are no more errors left to return, roll back to the initial set 78 | // to make sure an error gets reported. That should never happen, but better 79 | // be ready for it. 80 | return (clearerErrors.length > 0) ? clearerErrors : errors; 81 | } 82 | 83 | function compareSpecs(a, b) { 84 | return a.url.localeCompare(b.url); 85 | } 86 | 87 | function specs2objects(specs) { 88 | return specs 89 | .map(spec => (typeof spec === "string") ? 90 | { 91 | url: new URL(spec.split(" ")[0]).toString(), 92 | seriesComposition: (spec.split(' ')[1] === "delta") ? "delta" : "full", 93 | forceCurrent: (spec.split(' ')[1] === "current"), 94 | multipage: (spec.split(' ')[1] === "multipage"), 95 | } : 96 | Object.assign({}, spec, { url: new URL(spec.url).toString() })) 97 | .filter((spec, idx, list) => 98 | !list.find((s, i) => i < idx && compareSpecs(s, spec) === 0)); 99 | } 100 | 101 | function specs2LinkedList(specs) { 102 | return specs2objects(specs) 103 | .map(s => Object.assign({}, s, computeInfo(s.shortname || s.url, s.forkOf))) 104 | .map((s, _, list) => Object.assign({}, s, computePrevNext(s, list))); 105 | } 106 | 107 | function check(specs) { 108 | const validate = ajv.compile(schema); 109 | const isValid = validate(specs, { format: "full" }); 110 | const msg = ajv.errorsText(clarifyErrors(validate.errors), { 111 | dataVar: "specs", separator: "\n" 112 | }); 113 | return msg; 114 | } 115 | 116 | 117 | describe("The `specs.json` list", () => { 118 | describe("has a JSON schema which", () => { 119 | it("is valid", () => { 120 | const isSchemaValid = ajv.validateSchema(schema); 121 | assert.ok(isSchemaValid); 122 | }); 123 | 124 | it("rejects list if it is not an array", () => { 125 | const specs = 0; 126 | assert.strictEqual(check(specs), "specs must be array"); 127 | }); 128 | 129 | it("rejects an empty list", () => { 130 | const specs = []; 131 | assert.strictEqual(check(specs), "specs must NOT have fewer than 1 items"); 132 | }); 133 | 134 | it("rejects items that have a wrong type", () => { 135 | const specs = [0]; 136 | assert.strictEqual(check(specs), "specs/0 must be a string or an object"); 137 | }); 138 | 139 | it("rejects spec objects without URL", () => { 140 | const specs = [{}]; 141 | assert.strictEqual(check(specs), "specs/0 must have required property 'url'"); 142 | }); 143 | 144 | it("rejects spec objects with an invalid URL", () => { 145 | const specs = [{ url: "invalid" }]; 146 | assert.strictEqual(check(specs), "specs/0/url must match format \"uri\""); 147 | }); 148 | 149 | it("rejects spec objects with additional properties", () => { 150 | const specs = [{ url: "https://example.org/", invalid: "test" }]; 151 | assert.strictEqual(check(specs), "specs/0 must not have additional property 'invalid'"); 152 | }); 153 | }); 154 | 155 | it("respects the JSON schema", () => { 156 | assert.strictEqual(check(specs), 'No errors'); 157 | }); 158 | 159 | it("only points at valid URLs", () => { 160 | specs.forEach(spec => (typeof spec === "string") ? 161 | new URL(spec.split(" ")[0]).toString() : null); 162 | assert.ok(true); 163 | }) 164 | 165 | it("only contains specs for which a shortname can be generated", () => { 166 | // Convert entries to spec objects and compute shortname 167 | const specsWithoutShortname = specs2objects(specs) 168 | .map(spec => Object.assign({}, spec, computeInfo(spec.shortname || spec.url, spec.forkOf))) 169 | .filter(spec => !spec.shortname); 170 | 171 | // No exception thrown? That means we're good! 172 | // We'll just check that there aren't any spec with an empty name and report 173 | // the first one (That should never happen since computeInfo would throw but 174 | // better be safe) 175 | assert.strictEqual(specsWithoutShortname[0], undefined); 176 | }); 177 | 178 | it("does not have a delta spec without a previous full spec", () => { 179 | const fullPrevious = (spec, list) => { 180 | const previous = list.find(s => s.shortname === spec.seriesPrevious); 181 | if (previous && previous.seriesComposition && previous.seriesComposition !== "full") { 182 | return fullPrevious(previous, list); 183 | } 184 | return previous; 185 | }; 186 | const deltaWithoutFull = specs2LinkedList(specs) 187 | .filter((s, _, list) => s.seriesComposition === "delta" && !fullPrevious(s, list)); 188 | assert.strictEqual(deltaWithoutFull[0], undefined); 189 | }); 190 | 191 | it("does not have a delta spec flagged as 'current'", () => { 192 | const deltaCurrent = specs2LinkedList(specs) 193 | .filter(s => s.forceCurrent && s.seriesComposition === "delta"); 194 | assert.strictEqual(deltaCurrent[0], undefined); 195 | }); 196 | 197 | it("does not have a fork spec flagged as 'current'", () => { 198 | const forkCurrent = specs2LinkedList(specs) 199 | .filter(s => s.forceCurrent && s.forkOf); 200 | assert.strictEqual(forkCurrent[0], undefined); 201 | }); 202 | 203 | it("has only one spec flagged as 'current' per series shortname", () => { 204 | const linkedList = specs2LinkedList(specs); 205 | const problematicCurrent = linkedList 206 | .filter(s => s.forceCurrent) 207 | .filter(s => s !== linkedList.find(p => 208 | p.series.shortname === s.series.shortname && p.forceCurrent)); 209 | assert.strictEqual(problematicCurrent[0], undefined); 210 | }); 211 | 212 | it("does not have a spec with a 'fork' seriesComposition property", () => { 213 | const wrong = specs.find(s => s.seriesComposition === "fork"); 214 | assert.strictEqual(wrong, undefined); 215 | }); 216 | 217 | it("does not have a 'delta fork' spec", () => { 218 | const wrong = specs.find(s => s.forkOf && s.seriesComposition === "delta"); 219 | assert.strictEqual(wrong, undefined); 220 | }); 221 | 222 | it("only has fork specs that reference existing specs", () => { 223 | const linkedList = specs2LinkedList(specs); 224 | const forkWithoutFull = linkedList.filter((s, _, list) => s.forkOf && 225 | !linkedList.find(spec => spec.shortname === s.forkOf)); 226 | assert.strictEqual(forkWithoutFull[0], undefined); 227 | }); 228 | }); 229 | -------------------------------------------------------------------------------- /test/compute-shortname.js: -------------------------------------------------------------------------------- 1 | import { describe, it } from "node:test"; 2 | import assert from "node:assert"; 3 | import computeInfo from "../src/compute-shortname.js"; 4 | 5 | describe("compute-shortname module", () => { 6 | 7 | describe("shortname property", () => { 8 | function assertName(url, name) { 9 | assert.equal(computeInfo(url).shortname, name); 10 | } 11 | 12 | it("handles TR URLs", () => { 13 | assertName("https://www.w3.org/TR/the-spec/", "the-spec"); 14 | }); 15 | 16 | it("handles WHATWG URLs", () => { 17 | assertName("https://myspec.spec.whatwg.org/whatever/", "myspec"); 18 | }); 19 | 20 | it("handles ECMAScript proposal URLs", () => { 21 | assertName("https://tc39.es/proposal-smartidea/", "tc39-smartidea"); 22 | }); 23 | 24 | it("handles Khronos Group WebGL extensions", () => { 25 | assertName("https://registry.khronos.org/webgl/extensions/EXT_wow32/", "EXT_wow32"); 26 | }); 27 | 28 | it("handles URLs of drafts on GitHub", () => { 29 | assertName("https://wicg.github.io/whataspec/", "whataspec"); 30 | }); 31 | 32 | it("handles URLs of WebAppSec drafts on GitHub", () => { 33 | assertName("https://w3c.github.io/webappsec-ultrasecret/", "ultrasecret"); 34 | }); 35 | 36 | it("handles extension specs defined in the same repo as the main spec (singular)", () => { 37 | assertName("https://w3c.github.io/specwithext/extension.html", "specwithext-extension"); 38 | }); 39 | 40 | it("handles extension specs defined in the same repo as the main spec (plural)", () => { 41 | assertName("https://w3c.github.io/specwithext/extensions.html", "specwithext-extensions"); 42 | }); 43 | 44 | it("handles CSS WG draft URLs", () => { 45 | assertName("https://drafts.csswg.org/css-is-aweso/", "css-is-aweso"); 46 | }); 47 | 48 | it("handles CSS FXTF draft URLs", () => { 49 | assertName("https://drafts.fxtf.org/megafx/", "megafx"); 50 | }); 51 | 52 | it("handles CSS Houdini TF draft URLs", () => { 53 | assertName("https://drafts.css-houdini.org/magic/", "magic"); 54 | }); 55 | 56 | it("handles IETF RFCs", () => { 57 | assertName("https://www.rfc-editor.org/rfc/rfc2397", "rfc2397"); 58 | }); 59 | 60 | it("handles IETF group drafts", () => { 61 | assertName("https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis", "rfc6265bis"); 62 | }); 63 | 64 | it("handles IETF group drafts from individuals", () => { 65 | assertName("https://datatracker.ietf.org/doc/html/draft-cutler-httpbis-partitioned-cookies", "partitioned-cookies"); 66 | }); 67 | 68 | it("handles (simple) IETF individual drafts", () => { 69 | assertName("https://datatracker.ietf.org/doc/html/draft-zern-webp/", "webp"); 70 | }); 71 | 72 | it("handles SVG draft URLs", () => { 73 | assertName("https://svgwg.org/specs/module/", "svg-module"); 74 | }); 75 | 76 | it("handles SVG draft URLs that have an svg prefix", () => { 77 | assertName("https://svgwg.org/specs/svg-module/", "svg-module"); 78 | }); 79 | 80 | it("returns the name when given one", () => { 81 | assertName("myname", "myname"); 82 | }); 83 | 84 | it("preserves case", () => { 85 | assertName("https://www.w3.org/TR/IndexedDB/", "IndexedDB"); 86 | }); 87 | 88 | it("includes the version number in the name (int)", () => { 89 | assertName("https://www.w3.org/TR/level-42/", "level-42"); 90 | }); 91 | 92 | it("includes the version number in the name (float)", () => { 93 | assertName("https://www.w3.org/TR/level-4.2/", "level-4.2"); 94 | }); 95 | 96 | it("handles multi-specs repositories", () => { 97 | assertName("https://w3c.github.io/sdw/bp/", "sdw-bp"); 98 | }); 99 | 100 | it("throws when URL is a dated TR one", () => { 101 | assert.throws( 102 | () => computeInfo("https://www.w3.org/TR/2017/CR-presentation-api-20170601/"), 103 | /^Cannot extract meaningful name from /); 104 | }); 105 | 106 | it("throws when URL that does not follow a known pattern", () => { 107 | assert.throws( 108 | () => computeInfo("https://www.w3.org/2022/12/webmediaapi.html"), 109 | /^Cannot extract meaningful name from /); 110 | }); 111 | 112 | it("throws when name contains non basic Latin characters", () => { 113 | assert.throws( 114 | () => computeInfo("https://www.w3.org/TR/thé-ou-café/"), 115 | /^Specification name contains unexpected characters/); 116 | }); 117 | 118 | it("throws when name contains a dot outside of a level definition", () => { 119 | assert.throws( 120 | () => computeInfo("https://w3c.github.io/spec.name/"), 121 | /^Specification name contains unexpected characters/); 122 | }); 123 | 124 | it("handles non separated fractional level", () => { 125 | assertName("https://www.w3.org/TR/level4.2/", "level4.2"); 126 | }); 127 | 128 | it("handles forks", () => { 129 | const url = "https://www.w3.org/TR/extension/"; 130 | assert.equal(computeInfo(url, "source-2").shortname, "source-2-fork-extension"); 131 | }); 132 | }); 133 | 134 | 135 | describe("series' shortname property", () => { 136 | function assertSeries(url, shortname) { 137 | assert.equal(computeInfo(url).series.shortname, shortname); 138 | } 139 | 140 | it("parses form 'shortname-X'", () => { 141 | assertSeries("spec-4", "spec"); 142 | }); 143 | 144 | it("parses form 'shortname-XXX'", () => { 145 | assertSeries("horizon-2050", "horizon"); 146 | }); 147 | 148 | it("parses form 'shortname-X.Y'", () => { 149 | assertSeries("pi-3.1", "pi"); 150 | }); 151 | 152 | it("parses form 'shortnameX'", () => { 153 | assertSeries("loveu2", "loveu"); 154 | }); 155 | 156 | it("parses form 'shortnameXY'", () => { 157 | assertSeries("answer42", "answer"); 158 | }); 159 | 160 | it("parses form 'shortnameX.Y'", () => { 161 | assertSeries("answer4.2", "answer"); 162 | }); 163 | 164 | it("parses form 'rdfXY-something'", () => { 165 | assertSeries("rdf12-something", "rdf-something"); 166 | }); 167 | 168 | it("parses form 'sparqlXY-something'", () => { 169 | assertSeries("sparql12-something", "sparql-something"); 170 | }); 171 | 172 | it("parses form 'shaclXY-something'", () => { 173 | assertSeries("shacl12-something", "shacl-something"); 174 | }); 175 | 176 | it("includes final digits when they do not seem to be a level", () => { 177 | assertSeries("cors-rfc1918", "cors-rfc1918"); 178 | }); 179 | 180 | it("does not get lost with inner digits", () => { 181 | assertSeries("my-2-cents", "my-2-cents"); 182 | }); 183 | 184 | it("automatically updates CSS specs with an old 'css3-' name", () => { 185 | assertSeries("css3-conditional", "css-conditional"); 186 | }); 187 | 188 | it("preserves ECMA spec numbers", () => { 189 | assertSeries("ecma-402", "ecma-402"); 190 | }); 191 | 192 | it("preserves ISO spec numbers", () => { 193 | assertSeries("iso18181-2", "iso18181-2"); 194 | }); 195 | 196 | it("preserves digits at the end of WebGL extension names", () => { 197 | assertSeries("https://registry.khronos.org/webgl/extensions/EXT_wow32/", "EXT_wow32"); 198 | }); 199 | 200 | it("handles forks", () => { 201 | const url = "https://www.w3.org/TR/the-ext/"; 202 | assert.equal(computeInfo(url, "source-2").series.shortname, "source"); 203 | }); 204 | }); 205 | 206 | 207 | describe("seriesVersion property", () => { 208 | function assertSeriesVersion(url, level) { 209 | assert.equal(computeInfo(url).seriesVersion, level); 210 | } 211 | function assertNoSeriesVersion(url) { 212 | assert.equal(computeInfo(url).hasOwnProperty("seriesVersion"), false, 213 | "did not expect to see a seriesVersion property"); 214 | } 215 | 216 | it("finds the right series version for form 'shortname-X'", () => { 217 | assertSeriesVersion("spec-4", "4"); 218 | }); 219 | 220 | it("finds the right series version for form 'shortname-XXX'", () => { 221 | assertSeriesVersion("horizon-2050", "2050"); 222 | }); 223 | 224 | it("finds the right series version for form 'shortname-X.Y'", () => { 225 | assertSeriesVersion("pi-3.1", "3.1"); 226 | }); 227 | 228 | it("finds the right series version for form 'shortnameX'", () => { 229 | assertSeriesVersion("loveu2", "2"); 230 | }); 231 | 232 | it("finds the right series version for form 'shortnameXY'", () => { 233 | assertSeriesVersion("answer42", "4.2"); 234 | }); 235 | 236 | it("finds the right series version for form 'rdfXY-something'", () => { 237 | assertSeriesVersion("rdf12-something", "1.2"); 238 | }); 239 | 240 | it("finds the right series version for form 'sparqlXY-something'", () => { 241 | assertSeriesVersion("sparql12-something", "1.2"); 242 | }); 243 | 244 | it("does not report any series version when there are none", () => { 245 | assertNoSeriesVersion("nolevel"); 246 | }); 247 | 248 | it("does not report a series version when final digits do not seem to be one", () => { 249 | assertNoSeriesVersion("cors-rfc1918"); 250 | }); 251 | 252 | it("does not get lost with inner digits", () => { 253 | assertNoSeriesVersion("my-2-cents"); 254 | }); 255 | 256 | it("does not confuse an ECMA spec number with a series version", () => { 257 | assertNoSeriesVersion("ecma-402"); 258 | }); 259 | 260 | it("does not confuse a TC39 proposal number with a series version", () => { 261 | assertNoSeriesVersion("tc39-arraybuffer-base64"); 262 | }); 263 | 264 | it("does not confuse an ISO spec number with a series version", () => { 265 | assertNoSeriesVersion("iso18181-2"); 266 | }); 267 | 268 | it("does not confuse digits at the end of a WebGL extension spec with a series version", () => { 269 | assertNoSeriesVersion("https://registry.khronos.org/webgl/extensions/EXT_wow32/"); 270 | }); 271 | 272 | it("handles forks", () => { 273 | const url = "https://www.w3.org/TR/the-ext/"; 274 | assert.equal(computeInfo(url, "source-2").seriesVersion, "2"); 275 | }); 276 | }); 277 | }); 278 | --------------------------------------------------------------------------------