├── .gitignore ├── w3c.json ├── .github ├── dependabot.yml ├── workflows │ ├── request-pr-review.yml │ ├── monitor-specs.yml │ ├── check-base-url.yml │ ├── report-new-specs.yml │ ├── lint.yml │ ├── release-package.yml │ ├── submit-suggested-spec.yml │ ├── build.yml │ ├── build-skip-iso.yml │ └── check-suggested-spec.yml ├── incorrect-base-url.md └── ISSUE_TEMPLATE │ └── suggest-spec.yml ├── packages ├── web-specs │ ├── package.json │ └── README.md └── browser-specs │ ├── package.json │ └── README.md ├── src ├── load-json.js ├── split-issue-body.js ├── extract-pages.js ├── graphql.js ├── fetch-json.js ├── compute-standing.js ├── octokit.js ├── determine-filename.js ├── compute-prevnext.js ├── check-base-url.js ├── compute-alternate-urls.js ├── compute-currentlevel.js ├── compute-shorttitle.js ├── data │ └── multispecs-repos.json ├── request-pr-review.js ├── prepare-packages.js ├── parse-spec-url.js ├── compute-series-urls.js ├── compute-categories.js ├── lint.js ├── fetch-iso-info.js ├── bump-packages-minor.js ├── throttled-queue.js ├── load-spec.js ├── release-package.js ├── determine-testpath.js ├── compute-repository.js ├── fetch-groups.js └── monitor-specs.js ├── test ├── cli.js ├── determine-filename.js ├── data.js ├── shortname-continuity.js ├── extract-pages.js ├── compute-standing.js ├── compute-categories.js ├── compute-shorttitle.js ├── compute-repository.js ├── compute-currentlevel.js ├── compute-prevnext.js ├── fetch-iso-info.js ├── lint.js ├── compute-series-urls.js ├── fetch-groups.js ├── specs.js └── compute-shortname.js ├── schema ├── data.json ├── index.json ├── specs.json └── definitions.json ├── package.json ├── index.js └── LICENSE.md /.gitignore: -------------------------------------------------------------------------------- 1 | .buildsteps 2 | .cache 3 | node_modules/ 4 | config.json 5 | packages/**/index.json -------------------------------------------------------------------------------- /w3c.json: -------------------------------------------------------------------------------- 1 | { 2 | "contacts": ["dontcallmedom", "tidoust"], 3 | "repo-type": "tool" 4 | } 5 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: npm 4 | directory: "/" 5 | schedule: 6 | interval: daily 7 | time: '10:00' 8 | open-pull-requests-limit: 10 -------------------------------------------------------------------------------- /packages/web-specs/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "web-specs", 3 | "version": "3.75.0", 4 | "description": "Curated list of technical Web specifications", 5 | "repository": { 6 | "type": "git", 7 | "url": "https://github.com/w3c/browser-specs.git" 8 | }, 9 | "bugs": { 10 | "url": "https://github.com/w3c/browser-specs/issues" 11 | }, 12 | "license": "CC0-1.0", 13 | "files": [ 14 | "index.json" 15 | ], 16 | "main": "index.json" 17 | } -------------------------------------------------------------------------------- /packages/browser-specs/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "browser-specs", 3 | "version": "4.65.0", 4 | "description": "Curated list of technical Web specifications that are directly implemented or that will be implemented by Web browsers.", 5 | "repository": { 6 | "type": "git", 7 | "url": "https://github.com/w3c/browser-specs.git" 8 | }, 9 | "bugs": { 10 | "url": "https://github.com/w3c/browser-specs/issues" 11 | }, 12 | "license": "CC0-1.0", 13 | "files": [ 14 | "index.json" 15 | ], 16 | "main": "index.json" 17 | } -------------------------------------------------------------------------------- /src/load-json.js: -------------------------------------------------------------------------------- 1 | import { readFile } from 'node:fs/promises'; 2 | 3 | /** 4 | * Load a JSON file as JS object. 5 | * 6 | * @function 7 | * @param {String} filename The path to the file to require 8 | * @return {Object} The result of loading and parsing the file relative to the 9 | * current working directory. 10 | */ 11 | export default async function (filename) { 12 | try { 13 | const json = await readFile(filename, 'utf8'); 14 | return JSON.parse(json); 15 | } 16 | catch (err) { 17 | return null; 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /src/split-issue-body.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Helper function to split an issue body (in markdown) into sections 3 | */ 4 | export default function splitIssueBodyIntoSections(body) { 5 | return body.split(/^### /m) 6 | .filter(section => !!section) 7 | .map(section => section.split(/\r?\n/)) 8 | .map(section => { 9 | let value = section.slice(1).join('\n').trim(); 10 | if (value.replace(/^_(.*)_$/, '$1') === 'No response') { 11 | value = null; 12 | } 13 | return { 14 | title: section[0].replace(/ \(Optional\)$/, ''), 15 | value 16 | }; 17 | }); 18 | } -------------------------------------------------------------------------------- /test/cli.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Make sure that the browser-specs CLI runs as intended. 3 | */ 4 | import { describe, it } from "node:test"; 5 | import assert from "node:assert"; 6 | import path from "node:path"; 7 | import { fileURLToPath } from "node:url"; 8 | import { exec as execCb } from 'node:child_process'; 9 | import util from "node:util"; 10 | const exec = util.promisify(execCb); 11 | 12 | const scriptPath = path.dirname(fileURLToPath(import.meta.url)); 13 | const cwd = path.join(scriptPath, '..', 'src'); 14 | 15 | describe("The browser-specs CLI", () => { 16 | it("runs without errors", async () => { 17 | await exec("node cli.js --help", { cwd }); 18 | }); 19 | }); -------------------------------------------------------------------------------- /.github/workflows/request-pr-review.yml: -------------------------------------------------------------------------------- 1 | name: "NPM release: Request review of pre-release PR" 2 | 3 | on: 4 | schedule: 5 | - cron: '0 5 * * 4' 6 | workflow_dispatch: 7 | 8 | jobs: 9 | review: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - name: Checkout latest version of release script 13 | uses: actions/checkout@v4 14 | 15 | - name: Setup node.js 16 | uses: actions/setup-node@v4 17 | with: 18 | node-version: 20 19 | cache: 'npm' 20 | 21 | - name: Install dependencies 22 | run: npm ci 23 | 24 | - name: Request review of pre-release PR 25 | run: node src/request-pr-review.js 26 | env: 27 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} -------------------------------------------------------------------------------- /.github/incorrect-base-url.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Base URL mismatch 3 | assignees: tidoust, dontcallmedom 4 | labels: bug 5 | --- 6 | [check-base-url](../blob/main/src/check-base-url.js) has detected that the base URL (i.e. the one that appears in the root `url` property in `index.json`) of the following specifications does not match the `release` URL or the `nightly` URL: 7 | 8 | {{ env.check_list }} 9 | 10 | Please review the above list. For each specification, consider updating the URL in [specs.json](../blob/main/specs.json) or fixing the info at the source (the W3C API, Specref, or the spec itself). If the discrepancy seems warranted, the specification should be hardcoded as an exception to the rule in the [check-base-url](../blob/main/src/check-base-url.js) script. -------------------------------------------------------------------------------- /.github/workflows/monitor-specs.yml: -------------------------------------------------------------------------------- 1 | name: Monitor specs 2 | 3 | on: 4 | schedule: 5 | - cron: '0 0 1 */2 *' 6 | workflow_dispatch: 7 | 8 | jobs: 9 | find-specs: 10 | name: Update the list of monitored specs and highlights those that have changed 11 | runs-on: ubuntu-latest 12 | steps: 13 | - name: Checkout latest version of release script 14 | uses: actions/checkout@v4 15 | 16 | - name: Setup node.js 17 | uses: actions/setup-node@v4 18 | with: 19 | node-version: 20 20 | cache: 'npm' 21 | 22 | - name: Install dependencies 23 | run: npm ci 24 | 25 | - name: Check specs that changed since last review 26 | run: node src/monitor-specs.js --update 27 | env: 28 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 29 | -------------------------------------------------------------------------------- /.github/workflows/check-base-url.yml: -------------------------------------------------------------------------------- 1 | name: Check base URL 2 | 3 | on: 4 | schedule: 5 | - cron: '30 0 * * 1' 6 | workflow_dispatch: 7 | 8 | jobs: 9 | find-specs: 10 | name: Check base URL 11 | runs-on: ubuntu-latest 12 | steps: 13 | - name: Checkout repo 14 | uses: actions/checkout@v4 15 | 16 | - name: Setup node.js 17 | uses: actions/setup-node@v4 18 | with: 19 | node-version: 20 20 | cache: 'npm' 21 | 22 | - name: Setup environment 23 | run: npm ci 24 | 25 | - name: Check base URL 26 | run: node src/check-base-url.js # sets check_list env variable 27 | 28 | - name: Report any mismatch in an issue 29 | uses: JasonEtco/create-an-issue@v2 30 | if: ${{ env.check_list }} 31 | env: 32 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 33 | with: 34 | filename: .github/incorrect-base-url.md -------------------------------------------------------------------------------- /src/extract-pages.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Module that exports a function that takes the URL of the index page of a 3 | * multi-page spec as input and that returns the list of pages referenced in 4 | * the table of contents, in document order, excluding the index page. 5 | */ 6 | 7 | import loadSpec from './load-spec.js'; 8 | 9 | export default async function (url, browser) { 10 | const page = await browser.newPage(); 11 | try { 12 | await loadSpec(url, page); 13 | const allPages = await page.evaluate(_ => 14 | [...document.querySelectorAll('.toc a[href]')] 15 | .map(link => link.href) 16 | .map(url => url.split('#')[0]) 17 | .filter(url => url !== window.location.href) 18 | ); 19 | const pageSet = new Set(allPages); 20 | return [...pageSet]; 21 | } 22 | catch (err) { 23 | throw new Error(`Could not extract pages from ${url}: ${err.message}`); 24 | } 25 | finally { 26 | await page.close(); 27 | } 28 | }; -------------------------------------------------------------------------------- /src/graphql.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Send a GraphQL request to the GitHub GraphQL endpoint, authenticating using 3 | * the provided token. 4 | */ 5 | export default async function (query, variables, graphqlToken) { 6 | if (typeof variables === 'string') { 7 | graphqlToken = variables; 8 | variables = null; 9 | } 10 | const res = await fetch("https://api.github.com/graphql", { 11 | method: "POST", 12 | headers: { 13 | "Content-Type": "application/json", 14 | "Authorization": `bearer ${graphqlToken}` 15 | }, 16 | body: JSON.stringify({ query, variables }, null, 2) 17 | }); 18 | if (res.status !== 200) { 19 | if (res.status >= 500) { 20 | throw new Error(`GraphQL server error, ${res.status} status received`); 21 | } 22 | if (res.status === 403) { 23 | throw new Error(`GraphQL server reports that the API key is invalid, ${res.status} status received`); 24 | } 25 | throw new Error(`GraphQL server returned an unexpected HTTP status ${res.status}`); 26 | } 27 | return res.json(); 28 | } 29 | -------------------------------------------------------------------------------- /schema/data.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "http://json-schema.org/schema#", 3 | "$id": "https://w3c.github.io/browser-specs/schema/data.json", 4 | 5 | "type": "object", 6 | "propertyNames": { 7 | "type": "string", 8 | "pattern": "^[\\w\\-\\.]+\\/[\\w\\-\\.]+$" 9 | }, 10 | "additionalProperties": { 11 | "type": "object", 12 | "properties": { 13 | "url": { 14 | "$ref": "definitions.json#/$defs/url" 15 | }, 16 | "shortname": { 17 | "type": "object", 18 | "properties": { 19 | "pattern": { 20 | "type": "string" 21 | }, 22 | "prefix": { 23 | "type": "string" 24 | } 25 | }, 26 | "required": ["pattern"], 27 | "additionalProperties": false 28 | }, 29 | "path": { 30 | "type": "string", 31 | "pattern": "[\\w\\-]+" 32 | }, 33 | "exclude": { 34 | "type": "array", 35 | "items": { 36 | "type": "string", 37 | "pattern": "[\\w\\-]+" 38 | } 39 | } 40 | }, 41 | "required": ["url", "shortname", "exclude"], 42 | "additionalProperties": false 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /.github/workflows/report-new-specs.yml: -------------------------------------------------------------------------------- 1 | name: Report new specs 2 | 3 | on: 4 | schedule: 5 | - cron: '0 0 * * 1' 6 | workflow_dispatch: 7 | 8 | jobs: 9 | find-specs: 10 | name: Find potential new specs 11 | runs-on: ubuntu-latest 12 | steps: 13 | # Starting with Ubuntu 23+, a security feature prevents running Puppeteer 14 | # by default. It needs to be disabled. Using the "easiest" option, see: 15 | # https://chromium.googlesource.com/chromium/src/+/main/docs/security/apparmor-userns-restrictions.md 16 | # https://github.com/puppeteer/puppeteer/pull/13196/files 17 | - name: Disable AppArmor 18 | run: echo 0 | sudo tee /proc/sys/kernel/apparmor_restrict_unprivileged_userns 19 | 20 | - name: Checkout latest version of release script 21 | uses: actions/checkout@v4 22 | 23 | - name: Setup node.js 24 | uses: actions/setup-node@v4 25 | with: 26 | node-version: 20 27 | cache: 'npm' 28 | 29 | - name: Install dependencies 30 | run: npm ci 31 | 32 | - name: Find new candidate specs 33 | run: npx find-specs --github --repos 34 | env: 35 | GITHUB_TOKEN: ${{ secrets.BOT_TOKEN }} 36 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "browser-specs", 3 | "version": "2.27.0", 4 | "repository": { 5 | "type": "git", 6 | "url": "git+https://github.com/w3c/browser-specs.git" 7 | }, 8 | "files": [ 9 | "index.json" 10 | ], 11 | "license": "CC0-1.0", 12 | "main": "index.json", 13 | "scripts": { 14 | "build": "node src/build-index.js", 15 | "build-skip-iso": "node src/build-index.js --skip-fetch=iso", 16 | "lint": "node src/lint.js", 17 | "lint-fix": "node src/lint.js --fix", 18 | "test": "node --test --test-reporter=spec", 19 | "test-index": "node --test --test-reporter=spec test/index.js" 20 | }, 21 | "type": "module", 22 | "bin": { 23 | "browser-specs": "./src/cli.js", 24 | "find-specs": "./src/find-specs.js" 25 | }, 26 | "devDependencies": { 27 | "@actions/core": "^2.0.1", 28 | "@jsdevtools/npm-publish": "^4.1.1", 29 | "@octokit/plugin-throttling": "^11.0.3", 30 | "@octokit/rest": "^22.0.1", 31 | "ajv": "^8.17.1", 32 | "ajv-formats": "^3.0.1", 33 | "commander": "^14.0.2", 34 | "puppeteer": "^24.33.0", 35 | "reffy": "^20.0.1", 36 | "rimraf": "^6.1.2", 37 | "undici": "^7.16.0" 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /src/fetch-json.js: -------------------------------------------------------------------------------- 1 | import ThrottledQueue from "./throttled-queue.js"; 2 | 3 | // Make sure we remain "friendly" with servers 4 | // In particular, we're going to have to fetch a number of w3c.json files from 5 | // https://raw.githubusercontent.com which seems to restrict the total number 6 | // of allowed requests to ~5000 per hour and per IP address. 7 | const fetchQueue = new ThrottledQueue({ 8 | maxParallel: 4, 9 | sleepInterval: 1000 10 | }); 11 | 12 | // Maintain a cache of fetched JSON resources in memory to avoid sending the 13 | // same fetch request again and again 14 | const cache = {}; 15 | 16 | /** 17 | * Fetch a JSON URL 18 | */ 19 | export default async function (url, options) { 20 | if (cache[url]) { 21 | return structuredClone(cache[url]); 22 | } 23 | const res = await fetchQueue.runThrottledPerOrigin(url, fetch, url, options); 24 | if (res.status === 404) { 25 | return null; 26 | } 27 | if (res.status !== 200) { 28 | throw new Error(`Server returned an error for ${url}, status code is ${res.status}`); 29 | } 30 | 31 | try { 32 | const body = await res.json(); 33 | cache[url] = body; 34 | return structuredClone(body); 35 | } 36 | catch (err) { 37 | throw new Error(`Server returned invalid JSON for ${url}`); 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /.github/workflows/lint.yml: -------------------------------------------------------------------------------- 1 | name: Test and lint 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request: 8 | branches: 9 | - main 10 | workflow_dispatch: 11 | 12 | jobs: 13 | lint: 14 | runs-on: ubuntu-latest 15 | steps: 16 | # Starting with Ubuntu 23+, a security feature prevents running Puppeteer 17 | # by default. It needs to be disabled. Using the "easiest" option, see: 18 | # https://chromium.googlesource.com/chromium/src/+/main/docs/security/apparmor-userns-restrictions.md 19 | # https://github.com/puppeteer/puppeteer/pull/13196/files 20 | - name: Disable AppArmor 21 | run: echo 0 | sudo tee /proc/sys/kernel/apparmor_restrict_unprivileged_userns 22 | 23 | - name: Checkout latest version of release script 24 | uses: actions/checkout@v4 25 | 26 | - name: Setup node.js 27 | uses: actions/setup-node@v4 28 | with: 29 | node-version: 20 30 | cache: 'npm' 31 | 32 | - name: Install dependencies 33 | run: npm ci 34 | 35 | - name: Test 36 | env: 37 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 38 | if: ${{ env.GITHUB_TOKEN }} 39 | run: | 40 | npm run test 41 | 42 | - name: Lint 43 | run: npm run lint 44 | -------------------------------------------------------------------------------- /src/compute-standing.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Module that exports a function that takes a spec object that already has its 3 | * `nightly.status` (and `release.status` for released specs) properties set as 4 | * input, and that returns the "standing" of the spec. 5 | * 6 | * Note (2023-01-06): The definition of "standing" remains fuzzy and this 7 | * property should be regarded as unstable. 8 | */ 9 | 10 | // List of spec statuses that are not "official" ones, in the sense that the 11 | // specs have not been officially adopted by a group as a deliverable. 12 | const unofficialStatuses = [ 13 | "A Collection of Interesting Ideas", 14 | "Unofficial Proposal Draft" 15 | ]; 16 | 17 | 18 | /** 19 | * Exports main function that takes a spec object and returns the standing of 20 | * the spec. 21 | */ 22 | export default function (spec) { 23 | if (!spec) { 24 | throw "Invalid spec object passed as parameter"; 25 | } 26 | 27 | // If spec is already explicit about its standing, who are we to disagree? 28 | if (spec.standing) { 29 | return spec.standing; 30 | } 31 | 32 | const status = spec.release?.status ?? spec.nightly?.status; 33 | if (status === "Discontinued Draft") { 34 | return "discontinued"; 35 | } 36 | else { 37 | return unofficialStatuses.includes(status) ? "pending" : "good"; 38 | } 39 | } -------------------------------------------------------------------------------- /src/octokit.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Wrapper around Octokit to add throttling and avoid hitting rate limits 3 | */ 4 | 5 | import { throttling } from "@octokit/plugin-throttling"; 6 | import { Octokit as OctokitRest } from "@octokit/rest"; 7 | const Octokit = OctokitRest.plugin(throttling); 8 | 9 | const MAX_RETRIES = 3; 10 | 11 | export default function (params) { 12 | params = params || {}; 13 | 14 | const octoParams = Object.assign({ 15 | throttle: { 16 | onRateLimit: (retryAfter, options) => { 17 | if (options.request.retryCount < MAX_RETRIES) { 18 | console.warn(`Rate limit exceeded, retrying after ${retryAfter} seconds`) 19 | return true; 20 | } else { 21 | console.error(`Rate limit exceeded, giving up after ${MAX_RETRIES} retries`); 22 | return false; 23 | } 24 | }, 25 | onSecondaryRateLimit: (retryAfter, options) => { 26 | if (options.request.retryCount < MAX_RETRIES) { 27 | console.warn(`Abuse detection triggered, retrying after ${retryAfter} seconds`) 28 | return true; 29 | } else { 30 | console.error(`Abuse detection triggered, giving up after ${MAX_RETRIES} retries`); 31 | return false; 32 | } 33 | } 34 | } 35 | }, params); 36 | 37 | return new Octokit(octoParams); 38 | } 39 | -------------------------------------------------------------------------------- /test/determine-filename.js: -------------------------------------------------------------------------------- 1 | import { describe, it } from "node:test"; 2 | import assert from "node:assert"; 3 | import determineFilename from "../src/determine-filename.js"; 4 | 5 | describe("determine-filename module", function () { 6 | // Long timeout since tests need to send network requests 7 | const timeout = { 8 | timeout: 30000 9 | }; 10 | 11 | it("extracts filename from URL (.html)", timeout, async () => { 12 | const url = "https://example.org/spec/filename.html"; 13 | const filename = await determineFilename(url); 14 | assert.equal(filename, "filename.html"); 15 | }); 16 | 17 | it("extracts filename from URL (.pdf)", timeout, async () => { 18 | const url = "https://example.org/spec/filename.pdf"; 19 | const filename = await determineFilename(url); 20 | assert.equal(filename, "filename.pdf"); 21 | }); 22 | 23 | it("finds index.html filenames", timeout, async () => { 24 | const url = "https://w3c.github.io/presentation-api/"; 25 | const filename = await determineFilename(url); 26 | assert.equal(filename, "index.html"); 27 | }); 28 | 29 | it("finds Overview.html filenames", timeout, async () => { 30 | const url = "https://www.w3.org/TR/presentation-api/"; 31 | const filename = await determineFilename(url); 32 | assert.equal(filename, "Overview.html"); 33 | }); 34 | }); 35 | -------------------------------------------------------------------------------- /test/data.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Make sure that the src/data/*.json files respect the right JSON schema 3 | */ 4 | 5 | import { describe, it } from "node:test"; 6 | import assert from "node:assert"; 7 | import path from "node:path"; 8 | import { fileURLToPath } from "node:url"; 9 | import schema from "../schema/data.json" with { type: "json" }; 10 | import dfnsSchema from "../schema/definitions.json" with { type: "json" }; 11 | import loadJSON from "../src/load-json.js"; 12 | import Ajv from "ajv"; 13 | import addFormats from "ajv-formats"; 14 | const ajv = (new Ajv()).addSchema(dfnsSchema); 15 | addFormats(ajv); 16 | 17 | const scriptPath = path.dirname(fileURLToPath(import.meta.url)); 18 | const multiReposFile = path.resolve(scriptPath, "..", "src", "data", "multispecs-repos.json"); 19 | 20 | describe("Data files", () => { 21 | describe("The JSON schema", () => { 22 | it("is valid", () => { 23 | const isSchemaValid = ajv.validateSchema(schema); 24 | assert.ok(isSchemaValid); 25 | }); 26 | }); 27 | 28 | describe("The multispecs-repos.json list", () => { 29 | it("respects the JSON schema", async () => { 30 | const list = await loadJSON(multiReposFile); 31 | const validate = ajv.compile(schema); 32 | const isValid = validate(list, { format: "full" }); 33 | assert.strictEqual(validate.errors, null); 34 | }); 35 | }); 36 | }); 37 | -------------------------------------------------------------------------------- /test/shortname-continuity.js: -------------------------------------------------------------------------------- 1 | // Tests may run against a test version of the index file 2 | import { describe, it, before } from "node:test"; 3 | import assert from "node:assert"; 4 | import os from "node:os"; 5 | import fs from "node:fs"; 6 | import path from "node:path"; 7 | import util from "node:util"; 8 | import { exec as execCb } from "node:child_process"; 9 | import { fileURLToPath } from "node:url"; 10 | const exec = util.promisify(execCb); 11 | import loadJSON from "../src/load-json.js"; 12 | 13 | const scriptPath = path.dirname(fileURLToPath(import.meta.url)); 14 | const specsFile = process.env.testIndex ?? path.resolve(scriptPath, "..", "index.json"); 15 | const specs = await loadJSON(specsFile); 16 | 17 | describe("The build", {timeout: 60000}, function () { 18 | let tmpdir; 19 | 20 | before(async () => { 21 | tmpdir = await fs.promises.mkdtemp(path.join(os.tmpdir(), "web-specs-")); 22 | await exec("npm install web-specs", { cwd: tmpdir }); 23 | }); 24 | 25 | it("preserves shortnames", async () => { 26 | const lastPublishedSpecs = await loadJSON(path.join( 27 | tmpdir, "node_modules", "web-specs", "index.json")); 28 | 29 | const shortnames = lastPublishedSpecs.map(spec => spec.shortname); 30 | const wrong = shortnames.filter(shortname => !specs.find(spec => 31 | spec.shortname === shortname || 32 | spec.formerNames?.includes(shortname)) 33 | ); 34 | assert.deepStrictEqual(wrong, []); 35 | }); 36 | }); 37 | -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | import specs from "./index.json" with { type: "json" }; 4 | import { fileURLToPath } from 'node:url'; 5 | import process from 'node:process'; 6 | 7 | 8 | /** 9 | * Return the list of specs that match the specified filter. 10 | * 11 | * - If the filter is an integer, return the spec at that index in the list 12 | * - If the filter is full or delta, return specs with same level composition 13 | * - If the filter is empty, return the whole list 14 | * - return specs that have the same URL, name, shortname, or source otherwise 15 | */ 16 | function getSpecs(filter) { 17 | if (filter) { 18 | const res = filter.match(/^\d+$/) ? 19 | [specs[parseInt(filter, 10)]] : 20 | specs.filter(s => 21 | s.url === filter || 22 | s.name === filter || 23 | s.seriesComposition === filter || 24 | s.source === filter || 25 | s.title === filter || 26 | (s.series && s.series.shortname === filter) || 27 | (s.release && s.release.url === filter) || 28 | (s.nightly && s.nightly.url === filter)); 29 | return res; 30 | } 31 | else { 32 | return specs; 33 | } 34 | } 35 | 36 | export { getSpecs }; 37 | 38 | if (process.argv[1] === fileURLToPath(import.meta.url)) { 39 | // Code used as command-line interface (CLI), output info about known specs. 40 | const res = getSpecs(process.argv[2]); 41 | console.log(JSON.stringify(res.length === 1 ? res[0] : res, null, 2)); 42 | } 43 | -------------------------------------------------------------------------------- /.github/workflows/release-package.yml: -------------------------------------------------------------------------------- 1 | # Publish a new package when a pre-release PR is merged. 2 | # 3 | # Job does nothing if PR that was merged is not a pre-release PR. 4 | 5 | name: "Publish NPM package if needed" 6 | 7 | permissions: 8 | # Required to create/update references (release tags), 9 | # includes "read", which is needed to retrieve a PR: 10 | # https://docs.github.com/en/rest/git/refs#create-a-reference--fine-grained-access-tokens 11 | # https://docs.github.com/en/rest/pulls/pulls#get-a-pull-request--fine-grained-access-tokens 12 | contents: write 13 | 14 | # Required for Open ID Connect (OIDC) authentication for npm publication: 15 | # https://docs.npmjs.com/trusted-publishers#github-actions-configuration 16 | id-token: write 17 | 18 | on: 19 | pull_request: 20 | branches: 21 | - main 22 | types: 23 | - closed 24 | 25 | jobs: 26 | release: 27 | if: startsWith(github.head_ref, 'release-') && github.event.pull_request.merged == true 28 | runs-on: ubuntu-latest 29 | steps: 30 | - name: Checkout latest version of release script 31 | uses: actions/checkout@v6 32 | with: 33 | ref: main 34 | 35 | - name: Setup node.js 36 | uses: actions/setup-node@v6 37 | with: 38 | node-version: 24 39 | cache: 'npm' 40 | 41 | - name: Install dependencies 42 | run: npm ci 43 | 44 | - name: Release package if needed 45 | run: node src/release-package.js ${{ github.event.pull_request.number }} 46 | env: 47 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 48 | -------------------------------------------------------------------------------- /src/determine-filename.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Module that takes the URL of the index page of a spec as input, possibly 3 | * without a filename, and that tries to determine the underlying filename. 4 | * 5 | * For instance: 6 | * - given "https://w3c.github.io/webrtc-identity/identity.html", the function 7 | * would return "identity.html" 8 | * - given "https://compat.spec.whatwg.org/", the function would determine that 9 | * the filename is "index.html". 10 | */ 11 | 12 | export default async function (url) { 13 | // Extract filename directly from the URL when possible 14 | const match = url.match(/\/([^/]+\.(html|pdf|txt))$/); 15 | if (match) { 16 | return match[1]; 17 | } 18 | 19 | // RFC-editor HTML rendering 20 | const rfcMatch = url.match(/\/rfc\/(rfc[0-9]+)$/); 21 | if (rfcMatch) { 22 | return rfcMatch[1] + '.html'; 23 | } 24 | 25 | // Make sure that url ends with a "/" 26 | const urlWithSlash = url.endsWith("/") ? url : url + "/"; 27 | 28 | // Check common candidates 29 | const candidates = [ 30 | "index.html", 31 | "Overview.html" 32 | ]; 33 | 34 | for (const candidate of candidates) { 35 | const res = await fetch(urlWithSlash + candidate, { method: "HEAD" }); 36 | if (res.status >= 200 && res.status < 300) { 37 | return candidate; 38 | } 39 | else if (res.status !== 404) { 40 | console.warn(`[warning] fetching "${urlWithSlash + candidate}" returned unexpected HTTP status ${res.status}`); 41 | } 42 | } 43 | 44 | // Not found? Look at Content-Location header 45 | const res = await fetch(url, { method: "HEAD" }); 46 | const filename = res.headers.get("Content-Location"); 47 | return filename; 48 | } 49 | -------------------------------------------------------------------------------- /src/compute-prevnext.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Module that exports a function that takes a spec object that already has a 3 | * "shortname", "series" and "level" properties (if needed) as input along with 4 | * a list of specs with the same info for each spec, and that returns an object 5 | * with "seriesPrevious" and "seriesNext" properties as needed, that point 6 | * to the "shortname" of the spec object that describes the previous and next 7 | * level for the spec in the list. 8 | */ 9 | 10 | /** 11 | * Exports main function that takes a spec object and a list of specs (which 12 | * may contain the spec object itself) and returns an object with properties 13 | * "seriesPrevious" and/or "seriesNext" set. Function only sets the 14 | * properties when needed, so returned object may be empty. 15 | */ 16 | export default function (spec, list) { 17 | if (!spec || !spec.shortname || !spec.series || !spec.series.shortname) { 18 | throw "Invalid spec object passed as parameter"; 19 | } 20 | 21 | list = list || []; 22 | const level = spec.seriesVersion || "0"; 23 | 24 | return list 25 | .filter(s => s.series.shortname === spec.series.shortname && s.seriesComposition !== "fork") 26 | .sort((a, b) => (a.seriesVersion || "0").localeCompare(b.seriesVersion || "0")) 27 | .reduce((res, s) => { 28 | if ((s.seriesVersion || "0") < level) { 29 | // Previous level is the last spec with a lower level 30 | res.seriesPrevious = s.shortname; 31 | } 32 | else if ((s.seriesVersion || "0") > level) { 33 | // Next level is the first spec with a greater level 34 | if (!res.seriesNext) { 35 | res.seriesNext = s.shortname; 36 | } 37 | } 38 | return res; 39 | }, {}); 40 | } -------------------------------------------------------------------------------- /src/check-base-url.js: -------------------------------------------------------------------------------- 1 | /** 2 | * CLI tool that parses the generated index of specifications to make sure that 3 | * the base URL either matches the release URL if there is one, or the nightly 4 | * URL otherwise. 5 | * 6 | * The CLI tool returns Markdown that can typically be used to create an issue. 7 | * It also sets a check_list environment variable that can be used in GitHub 8 | * actions. 9 | * 10 | * No content is returned when everything looks good. 11 | */ 12 | 13 | import core from "@actions/core"; 14 | import specs from "../index.json" with { type: "json" }; 15 | 16 | const problems = specs 17 | // A subset of the IETF RFCs are crawled from their httpwg.org rendering 18 | // see https://github.com/tobie/specref/issues/672 and 19 | // https://github.com/w3c/browser-specs/issues/280 20 | // Also, the revision for CSS2 is ignored on purpose to squash CSS 2.1 and 21 | // CSS 2.2 into a single entry 22 | .filter(s => s.nightly && 23 | !s.nightly.url.startsWith('https://httpwg.org') && 24 | !s.nightly.url.startsWith('https://www.ietf.org/') && 25 | !s.nightly.url.startsWith('https://explainers-by-googlers.github.io/CHIPS-spec/')) 26 | .filter(s => (s.release && s.url !== s.release.url) || (!s.release && s.url !== s.nightly.url)) 27 | .filter(s => s.shortname !== 'CSS2') 28 | .map(s => { 29 | const expected = s.release ? "release" : "nightly"; 30 | const expectedUrl = s.release ? s.release.url : s.nightly.url; 31 | return `- [ ] [${s.title}](${s.url}): expected ${expected} URL ${expectedUrl} to match base URL ${s.url}`; 32 | }); 33 | 34 | if (problems.length > 0) { 35 | const res = problems.join("\n"); 36 | core.exportVariable("check_list", res); 37 | console.log(res); 38 | } 39 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/suggest-spec.yml: -------------------------------------------------------------------------------- 1 | name: New spec 2 | description: Use this issue template to suggest that a new spec be added to the list. 3 | labels: ["new spec", "review"] 4 | title: "Add new spec:
ISO/IEC 10918-5:2013 specifies the JPEG File Interchange Format (JFIF).
\\n"}} 18 | {"id":61292,"deliverableType":"IS","supplementType":null,"reference":"ISO 18074:2015","title":{"en":"Textiles — Identification of some animal fibres by DNA analysis method — Cashmere, wool, yak and their blends","fr":"Textiles — Identification de certaines fibres animales par la méthode d'analyse de l'ADN — Cachemire, laine, yack et leurs mélanges"},"publicationDate":"2015-11-19","edition":1,"icsCode":["59.080.01"],"ownerCommittee":"ISO/TC 38","currentStage":9093,"replaces":null,"replacedBy":null,"languages":["en","fr"],"pages":{"en":22},"scope":{"en":"ISO 18074:2015 specifies a testing method for DNA analysis of some animal fibres to identify cashmere, wool, yak, and their blends by using extraction, amplification by the polymerase chain reaction (PCR) method and DNA detection processes.
\\nISO 18084:2015 is applicable to cashmere, yak, and wool and their blends as a qualitative method.
\\n\\n"}} 19 | `; 20 | 21 | describe("The ISO catalog module", async function () { 22 | // Long time out since tests need to send network requests 23 | const timeout = { 24 | timeout: 60000 25 | }; 26 | 27 | let defaultDispatcher = getGlobalDispatcher(); 28 | let mockAgent = new MockAgent(); 29 | 30 | function initIntercepts() { 31 | const mockPool = mockAgent 32 | .get("https://isopublicstorageprod.blob.core.windows.net"); 33 | mockPool 34 | .intercept({ path: "/opendata/_latest/iso_technical_committees/json/iso_technical_committees.jsonl" }) 35 | .reply(200, tcResponse); 36 | mockPool 37 | .intercept({ path: "/opendata/_latest/iso_deliverables_metadata/json/iso_deliverables_metadata.jsonl" }) 38 | .reply(200, catalogResponse); 39 | } 40 | 41 | before(() => { 42 | setGlobalDispatcher(mockAgent); 43 | mockAgent.disableNetConnect(); 44 | }); 45 | 46 | after(() => { 47 | setGlobalDispatcher(defaultDispatcher); 48 | }) 49 | 50 | it("extracts spec info for an ISO spec", timeout, async () => { 51 | initIntercepts(); 52 | const spec = { url: "https://www.iso.org/standard/61292.html" }; 53 | const specs = await fetchInfoFromISO([spec]); 54 | assert.ok(specs[0]); 55 | assert.equal(specs[0].shortname, "iso18074"); 56 | assert.equal(specs[0].series?.shortname, "iso18074"); 57 | assert.equal(specs[0].series?.currentSpecification, "iso18074"); 58 | assert.equal(specs[0].source, "iso"); 59 | assert.equal(specs[0].title, "Textiles — Identification of some animal fibres by DNA analysis method — Cashmere, wool, yak and their blends"); 60 | assert.equal(specs[0].organization, "ISO"); 61 | assert.equal(specs[0].groups[0].url, "https://www.iso.org/committee/48148.html"); 62 | assert.equal(specs[0].groups[0].name, "ISO/TC 38"); 63 | assert.equal(specs[0].nightly, undefined); 64 | }); 65 | 66 | it("extracts spec info for an ISO/IEC spec", timeout, async () => { 67 | initIntercepts(); 68 | const spec = { url: "https://www.iso.org/standard/54989.html" }; 69 | const specs = await fetchInfoFromISO([spec]); 70 | assert.ok(specs[0]); 71 | assert.equal(specs[0].shortname, "iso10918-5"); 72 | assert.equal(specs[0].series?.shortname, "iso10918-5"); 73 | assert.equal(specs[0].series?.currentSpecification, "iso10918-5"); 74 | assert.equal(specs[0].source, "iso"); 75 | assert.equal(specs[0].title, "Information technology — Digital compression and coding of continuous-tone still images: JPEG File Interchange Format (JFIF) — Part 5:"); 76 | assert.equal(specs[0].organization, "ISO/IEC"); 77 | assert.equal(specs[0].groups[0].url, "https://www.iso.org/committee/45316.html"); 78 | assert.equal(specs[0].groups[0].name, "ISO/IEC JTC 1/SC 29"); 79 | assert.equal(specs[0].nightly, undefined); 80 | }); 81 | 82 | it("skips fetch in the absence of specs from ISO", timeout, async () => { 83 | // Note: as we don't call initIntercepts(), mock agent will throw if 84 | // code attempts to fetch something from the network 85 | const spec = { url: "https://www.w3.org/TR/from-w3c-with-love/" }; 86 | const specs = await fetchInfoFromISO([spec]); 87 | assert.ok(specs[0]); 88 | }); 89 | 90 | it("skips fetch when asked", timeout, async () => { 91 | // Note: as we don't call initIntercepts(), mock agent will throw if 92 | // code attempts to fetch something from the network. 93 | const spec = { url: "https://www.iso.org/standard/54989.html" }; 94 | const specs = await fetchInfoFromISO([spec], { skipFetch: 'iso' }); 95 | assert.ok(specs[0]); 96 | }); 97 | }); 98 | -------------------------------------------------------------------------------- /src/release-package.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Publish an NPM package when pre-release PR is merged, using the commit on 3 | * which the pre-release PR is based as source of data. 4 | */ 5 | 6 | import Octokit from "./octokit.js"; 7 | import fs from "node:fs"; 8 | import path from "node:path"; 9 | import os from "node:os"; 10 | import { execSync } from "node:child_process"; 11 | import { rimraf } from "rimraf"; 12 | import { npmPublish } from "@jsdevtools/npm-publish"; 13 | import loadJSON from "./load-json.js"; 14 | 15 | const owner = "w3c"; 16 | const repo = "browser-specs"; 17 | 18 | 19 | /** 20 | * Release package at the requested version. 21 | * 22 | * @function 23 | * @param {Number} prNumber Pre-release PR number 24 | */ 25 | async function releasePackage(prNumber) { 26 | console.log(`Retrieve pre-release PR`); 27 | const prResponse = await octokit.pulls.get({ 28 | owner, repo, 29 | pull_number: prNumber 30 | }); 31 | const preReleasePR = prResponse?.data; 32 | if (!preReleasePR) { 33 | console.log("- Given PR does not seem to exist, nothing to release"); 34 | return; 35 | } 36 | 37 | // Extract type from PR title 38 | console.log(`- Given PR title: ${preReleasePR.title}`); 39 | const match = preReleasePR.title.match(/^📦 Release (.*)@(.*)$/); 40 | if (!match) { 41 | console.log("- Given PR is not a pre-release PR, nothing to release"); 42 | return; 43 | } 44 | const type = match[1]; 45 | 46 | if (!["web-specs", "browser-specs"].includes(type)) { 47 | console.log(`- Unknown package type "${type}", nothing to release`); 48 | return; 49 | } 50 | 51 | // Extract commit to release from PR 52 | const preReleaseSha = preReleasePR.base.sha; 53 | console.log(`- Found commit to release: ${preReleaseSha}`); 54 | 55 | console.log(); 56 | console.log("Publish package to npm"); 57 | console.log("- Checkout repo at right commit in temporary folder"); 58 | const tmpFolder = fs.mkdtempSync(path.join(os.tmpdir(), `${repo}-`)); 59 | 60 | try { 61 | execSync(`git clone https://github.com/${owner}/${repo}`, { 62 | cwd: tmpFolder 63 | }); 64 | const installFolder = path.join(tmpFolder, repo); 65 | execSync(`git reset --hard ${preReleaseSha}`, { 66 | cwd: installFolder 67 | }); 68 | console.log(`- Installation folder: ${installFolder}`); 69 | 70 | console.log("- Prepare package files"); 71 | execSync("npm ci", { cwd: installFolder }); 72 | execSync("node src/prepare-packages.js", { cwd: installFolder }); 73 | 74 | console.log(`- Publish packages/${type} folder to npm`); 75 | const packageFolder = path.join(installFolder, "packages", type, "package.json"); 76 | const pubOptions = { 77 | package: packageFolder 78 | //, debug: console.debug 79 | }; 80 | if (NPM_TOKEN) { 81 | pubOptions.token = NPM_TOKEN; 82 | } 83 | const pubResult = await npmPublish(pubOptions); 84 | console.log(`- Published version was ${pubResult.oldVersion}`); 85 | console.log(`- Version bump: ${pubResult.type}`); 86 | console.log(`- Published version is ${pubResult.version}`); 87 | 88 | console.log(); 89 | console.log("Add release tag to commit"); 90 | if (pubResult.version === pubResult.oldVersion) { 91 | console.log("- Skip, no actual package released"); 92 | } 93 | else { 94 | const rawTag = `${type}@${pubResult.version}`; 95 | await octokit.git.createRef({ 96 | owner, repo, 97 | ref: `refs/tags/${rawTag}`, 98 | sha: preReleaseSha 99 | }); 100 | console.log(`- Tagged released commit ${preReleaseSha} with tag "${rawTag}"`); 101 | 102 | await octokit.git.updateRef({ 103 | owner, repo, 104 | ref: `heads/${type}@latest`, 105 | sha: preReleaseSha 106 | }); 107 | console.log(`- Updated ${type}-latest to point to released commit ${preReleaseSha}`); 108 | } 109 | } 110 | finally { 111 | console.log("Clean temporary folder"); 112 | try { 113 | rimraf.sync(tmpFolder); 114 | console.log("- done"); 115 | } 116 | catch { 117 | } 118 | } 119 | } 120 | 121 | 122 | /******************************************************************************* 123 | Retrieve tokens from environment, prepare Octokit and kick things off 124 | *******************************************************************************/ 125 | const config = await loadJSON("config.json"); 126 | const GITHUB_TOKEN = config?.GITHUB_TOKEN ?? process.env.GITHUB_TOKEN; 127 | if (!GITHUB_TOKEN) { 128 | console.error("GITHUB_TOKEN must be set to some personal access token as an env variable or in a config.json file"); 129 | process.exit(1); 130 | } 131 | 132 | // An NPM token is needed to run the script from a local machine. 133 | // Authentication from a GitHub workflow rather relies on OpenID Connect 134 | // and the release workflow must be added as a trusted publisher for each 135 | // npm package that can be released, see: 136 | // https://docs.npmjs.com/trusted-publishers 137 | const NPM_TOKEN = config?.NPM_TOKEN ?? process.env.NPM_TOKEN; 138 | 139 | // Note: npm-publish has a bug and needs an "INPUT_TOKEN" env variable: 140 | // https://github.com/JS-DevTools/npm-publish/issues/15 141 | // (we're passing the token to the function directly, no need to set it here) 142 | process.env.INPUT_TOKEN = ""; 143 | 144 | const octokit = new Octokit({ 145 | auth: GITHUB_TOKEN 146 | //, log: console 147 | }); 148 | 149 | const prereleasePR = parseInt(process.argv[2], 10); 150 | 151 | releasePackage(prereleasePR) 152 | .then(() => { 153 | console.log(); 154 | console.log("== The end =="); 155 | }) 156 | .catch(err => { 157 | console.error(err); 158 | process.exit(1); 159 | }); 160 | -------------------------------------------------------------------------------- /test/lint.js: -------------------------------------------------------------------------------- 1 | import { describe, it } from "node:test"; 2 | import assert from "node:assert"; 3 | import { lintStr } from "../src/lint.js"; 4 | 5 | describe("Linter", () => { 6 | describe("lintStr()", () => { 7 | function toStr(specs) { 8 | return JSON.stringify(specs, null, 2) + "\n"; 9 | } 10 | 11 | it("passes if specs contains a valid URL", () => { 12 | const specs = ["https://www.w3.org/TR/spec/"]; 13 | assert.equal(lintStr(toStr(specs)), null); 14 | }); 15 | 16 | it("passes if specs contains multiple sorted URLs", () => { 17 | const specs = [ 18 | "https://www.w3.org/TR/spec1/", 19 | "https://www.w3.org/TR/spec2/" 20 | ]; 21 | assert.equal(lintStr(toStr(specs)), null); 22 | }); 23 | 24 | it("passes if specs contains a URL with a delta spec", () => { 25 | const specs = [ 26 | "https://www.w3.org/TR/spec-1/", 27 | "https://www.w3.org/TR/spec-2/ delta" 28 | ]; 29 | assert.equal(lintStr(toStr(specs)), null); 30 | }); 31 | 32 | it("passes if specs contains a URL with a spec flagged as current", () => { 33 | const specs = [ 34 | "https://www.w3.org/TR/spec-1/ current", 35 | "https://www.w3.org/TR/spec-2/" 36 | ]; 37 | assert.equal(lintStr(toStr(specs)), null); 38 | }); 39 | 40 | it("passes if specs contains a URL with a spec flagged as multipage", () => { 41 | const specs = [ 42 | "https://www.w3.org/TR/spec-1/ multipage" 43 | ]; 44 | assert.equal(lintStr(toStr(specs)), null); 45 | }); 46 | 47 | it("sorts URLs", () => { 48 | const specs = [ 49 | "https://www.w3.org/TR/spec2/", 50 | "https://www.w3.org/TR/spec1/" 51 | ]; 52 | assert.equal( 53 | lintStr(toStr(specs)), 54 | toStr([ 55 | "https://www.w3.org/TR/spec1/", 56 | "https://www.w3.org/TR/spec2/" 57 | ])); 58 | }); 59 | 60 | it("lints a URL", () => { 61 | const specs = [ 62 | { "url": "https://example.org", "shortname": "test" } 63 | ]; 64 | assert.equal(lintStr(toStr(specs)), toStr([ 65 | { "url": "https://example.org/", "shortname": "test" } 66 | ])); 67 | }); 68 | 69 | it("lints an object with only a URL to a URL", () => { 70 | const specs = [ 71 | { "url": "https://www.w3.org/TR/spec/" } 72 | ]; 73 | assert.equal(lintStr(toStr(specs)), toStr([ 74 | "https://www.w3.org/TR/spec/" 75 | ])); 76 | }); 77 | 78 | it("lints an object with only a URL and a delta flag to a string", () => { 79 | const specs = [ 80 | "https://www.w3.org/TR/spec-1/", 81 | { "url": "https://www.w3.org/TR/spec-2/", seriesComposition: "delta" } 82 | ]; 83 | assert.equal(lintStr(toStr(specs)), toStr([ 84 | "https://www.w3.org/TR/spec-1/", 85 | "https://www.w3.org/TR/spec-2/ delta" 86 | ])); 87 | }); 88 | 89 | it("lints an object with only a URL and a current flag to a string", () => { 90 | const specs = [ 91 | { "url": "https://www.w3.org/TR/spec-1/", "forceCurrent": true }, 92 | "https://www.w3.org/TR/spec-2/" 93 | ]; 94 | assert.equal(lintStr(toStr(specs)), toStr([ 95 | "https://www.w3.org/TR/spec-1/ current", 96 | "https://www.w3.org/TR/spec-2/" 97 | ])); 98 | }); 99 | 100 | it("lints an object with only a URL and a multipage flag to a string", () => { 101 | const specs = [ 102 | { "url": "https://www.w3.org/TR/spec-1/", "multipage": "all" } 103 | ]; 104 | assert.equal(lintStr(toStr(specs)), toStr([ 105 | "https://www.w3.org/TR/spec-1/ multipage" 106 | ])); 107 | }); 108 | 109 | it("lints an object with a 'full' flag", () => { 110 | const specs = [ 111 | { "url": "https://www.w3.org/TR/spec/", "seriesComposition": "full" } 112 | ]; 113 | assert.equal(lintStr(toStr(specs)), toStr([ 114 | "https://www.w3.org/TR/spec/" 115 | ])); 116 | }); 117 | 118 | it("lints an object with a current flag set to false", () => { 119 | const specs = [ 120 | { "url": "https://www.w3.org/TR/spec/", "forceCurrent": false } 121 | ]; 122 | assert.equal(lintStr(toStr(specs)), toStr([ 123 | "https://www.w3.org/TR/spec/" 124 | ])); 125 | }); 126 | 127 | it("lints an object with a multipage flag set to null", () => { 128 | const specs = [ 129 | { "url": "https://www.w3.org/TR/spec/", "multipage": null } 130 | ]; 131 | assert.equal(lintStr(toStr(specs)), toStr([ 132 | "https://www.w3.org/TR/spec/" 133 | ])); 134 | }); 135 | 136 | it("drops duplicate URLs", () => { 137 | const specs = [ 138 | "https://www.w3.org/TR/duplicate/", 139 | "https://www.w3.org/TR/duplicate/" 140 | ]; 141 | assert.equal( 142 | lintStr(toStr(specs)), 143 | toStr(["https://www.w3.org/TR/duplicate/"])); 144 | }); 145 | 146 | it("drops duplicate URLs defined as string and object", () => { 147 | const specs = [ 148 | { "url": "https://www.w3.org/TR/duplicate/" }, 149 | "https://www.w3.org/TR/duplicate/" 150 | ]; 151 | assert.equal( 152 | lintStr(toStr(specs)), 153 | toStr(["https://www.w3.org/TR/duplicate/"])); 154 | }); 155 | 156 | it("lints an object with a forkOf and a seriesComposition property", () => { 157 | const specs = [ 158 | "https://www.w3.org/TR/spec-1/", 159 | { "url": "https://www.w3.org/TR/spec-2/", seriesComposition: "fork", forkOf: "spec-1" } 160 | ]; 161 | assert.equal(lintStr(toStr(specs)), toStr([ 162 | "https://www.w3.org/TR/spec-1/", 163 | { "url": "https://www.w3.org/TR/spec-2/", forkOf: "spec-1" } 164 | ])); 165 | }); 166 | }); 167 | }); 168 | -------------------------------------------------------------------------------- /schema/definitions.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "http://json-schema.org/schema#", 3 | "$id": "https://w3c.github.io/browser-specs/schema/definitions.json", 4 | 5 | "$defs": { 6 | "url": { 7 | "type": "string", 8 | "format": "uri" 9 | }, 10 | 11 | "filename": { 12 | "type": "string", 13 | "pattern": "^[\\w\\-\\.]+\\.(html|pdf|txt)$" 14 | }, 15 | 16 | "relativePath": { 17 | "type": "string", 18 | "pattern": "^[\\w\\-\\.]+(\\/[\\w\\-\\.]+)*$" 19 | }, 20 | 21 | "shortname": { 22 | "type": "string", 23 | "pattern": "^[\\w\\-]+((?<=v?\\d+)\\.\\d+)?$" 24 | }, 25 | 26 | "series": { 27 | "type": "object", 28 | "properties": { 29 | "shortname": { 30 | "type": "string", 31 | "pattern": "^[\\w\\-]+$" 32 | }, 33 | "title": { "$ref": "#/$defs/title" }, 34 | "shortTitle": { "$ref": "#/$defs/title" }, 35 | "currentSpecification": { "$ref": "#/$defs/shortname" }, 36 | "releaseUrl": { "$ref": "#/$defs/url" }, 37 | "nightlyUrl": { "$ref": "#/$defs/url" } 38 | }, 39 | "required": ["shortname"], 40 | "additionalProperties": false 41 | }, 42 | 43 | "seriesVersion": { 44 | "type": "string", 45 | "pattern": "^\\d+(\\.\\d+){0,2}$" 46 | }, 47 | 48 | "seriesComposition": { 49 | "type": "string", 50 | "enum": ["full", "delta", "fork"] 51 | }, 52 | 53 | "forceCurrent": { 54 | "type": "boolean" 55 | }, 56 | 57 | "title": { 58 | "type": "string" 59 | }, 60 | 61 | "source": { 62 | "type": "string", 63 | "enum": ["w3c", "spec", "ietf", "whatwg", "iso"] 64 | }, 65 | 66 | "nightly": { 67 | "type": "object", 68 | "properties": { 69 | "url": { "$ref": "#/$defs/url" }, 70 | "status": { 71 | "type": "string", 72 | "enum": [ 73 | "A Collection of Interesting Ideas", 74 | "Draft Community Group Report", 75 | "Draft Deliverable", 76 | "Draft Finding", 77 | "Draft Registry", 78 | "Editor's Draft", 79 | "Experimental", 80 | "Final Deliverable", 81 | "Informational", 82 | "Internet Standard", 83 | "Living Standard", 84 | "Proposed Standard", 85 | "TAG Finding", 86 | "Unofficial Proposal Draft", 87 | "Working Group Approved Draft" 88 | ] 89 | }, 90 | "alternateUrls": { 91 | "type": "array", 92 | "items": { "$ref": "#/$defs/url" } 93 | }, 94 | "filename": { "$ref": "#/$defs/filename" }, 95 | "sourcePath": { "$ref": "#/$defs/relativePath" }, 96 | "pages": { 97 | "type": "array", 98 | "items": { "$ref": "#/$defs/url" } 99 | }, 100 | "repository": { "$ref": "#/$defs/url" } 101 | }, 102 | "additionalProperties": false 103 | }, 104 | 105 | "tests": { 106 | "type": "object", 107 | "properties": { 108 | "repository": { "$ref": "#/$defs/url" }, 109 | "testPaths": { 110 | "type": "array", 111 | "items": { "$ref": "#/$defs/relativePath" }, 112 | "minItems": 1 113 | }, 114 | "excludePaths": { 115 | "type": "array", 116 | "items": { "$ref": "#/$defs/relativePath" }, 117 | "minItems": 1 118 | } 119 | }, 120 | "required": ["repository"], 121 | "additionalProperties": false 122 | }, 123 | 124 | "groups": { 125 | "type": "array", 126 | "items": { 127 | "type": "object", 128 | "properties": { 129 | "name": { "type": "string" }, 130 | "url": { "$ref": "#/$defs/url" } 131 | }, 132 | "required": ["name", "url"], 133 | "additionalProperties": false 134 | } 135 | }, 136 | 137 | "organization": { 138 | "type": "string" 139 | }, 140 | 141 | "categories": { 142 | "type": "array", 143 | "items": { 144 | "type": "string", 145 | "enum": ["browser"] 146 | } 147 | }, 148 | 149 | "categories-specs": { 150 | "oneOf": [ 151 | { 152 | "type": "string", 153 | "enum": ["reset", "+browser", "-browser"] 154 | }, 155 | { 156 | "type": "array", 157 | "items": { 158 | "type": "string", 159 | "enum": ["reset", "+browser", "-browser"] 160 | }, 161 | "minItems": 1 162 | } 163 | ] 164 | }, 165 | 166 | "forks": { 167 | "type": "array", 168 | "items": { "$ref": "#/$defs/shortname" } 169 | }, 170 | 171 | "standing": { 172 | "type": "string", 173 | "enum": ["good", "pending", "discontinued"] 174 | }, 175 | 176 | "obsoletedBy": { 177 | "type": "array", 178 | "items": { "$ref": "#/$defs/shortname" }, 179 | "minItems": 1 180 | }, 181 | 182 | "formerNames": { 183 | "type": "array", 184 | "items": { "$ref": "#/$defs/shortname" }, 185 | "minItems": 1 186 | }, 187 | 188 | "specsfile": { 189 | "release": { 190 | "type": "object", 191 | "properties": { 192 | "url": { "$ref": "#/$defs/url" }, 193 | "status": { 194 | "type": "string", 195 | "enum": [ 196 | "Candidate Recommendation Draft", 197 | "Candidate Recommendation Snapshot", 198 | "Discontinued Draft", 199 | "Draft Note", 200 | "Draft Registry", 201 | "Final Deliverable", 202 | "First Public Working Draft", 203 | "Note", 204 | "Proposed Recommendation", 205 | "Recommendation", 206 | "Statement", 207 | "Working Draft" 208 | ] 209 | }, 210 | "filename": { "$ref": "#/$defs/filename" }, 211 | "pages": { 212 | "type": "array", 213 | "items": { "$ref": "#/$defs/url" } 214 | } 215 | }, 216 | "additionalProperties": false 217 | } 218 | }, 219 | 220 | "indexfile": { 221 | "release": { 222 | "$ref": "#/$defs/specsfile/release", 223 | "required": ["url", "status"] 224 | } 225 | } 226 | } 227 | } -------------------------------------------------------------------------------- /src/determine-testpath.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Module that takes a list of spec objects as input and returns, for each spec, 3 | * the URL of the repository that contains the test suite of the spec along with 4 | * the path under which the tests are to be found in that repository. 5 | * 6 | * The function will run git commands on the command-line and populate the local 7 | * ".cache" folder. 8 | */ 9 | 10 | import fs from "node:fs"; 11 | import path from "node:path"; 12 | import { execSync } from "node:child_process"; 13 | import { fileURLToPath } from "node:url"; 14 | 15 | // Cache folder under which the WPT repository will be cloned 16 | const scriptPath = path.dirname(fileURLToPath(import.meta.url)); 17 | const cacheFolder = path.resolve(scriptPath, "..", ".cache"); 18 | const wptFolder = path.resolve(cacheFolder, "wpt"); 19 | 20 | /** 21 | * Helper function to setup the cache folder 22 | */ 23 | function setupCacheFolder() { 24 | try { 25 | fs.mkdirSync(cacheFolder); 26 | } 27 | catch (err) { 28 | if (err.code !== 'EEXIST') { 29 | throw err; 30 | } 31 | } 32 | } 33 | 34 | /** 35 | * Helper function that returns true when the WPT folder already exists 36 | * (which is taken to mean that the repository has already been cloned) 37 | */ 38 | function wptFolderExists() { 39 | try { 40 | fs.accessSync(wptFolder); 41 | return true; 42 | } 43 | catch (err) { 44 | if (err.code !== "ENOENT") { 45 | throw err; 46 | } 47 | return false; 48 | } 49 | } 50 | 51 | /** 52 | * Helper function that fetches the latest version of the WPT repository, 53 | * restricting the checkout to META.yml files 54 | */ 55 | function fetchWPT() { 56 | setupCacheFolder(); 57 | if (wptFolderExists()) { 58 | // Pull latest commit from master branch 59 | execSync("git pull origin master", { cwd: wptFolder }); 60 | } 61 | else { 62 | // Clone repo using sparse mode: the repo is huge and we're only interested 63 | // in META.yml files 64 | execSync("git clone https://github.com/web-platform-tests/wpt.git --depth 1 --sparse", { cwd: cacheFolder }); 65 | execSync("git sparse-checkout set --no-cone", { cwd: wptFolder }); 66 | execSync("git sparse-checkout add **/META.yml", { cwd: wptFolder }); 67 | } 68 | } 69 | 70 | /** 71 | * Helper function that reads "spec" entries in all META.yml files of the WPT 72 | * repository. 73 | * 74 | * Note the function parses META.yml files as regular text files. That works 75 | * well but a proper YAML parser would be needed if we need to handle things 76 | * such as comments. 77 | */ 78 | async function readWptMetaFiles() { 79 | async function readFolder(folder) { 80 | let res = []; 81 | const contents = await fs.promises.readdir(folder); 82 | for (const name of contents) { 83 | const filename = path.resolve(folder, name); 84 | const stat = await fs.promises.stat(filename); 85 | if (stat.isDirectory()) { 86 | const nestedFiles = await readFolder(filename); 87 | res = res.concat(nestedFiles); 88 | } 89 | else if (name === "META.yml") { 90 | const file = await fs.promises.readFile(filename, "utf8"); 91 | const match = file.match(/(?:^|\n)spec: (.*)$/m); 92 | if (match) { 93 | res.push({ 94 | folder: folder.substring(wptFolder.length + 1).replace(/\\/g, "/"), 95 | spec: match[1] 96 | }); 97 | } 98 | } 99 | } 100 | return res; 101 | } 102 | 103 | fetchWPT(); 104 | return await readFolder(wptFolder); 105 | } 106 | 107 | 108 | /** 109 | * Returns the first item in the list found in the array, or null if none of 110 | * the items exists in the array. 111 | */ 112 | function getFirstFoundInArray(paths, ...items) { 113 | for (const item of items) { 114 | const path = paths.find(p => p === item); 115 | if (path) { 116 | return path; 117 | } 118 | } 119 | return null; 120 | } 121 | 122 | 123 | /** 124 | * Exports main function that takes a list of specs as input, completes entries 125 | * with a tests property when possible and returns the list. 126 | * 127 | * The options parameter is used to specify the GitHub API authentication token. 128 | */ 129 | export default async function (specs, options) { 130 | if (!specs || specs.find(spec => !spec.shortname || !spec.series || !spec.series.shortname)) { 131 | throw "Invalid list of specifications passed as parameter"; 132 | } 133 | options = options || {}; 134 | 135 | const wptFolders = await readWptMetaFiles(); 136 | 137 | function determineTestInfo(spec) { 138 | const info = { 139 | repository: "https://github.com/web-platform-tests/wpt" 140 | }; 141 | 142 | if (spec.tests) { 143 | return Object.assign(info, spec.tests); 144 | } 145 | 146 | if (spec.url.startsWith("https://registry.khronos.org/webgl/")) { 147 | info.repository = "https://github.com/KhronosGroup/WebGL"; 148 | info.testPaths = ["conformance-suites"]; 149 | // TODO: Be more specific, tests for extensions should one of the files in: 150 | // https://github.com/KhronosGroup/WebGL/tree/master/conformance-suites/2.0.0/conformance2/extensions 151 | // https://github.com/KhronosGroup/WebGL/tree/master/conformance-suites/2.0.0/conformance/extensions 152 | // https://github.com/KhronosGroup/WebGL/tree/master/conformance-suites/1.0.3/conformance/extensions 153 | return info; 154 | } 155 | 156 | if (spec.url.startsWith("https://tc39.es/proposal-") || !spec.nightly) { 157 | // TODO: proposals may or may not have tests under tc39/test262, it would 158 | // be good to have that info here. However, that seems hard to assess 159 | // automatically and tedious to handle as exceptions in specs.json. 160 | return null; 161 | } 162 | 163 | // Note the use of startsWith below, needed to cover cases where a META.yml 164 | // file targets a specific page in a multipage spec (for HTML, typically), 165 | // or a fragment within a spec. 166 | const folders = wptFolders 167 | .filter(item => 168 | item.spec.startsWith(spec.nightly.url) || 169 | item.spec.startsWith(spec.nightly.url.replace(/-\d+\/$/, "/"))) 170 | .map(item => item.folder); 171 | if (folders.length > 0) { 172 | // Don't list subfolders when parent folder is already in the list 173 | info.testPaths = folders.filter(p1 => !folders.find(p2 => (p1 !== p2) && p1.startsWith(p2))); 174 | 175 | // Exclude subfolders of listed folders when they map to another spec 176 | const excludePaths = folders 177 | .map(path => wptFolders.filter(item => 178 | (item.folder !== path) && 179 | item.folder.startsWith(path + "/") && 180 | !item.spec.startsWith(spec.nightly.url) && 181 | !item.spec.startsWith(spec.nightly.url.replace(/-\d+\/$/, "/")))) 182 | .flat() 183 | .map(item => item.folder); 184 | if (excludePaths.length > 0) { 185 | info.excludePaths = excludePaths; 186 | } 187 | 188 | return info; 189 | } 190 | return null; 191 | } 192 | 193 | const testInfos = specs.map(determineTestInfo); 194 | for (const spec of specs) { 195 | const testInfo = testInfos.shift(); 196 | if (testInfo) { 197 | spec.tests = testInfo; 198 | } 199 | } 200 | 201 | return specs; 202 | }; 203 | -------------------------------------------------------------------------------- /test/compute-series-urls.js: -------------------------------------------------------------------------------- 1 | import { describe, it } from "node:test"; 2 | import assert from "node:assert"; 3 | import computeSeriesUrls from "../src/compute-series-urls.js"; 4 | 5 | describe("compute-series-urls module", () => { 6 | it("returns spec URLs when spec has no level", () => { 7 | const spec = { 8 | url: "https://www.w3.org/TR/preload/", 9 | shortname: "preload", 10 | series: { shortname: "preload" }, 11 | release: { url: "https://www.w3.org/TR/preload/" }, 12 | nightly: { url: "https://w3c.github.io/preload/" } 13 | }; 14 | assert.deepStrictEqual(computeSeriesUrls(spec), 15 | { releaseUrl: "https://www.w3.org/TR/preload/", 16 | nightlyUrl: "https://w3c.github.io/preload/" }); 17 | }); 18 | 19 | 20 | it("does not return a release URL if spec has none", () => { 21 | const spec = { 22 | url: "https://compat.spec.whatwg.org/", 23 | shortname: "compat", 24 | series: { shortname: "compat" }, 25 | nightly: { url: "https://compat.spec.whatwg.org/" } 26 | }; 27 | assert.deepStrictEqual(computeSeriesUrls(spec), 28 | { nightlyUrl: "https://compat.spec.whatwg.org/" }); 29 | }); 30 | 31 | 32 | it("does not return a nightly URL if spec has none", () => { 33 | const spec = { 34 | url: "https://compat.spec.whatwg.org/", 35 | shortname: "compat", 36 | series: { shortname: "compat" }, 37 | }; 38 | assert.deepStrictEqual(computeSeriesUrls(spec), {}); 39 | }); 40 | 41 | 42 | it("returns series URLs for Houdini specs", () => { 43 | const spec = { 44 | url: "https://www.w3.org/TR/css-paint-api-1/", 45 | shortname: "css-paint-api-1", 46 | series: { shortname: "css-paint-api" }, 47 | release: { url: "https://www.w3.org/TR/css-paint-api-1/" }, 48 | nightly: { url: "https://drafts.css-houdini.org/css-paint-api-1/" } 49 | }; 50 | assert.deepStrictEqual(computeSeriesUrls(spec), 51 | { releaseUrl: "https://www.w3.org/TR/css-paint-api/", 52 | nightlyUrl: "https://drafts.css-houdini.org/css-paint-api/" }); 53 | }); 54 | 55 | 56 | it("returns series URLs for CSS specs", () => { 57 | const spec = { 58 | url: "https://www.w3.org/TR/css-fonts-4/", 59 | shortname: "css-fonts-4", 60 | series: { shortname: "css-fonts" }, 61 | release: { url: "https://www.w3.org/TR/css-fonts-4/" }, 62 | nightly: { url: "https://drafts.csswg.org/css-fonts-4/" } 63 | }; 64 | assert.deepStrictEqual(computeSeriesUrls(spec), 65 | { releaseUrl: "https://www.w3.org/TR/css-fonts/", 66 | nightlyUrl: "https://drafts.csswg.org/css-fonts/" }); 67 | }); 68 | 69 | 70 | it("handles CSS2 correctly", () => { 71 | const spec = { 72 | url: "https://www.w3.org/TR/CSS2/", 73 | shortname: "CSS2", 74 | series: { shortname: "CSS" }, 75 | release: { url: "https://www.w3.org/TR/CSS2/" }, 76 | nightly: { url: "https://drafts.csswg.org/css2/" } 77 | }; 78 | assert.deepStrictEqual(computeSeriesUrls(spec), 79 | { releaseUrl: "https://www.w3.org/TR/CSS2/", 80 | nightlyUrl: "https://drafts.csswg.org/css2/" }); 81 | }); 82 | 83 | 84 | it("returns right nightly URL for series when spec's nightly has no level", () => { 85 | const spec = { 86 | url: "https://www.w3.org/TR/pointerlock-2/", 87 | shortname: "pointerlock-2", 88 | series: { shortname: "pointerlock" }, 89 | release: { url: "https://www.w3.org/TR/pointerlock-2/" }, 90 | nightly: { url: "https://w3c.github.io/pointerlock/" } 91 | }; 92 | assert.deepStrictEqual(computeSeriesUrls(spec), 93 | { releaseUrl: "https://www.w3.org/TR/pointerlock/", 94 | nightlyUrl: "https://w3c.github.io/pointerlock/" }); 95 | }); 96 | 97 | 98 | it("does not invent an unversioned nightly URL for SVG 2", () => { 99 | const spec = { 100 | url: "https://www.w3.org/TR/SVG2/", 101 | shortname: "SVG2", 102 | series: { shortname: "SVG" }, 103 | release: { url: "https://www.w3.org/TR/SVG2/" }, 104 | nightly: { url: "https://svgwg.org/svg2-draft/" } 105 | }; 106 | assert.deepStrictEqual(computeSeriesUrls(spec), 107 | { releaseUrl: "https://www.w3.org/TR/SVG/", 108 | nightlyUrl: "https://svgwg.org/svg2-draft/" }); 109 | }); 110 | 111 | 112 | it("looks for a release URL in previous versions", () => { 113 | const spec = { 114 | url: "https://drafts.csswg.org/css-fonts-5/", 115 | shortname: "css-fonts-5", 116 | series: { shortname: "css-fonts" }, 117 | seriesPrevious: "css-fonts-4", 118 | nightly: { url: "https://drafts.csswg.org/css-fonts-5/" } 119 | }; 120 | 121 | const list = [ 122 | spec, 123 | { 124 | url: "https://drafts.csswg.org/css-fonts-4/", 125 | shortname: "css-fonts-4", 126 | series: { shortname: "css-fonts" }, 127 | seriesPrevious: "css-fonts-3", 128 | nightly: { url: "https://drafts.csswg.org/css-fonts-4/" } 129 | }, 130 | { 131 | url: "https://drafts.csswg.org/css-fonts-3/", 132 | shortname: "css-fonts-3", 133 | series: { shortname: "css-fonts" }, 134 | release: { url: "https://www.w3.org/TR/css-fonts-3/" }, 135 | nightly: { url: "https://drafts.csswg.org/css-fonts-3/" } 136 | } 137 | ]; 138 | 139 | assert.deepStrictEqual(computeSeriesUrls(spec, list), 140 | { releaseUrl: "https://www.w3.org/TR/css-fonts/", 141 | nightlyUrl: "https://drafts.csswg.org/css-fonts/" }); 142 | }); 143 | 144 | 145 | it("looks for a release URL in the provided spec if not the current one", () => { 146 | const spec = { 147 | url: "https://drafts.fxtf.org/compositing-1/", 148 | shortname: "compositing-1", 149 | series: { shortname: "compositing", currentSpecification: "compositing-2" }, 150 | nightly: { url: "https://drafts.fxtf.org/compositing-1/" }, 151 | release: { url: "https://www.w3.org/TR/compositing-1/" } 152 | }; 153 | 154 | const list = [ 155 | spec, 156 | { 157 | url: "https://drafts.fxtf.org/compositing-2/", 158 | shortname: "compositing-2", 159 | series: { shortname: "compositing", currentSpecification: "compositing-2" }, 160 | seriesPrevious: "compositing-1", 161 | nightly: { url: "https://drafts.fxtf.org/compositing-2/" } 162 | } 163 | ]; 164 | 165 | assert.deepStrictEqual(computeSeriesUrls(spec, list), 166 | { releaseUrl: "https://www.w3.org/TR/compositing/", 167 | nightlyUrl: "https://drafts.fxtf.org/compositing/" }); 168 | }); 169 | 170 | 171 | it("computes info based on current specification", () => { 172 | const spec = { 173 | url: "https://www.w3.org/TR/SVG11/", 174 | seriesComposition: "full", 175 | shortname: "SVG11", 176 | series: { shortname: "SVG", currentSpecification: "SVG2" }, 177 | release: { url: "https://www.w3.org/TR/SVG11/" }, 178 | nightly: { url: "https://www.w3.org/TR/SVG11/" } 179 | }; 180 | 181 | const list = [ 182 | spec, 183 | { 184 | url: "https://www.w3.org/TR/SVG2/", 185 | seriesComposition: "full", 186 | shortname: "SVG2", 187 | series: { shortname: "SVG", currentSpecification: "SVG2" }, 188 | release: { url: "https://www.w3.org/TR/SVG2/" }, 189 | nightly: { url: "https://svgwg.org/svg2-draft/" } 190 | } 191 | ]; 192 | 193 | assert.deepStrictEqual(computeSeriesUrls(spec, list), 194 | { releaseUrl: "https://www.w3.org/TR/SVG/", 195 | nightlyUrl: "https://svgwg.org/svg2-draft/" }); 196 | }); 197 | }); 198 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | ## Applicable licenses 2 | 3 | This software and associated documentation files (the "Software") are licensed under the terms of the [MIT License](#mit-license). 4 | 5 | Additionally, the list of technical Web specifications (the [index.json](index.json) file) is published under the terms of the [CC0 license](#cc0-license). 6 | 7 | 8 | ## MIT License 9 | 10 | Copyright (c) 2020 World Wide Web Consortium 11 | 12 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 13 | 14 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 15 | 16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | 18 | 19 | ## CC0 License 20 | 21 | ### Statement of Purpose 22 | 23 | The laws of most jurisdictions throughout the world automatically confer exclusive Copyright and Related Rights (defined below) upon the creator and subsequent owner(s) (each and all, an "owner") of an original work of authorship and/or a database (each, a "Work"). 24 | 25 | Certain owners wish to permanently relinquish those rights to a Work for the purpose of contributing to a commons of creative, cultural and scientific works ("Commons") that the public can reliably and without fear of later claims of infringement build upon, modify, incorporate in other works, reuse and redistribute as freely as possible in any form whatsoever and for any purposes, including without limitation commercial purposes. These owners may contribute to the Commons to promote the ideal of a free culture and the further production of creative, cultural and scientific works, or to gain reputation or greater distribution for their Work in part through the use and efforts of others. 26 | 27 | For these and/or other purposes and motivations, and without any expectation of additional consideration or compensation, the person associating CC0 with a Work (the "Affirmer"), to the extent that he or she is an owner of Copyright and Related Rights in the Work, voluntarily elects to apply CC0 to the Work and publicly distribute the Work under its terms, with knowledge of his or her Copyright and Related Rights in the Work and the meaning and intended legal effect of CC0 on those rights. 28 | 29 | ### Copyright and Related Rights 30 | 31 | A Work made available under CC0 may be protected by copyright and related or neighboring rights ("Copyright and Related Rights"). Copyright and Related Rights include, but are not limited to, the following: 32 | 33 | i. the right to reproduce, adapt, distribute, perform, display, communicate, and translate a Work; 34 | ii. moral rights retained by the original author(s) and/or performer(s); 35 | iii. publicity and privacy rights pertaining to a person's image or likeness depicted in a Work; 36 | iv. rights protecting against unfair competition in regards to a Work, subject to the limitations in paragraph 4(a), below; 37 | v. rights protecting the extraction, dissemination, use and reuse of data in a Work; 38 | vi. database rights (such as those arising under Directive 96/9/EC of the European Parliament and of the Council of 11 March 1996 on the legal protection of databases, and under any national implementation thereof, including any amended or successor version of such directive); and 39 | vii. other similar, equivalent or corresponding rights throughout the world based on applicable law or treaty, and any national implementations thereof. 40 | 41 | ### Waiver 42 | 43 | To the greatest extent permitted by, but not in contravention of, applicable law, Affirmer hereby overtly, fully, permanently, irrevocably and unconditionally waives, abandons, and surrenders all of Affirmer's Copyright and Related Rights and associated claims and causes of action, whether now known or unknown (including existing as well as future claims and causes of action), in the Work (i) in all territories worldwide, (ii) for the maximum duration provided by applicable law or treaty (including future time extensions), (iii) in any current or future medium and for any number of copies, and (iv) for any purpose whatsoever, including without limitation commercial, advertising or promotional purposes (the "Waiver"). Affirmer makes the Waiver for the benefit of each member of the public at large and to the detriment of Affirmer's heirs and successors, fully intending that such Waiver shall not be subject to revocation, rescission, cancellation, termination, or any other legal or equitable action to disrupt the quiet enjoyment of the Work by the public as contemplated by Affirmer's express Statement of Purpose. 44 | 45 | ### Public License Fallback 46 | 47 | Should any part of the Waiver for any reason be judged legally invalid or ineffective under applicable law, then the Waiver shall be preserved to the maximum extent permitted taking into account Affirmer's express Statement of Purpose. In addition, to the extent the Waiver is so judged Affirmer hereby grants to each affected person a royalty-free, non transferable, non sublicensable, non exclusive, irrevocable and unconditional license to exercise Affirmer's Copyright and Related Rights in the Work (i) in all territories worldwide, (ii) for the maximum duration provided by applicable law or treaty (including future time extensions), (iii) in any current or future medium and for any number of copies, and (iv) for any purpose whatsoever, including without limitation commercial, advertising or promotional purposes (the "License"). The License shall be deemed effective as of the date CC0 was applied by Affirmer to the Work. Should any part of the License for any reason be judged legally invalid or ineffective under applicable law, such partial invalidity or ineffectiveness shall not invalidate the remainder of the License, and in such case Affirmer hereby affirms that he or she will not (i) exercise any of his or her remaining Copyright and Related Rights in the Work or (ii) assert any associated claims and causes of action with respect to the Work, in either case contrary to Affirmer's express Statement of Purpose. 48 | 49 | ### Limitations and Disclaimers 50 | 51 | a. No trademark or patent rights held by Affirmer are waived, abandoned, surrendered, licensed or otherwise affected by this document. 52 | b. Affirmer offers the Work as-is and makes no representations or warranties of any kind concerning the Work, express, implied, statutory or otherwise, including without limitation warranties of title, merchantability, fitness for a particular purpose, non infringement, or the absence of latent or other defects, accuracy, or the present or absence of errors, whether or not discoverable, all to the greatest extent permissible under applicable law. 53 | c. Affirmer disclaims responsibility for clearing rights of other persons that may apply to the Work or any use thereof, including without limitation any person's Copyright and Related Rights in the Work. Further, Affirmer disclaims responsibility for obtaining any necessary consents, permissions or other rights required for any use of the Work. 54 | d. Affirmer understands and acknowledges that Creative Commons is not a party to this document and has no duty or obligation with respect to this CC0 or use of the Work. 55 | -------------------------------------------------------------------------------- /src/compute-repository.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Module that exports a function that takes a list of specifications as input 3 | * and computes, for each of them, the URL of the repository that contains the 4 | * source code for this, as well as the source file of the specification at the 5 | * HEAD of default branch in the repository. 6 | * 7 | * The function needs an authentication token for the GitHub API. 8 | */ 9 | 10 | import Octokit from "./octokit.js"; 11 | import parseSpecUrl from "./parse-spec-url.js"; 12 | 13 | 14 | /** 15 | * Returns the first item in the list found in the Git tree, or null if none of 16 | * the items exists in the array. 17 | */ 18 | function getFirstFoundInTree(paths, ...items) { 19 | for (const item of items) { 20 | const path = paths.find(p => p.path === item); 21 | if (path) { 22 | return path; 23 | } 24 | } 25 | return null; 26 | } 27 | 28 | 29 | /** 30 | * Exports main function that takes a list of specs (with a nighly.url property) 31 | * as input, completes entries with a nightly.repository property when possible 32 | * and returns the list. 33 | * 34 | * The options parameter is used to specify the GitHub API authentication token. 35 | * In the absence of it, the function does not go through the GitHub API and 36 | * thus cannot set most of the information. This is useful to run tests without 37 | * an authentication token (but obviously means that the owner name returned 38 | * by the function will remain the lowercased version, and that the returned 39 | * info won't include the source file). 40 | */ 41 | export default async function (specs, options) { 42 | if (!specs) { 43 | throw "Invalid list of specifications passed as parameter"; 44 | } 45 | options = options || {}; 46 | 47 | const octokit = new Octokit({ auth: options.githubToken }); 48 | const repoCache = new Map(); 49 | const repoPathCache = new Map(); 50 | const userCache = new Map(); 51 | 52 | /** 53 | * Take a GitHub repo owner name (lowercase version) and retrieve the real 54 | * owner name (with possible uppercase characters) from the GitHub API. 55 | */ 56 | async function fetchRealGitHubOwnerName(username) { 57 | if (!userCache.has(username)) { 58 | const { data } = await octokit.users.getByUsername({ username }); 59 | if (data.message) { 60 | // Alert when user does not exist 61 | throw res.message; 62 | } 63 | userCache.set(username, data.login); 64 | } 65 | return userCache.get(username); 66 | } 67 | 68 | /** 69 | * Determine the name of the file that contains the source of the spec in the 70 | * default branch of the GitHub repository associated with the specification. 71 | */ 72 | async function determineSourcePath(spec, repo) { 73 | // Retrieve all paths of the GitHub repository 74 | const cacheKey = `${repo.owner}/${repo.name}`; 75 | if (!repoPathCache.has(cacheKey)) { 76 | const { data } = await octokit.git.getTree({ 77 | owner: repo.owner, 78 | repo: repo.name, 79 | tree_sha: "HEAD", 80 | recursive: true 81 | }); 82 | const paths = data.tree; 83 | repoPathCache.set(cacheKey, paths); 84 | } 85 | const paths = repoPathCache.get(cacheKey); 86 | 87 | // Extract filename from nightly URL when there is one 88 | const match = spec.nightly.url.match(/\/([\w\-]+)\.html$/); 89 | const nightlyFilename = match ? match[1] : ""; 90 | 91 | const sourcePath = getFirstFoundInTree(paths, 92 | // Common paths for CSS specs 93 | `${spec.shortname}.bs`, 94 | `${spec.shortname}/Overview.bs`, 95 | `${spec.shortname}/Overview.src.html`, 96 | `${spec.series.shortname}/Overview.bs`, 97 | `${spec.series.shortname}/Overview.src.html`, 98 | 99 | // Used for SHACL specs 100 | `${spec.shortname}/index.html`, 101 | 102 | // Used for ARIA specs 103 | `${spec.series.shortname}/index.html`, 104 | 105 | // Named after the nightly filename 106 | `${nightlyFilename}.bs`, 107 | `${nightlyFilename}.html`, 108 | `${nightlyFilename}.src.html`, 109 | `${nightlyFilename}.md`, 110 | 111 | // WebGL extensions 112 | `extensions/${spec.shortname}/extension.xml`, 113 | 114 | // WebAssembly specs 115 | `document/${spec.series.shortname.replace(/^wasm-/, '')}/index.bs`, 116 | 117 | // SVG specs 118 | `specs/${spec.shortname.replace(/^svg-/, '')}/master/Overview.html`, 119 | `master/Overview.html`, 120 | 121 | // HTTPWG specs 122 | `specs/${spec.shortname}.xml`, 123 | 124 | // Following patterns are used in a small number of cases, but could 125 | // perhaps appear again in the future, so worth handling here. 126 | "spec/index.bs", 127 | "spec/index.html", // Only one TC39 spec 128 | "spec/Overview.html", // Only WebCrypto 129 | "docs/index.bs", // Only ServiceWorker 130 | "spec.html", // Most TC39 specs 131 | "spec.emu", // Some TC39 specs 132 | `${spec.shortname}/Overview.html`, // css-color-3, mediaqueries-3 133 | 134 | // Most common patterns, checking on "index.html" last as some repos 135 | // include such a file to store the generated spec from the source. 136 | "index.src.html", 137 | "index.bs", 138 | "spec.bs", 139 | "index.md", 140 | "index.html" 141 | ); 142 | 143 | if (!sourcePath) { 144 | return null; 145 | } 146 | 147 | // Fetch target file for symlinks 148 | if (sourcePath.mode === "120000") { 149 | const { data } = await octokit.git.getBlob({ 150 | owner: repo.owner, 151 | repo: repo.name, 152 | file_sha: sourcePath.sha 153 | }); 154 | return Buffer.from(data.content, "base64").toString("utf8"); 155 | } 156 | return sourcePath.path; 157 | } 158 | 159 | async function isRealRepo(repo) { 160 | if (!options.githubToken) { 161 | // Assume the repo exists if we can't check 162 | return true; 163 | } 164 | const cacheKey = `${repo.owner}/${repo.name}`; 165 | if (!repoCache.has(cacheKey)) { 166 | try { 167 | await octokit.repos.get({ 168 | owner: repo.owner, 169 | repo: repo.name 170 | }); 171 | repoCache.set(cacheKey, true); 172 | } 173 | catch (err) { 174 | if (err.status === 404) { 175 | repoCache.set(cacheKey, false); 176 | } 177 | else { 178 | throw err; 179 | } 180 | } 181 | } 182 | return repoCache.get(cacheKey); 183 | } 184 | 185 | // Compute GitHub repositories with lowercase owner names 186 | const repos = specs.map(spec => spec.nightly ? 187 | parseSpecUrl(spec.nightly.repository ?? spec.nightly.url) : 188 | null); 189 | 190 | if (options.githubToken) { 191 | // Fetch the real name of repository owners (preserving case) 192 | for (const repo of repos) { 193 | if (repo) { 194 | repo.owner = await fetchRealGitHubOwnerName(repo.owner); 195 | } 196 | } 197 | } 198 | 199 | // Compute final repo URL and add source file if possible 200 | for (const spec of specs) { 201 | const repo = repos.shift(); 202 | if (repo && await isRealRepo(repo)) { 203 | spec.nightly.repository = `https://github.com/${repo.owner}/${repo.name}`; 204 | 205 | if (options.githubToken && !spec.nightly.sourcePath) { 206 | const sourcePath = await determineSourcePath(spec, repo); 207 | if (sourcePath) { 208 | spec.nightly.sourcePath = sourcePath; 209 | } 210 | } 211 | } 212 | else if (spec.nightly?.url.match(/\/httpwg\.org\//)) { 213 | const draftName = spec.nightly.url.match(/\/(draft-ietf-(.+))\.html$/); 214 | spec.nightly.repository = 'https://github.com/httpwg/http-extensions'; 215 | spec.nightly.sourcePath = `${draftName[1]}.md`; 216 | } 217 | } 218 | 219 | return specs; 220 | }; 221 | -------------------------------------------------------------------------------- /test/fetch-groups.js: -------------------------------------------------------------------------------- 1 | import { describe, it } from "node:test"; 2 | import assert from "node:assert"; 3 | import fetchGroups from "../src/fetch-groups.js"; 4 | 5 | const githubToken = (function () { 6 | try { 7 | return require("../config.json").GITHUB_TOKEN; 8 | } 9 | catch (err) { 10 | return null; 11 | } 12 | })() ?? process.env.GITHUB_TOKEN; 13 | 14 | describe("fetch-groups module (without API keys)", function () { 15 | // Long timeout since tests may need to send network requests 16 | const timeout = { timeout: 30000 }; 17 | 18 | async function fetchGroupsFor(url, options) { 19 | const spec = { url }; 20 | const result = await fetchGroups([spec], options); 21 | return result[0]; 22 | }; 23 | 24 | it("handles WHATWG URLs", timeout, async () => { 25 | const res = await fetchGroupsFor("https://url.spec.whatwg.org/"); 26 | assert.equal(res.organization, "WHATWG"); 27 | assert.deepStrictEqual(res.groups, [{ 28 | name: "URL Workstream", 29 | url: "https://url.spec.whatwg.org/" 30 | }]); 31 | }); 32 | 33 | it("handles TC39 URLs", timeout, async () => { 34 | const res = await fetchGroupsFor("https://tc39.es/proposal-relative-indexing-method/"); 35 | assert.equal(res.organization, "Ecma International"); 36 | assert.deepStrictEqual(res.groups, [{ 37 | name: "TC39", 38 | url: "https://tc39.es/" 39 | }]); 40 | }); 41 | 42 | it("handles W3C TAG URLs", timeout, async () => { 43 | const res = await fetchGroupsFor("https://www.w3.org/2001/tag/doc/promises-guide"); 44 | assert.equal(res.organization, "W3C"); 45 | assert.deepStrictEqual(res.groups, [{ 46 | name: "Technical Architecture Group", 47 | url: "https://www.w3.org/2001/tag/" 48 | }]); 49 | }); 50 | 51 | it("handles WebGL URLs", timeout, async () => { 52 | const res = await fetchGroupsFor("https://registry.khronos.org/webgl/extensions/EXT_clip_cull_distance/"); 53 | assert.equal(res.organization, "Khronos Group"); 54 | assert.deepStrictEqual(res.groups, [{ 55 | name: "WebGL Working Group", 56 | url: "https://www.khronos.org/webgl/" 57 | }]); 58 | }); 59 | 60 | it("handles IETF RFCs", timeout, async () => { 61 | const res = await fetchGroupsFor("https://www.rfc-editor.org/rfc/rfc9110"); 62 | assert.equal(res.organization, "IETF"); 63 | assert.deepStrictEqual(res.groups, [{ 64 | name: "HTTP Working Group", 65 | url: "https://datatracker.ietf.org/wg/httpbis/" 66 | }]); 67 | }); 68 | 69 | it("handles IETF group drafts", timeout, async () => { 70 | const res = await fetchGroupsFor("https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-digest-headers"); 71 | assert.equal(res.organization, "IETF"); 72 | assert.deepStrictEqual(res.groups, [{ 73 | name: "HTTP Working Group", 74 | url: "https://datatracker.ietf.org/wg/httpbis/" 75 | }]); 76 | }); 77 | 78 | it("handles IETF individual drafts", timeout, async () => { 79 | const res = await fetchGroupsFor("https://datatracker.ietf.org/doc/html/draft-cutler-httpbis-partitioned-cookies"); 80 | assert.equal(res.organization, "IETF"); 81 | assert.deepStrictEqual(res.groups, [{ 82 | name: "Individual Submissions", 83 | url: "https://datatracker.ietf.org/wg/none/" 84 | }]); 85 | }); 86 | 87 | it("handles IETF area drafts", timeout, async () => { 88 | const res = await fetchGroupsFor("https://datatracker.ietf.org/doc/html/draft-zern-webp"); 89 | assert.equal(res.organization, "IETF"); 90 | assert.deepStrictEqual(res.groups, [{ 91 | name: "Applications and Real-Time Area", 92 | url: "https://datatracker.ietf.org/wg/art/" 93 | }]); 94 | }); 95 | 96 | it("handles AOM specs", timeout, async () => { 97 | const res = await fetchGroupsFor("https://aomediacodec.github.io/afgs1-spec/"); 98 | assert.equal(res.organization, "Alliance for Open Media"); 99 | assert.deepStrictEqual(res.groups, [{ 100 | name: "Codec Working Group", 101 | url: "https://aomedia.org/about/#codec-working-group" 102 | }]); 103 | }); 104 | 105 | it("preserves provided info", timeout, async () => { 106 | const spec = { 107 | url: "https://url.spec.whatwg.org/", 108 | organization: "Acme Corporation", 109 | groups: [{ 110 | name: "Road Runner Group", 111 | url: "https://en.wikipedia.org/wiki/Wile_E._Coyote_and_the_Road_Runner" 112 | }] 113 | }; 114 | const res = await fetchGroups([spec]); 115 | assert.equal(res[0].organization, spec.organization); 116 | assert.deepStrictEqual(res[0].groups, spec.groups); 117 | }); 118 | 119 | it("preserves provided info for Patent Policy", timeout, async () => { 120 | const spec = { 121 | "url": "https://www.w3.org/Consortium/Patent-Policy/", 122 | "shortname": "w3c-patent-policy", 123 | "groups": [ 124 | { 125 | "name": "Patents and Standards Interest Group", 126 | "url": "https://www.w3.org/2004/pp/psig/" 127 | } 128 | ] 129 | }; 130 | const res = await fetchGroups([spec]); 131 | assert.equal(res[0].organization, "W3C"); 132 | assert.deepStrictEqual(res[0].groups, spec.groups); 133 | }); 134 | 135 | describe("fetch from W3C API", () => { 136 | it("handles /TR URLs", timeout, async () => { 137 | const res = await fetchGroupsFor("https://www.w3.org/TR/gamepad/"); 138 | assert.equal(res.organization, "W3C"); 139 | assert.deepStrictEqual(res.groups, [{ 140 | name: "Web Applications Working Group", 141 | url: "https://www.w3.org/groups/wg/webapps/" 142 | }]); 143 | }); 144 | 145 | it("handles multiple /TR URLs", timeout, async () => { 146 | const specs = [ 147 | { url: "https://www.w3.org/TR/gamepad/" }, 148 | { url: "https://www.w3.org/TR/accname-1.2/" } 149 | ]; 150 | const res = await fetchGroups(specs); 151 | assert.equal(res[0].organization, "W3C"); 152 | assert.deepStrictEqual(res[0].groups, [{ 153 | name: "Web Applications Working Group", 154 | url: "https://www.w3.org/groups/wg/webapps/" 155 | }]); 156 | assert.equal(res[1].organization, "W3C"); 157 | assert.deepStrictEqual(res[1].groups, [{ 158 | name: "Accessible Rich Internet Applications Working Group", 159 | url: "https://www.w3.org/WAI/about/groups/ariawg/" 160 | }]); 161 | }); 162 | 163 | it("handles w3c.github.io URLs", timeout, async () => { 164 | const res = await fetchGroupsFor("https://w3c.github.io/web-nfc/", { githubToken }); 165 | assert.equal(res.organization, "W3C"); 166 | assert.deepStrictEqual(res.groups, [{ 167 | name: "Web NFC Community Group", 168 | url: "https://www.w3.org/community/web-nfc/" 169 | }]); 170 | }); 171 | 172 | it("handles SVG URLs", timeout, async () => { 173 | const res = await fetchGroupsFor("https://svgwg.org/specs/animations/"); 174 | assert.equal(res.organization, "W3C"); 175 | assert.deepStrictEqual(res.groups, [{ 176 | name: "SVG Working Group", 177 | url: "https://www.w3.org/groups/wg/svg/" 178 | }]); 179 | }); 180 | 181 | it("handles CSS WG URLs", timeout, async () => { 182 | const res = await fetchGroupsFor("https://drafts.csswg.org/css-animations-2/"); 183 | assert.equal(res.organization, "W3C"); 184 | assert.deepStrictEqual(res.groups, [{ 185 | name: "Cascading Style Sheets (CSS) Working Group", 186 | url: "https://www.w3.org/groups/wg/css/" 187 | }]); 188 | }); 189 | 190 | it("handles CSS Houdini TF URLs", timeout, async () => { 191 | const res = await fetchGroupsFor("https://drafts.css-houdini.org/css-typed-om-2/"); 192 | assert.equal(res.organization, "W3C"); 193 | assert.deepStrictEqual(res.groups, [{ 194 | name: "Cascading Style Sheets (CSS) Working Group", 195 | url: "https://www.w3.org/groups/wg/css/" 196 | }]); 197 | }); 198 | 199 | it("handles CSS FXTF URLs", timeout, async () => { 200 | const res = await fetchGroupsFor("https://drafts.fxtf.org/filter-effects-2/"); 201 | assert.equal(res.organization, "W3C"); 202 | assert.deepStrictEqual(res.groups, [{ 203 | name: "Cascading Style Sheets (CSS) Working Group", 204 | url: "https://www.w3.org/groups/wg/css/" 205 | }]); 206 | }); 207 | 208 | it("uses last published info for discontinued specs", timeout, async () => { 209 | const spec = { 210 | url: "https://wicg.github.io/close-watcher/", 211 | shortname: "close-watcher", 212 | __last: { 213 | standing: "discontinued", 214 | organization: "Acme Corporation", 215 | groups: [{ 216 | name: "Road Runner", 217 | url: "beep beep" 218 | }] 219 | } 220 | }; 221 | const result = await fetchGroups([spec]); 222 | assert.equal(result[0].organization, spec.__last.organization); 223 | assert.deepStrictEqual(result[0].groups, spec.__last.groups); 224 | }); 225 | }); 226 | }); 227 | -------------------------------------------------------------------------------- /src/fetch-groups.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Module that exports a function that takes a list of specifications as input 3 | * and computes, for each of them, the name of the organization and groups 4 | * within that organization that develop the specification. 5 | * 6 | * The function needs an authentication token for the GitHub API. 7 | */ 8 | 9 | import Octokit from "./octokit.js"; 10 | import parseSpecUrl from "./parse-spec-url.js"; 11 | import fetchJSON from "./fetch-json.js"; 12 | 13 | 14 | /** 15 | * We will very likely need to use group information from the validate-repos 16 | * project which compiles w3c.json files across repositories. 17 | */ 18 | let w3cGroups = null; 19 | 20 | 21 | /** 22 | * Exports main function that takes a list of specs (with a url property) 23 | * as input, completes entries with an "organization" property that contains the 24 | * name of the organization such as W3C, WHATWG, IETF, Khronos Group, 25 | * Ecma International, and a "groups" property that contains an array of objects 26 | * that describe the groups responsible for the spec. 27 | * 28 | * The function preserves the properties if they have already been provided in 29 | * the input array. 30 | * 31 | * The options parameter is used to specify the GitHub API 32 | * authentication token. 33 | */ 34 | export default async function (specs, options) { 35 | // Maintain a cache of fetched resources in memory to avoid sending the 36 | // same fetch request again and again 37 | const cache = {}; 38 | 39 | for (const spec of specs) { 40 | if (spec.__last?.standing === 'discontinued' && 41 | (!spec.standing || spec.standing === 'discontinued')) { 42 | spec.organization = spec.__last.organization; 43 | spec.groups = spec.__last.groups; 44 | continue; 45 | } 46 | const info = parseSpecUrl(spec.url); 47 | if (!info) { 48 | // For IETF documents, retrieve the group info from datatracker 49 | const ietfName = 50 | spec.url.match(/rfc-editor\.org\/rfc\/([^\/]+)/) ?? 51 | spec.url.match(/datatracker\.ietf\.org\/doc\/html\/([^\/]+)/); 52 | if (ietfName) { 53 | spec.organization = spec.organization ?? "IETF"; 54 | if (spec.groups) continue; 55 | const ietfJson = await fetchJSON(`https://datatracker.ietf.org/doc/${ietfName[1]}/doc.json`, options); 56 | if (ietfJson.group?.type === "WG") { 57 | spec.groups = [{ 58 | name: `${ietfJson.group.name} Working Group`, 59 | url: `https://datatracker.ietf.org/wg/${ietfJson.group.acronym}/` 60 | }]; 61 | continue; 62 | } 63 | else if ((ietfJson.group?.type === "Individual") || 64 | (ietfJson.group?.type === "Area")) { 65 | // Document uses the "Individual Submissions" stream, linked to the 66 | // "none" group in IETF: https://datatracker.ietf.org/group/none/ 67 | // or to an IETF area, which isn't truly a group but still looks like 68 | // one. That's fine, let's reuse that info. 69 | spec.groups = [{ 70 | name: ietfJson.group.name, 71 | url: `https://datatracker.ietf.org/wg/${ietfJson.group.acronym}/` 72 | }]; 73 | continue; 74 | } 75 | else { 76 | throw new Error(`Could not derive IETF group for ${spec.url}. 77 | Unknown group type found in https://datatracker.ietf.org/doc/${ietfName[1]}/doc.json`); 78 | } 79 | } 80 | 81 | if (!spec.groups) { 82 | throw new Error(`Cannot extract any useful info from ${spec.url}`); 83 | } 84 | } 85 | 86 | if (info && info.owner === "whatwg") { 87 | const workstreams = await fetchJSON("https://raw.githubusercontent.com/whatwg/sg/main/db.json", options); 88 | const workstream = workstreams.workstreams.find(ws => ws.standards.find(s => s.href === spec.url)); 89 | if (!workstream) { 90 | throw new Error(`No WHATWG workstream found for ${spec.url}`); 91 | } 92 | spec.organization = spec.organization ?? "WHATWG"; 93 | spec.groups = spec.groups ?? [{ 94 | name: `${workstream.name} Workstream`, 95 | url: spec.url 96 | }]; 97 | continue; 98 | } 99 | 100 | if (info && info.owner === "tc39") { 101 | spec.organization = spec.organization ?? "Ecma International"; 102 | spec.groups = spec.groups ?? [{ 103 | name: "TC39", 104 | url: "https://tc39.es/" 105 | }]; 106 | continue; 107 | } 108 | 109 | if (info && info.owner === "khronosgroup") { 110 | spec.organization = spec.organization ?? "Khronos Group"; 111 | spec.groups = spec.groups ?? [{ 112 | name: "WebGL Working Group", 113 | url: "https://www.khronos.org/webgl/" 114 | }]; 115 | continue; 116 | } 117 | 118 | if (info && info.owner === "w3ctag") { 119 | spec.groups = spec.groups ?? [{ 120 | name: "Technical Architecture Group", 121 | url: "https://www.w3.org/2001/tag/" 122 | }]; 123 | } 124 | 125 | // For the Alliance for Open Media (AOM), let's consider that the Codec WG 126 | // is the default group, noting that it is not super clear which AOM group 127 | // develops which spec in practice: https://aomedia.org/about/ 128 | if (info && info.owner === "aomediacodec") { 129 | spec.organization = spec.organization ?? "Alliance for Open Media"; 130 | spec.groups = spec.groups ?? [{ 131 | name: "Codec Working Group", 132 | url: "https://aomedia.org/about/#codec-working-group" 133 | }] 134 | } 135 | 136 | 137 | 138 | // All specs that remain should be developed by some W3C group. 139 | spec.organization = spec.organization ?? "W3C"; 140 | 141 | if (!spec.groups) { 142 | // Get group info from validate-repos if possible to avoid having to 143 | // send individual network requests for each spec 144 | // Note: this will not yield anything for many /TR specs because we 145 | // guess the name of the repo from the shortname. 146 | if (!w3cGroups) { 147 | const report = await fetchJSON( 148 | "https://w3c.github.io/validate-repos/report.json" 149 | ); 150 | w3cGroups = report.groups; 151 | } 152 | spec.groups = Object.values(w3cGroups) 153 | .filter(group => group.repos?.find(repo => 154 | repo.fullName?.toLowerCase() === `${info.owner}/${info.name}`.toLowerCase() 155 | )) 156 | .map(group => Object.assign({ 157 | name: group.name, 158 | url: group._links.homepage.href 159 | })); 160 | } 161 | if (spec.groups.length === 0) { 162 | let groups = []; 163 | if (info.name === "svgwg") { 164 | groups.push(19480); 165 | } 166 | else if (info.type === "tr") { 167 | // Use the W3C API to find info about /TR specs 168 | const url = `https://api.w3.org/specifications/${info.name}/versions/latest`; 169 | let resp = await fetchJSON(url, options); 170 | if (!resp?._links?.deliverers) { 171 | throw new Error(`W3C API did not return deliverers for the spec`); 172 | } 173 | resp = await fetchJSON(resp._links.deliverers.href, options); 174 | 175 | if (!resp?._links?.deliverers) { 176 | throw new Error(`W3C API did not return deliverers for the spec`); 177 | } 178 | for (const deliverer of resp._links.deliverers) { 179 | groups.push(deliverer.href); 180 | } 181 | } 182 | else { 183 | // Use info in w3c.json file, which we'll either retrieve from the 184 | // repository when one is defined or directly from the spec origin 185 | let url = null; 186 | if (info.type === "github") { 187 | const octokit = new Octokit({ auth: options?.githubToken }); 188 | const cacheId = info.owner + "/" + info.name; 189 | const repo = cache[cacheId] ?? 190 | await octokit.repos.get({ owner: info.owner, repo: info.name }); 191 | cache[cacheId] = repo; 192 | const branch = repo?.data?.default_branch; 193 | if (!branch) { 194 | throw new Error(`Expected GitHub repository does not exist (${spec.url})`); 195 | } 196 | url = new URL(`https://raw.githubusercontent.com/${info.owner}/${info.name}/${branch}/w3c.json`); 197 | } 198 | else { 199 | url = new URL(spec.url); 200 | url.pathname = "/w3c.json"; 201 | } 202 | const body = await fetchJSON(url.toString(), options); 203 | 204 | // Note the "group" property is either an ID or an array of IDs 205 | groups = [body?.group].flat().filter(g => !!g); 206 | } 207 | 208 | // Retrieve info about W3C groups from W3C API 209 | // (Note the "groups" array may contain numbers, strings or API URLs) 210 | for (const id of groups) { 211 | const url = ('' + id).startsWith("https://") ? id : `https://api.w3.org/groups/${id}`; 212 | const info = await fetchJSON(url, options); 213 | spec.groups.push({ 214 | name: info.name, 215 | url: info._links.homepage.href 216 | }); 217 | } 218 | } 219 | } 220 | 221 | return specs; 222 | }; 223 | -------------------------------------------------------------------------------- /src/monitor-specs.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | /** 4 | * The monitor-specs script loops through the list of open issues in the 5 | * browser-specs repository that have a "new spec" label, checks those that 6 | * have not been reviewed for a while, and adds a comment and "review" label to 7 | * those that seems worth reviewing again because an update was detected since 8 | * last review. 9 | * 10 | * The last time that an issue was reviewed is the last time that the "review" 11 | * label was removed, which the script retrieves thanks through the GraphQL 12 | * endpoint. 13 | * 14 | * To report the list of issues that need a review (without updating the 15 | * issues), run: 16 | * node src/monitor-specs.js 17 | * 18 | * To report the list of issues that need a review **and** also update the 19 | * issues to add a comment/label, run: 20 | * node src/monitor-specs.js --update 21 | */ 22 | 23 | import sendGraphQLQuery from "./graphql.js"; 24 | import splitIssueBodyIntoSections from "./split-issue-body.js"; 25 | import loadJSON from "./load-json.js"; 26 | 27 | const config = await loadJSON("config.json"); 28 | const githubToken = config?.GITHUB_TOKEN ?? process.env.GITHUB_TOKEN; 29 | 30 | 31 | /** 32 | * The list of specs that are already known is derived from open and closed 33 | * issues in the browser-specs repository. 34 | */ 35 | const BROWSER_SPECS_REPO = { 36 | owner: "w3c", 37 | name: "browser-specs" 38 | }; 39 | 40 | 41 | /** 42 | * Script does not update GitHub issues by default 43 | */ 44 | const updateGitHubIssues = 45 | (process.argv[2] === "--update") || 46 | (process.argv[2] === "-u"); 47 | 48 | 49 | /** 50 | * Retrieve the list of specs and repositories that should not be reported 51 | * because we're already aware of them and their treatment is still pending or 52 | * we explicitly don't want to add them to browser-specs. 53 | */ 54 | async function fetchIssuesToReview() { 55 | let list = []; 56 | 57 | // Retrieve the list of open issues that have a "new spec" label and, 58 | // for each of them, the last "unlabeled" events. 59 | // Notes: 60 | // - Issues that have a "review" label get skipped for now. By definition, 61 | // a review is already pending for them. If this script is run every couple 62 | // of months, there should not be any issue in that category though... 63 | // - The code assumes that we won't ever set more than 10 different labels on 64 | // a single issue and that we'll find a "review" label removal within the 65 | // last 5 "unlabeled" events. That seems more than enough for now. 66 | let hasNextPage = true; 67 | let endCursor = ""; 68 | while (hasNextPage) { 69 | const response = await sendGraphQLQuery(`query { 70 | organization(login: "${BROWSER_SPECS_REPO.owner}") { 71 | repository(name: "${BROWSER_SPECS_REPO.name}") { 72 | issues( 73 | states: OPEN, 74 | labels: "new spec", 75 | first: 100 76 | ${endCursor ? ', after: "' + endCursor + '"' : ''} 77 | ) { 78 | pageInfo { 79 | endCursor 80 | hasNextPage 81 | } 82 | nodes { 83 | id 84 | number 85 | title 86 | body 87 | createdAt 88 | labels(first: 10) { 89 | nodes { 90 | name 91 | } 92 | } 93 | timelineItems(last: 5, itemTypes: UNLABELED_EVENT) { 94 | nodes { 95 | ... on UnlabeledEvent { 96 | label { 97 | name 98 | } 99 | createdAt 100 | } 101 | } 102 | } 103 | } 104 | } 105 | } 106 | } 107 | }`, githubToken); 108 | if (!response?.data?.organization?.repository?.issues) { 109 | console.log(JSON.stringify(response, null, 2)); 110 | throw new Error(`GraphQL error, could not retrieve the list of issues`); 111 | } 112 | const issues = response.data.organization.repository.issues; 113 | list.push(...issues.nodes 114 | .filter(issue => !issue.labels.nodes.find(label => label.name === "review")) 115 | ); 116 | hasNextPage = issues.pageInfo.hasNextPage; 117 | endCursor = issues.pageInfo.endCursor; 118 | } 119 | 120 | return list; 121 | } 122 | 123 | 124 | /** 125 | * Set a label on a GitHub issue 126 | */ 127 | const labelIds = {}; 128 | async function setIssueLabel(issue, label) { 129 | if (!labelIds[label]) { 130 | // Retrieve the label ID from GitHub if we don't know anything about it yet 131 | const labelResponse = await sendGraphQLQuery(`query { 132 | organization(login: "${BROWSER_SPECS_REPO.owner}") { 133 | repository(name: "${BROWSER_SPECS_REPO.name}") { 134 | label(name: "${label}") { 135 | id 136 | } 137 | } 138 | } 139 | }`, githubToken); 140 | if (!labelResponse?.data?.organization?.repository?.label?.id) { 141 | console.log(JSON.stringify(labelResponse, null, 2)); 142 | throw new Error(`GraphQL error, could not retrieve the "${label}" label`); 143 | } 144 | labelIds[label] = labelResponse.data.organization.repository.label.id; 145 | } 146 | 147 | // Set the label on the issue 148 | const response = await sendGraphQLQuery(`mutation { 149 | addLabelsToLabelable(input: { 150 | labelableId: "${issue.id}" 151 | labelIds: ["${labelIds[label]}"] 152 | clientMutationId: "mutatis mutandis" 153 | }) { 154 | labelable { 155 | ... on Issue { 156 | id 157 | } 158 | } 159 | } 160 | }`, githubToken); 161 | if (!response?.data?.addLabelsToLabelable?.labelable?.id) { 162 | console.log(JSON.stringify(response, null, 2)); 163 | throw new Error(`GraphQL error, could not add "${label}" label to issue #${session.number}`); 164 | } 165 | } 166 | 167 | 168 | /** 169 | * Add the "review" label to the given issue, along with a comment 170 | */ 171 | let reviewLabelId = null; 172 | async function flagIssueForReview(issue, comment) { 173 | if (comment) { 174 | // Using a variable to avoid having to deal with comment escaping issues 175 | const commentResponse = await sendGraphQLQuery(` 176 | mutation($comment: AddCommentInput!) { 177 | addComment(input: $comment) { 178 | subject { 179 | id 180 | } 181 | } 182 | }`, { 183 | comment: { 184 | subjectId: issue.id, 185 | body: comment, 186 | clientMutationId: "mutatis mutandis" 187 | } 188 | }, 189 | githubToken); 190 | if (!commentResponse?.data?.addComment?.subject?.id) { 191 | console.log(JSON.stringify(commentResponse, null, 2)); 192 | throw new Error(`GraphQL error, could not add comment to issue #${issue.number}`); 193 | } 194 | } 195 | 196 | await setIssueLabel(issue, "review"); 197 | } 198 | 199 | 200 | fetchIssuesToReview().then(async issues => { 201 | const issuesToReview = []; 202 | for (const issue of issues) { 203 | const lastReviewedEvent = issue.timelineItems.nodes.find(event => 204 | event.label.name === "review"); 205 | issue.lastReviewed = (new Date(lastReviewedEvent ? 206 | lastReviewedEvent.createdAt : 207 | issue.createdAt)) 208 | .toJSON() 209 | .slice(0, 10); 210 | 211 | const sections = splitIssueBodyIntoSections(issue.body); 212 | const urlSection = sections.find(section => section.title === 'URL'); 213 | if (!urlSection) { 214 | console.warn(`- ${issue.title} (#${issue.number}) does not follow the expected issue format`); 215 | if (updateGitHubIssues) { 216 | await setIssueLabel(issue, "invalid"); 217 | } 218 | continue; 219 | } 220 | 221 | // Retrieve the spec and check the last-modified HTTP header 222 | const response = await fetch(urlSection.value); 223 | const { headers } = response; 224 | 225 | // The CSS drafts use a proprietary header to expose the real last 226 | // modification date 227 | issue.lastRevised = (new Date(headers.get('Last-Revised') ? 228 | headers.get('Last-Revised') : 229 | headers.get('Last-Modified'))) 230 | .toJSON() 231 | .slice(0, 10); 232 | if (issue.lastRevised > issue.lastReviewed) { 233 | issuesToReview.push(issue); 234 | } 235 | // We don't need the response's body, but not reading it means Node will keep 236 | // the network request in memory, which prevents the CLI from returning until 237 | // a timeout occurs. 238 | await response.arrayBuffer(); 239 | } 240 | 241 | if (issuesToReview.length === 0) { 242 | console.log('No candidate spec to review'); 243 | return; 244 | } 245 | 246 | console.log('Candidate specs to review:'); 247 | console.log(issuesToReview 248 | .map(issue => `- ${issue.title} (#${issue.number}) updated on ${issue.lastRevised} (last reviewed on ${issue.lastReviewed})`) 249 | .join('\n') 250 | ); 251 | 252 | if (!updateGitHubIssues) { 253 | return; 254 | } 255 | 256 | console.log('Mark GitHub issues as needing a review...'); 257 | for (const issue of issuesToReview) { 258 | const comment = `The specification was updated on **${issue.lastRevised}** (last reviewed on ${issue.lastReviewed}).`; 259 | await flagIssueForReview(issue, comment); 260 | } 261 | console.log('Mark GitHub issues as needing a review... done'); 262 | }); 263 | -------------------------------------------------------------------------------- /test/specs.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Make sure that the specs.json respects the JSON schema and all constraints 3 | * that cannot be automatically linted. 4 | * 5 | * Note: The specs.json file may still need to be linted, and that's all fine! 6 | */ 7 | 8 | // Tests may run against a test version of the specs file 9 | import { describe, it } from "node:test"; 10 | import assert from "node:assert"; 11 | import path from "node:path"; 12 | import { fileURLToPath } from "node:url"; 13 | import schema from "../schema/specs.json" with { type: "json" }; 14 | import dfnsSchema from "../schema/definitions.json" with { type: "json" }; 15 | import computeInfo from "../src/compute-shortname.js"; 16 | import computePrevNext from "../src/compute-prevnext.js"; 17 | import loadJSON from "../src/load-json.js"; 18 | import Ajv from "ajv"; 19 | import addFormats from "ajv-formats"; 20 | const ajv = (new Ajv()).addSchema(dfnsSchema); 21 | addFormats(ajv); 22 | 23 | const scriptPath = path.dirname(fileURLToPath(import.meta.url)); 24 | const specsFile = process.env.testSpecs ?? path.resolve(scriptPath, "..", "specs.json"); 25 | const specs = await loadJSON(specsFile); 26 | 27 | // When an entry is invalid, the schema validator returns one error for each 28 | // "oneOf" option and one error on overall "oneOf" problem. This is confusing 29 | // for humans. The following function improves the error being returned. 30 | function clarifyErrors(errors) { 31 | if (!errors) { 32 | return errors; 33 | } 34 | 35 | // Update instancePath to drop misleading "[object Object]" 36 | errors.forEach(err => 37 | err.instancePath = err.instancePath.replace(/^\[object Object\]/, '')); 38 | 39 | if (errors.length < 2) { 40 | return errors; 41 | } 42 | 43 | // If first two errors are type errors for oneOf choices, item is neither 44 | // a string nor an object 45 | if ((errors[0].schemaPath === "#/items/oneOf/0/type") && 46 | (errors[1].schemaPath === "#/items/oneOf/1/type")) { 47 | return [ 48 | Object.assign(errors[0], { "message": "must be a string or an object" }) 49 | ]; 50 | } 51 | 52 | // Otherwise, if second error is a type error for second oneOf choice, 53 | // it means the item is actually a string that represents an invalid URL, 54 | // which the first error should capture. 55 | if (errors[1].schemaPath === "#/items/oneOf/1/type") { 56 | return [errors[0]]; 57 | } 58 | 59 | // Otherwise, item is an object that does not follow the schema, drop the 60 | // error that says that item is not a string and the error that says that it 61 | // does not meet one of the "oneOf" options. What remains should be the error 62 | // that explains why the item does not meet the schema for the object. 63 | const clearerErrors = errors.filter(error => 64 | (error.schemaPath !== "#/items/oneOf/0/type") && 65 | (error.schemaPath !== "#/items/oneOf")); 66 | 67 | // Improve an additional property message to point out the property that 68 | // should not be there (default message does not say it) 69 | clearerErrors.forEach(error => { 70 | if ((error.keyword === "additionalProperties") && 71 | error.params && error.params.additionalProperty) { 72 | error.message = "must not have additional property '" + 73 | error.params.additionalProperty + "'"; 74 | } 75 | }); 76 | 77 | // If there are no more errors left to return, roll back to the initial set 78 | // to make sure an error gets reported. That should never happen, but better 79 | // be ready for it. 80 | return (clearerErrors.length > 0) ? clearerErrors : errors; 81 | } 82 | 83 | function compareSpecs(a, b) { 84 | return a.url.localeCompare(b.url); 85 | } 86 | 87 | function specs2objects(specs) { 88 | return specs 89 | .map(spec => (typeof spec === "string") ? 90 | { 91 | url: new URL(spec.split(" ")[0]).toString(), 92 | seriesComposition: (spec.split(' ')[1] === "delta") ? "delta" : "full", 93 | forceCurrent: (spec.split(' ')[1] === "current"), 94 | multipage: (spec.split(' ')[1] === "multipage"), 95 | } : 96 | Object.assign({}, spec, { url: new URL(spec.url).toString() })) 97 | .filter((spec, idx, list) => 98 | !list.find((s, i) => i < idx && compareSpecs(s, spec) === 0)); 99 | } 100 | 101 | function specs2LinkedList(specs) { 102 | return specs2objects(specs) 103 | .map(s => Object.assign({}, s, computeInfo(s.shortname || s.url, s.forkOf))) 104 | .map((s, _, list) => Object.assign({}, s, computePrevNext(s, list))); 105 | } 106 | 107 | function check(specs) { 108 | const validate = ajv.compile(schema); 109 | const isValid = validate(specs, { format: "full" }); 110 | const msg = ajv.errorsText(clarifyErrors(validate.errors), { 111 | dataVar: "specs", separator: "\n" 112 | }); 113 | return msg; 114 | } 115 | 116 | 117 | describe("The `specs.json` list", () => { 118 | describe("has a JSON schema which", () => { 119 | it("is valid", () => { 120 | const isSchemaValid = ajv.validateSchema(schema); 121 | assert.ok(isSchemaValid); 122 | }); 123 | 124 | it("rejects list if it is not an array", () => { 125 | const specs = 0; 126 | assert.strictEqual(check(specs), "specs must be array"); 127 | }); 128 | 129 | it("rejects an empty list", () => { 130 | const specs = []; 131 | assert.strictEqual(check(specs), "specs must NOT have fewer than 1 items"); 132 | }); 133 | 134 | it("rejects items that have a wrong type", () => { 135 | const specs = [0]; 136 | assert.strictEqual(check(specs), "specs/0 must be a string or an object"); 137 | }); 138 | 139 | it("rejects spec objects without URL", () => { 140 | const specs = [{}]; 141 | assert.strictEqual(check(specs), "specs/0 must have required property 'url'"); 142 | }); 143 | 144 | it("rejects spec objects with an invalid URL", () => { 145 | const specs = [{ url: "invalid" }]; 146 | assert.strictEqual(check(specs), "specs/0/url must match format \"uri\""); 147 | }); 148 | 149 | it("rejects spec objects with additional properties", () => { 150 | const specs = [{ url: "https://example.org/", invalid: "test" }]; 151 | assert.strictEqual(check(specs), "specs/0 must not have additional property 'invalid'"); 152 | }); 153 | }); 154 | 155 | it("respects the JSON schema", () => { 156 | assert.strictEqual(check(specs), 'No errors'); 157 | }); 158 | 159 | it("only points at valid URLs", () => { 160 | specs.forEach(spec => (typeof spec === "string") ? 161 | new URL(spec.split(" ")[0]).toString() : null); 162 | assert.ok(true); 163 | }) 164 | 165 | it("only contains specs for which a shortname can be generated", () => { 166 | // Convert entries to spec objects and compute shortname 167 | const specsWithoutShortname = specs2objects(specs) 168 | .map(spec => Object.assign({}, spec, computeInfo(spec.shortname || spec.url, spec.forkOf))) 169 | .filter(spec => !spec.shortname); 170 | 171 | // No exception thrown? That means we're good! 172 | // We'll just check that there aren't any spec with an empty name and report 173 | // the first one (That should never happen since computeInfo would throw but 174 | // better be safe) 175 | assert.strictEqual(specsWithoutShortname[0], undefined); 176 | }); 177 | 178 | it("does not have a delta spec without a previous full spec", () => { 179 | const fullPrevious = (spec, list) => { 180 | const previous = list.find(s => s.shortname === spec.seriesPrevious); 181 | if (previous && previous.seriesComposition && previous.seriesComposition !== "full") { 182 | return fullPrevious(previous, list); 183 | } 184 | return previous; 185 | }; 186 | const deltaWithoutFull = specs2LinkedList(specs) 187 | .filter((s, _, list) => s.seriesComposition === "delta" && !fullPrevious(s, list)); 188 | assert.strictEqual(deltaWithoutFull[0], undefined); 189 | }); 190 | 191 | it("does not have a delta spec flagged as 'current'", () => { 192 | const deltaCurrent = specs2LinkedList(specs) 193 | .filter(s => s.forceCurrent && s.seriesComposition === "delta"); 194 | assert.strictEqual(deltaCurrent[0], undefined); 195 | }); 196 | 197 | it("does not have a fork spec flagged as 'current'", () => { 198 | const forkCurrent = specs2LinkedList(specs) 199 | .filter(s => s.forceCurrent && s.forkOf); 200 | assert.strictEqual(forkCurrent[0], undefined); 201 | }); 202 | 203 | it("has only one spec flagged as 'current' per series shortname", () => { 204 | const linkedList = specs2LinkedList(specs); 205 | const problematicCurrent = linkedList 206 | .filter(s => s.forceCurrent) 207 | .filter(s => s !== linkedList.find(p => 208 | p.series.shortname === s.series.shortname && p.forceCurrent)); 209 | assert.strictEqual(problematicCurrent[0], undefined); 210 | }); 211 | 212 | it("does not have a spec with a 'fork' seriesComposition property", () => { 213 | const wrong = specs.find(s => s.seriesComposition === "fork"); 214 | assert.strictEqual(wrong, undefined); 215 | }); 216 | 217 | it("does not have a 'delta fork' spec", () => { 218 | const wrong = specs.find(s => s.forkOf && s.seriesComposition === "delta"); 219 | assert.strictEqual(wrong, undefined); 220 | }); 221 | 222 | it("only has fork specs that reference existing specs", () => { 223 | const linkedList = specs2LinkedList(specs); 224 | const forkWithoutFull = linkedList.filter((s, _, list) => s.forkOf && 225 | !linkedList.find(spec => spec.shortname === s.forkOf)); 226 | assert.strictEqual(forkWithoutFull[0], undefined); 227 | }); 228 | }); 229 | -------------------------------------------------------------------------------- /test/compute-shortname.js: -------------------------------------------------------------------------------- 1 | import { describe, it } from "node:test"; 2 | import assert from "node:assert"; 3 | import computeInfo from "../src/compute-shortname.js"; 4 | 5 | describe("compute-shortname module", () => { 6 | 7 | describe("shortname property", () => { 8 | function assertName(url, name) { 9 | assert.equal(computeInfo(url).shortname, name); 10 | } 11 | 12 | it("handles TR URLs", () => { 13 | assertName("https://www.w3.org/TR/the-spec/", "the-spec"); 14 | }); 15 | 16 | it("handles WHATWG URLs", () => { 17 | assertName("https://myspec.spec.whatwg.org/whatever/", "myspec"); 18 | }); 19 | 20 | it("handles ECMAScript proposal URLs", () => { 21 | assertName("https://tc39.es/proposal-smartidea/", "tc39-smartidea"); 22 | }); 23 | 24 | it("handles Khronos Group WebGL extensions", () => { 25 | assertName("https://registry.khronos.org/webgl/extensions/EXT_wow32/", "EXT_wow32"); 26 | }); 27 | 28 | it("handles URLs of drafts on GitHub", () => { 29 | assertName("https://wicg.github.io/whataspec/", "whataspec"); 30 | }); 31 | 32 | it("handles URLs of WebAppSec drafts on GitHub", () => { 33 | assertName("https://w3c.github.io/webappsec-ultrasecret/", "ultrasecret"); 34 | }); 35 | 36 | it("handles extension specs defined in the same repo as the main spec (singular)", () => { 37 | assertName("https://w3c.github.io/specwithext/extension.html", "specwithext-extension"); 38 | }); 39 | 40 | it("handles extension specs defined in the same repo as the main spec (plural)", () => { 41 | assertName("https://w3c.github.io/specwithext/extensions.html", "specwithext-extensions"); 42 | }); 43 | 44 | it("handles CSS WG draft URLs", () => { 45 | assertName("https://drafts.csswg.org/css-is-aweso/", "css-is-aweso"); 46 | }); 47 | 48 | it("handles CSS FXTF draft URLs", () => { 49 | assertName("https://drafts.fxtf.org/megafx/", "megafx"); 50 | }); 51 | 52 | it("handles CSS Houdini TF draft URLs", () => { 53 | assertName("https://drafts.css-houdini.org/magic/", "magic"); 54 | }); 55 | 56 | it("handles IETF RFCs", () => { 57 | assertName("https://www.rfc-editor.org/rfc/rfc2397", "rfc2397"); 58 | }); 59 | 60 | it("handles IETF group drafts", () => { 61 | assertName("https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis", "rfc6265bis"); 62 | }); 63 | 64 | it("handles IETF group drafts from individuals", () => { 65 | assertName("https://datatracker.ietf.org/doc/html/draft-cutler-httpbis-partitioned-cookies", "partitioned-cookies"); 66 | }); 67 | 68 | it("handles (simple) IETF individual drafts", () => { 69 | assertName("https://datatracker.ietf.org/doc/html/draft-zern-webp/", "webp"); 70 | }); 71 | 72 | it("handles SVG draft URLs", () => { 73 | assertName("https://svgwg.org/specs/module/", "svg-module"); 74 | }); 75 | 76 | it("handles SVG draft URLs that have an svg prefix", () => { 77 | assertName("https://svgwg.org/specs/svg-module/", "svg-module"); 78 | }); 79 | 80 | it("returns the name when given one", () => { 81 | assertName("myname", "myname"); 82 | }); 83 | 84 | it("preserves case", () => { 85 | assertName("https://www.w3.org/TR/IndexedDB/", "IndexedDB"); 86 | }); 87 | 88 | it("includes the version number in the name (int)", () => { 89 | assertName("https://www.w3.org/TR/level-42/", "level-42"); 90 | }); 91 | 92 | it("includes the version number in the name (float)", () => { 93 | assertName("https://www.w3.org/TR/level-4.2/", "level-4.2"); 94 | }); 95 | 96 | it("handles multi-specs repositories", () => { 97 | assertName("https://w3c.github.io/sdw/bp/", "sdw-bp"); 98 | }); 99 | 100 | it("throws when URL is a dated TR one", () => { 101 | assert.throws( 102 | () => computeInfo("https://www.w3.org/TR/2017/CR-presentation-api-20170601/"), 103 | /^Cannot extract meaningful name from /); 104 | }); 105 | 106 | it("throws when URL that does not follow a known pattern", () => { 107 | assert.throws( 108 | () => computeInfo("https://www.w3.org/2022/12/webmediaapi.html"), 109 | /^Cannot extract meaningful name from /); 110 | }); 111 | 112 | it("throws when name contains non basic Latin characters", () => { 113 | assert.throws( 114 | () => computeInfo("https://www.w3.org/TR/thé-ou-café/"), 115 | /^Specification name contains unexpected characters/); 116 | }); 117 | 118 | it("throws when name contains a dot outside of a level definition", () => { 119 | assert.throws( 120 | () => computeInfo("https://w3c.github.io/spec.name/"), 121 | /^Specification name contains unexpected characters/); 122 | }); 123 | 124 | it("handles non separated fractional level", () => { 125 | assertName("https://www.w3.org/TR/level4.2/", "level4.2"); 126 | }); 127 | 128 | it("handles forks", () => { 129 | const url = "https://www.w3.org/TR/extension/"; 130 | assert.equal(computeInfo(url, "source-2").shortname, "source-2-fork-extension"); 131 | }); 132 | }); 133 | 134 | 135 | describe("series' shortname property", () => { 136 | function assertSeries(url, shortname) { 137 | assert.equal(computeInfo(url).series.shortname, shortname); 138 | } 139 | 140 | it("parses form 'shortname-X'", () => { 141 | assertSeries("spec-4", "spec"); 142 | }); 143 | 144 | it("parses form 'shortname-XXX'", () => { 145 | assertSeries("horizon-2050", "horizon"); 146 | }); 147 | 148 | it("parses form 'shortname-X.Y'", () => { 149 | assertSeries("pi-3.1", "pi"); 150 | }); 151 | 152 | it("parses form 'shortnameX'", () => { 153 | assertSeries("loveu2", "loveu"); 154 | }); 155 | 156 | it("parses form 'shortnameXY'", () => { 157 | assertSeries("answer42", "answer"); 158 | }); 159 | 160 | it("parses form 'shortnameX.Y'", () => { 161 | assertSeries("answer4.2", "answer"); 162 | }); 163 | 164 | it("parses form 'rdfXY-something'", () => { 165 | assertSeries("rdf12-something", "rdf-something"); 166 | }); 167 | 168 | it("parses form 'sparqlXY-something'", () => { 169 | assertSeries("sparql12-something", "sparql-something"); 170 | }); 171 | 172 | it("parses form 'shaclXY-something'", () => { 173 | assertSeries("shacl12-something", "shacl-something"); 174 | }); 175 | 176 | it("includes final digits when they do not seem to be a level", () => { 177 | assertSeries("cors-rfc1918", "cors-rfc1918"); 178 | }); 179 | 180 | it("does not get lost with inner digits", () => { 181 | assertSeries("my-2-cents", "my-2-cents"); 182 | }); 183 | 184 | it("automatically updates CSS specs with an old 'css3-' name", () => { 185 | assertSeries("css3-conditional", "css-conditional"); 186 | }); 187 | 188 | it("preserves ECMA spec numbers", () => { 189 | assertSeries("ecma-402", "ecma-402"); 190 | }); 191 | 192 | it("preserves ISO spec numbers", () => { 193 | assertSeries("iso18181-2", "iso18181-2"); 194 | }); 195 | 196 | it("preserves digits at the end of WebGL extension names", () => { 197 | assertSeries("https://registry.khronos.org/webgl/extensions/EXT_wow32/", "EXT_wow32"); 198 | }); 199 | 200 | it("handles forks", () => { 201 | const url = "https://www.w3.org/TR/the-ext/"; 202 | assert.equal(computeInfo(url, "source-2").series.shortname, "source"); 203 | }); 204 | }); 205 | 206 | 207 | describe("seriesVersion property", () => { 208 | function assertSeriesVersion(url, level) { 209 | assert.equal(computeInfo(url).seriesVersion, level); 210 | } 211 | function assertNoSeriesVersion(url) { 212 | assert.equal(computeInfo(url).hasOwnProperty("seriesVersion"), false, 213 | "did not expect to see a seriesVersion property"); 214 | } 215 | 216 | it("finds the right series version for form 'shortname-X'", () => { 217 | assertSeriesVersion("spec-4", "4"); 218 | }); 219 | 220 | it("finds the right series version for form 'shortname-XXX'", () => { 221 | assertSeriesVersion("horizon-2050", "2050"); 222 | }); 223 | 224 | it("finds the right series version for form 'shortname-X.Y'", () => { 225 | assertSeriesVersion("pi-3.1", "3.1"); 226 | }); 227 | 228 | it("finds the right series version for form 'shortnameX'", () => { 229 | assertSeriesVersion("loveu2", "2"); 230 | }); 231 | 232 | it("finds the right series version for form 'shortnameXY'", () => { 233 | assertSeriesVersion("answer42", "4.2"); 234 | }); 235 | 236 | it("finds the right series version for form 'rdfXY-something'", () => { 237 | assertSeriesVersion("rdf12-something", "1.2"); 238 | }); 239 | 240 | it("finds the right series version for form 'sparqlXY-something'", () => { 241 | assertSeriesVersion("sparql12-something", "1.2"); 242 | }); 243 | 244 | it("does not report any series version when there are none", () => { 245 | assertNoSeriesVersion("nolevel"); 246 | }); 247 | 248 | it("does not report a series version when final digits do not seem to be one", () => { 249 | assertNoSeriesVersion("cors-rfc1918"); 250 | }); 251 | 252 | it("does not get lost with inner digits", () => { 253 | assertNoSeriesVersion("my-2-cents"); 254 | }); 255 | 256 | it("does not confuse an ECMA spec number with a series version", () => { 257 | assertNoSeriesVersion("ecma-402"); 258 | }); 259 | 260 | it("does not confuse a TC39 proposal number with a series version", () => { 261 | assertNoSeriesVersion("tc39-arraybuffer-base64"); 262 | }); 263 | 264 | it("does not confuse an ISO spec number with a series version", () => { 265 | assertNoSeriesVersion("iso18181-2"); 266 | }); 267 | 268 | it("does not confuse digits at the end of a WebGL extension spec with a series version", () => { 269 | assertNoSeriesVersion("https://registry.khronos.org/webgl/extensions/EXT_wow32/"); 270 | }); 271 | 272 | it("handles forks", () => { 273 | const url = "https://www.w3.org/TR/the-ext/"; 274 | assert.equal(computeInfo(url, "source-2").seriesVersion, "2"); 275 | }); 276 | }); 277 | }); 278 | --------------------------------------------------------------------------------