├── .babelrc
├── .eslintrc
├── .gitignore
├── .prettierignore
├── .prettierrc.json
├── CODE_OF_CONDUCT.md
├── LICENSE
├── README.md
├── data
└── states.geojson
├── foreign
└── project.sh
├── index.html
├── package-lock.json
├── package.json
├── project.js
├── src
├── RandomPoints.ts
├── TriFeather.ts
├── TriMap.ts
├── geo-albers-usa-LICENSE
├── geo-albers-usa-territories.js
└── index.js
├── tests
└── states.json
└── vite.config.cjs
/.babelrc:
--------------------------------------------------------------------------------
1 | {
2 | "presets": ["@babel/preset-env"],
3 |
4 | "plugins": [
5 | [
6 | "@babel/plugin-transform-runtime",
7 | {
8 | "absoluteRuntime": false,
9 | "corejs": false,
10 | "helpers": true,
11 | "regenerator": true,
12 | "useESModules": false,
13 | "version": "7.0.0-beta.0"
14 | }
15 | ]
16 | ]
17 | }
18 |
--------------------------------------------------------------------------------
/.eslintrc:
--------------------------------------------------------------------------------
1 | {
2 | "parser": "babel-eslint",
3 | "globals": {
4 | "define": false
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Logs
2 | logs
3 | *.log
4 | npm-debug.log*
5 | yarn-debug.log*
6 | yarn-error.log*
7 | lerna-debug.log*
8 |
9 | # Diagnostic reports (https://nodejs.org/api/report.html)
10 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
11 |
12 | # Runtime data
13 | pids
14 | *.pid
15 | *.seed
16 | *.pid.lock
17 |
18 | # Directory for instrumented libs generated by jscoverage/JSCover
19 | lib-cov
20 |
21 | # Coverage directory used by tools like istanbul
22 | coverage
23 | *.lcov
24 |
25 | # nyc test coverage
26 | .nyc_output
27 |
28 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
29 | .grunt
30 |
31 | # Bower dependency directory (https://bower.io/)
32 | bower_components
33 |
34 | # node-waf configuration
35 | .lock-wscript
36 |
37 | # Compiled binary addons (https://nodejs.org/api/addons.html)
38 | build/Release
39 |
40 | # Dependency directories
41 | node_modules/
42 | jspm_packages/
43 |
44 | # Snowpack dependency directory (https://snowpack.dev/)
45 | web_modules/
46 |
47 | # TypeScript cache
48 | *.tsbuildinfo
49 |
50 | # Optional npm cache directory
51 | .npm
52 |
53 | # Optional eslint cache
54 | .eslintcache
55 |
56 | # Microbundle cache
57 | .rpt2_cache/
58 | .rts2_cache_cjs/
59 | .rts2_cache_es/
60 | .rts2_cache_umd/
61 |
62 | # Optional REPL history
63 | .node_repl_history
64 |
65 | # Output of 'npm pack'
66 | *.tgz
67 |
68 | # Yarn Integrity file
69 | .yarn-integrity
70 |
71 | # dotenv environment variables file
72 | .env
73 | .env.test
74 |
75 | # parcel-bundler cache (https://parceljs.org/)
76 | .cache
77 | .parcel-cache
78 |
79 | # Next.js build output
80 | .next
81 | out
82 |
83 | # Nuxt.js build / generate output
84 | .nuxt
85 | dist
86 |
87 | # Gatsby files
88 | .cache/
89 | # Comment in the public line in if your project uses Gatsby and not Next.js
90 | # https://nextjs.org/blog/next-9-1#public-directory-support
91 | # public
92 |
93 | # vuepress build output
94 | .vuepress/dist
95 |
96 | # Serverless directories
97 | .serverless/
98 |
99 | # FuseBox cache
100 | .fusebox/
101 |
102 | # DynamoDB Local files
103 | .dynamodb/
104 |
105 | # TernJS port file
106 | .tern-port
107 |
108 | # Stores VSCode versions used for testing VSCode extensions
109 | .vscode-test
110 |
111 | # yarn v2
112 | .yarn/cache
113 | .yarn/unplugged
114 | .yarn/build-state.yml
115 | .yarn/install-state.gz
116 | .pnp.*
117 | data
118 | index2.html
119 | tests/states.feather
120 | tests/states.trifeather
121 |
--------------------------------------------------------------------------------
/.prettierignore:
--------------------------------------------------------------------------------
1 | # Ignore artifacts:
2 | build
3 | coverage
4 | dist
5 | node_modules
6 |
--------------------------------------------------------------------------------
/.prettierrc.json:
--------------------------------------------------------------------------------
1 | {}
2 |
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Contributor Covenant Code of Conduct
2 |
3 | ## Our Pledge
4 |
5 | We as members, contributors, and leaders pledge to make participation in our
6 | community a harassment-free experience for everyone, regardless of age, body
7 | size, visible or invisible disability, ethnicity, sex characteristics, gender
8 | identity and expression, level of experience, education, socio-economic status,
9 | nationality, personal appearance, race, religion, or sexual identity
10 | and orientation.
11 |
12 | We pledge to act and interact in ways that contribute to an open, welcoming,
13 | diverse, inclusive, and healthy community.
14 |
15 | ## Our Standards
16 |
17 | Examples of behavior that contributes to a positive environment for our
18 | community include:
19 |
20 | - Demonstrating empathy and kindness toward other people
21 | - Being respectful of differing opinions, viewpoints, and experiences
22 | - Giving and gracefully accepting constructive feedback
23 | - Accepting responsibility and apologizing to those affected by our mistakes,
24 | and learning from the experience
25 | - Focusing on what is best not just for us as individuals, but for the
26 | overall community
27 |
28 | Examples of unacceptable behavior include:
29 |
30 | - The use of sexualized language or imagery, and sexual attention or
31 | advances of any kind
32 | - Trolling, insulting or derogatory comments, and personal or political attacks
33 | - Public or private harassment
34 | - Publishing others' private information, such as a physical or email
35 | address, without their explicit permission
36 | - Other conduct which could reasonably be considered inappropriate in a
37 | professional setting
38 |
39 | ## Enforcement Responsibilities
40 |
41 | Community leaders are responsible for clarifying and enforcing our standards of
42 | acceptable behavior and will take appropriate and fair corrective action in
43 | response to any behavior that they deem inappropriate, threatening, offensive,
44 | or harmful.
45 |
46 | Community leaders have the right and responsibility to remove, edit, or reject
47 | comments, commits, code, wiki edits, issues, and other contributions that are
48 | not aligned to this Code of Conduct, and will communicate reasons for moderation
49 | decisions when appropriate.
50 |
51 | ## Scope
52 |
53 | This Code of Conduct applies within all community spaces, and also applies when
54 | an individual is officially representing the community in public spaces.
55 | Examples of representing our community include using an official e-mail address,
56 | posting via an official social media account, or acting as an appointed
57 | representative at an online or offline event.
58 |
59 | ## Enforcement
60 |
61 | Instances of abusive, harassing, or otherwise unacceptable behavior may be
62 | reported to the community leaders responsible for enforcement at
63 | bmschmidt@gmail.com.
64 | All complaints will be reviewed and investigated promptly and fairly.
65 |
66 | All community leaders are obligated to respect the privacy and security of the
67 | reporter of any incident.
68 |
69 | ## Enforcement Guidelines
70 |
71 | Community leaders will follow these Community Impact Guidelines in determining
72 | the consequences for any action they deem in violation of this Code of Conduct:
73 |
74 | ### 1. Correction
75 |
76 | **Community Impact**: Use of inappropriate language or other behavior deemed
77 | unprofessional or unwelcome in the community.
78 |
79 | **Consequence**: A private, written warning from community leaders, providing
80 | clarity around the nature of the violation and an explanation of why the
81 | behavior was inappropriate. A public apology may be requested.
82 |
83 | ### 2. Warning
84 |
85 | **Community Impact**: A violation through a single incident or series
86 | of actions.
87 |
88 | **Consequence**: A warning with consequences for continued behavior. No
89 | interaction with the people involved, including unsolicited interaction with
90 | those enforcing the Code of Conduct, for a specified period of time. This
91 | includes avoiding interactions in community spaces as well as external channels
92 | like social media. Violating these terms may lead to a temporary or
93 | permanent ban.
94 |
95 | ### 3. Temporary Ban
96 |
97 | **Community Impact**: A serious violation of community standards, including
98 | sustained inappropriate behavior.
99 |
100 | **Consequence**: A temporary ban from any sort of interaction or public
101 | communication with the community for a specified period of time. No public or
102 | private interaction with the people involved, including unsolicited interaction
103 | with those enforcing the Code of Conduct, is allowed during this period.
104 | Violating these terms may lead to a permanent ban.
105 |
106 | ### 4. Permanent Ban
107 |
108 | **Community Impact**: Demonstrating a pattern of violation of community
109 | standards, including sustained inappropriate behavior, harassment of an
110 | individual, or aggression toward or disparagement of classes of individuals.
111 |
112 | **Consequence**: A permanent ban from any sort of public interaction within
113 | the community.
114 |
115 | ## Attribution
116 |
117 | This Code of Conduct is adapted from the [Contributor Covenant][homepage],
118 | version 2.0, available at
119 | [https://www.contributor-covenant.org/version/2/0/code_of_conduct.html][v2.0].
120 |
121 | Community Impact Guidelines were inspired by
122 | [Mozilla's code of conduct enforcement ladder][mozilla coc].
123 |
124 | For answers to common questions about this code of conduct, see the FAQ at
125 | [https://www.contributor-covenant.org/faq][faq]. Translations are available
126 | at [https://www.contributor-covenant.org/translations][translations].
127 |
128 | [homepage]: https://www.contributor-covenant.org
129 | [v2.0]: https://www.contributor-covenant.org/version/2/0/code_of_conduct.html
130 | [mozilla coc]: https://github.com/mozilla/diversity
131 | [faq]: https://www.contributor-covenant.org/faq
132 | [translations]: https://www.contributor-covenant.org/translations
133 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright 2022 Benjamin Schmidt
2 |
3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
4 |
5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
6 |
7 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
8 |
9 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Pre-triangulated shapefiles
2 |
3 | This library defines a binary file format for collections of projected
4 | polygon map data bound for geojson.
5 |
6 | It builds on the Apache Arrow project's `feather` format; each feature from
7 | a feature collection is stored as a single row, and all keys are stored as columns.
8 | It attempts to be clever about coercing strings to dictionaries, etc.
9 |
10 | Rather than store coordinates, it uses the mapbox [earcut library](https://github.com/mapbox/earcut)
11 | to triangulate polygons, and stores those triangles directly. The combination
12 | of this strategy and apache arrow means that the binary data can be pushed
13 | straight to a GPU for plotting without any need for Javascript, without
14 | an extraordinary size penalty.
15 |
16 | A trifeather object can be instantiated from _either_ the binary file format
17 |
18 | ```js
19 | new Trifeather(await fetch("file.feather"));
20 | ```
21 |
22 | or from
23 |
24 | ```js
25 | Trifeather.from_feature_collection(
26 | await fetch("file.geojson").then((d) => JSON.parse(d))
27 | );
28 | ```
29 |
30 | Storing as triangles also happens to allow
31 | much faster generation of random points in polygons than traditional methods.
32 |
33 | # Maps
34 |
35 | I've built some mapping functions into the library as 'TripMap.js'
36 |
37 | # Node triangulation.
38 |
39 | The `project.js` program can be used to convert from geojson to the trifeather format.
40 |
41 | It currently only accepts items which are a feature collection where all constituent
42 | elements are polygons or multipolygons.
43 |
44 | To generate feather files from a shapefile, pass them via the args. Multiple files can be created at once.
45 |
46 | ```sh
47 | node project.js --files tests/states.json
48 | ```
49 |
50 | To generate random points inside the polygons based on counts in the geojson, pass them instead.
51 |
52 | ```sh
53 | node project.js --files tests/states.json --counts counts
54 | ```
55 |
--------------------------------------------------------------------------------
/foreign/project.sh:
--------------------------------------------------------------------------------
1 | NAME=$1
2 |
3 |
4 | cat $NAME.geojson | geo2topo | toposimplify -s .1 | topo2geo - > $NAME2.geojson
5 |
6 | node project.js $NAME2.geojson
7 |
--------------------------------------------------------------------------------
/index.html:
--------------------------------------------------------------------------------
1 |
15 |
16 |
17 | FOO
18 |
19 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "private": false,
3 | "name": "trifeather",
4 | "version": "1.3.0",
5 | "description": "Pre-triangulated feather files",
6 | "unpkg": "dist/trifeather.umd.js",
7 | "files": [
8 | "dist"
9 | ],
10 | "bin": "project.js",
11 | "main": "./dist/trifeather.umd.js",
12 | "module": "./dist/trifeather.es.js",
13 | "exports": {
14 | ".": {
15 | "import": "./dist/trifeather.es.js",
16 | "require": "./dist/trifeather.umd.js"
17 | }
18 | },
19 | "type": "module",
20 | "scripts": {
21 | "build": "vite build",
22 | "dev": "vite",
23 | "prettier": "prettier --write .",
24 | "test": "echo \"Error: no test specified\" && exit 1"
25 | },
26 | "keywords": [
27 | "Cartography",
28 | "WebGL"
29 | ],
30 | "author": "bmschmidt@gmail.com",
31 | "license": "MIT",
32 | "dependencies": {
33 | "commander": "^6.2.1",
34 | "d3-array": "^3.0.2",
35 | "d3-color": "^3.0.1",
36 | "d3-geo-projection": "^4.0.0",
37 | "d3-scale": "^4.0.0",
38 | "earcut": "^2.2.3",
39 | "geo-albers-usa-territories": "^0.1.0",
40 | "polygon-clipping": "^0.15.3"
41 | },
42 | "devDependencies": {
43 | "@types/geojson": "^7946.0.10",
44 | "apache-arrow": "^10.0.1",
45 | "d3-geo": "^3.0.1",
46 | "d3-random": "^3.0.1",
47 | "d3-zoom": "^3.0.0",
48 | "eslint": "^4.13.1",
49 | "eslint-loader": "^1.9.0",
50 | "JSONStream": "^1.3.5",
51 | "lodash": "^4.17.14",
52 | "prettier": "2.8.1",
53 | "regl": "^2.1.0",
54 | "vite": "^2.5.2",
55 | "yargs": "^13.2.4"
56 | }
57 | }
58 |
--------------------------------------------------------------------------------
/project.js:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env node
2 |
3 | import TriFeather from "./src/TriFeather.js";
4 | import { random_points } from "./src/RandomPoints.js";
5 | import JSONStream from "JSONStream";
6 | import * as d3geo from "d3-geo";
7 | import * as geoproj from "d3-geo-projection";
8 | import * as geoAlbersUsaTerritories from "./src/geo-albers-usa-territories.js";
9 | import pkg from "commander";
10 | import { tableToIPC } from "apache-arrow";
11 |
12 | const { program } = pkg;
13 |
14 | const projections = {
15 | albersUsa: geoAlbersUsaTerritories,
16 | mollweide: d3geo.geoMollweide,
17 | mercator: d3geo.geoMercator,
18 | };
19 |
20 | program.version("1.1.1");
21 |
22 | program
23 | .option(
24 | "-c, --counts [counts...]",
25 | "Count fields to use for dot-density; must be keys in the geojson properties"
26 | )
27 | .option(
28 | "-n, --names [names...]",
29 | `Column names to split count fields into using delimiter. E.g., {'white_non-hispanic': 31}
30 | with names ['race', 'ethnicity'] will add 31 points with "race" of "white" and "ethnicity" of "non-hispanic." If left blank,
31 | the column will be named "category" and no splitting will occur.`
32 | )
33 | .option(
34 | "-d, --delimiter ",
35 | `Delimiter character. in conjunction with names. Default "_" if more than one "names" passed.`
36 | )
37 | .option(
38 | "-k, --keep [keep...]",
39 | "Geojson properties to pass into derived points without alteration. (Saved as utf8 regardless of type.)"
40 | )
41 | .option(
42 | "-p --projection ",
43 | "Projection. String representing d3-geo or d3-geo-projection factory function. Will be scaled up by a factor of 1e12 to support zoom.",
44 | "geoAlbersUsaTerritories"
45 | )
46 | .requiredOption("-f, --files ", "geojson files to parse");
47 |
48 | program.parse(process.argv);
49 | // Look in several places.
50 | const proj =
51 | d3geo[program.projection] ||
52 | geoAlbersUsaTerritories[program.projection] ||
53 | geoproj[program.projection];
54 |
55 | console.log(proj);
56 | const projection = proj().scale(1);
57 |
58 | import fs from "fs";
59 |
60 | const fnames = program.opts()["files"];
61 | const counts = program.opts()["counts"];
62 | const names = program.opts()["names"] || ["category"];
63 | const delimiter =
64 | program.opts()["delimiter"] || names.length == 1
65 | ? "ARBITRYAR STRING_THAT_WONT_OCCURR IN TEXT"
66 | : "_";
67 | const keep = program.opts()["keep"] || [];
68 |
69 | for (let fname of fnames) {
70 | console.log(fname);
71 | if (!(fname.endsWith(".json") || fname.endsWith(".geojson"))) {
72 | throw "Suffix should be 'json' or 'geojson', but " + fname + " found";
73 | }
74 |
75 | const destname = fname
76 | .replace(".geojson", counts ? ".feather" : ".trifeather")
77 | .replace(".json", counts ? ".feather" : ".trifeather");
78 |
79 | if (fs.existsSync(destname)) {
80 | console.log("Skipping " + destname + " because it already exists");
81 | continue;
82 | }
83 |
84 | /*var data = fs.readFileSync(fname, 'utf-8');
85 | console.log(fname, "loaded")
86 | const feature_collection = JSON.parse(data)
87 | */
88 |
89 | let stream = fs.createReadStream(fname, { encoding: "utf8" });
90 | let parser = JSONStream.parse("$*");
91 | stream.pipe(parser);
92 |
93 | let feature_collection_promise = new Promise((resolve, reject) => {
94 | let d = {};
95 | parser
96 | .on("data", (data) => {
97 | d[data.key] = data.value;
98 | })
99 | .on("end", () => {
100 | // I don't really get streaming in node, so I'm wrapping
101 | // it in a promise.
102 | resolve(d);
103 | });
104 | });
105 |
106 | const feature_collection = await feature_collection_promise;
107 |
108 | console.log(fname, "parsed... Creating triangulation");
109 |
110 | let trifeather = TriFeather.from_feature_collection(
111 | feature_collection,
112 | projection
113 | );
114 | console.log(fname, "triangulated");
115 |
116 | let t;
117 | if (!counts) {
118 | t = trifeather.t;
119 | } else {
120 | t = random_points(trifeather, counts, 1, "feather", keep, names, delimiter);
121 | }
122 | let b = Buffer.from(tableToIPC(t, "file"));
123 | const fd = fs.openSync(destname, "w");
124 | fs.writeSync(fd, b);
125 | }
126 |
--------------------------------------------------------------------------------
/src/RandomPoints.ts:
--------------------------------------------------------------------------------
1 | import {
2 | Table,
3 | Uint8,
4 | Dictionary,
5 | Float32,
6 | Utf8,
7 | Int16,
8 | Int8,
9 | vectorFromArray,
10 | tableFromArrays,
11 | makeVector,
12 | } from "apache-arrow";
13 | import { shuffle, sum, range, extent, min } from "d3-array";
14 |
15 | /*
16 | frame: a Trifeather object.
17 | fields: Fields in the original data that contain integer counts to be produced here.
18 | n_represented: How many points to generate per attested point.
19 | keep: fields to retain.
20 | names: The names for the columns created.
21 | delim: A delimiter insider column names: so if splitting by '_' and a field is
22 | 'White_Female', then the delim would be '_' and the names ['race', 'gender']
23 | */
24 | export function random_points(
25 | frame,
26 | fields,
27 | n_represented = 1,
28 | keep = [],
29 | names = ["category"],
30 | delim = "_"
31 | ) {
32 | // Usually this can just be a number.
33 | let targets = fields.map((f) => []);
34 | let total_counts = 0;
35 | let ix = 0;
36 | for (let field of fields) {
37 | // NB we are one-indexed here.
38 | for (let i of range(1, frame.t.numRows)) {
39 | const feature = frame.t.get(i);
40 | if (feature.coord_resolution === null) {
41 | continue;
42 | }
43 | const target = randround(feature[field] / n_represented);
44 | total_counts += target || 0;
45 | // un one-index
46 | targets[ix][i - 1] = target || 0;
47 | }
48 | ix++;
49 | }
50 | console.log(`Preparing to generate ${total_counts} points`);
51 | const x_array = new Float32Array(total_counts);
52 | const y_array = new Float32Array(total_counts);
53 | const field_array =
54 | fields.length > 127
55 | ? new Int16Array(total_counts)
56 | : new Int8Array(total_counts);
57 | const keepers = keep.map((key) => new Array(total_counts).fill(""));
58 | const ix_array = range(total_counts);
59 |
60 | // We are going to place these points randomly.
61 | // Important for overplotting.
62 |
63 | shuffle(ix_array);
64 |
65 | let overall_position = 0;
66 | for (let i_ of range(1, frame.t.numRows)) {
67 | const feature = frame.t.get(i_);
68 | const keep_values = keep.map((key) => feature[key]);
69 | const i = i_ - 1; // Because the other thing is one-indexed;
70 | const vert_buffer = new DataView(
71 | feature.vertices.buffer,
72 | feature.vertices.byteOffset,
73 | feature.vertices.byteLength
74 | );
75 | let local_targets = targets.map((d) => d[i]);
76 | let offset = feature.coord_buffer_offset;
77 | // earcut seems to always return triangles in a form where the absolute
78 | // value isn't necessary.
79 | const stride = feature.coord_resolution / 8; // Bytes, not bits.
80 |
81 | const triangles = [];
82 | for (
83 | let tri_number = 0;
84 | tri_number < feature.vertices.byteLength;
85 | tri_number += stride * 3
86 | ) {
87 | let a, b, c;
88 | try {
89 | [a, b, c] = [0, 1, 2]
90 | .map((ix) =>
91 | vert_buffer[`getUint${feature.coord_resolution}`](
92 | tri_number + ix * stride,
93 | true
94 | )
95 | )
96 | .map((n) => frame.coord(n + offset));
97 | } catch {
98 | console.log({
99 | feature,
100 | stride,
101 | i,
102 | byte_length: feature.vertices.byteLength,
103 | });
104 | throw new Error("Yikes--hit some observable debugging code here.");
105 | }
106 | const double_area = Math.abs(
107 | a[0] * (b[1] - c[1]) + b[0] * (c[1] - a[1]) + c[0] * (a[1] - b[1])
108 | );
109 | triangles.push({ a, b, c, double_area });
110 | }
111 |
112 | let double_areas = sum(triangles, (d) => d.double_area);
113 |
114 | for (let { a, b, c, double_area } of triangles) {
115 | if (double_area == 0) {
116 | continue;
117 | }
118 | const share_of_remaining = double_area / double_areas;
119 | double_areas -= double_area;
120 | if (share_of_remaining < 0) {
121 | if (local_targets.every((d) => d == 0)) {
122 | continue;
123 | }
124 | }
125 | for (let f_num of range(local_targets.length)) {
126 | let how_many_points_do_i_get = randround(
127 | local_targets[f_num] * share_of_remaining
128 | );
129 | how_many_points_do_i_get = min([
130 | how_many_points_do_i_get,
131 | local_targets[f_num],
132 | ]);
133 | if (how_many_points_do_i_get <= 0) {
134 | continue;
135 | }
136 | for (let i = 0; i < how_many_points_do_i_get; i++) {
137 | const [x, y] = random_point(a, b, c);
138 | // console.log({x, y})
139 | const writing_to = ix_array[overall_position++];
140 | x_array[writing_to] = x;
141 | y_array[writing_to] = y;
142 | for (let i = 0; i < keep.length; i++) {
143 | keepers[i][writing_to] = keep_values[i];
144 | }
145 | field_array[writing_to] = f_num;
146 | local_targets[f_num] -= 1;
147 | }
148 | }
149 | }
150 | }
151 | // Hard to imagine someone needing more than 2**16 entries here...
152 |
153 | const dictionaries: Vector = [];
154 | // Split the names by the delimiter and turn each
155 | // into a dictionary column.
156 | names.forEach((column_name, column_number) => {
157 | const codes = [];
158 | const strings = [];
159 | fields.forEach((multi_field, multi_field_number) => {
160 | const field = multi_field.split(delim)[column_number];
161 | if (strings.indexOf(field) == -1) {
162 | strings.push(field);
163 | }
164 | codes[multi_field_number] = strings.indexOf(field);
165 | });
166 | let dict_type;
167 | let subset_array;
168 | if (strings.length <= 127) {
169 | dict_type = new Int8();
170 | subset_array = new Int8Array(field_array.length);
171 | } else {
172 | dict_type = new Int16();
173 | subset_array = new Int16Array(field_array.length);
174 | }
175 | for (let i = 0; i < field_array.length; i++) {
176 | subset_array[i] = codes[field_array[i]];
177 | }
178 | const classes = vectorFromArray(strings, new Utf8());
179 | const dictionaryVector2 = makeVector({
180 | data: subset_array, // indexes into the dictionary
181 | dictionary: classes,
182 | type: new Dictionary(new Utf8(), new Uint8()),
183 | });
184 | dictionaries.push(dictionaryVector2);
185 | });
186 |
187 | const my_table2 = new tableFromArrays({
188 | x: vectorFromArray(x_array, new Float32()),
189 | y: vectorFromArray(y_array, new Float32()),
190 | ...keep.reduce(
191 | (acc, d, i) => ({ ...acc, [d]: vectorFromArray(keepers[i], new Utf8()) }),
192 | {}
193 | ),
194 | ...names.reduce((acc, d, i) => ({ ...acc, [d]: dictionaries[i] }), {}),
195 | });
196 | return my_table2;
197 | }
198 |
199 | function randround(how_many_points_do_i_get) {
200 | const leftover = how_many_points_do_i_get % 1;
201 | // Random round to decide if you get a fractional point.
202 | if (Math.random() > leftover) {
203 | return how_many_points_do_i_get - leftover;
204 | } else {
205 | return how_many_points_do_i_get + (1 - leftover);
206 | }
207 | }
208 |
209 | function random_point([ax, ay], [bx, by], [cx, cy]) {
210 | const a = [bx - ax, by - ay];
211 | const b = [cx - ax, cy - ay];
212 | let [u1, u2] = [Math.random(), Math.random()];
213 | if (u1 + u2 > 1) {
214 | u1 = 1 - u1;
215 | u2 = 1 - u2;
216 | }
217 | const w = [u1 * a[0] + u2 * b[0], u1 * a[1] + u2 * b[1]];
218 | return [w[0] + ax, w[1] + ay];
219 | }
220 |
--------------------------------------------------------------------------------
/src/TriFeather.ts:
--------------------------------------------------------------------------------
1 | import {
2 | Int32,
3 | Int64,
4 | Int16,
5 | Int8,
6 | Float32,
7 | Float64,
8 | Dictionary,
9 | Binary,
10 | Utf8,
11 | Uint8,
12 | Uint32,
13 | makeBuilder,
14 | vectorFromArray,
15 | tableToIPC,
16 | tableFromIPC,
17 | Table,
18 | } from "apache-arrow";
19 |
20 | import earcut from "earcut";
21 |
22 | import { geoPath, geoProjection } from "d3-geo";
23 |
24 | import { geoProject } from "d3-geo-projection";
25 | import { extent, range } from "d3-array";
26 |
27 | import clip from "polygon-clipping";
28 | import { Feature, FeatureCollection, Geometry } from "geojson";
29 |
30 | export default class TriFeather {
31 | public bytes?: Uint8Array;
32 | public t: Table;
33 | public _n_coords?: number;
34 | _coord_buffer: DataView;
35 |
36 | constructor(bytes: Uint8Array) {
37 | this.bytes = bytes;
38 | this.t = tableFromIPC(bytes);
39 | }
40 |
41 | get n_coords() {
42 | // Trigger creation.
43 | this.coord_buffer;
44 | return this._n_coords;
45 | }
46 |
47 | get coord_buffer() {
48 | if (this._coord_buffer) {
49 | return this._coord_buffer;
50 | }
51 | const d = this.t.get(0).vertices;
52 | this._coord_bytes = d.byteOffset;
53 | this._n_coords = d.byteLength / 4 / 2;
54 | this._coord_buffer = new DataView(d.buffer, d.byteOffset, d.byteLength);
55 | return this._coord_buffer;
56 | }
57 |
58 | static polygon_to_triangles(polygon) {
59 | // Actually perform the earcut work on a polygon.
60 | const el_pos = [];
61 | const coords = polygon.flat(2);
62 | const vertices = earcut(...Object.values(earcut.flatten(polygon)));
63 | return { coords, vertices };
64 | }
65 |
66 | /*
67 | Creates a Trifeather object from a geojson feature collection and a d3
68 | projection.
69 | */
70 | static from_feature_collection(
71 | feature_collection: FeatureCollection,
72 | projection: typeof geoProjection,
73 | options = { dictionary_threshold: 0.75, clip_to_sphere: false }
74 | ) {
75 | if (projection === undefined) {
76 | throw new Error("Must define a projection");
77 | }
78 | // feature_collections: a (parsed) geoJSON object.
79 | // projection: a d3.geoProjection instance;
80 | // eg, d3.geoMollweide().translate([10, 20])
81 | // options:
82 |
83 | const properties = new Map();
84 | // Stores the number of bytes used for the coordinates.
85 | const coord_resolutions = [null];
86 | const coord_buffer_offset = [null];
87 | // centroids let you have fun with shapes. Store x and y separately.
88 | const centroids = [[null], [null]];
89 | const bounds = [null];
90 | // Storing areas makes it possible to weight centroids.
91 | const areas = [null];
92 | let i = -1;
93 |
94 | const path = geoPath();
95 | let clip_shape;
96 |
97 | let projected = geoProject(
98 | feature_collection,
99 | projection
100 | ) as FeatureCollection;
101 | if (options.clip_to_sphere) {
102 | clip_shape = geoProject({ type: "Sphere" }, projection);
103 | for (let feature of projected.features) {
104 | const new_coords = clip.intersection(
105 | feature.coordinates,
106 | clip_shape.coordinates
107 | );
108 | if (
109 | projected.type == "Polygon" &&
110 | typeof (new_coords[0][0][0] != "numeric")
111 | ) {
112 | projected.type = "MultiPolygon";
113 | }
114 | feature.coordinates = new_coords;
115 | }
116 | }
117 | const { indices, points } = this.lookup_map_and_coord_buffer(projected);
118 | const coord_indices = indices;
119 | const coord_codes = points;
120 |
121 | // Stash the vertices in the first item of the array.
122 | const vertices = [new Uint8Array(coord_codes.buffer)];
123 | properties.set("id", ["Dummy feather row"]);
124 |
125 | i = 0;
126 | for (let feature of projected.features) {
127 | // start at one; the first slot is reserved for caching the full
128 | // feature list
129 | i++;
130 | properties.get("id")[i] = feature.id || `Feature_no_${i}`;
131 |
132 | for (let [k, v] of Object.entries(feature.properties)) {
133 | if (!properties.get(k)) {
134 | properties.set(k, []);
135 | }
136 | if (typeof v === "object") {
137 | properties.get(k)[i] = JSON.stringify(v);
138 | continue;
139 | }
140 | properties.get(k)[i] = v;
141 | }
142 |
143 | const projected = feature.geometry;
144 | const [x, y] = path.centroid(projected);
145 | const bbox = vectorFromArray(path.bounds(projected).flat());
146 |
147 | centroids[0][i] = x;
148 | centroids[1][i] = y;
149 | areas[i] = path.area(projected);
150 | bounds[i] = bbox;
151 | let loc_coordinates;
152 | if (projected === null) {
153 | console.warn("Error on", projected);
154 | coord_resolutions[i] = null;
155 | vertices[i] = null;
156 | continue;
157 | } else if (projected.type == "Polygon") {
158 | loc_coordinates = [projected.coordinates];
159 | } else if (projected.type == "MultiPolygon") {
160 | loc_coordinates = projected.coordinates;
161 | } else {
162 | throw "All elements must be polygons or multipolgyons.";
163 | }
164 | let all_coords = [];
165 | let all_vertices = [];
166 | for (let polygon of loc_coordinates) {
167 | const { coords, vertices } = TriFeather.polygon_to_triangles(polygon);
168 | // Allow coordinate lookups by treating them as a single 64-bit int.
169 | const r = new Float32Array(coords.flat(3));
170 | // console.log({r})
171 | const bigint_coords = new Float64Array(r.buffer);
172 | // Reduce to the indices of the master lookup table.
173 | for (let vertex of vertices) {
174 | all_vertices[all_vertices.length] = coord_indices.get(
175 | bigint_coords[vertex]
176 | );
177 | // console.log(bigint_coords[vertex], all_vertices[all_vertices.length])
178 | }
179 | // const lookup_points = vertices.map(vx => coord_indices.get(bigint_coords[vx]))
180 | // all_vertices.push(...lookup_points)
181 | }
182 | const [start, end] = extent(all_vertices);
183 | const diff = end - start;
184 |
185 | coord_buffer_offset[i] = start;
186 |
187 | // Normalize the vertices around the lowest element.
188 | // Allows some vertices to be stored at a lower resolution.
189 | for (let j = 0; j < all_vertices.length; j++) {
190 | all_vertices[j] = all_vertices[j] - start;
191 | }
192 |
193 | // Determine the type based on the offset.
194 | let MyArray;
195 | if (diff < 2 ** 8) {
196 | coord_resolutions[i] = 8;
197 | MyArray = Uint8Array;
198 | } else if (diff < 2 ** 16) {
199 | coord_resolutions[i] = 16;
200 | MyArray = Uint16Array;
201 | } else {
202 | // Will not allow more than 4 billion points on a single feature,
203 | // should be fine.
204 | coord_resolutions[i] = 32;
205 | MyArray = Uint32Array;
206 | }
207 | vertices[i] = MyArray.from(all_vertices);
208 | }
209 | const cols = {
210 | vertices: this.pack_binary(vertices),
211 | bounds: this.pack_binary(bounds),
212 | coord_resolution: vectorFromArray(coord_resolutions, new Uint8()),
213 | coord_buffer_offset: vectorFromArray(coord_buffer_offset, new Uint32()),
214 | pixel_area: vectorFromArray(areas, new Float64()),
215 | centroid_x: vectorFromArray(centroids[0], new Float32()),
216 | centroid_y: vectorFromArray(centroids[1], new Float32()),
217 | };
218 | for (const [k, v] of properties.entries()) {
219 | if (k in cols) {
220 | // silently ignore.
221 | //throw `Duplicate column names--rename ${k} `;
222 | }
223 | const builder = makeBuilder({
224 | type: this.infer_type(v, options.dictionary_threshold),
225 | nullValues: [null, undefined],
226 | highWaterMark: 2 ** 16,
227 | });
228 | for (let el of v) {
229 | builder.append(el);
230 | }
231 |
232 | cols[k] = builder.finish().toVector();
233 | }
234 | const tab = new Table(cols);
235 |
236 | const afresh = tableToIPC(tab);
237 | return new TriFeather(afresh);
238 | }
239 |
240 | static infer_type(array, dictionary_threshold = 0.75) {
241 | // Certainly reinventing the wheel here--
242 | // determine the most likely type of something based on a number of examples.
243 |
244 | // Dictionary threshold: a number between 0 and one. Character strings will be cast
245 | // as a dictionary if the unique values of the array are less than dictionary_threshold
246 | // times as long as the length of all (not null) values.
247 | const seen = new Set();
248 | let strings = 0;
249 | let floats = 0;
250 | let max_int = 0;
251 |
252 | for (let el of array) {
253 | if (Math.random() > 200 / array.length) {
254 | continue;
255 | } // Only check a subsample for speed. Try
256 | // to get about 200 instances for each row.
257 | if (el === undefined || el === null) {
258 | continue;
259 | }
260 | if (typeof el === "object") {
261 | strings += 1;
262 | seen.add(Math.random());
263 | }
264 | if (typeof el === "string") {
265 | strings += 1;
266 | seen.add(el);
267 | } else if (typeof el === "number") {
268 | if (el % 1 > 0) {
269 | floats += 1;
270 | } else if (isFinite(el)) {
271 | max_int = Math.max(Math.abs(el), max_int);
272 | } else {
273 | }
274 | } else if (typeof el === "boolean") {
275 | } else {
276 | console.warn(el);
277 | throw `Can't convert ${el} to arrow: no behavior defined for type ${typeof el}`;
278 | }
279 | }
280 | if (strings > 0) {
281 | // moderate overlap
282 | if (seen.length < strings.length * 0.75) {
283 | return new Dictionary(new Utf8(), new Int32());
284 | } else {
285 | return new Utf8();
286 | }
287 | }
288 | if (floats > 0) {
289 | return new Float32();
290 | }
291 | if (Math.abs(max_int) < 2 ** 8) {
292 | return new Int32();
293 | }
294 | if (Math.abs(max_int) < 2 ** 16) {
295 | return new Int32();
296 | }
297 | if (Math.abs(max_int) < 2 ** 32) {
298 | return new Int32();
299 | } else {
300 | return new Int64();
301 | }
302 | }
303 |
304 | coord(ix) {
305 | // NB this manually specifies little-endian, although
306 | // Arrow can potentially support big-endian frames under
307 | // certain (future?) circumstances.
308 | return [
309 | this.coord_buffer.getFloat32(ix * 4 * 2, true),
310 | this.coord_buffer.getFloat32(ix * 2 * 4 + 4, true),
311 | ];
312 | }
313 | static pack_binary(els) {
314 | const binaryBuilder = makeBuilder({
315 | type: new Binary(),
316 | nullValues: [null, undefined],
317 | highWaterMark: 2 ** 16,
318 | });
319 | for (let el of els) {
320 | binaryBuilder.append(el);
321 | }
322 | return binaryBuilder.finish().toVector();
323 | }
324 |
325 | bind_to_regl(regl) {
326 | this.regl = regl;
327 | this.element_handler = new Map();
328 | // Elements can't share buffers (?) so just use a map.
329 | this.regl_coord_buffer = regl.buffer({
330 | data: this.t.get(0).vertices,
331 | type: "float",
332 | usage: "static",
333 | });
334 | this.prepare_features_for_regl();
335 | }
336 |
337 | prepare_features_for_regl() {
338 | this.features = [];
339 | const { t, features, regl, element_handler, regl_coord_buffer } = this;
340 | // Start at 1, not zero, to avoid the dummy.
341 | for (let ix = 1; ix < this.t.numRows; ix++) {
342 | const feature = this.t.get(ix);
343 | element_handler.set(
344 | ix,
345 | this.regl.elements({
346 | primitive: "triangles",
347 | usage: "static",
348 | data: feature.vertices,
349 | type: "uint" + feature.coord_resolution,
350 | length: feature.vertices.length, // in bytes
351 | count: (feature.vertices.length / feature.coord_resolution) * 8,
352 | })
353 | );
354 | const f = {
355 | ix,
356 | vertices: element_handler.get(ix),
357 | coords: {
358 | buffer: this.regl_coord_buffer,
359 | stride: 8,
360 | offset: feature.coord_buffer_offset * 8,
361 | },
362 | properties: feature,
363 | }; // Other data can be bound to this object if desired, which makes programming easier than
364 | // working off the static feather frame.
365 | features.push(f);
366 | }
367 | }
368 |
369 | get bbox() {
370 | if (this._bbox) {
371 | return this._bbox;
372 | }
373 | this._bbox = {
374 | x: extent(range(this.n_coords).map((i) => this.coord(i)[0])),
375 | y: extent(range(this.n_coords).map((i) => this.coord(i)[1])),
376 | };
377 | return this._bbox;
378 | }
379 | *[Symbol.iterator]() {
380 | for (let feature of this.features) {
381 | yield feature;
382 | }
383 | }
384 |
385 | static lookup_map_and_coord_buffer(geojson) {
386 | const all_coordinates = new Float32Array(
387 | geojson.features
388 | .filter((d) => d.geometry)
389 | .map((d) => d.geometry.coordinates)
390 | .flat(4)
391 | );
392 | const feature_collection = geojson;
393 | const codes = new Float64Array(all_coordinates.buffer);
394 | const indices = new Map();
395 | for (let code of codes) {
396 | if (!indices.has(code)) {
397 | indices.set(code, indices.size);
398 | }
399 | }
400 | const points = new Float64Array(indices.size);
401 | for (let [k, v] of indices.entries()) {
402 | points[v] = k;
403 | }
404 | return { indices, points };
405 | }
406 | }
407 |
--------------------------------------------------------------------------------
/src/TriMap.ts:
--------------------------------------------------------------------------------
1 | import { rgb } from "d3-color";
2 | import { range, extent as d3extent, mean } from "d3-array";
3 | import { scaleSqrt, scaleLinear, scaleOrdinal } from "d3-scale";
4 | import { select } from "d3-selection";
5 | import { zoom } from "d3-zoom";
6 |
7 | const greys = (function () {
8 | const out = [];
9 | for (let i of range(10)) {
10 | for (let j of range(10)) {
11 | for (let k of range(10)) {
12 | out.push([118 + i * 2, 118 + j * 2, 118 + k * 2, 255]);
13 | }
14 | }
15 | }
16 | return out;
17 | })();
18 |
19 | const greyscale = scaleOrdinal().range(greys);
20 |
21 | const copy_shader = `
22 | precision mediump float;
23 | varying vec2 uv;
24 | uniform sampler2D tex;
25 | uniform float wRcp, hRcp;
26 | void main() {
27 | vec4 c = texture2D(tex, uv);
28 | if (c.a > 0.) {
29 | gl_FragColor = c;
30 | } else {
31 | discard;
32 | }
33 | }
34 | `;
35 |
36 | const alpha_color_merge = `
37 | // Takes the alpha channel from one buffer, and the rgb colors
38 | // from the other.
39 | precision mediump float;
40 | varying vec2 uv;
41 | uniform sampler2D color;
42 | uniform sampler2D alpha;
43 | uniform float wRcp, hRcp;
44 | void main() {
45 | vec4 col = texture2D(color, uv);
46 | vec4 alph = texture2D(alpha, uv);
47 | float a = alph.a;
48 | if (a < 1./255.) {
49 | discard;
50 | } else if (col.a == 0.) {
51 | discard;
52 | } else if (a < .99) {
53 | a = .25;
54 | } else {
55 | a = 0.75;
56 | // col = vec4(.5, .5, .5, 1.);
57 | }
58 | gl_FragColor = vec4(col.rgb * a, a);
59 | }
60 | `;
61 |
62 | const edge_propagation = `precision mediump float;
63 | varying vec2 uv;
64 | uniform sampler2D tex;
65 | uniform float wRcp, hRcp;
66 | uniform float u_decay;
67 | void main() {
68 | // The immediate neighbors
69 | vec4 maxlr = max(texture2D(tex, uv + vec2(wRcp, 0.)), texture2D(tex, uv + vec2(-wRcp, 0.)));
70 | vec4 maxud = max(texture2D(tex, uv + vec2(0., hRcp)), texture2D(tex, uv + vec2(0., -hRcp)));
71 | vec4 max_neighbor1 = max(maxlr, maxud) * u_decay;
72 | // Corners
73 | vec4 maxulur = max(texture2D(tex, uv + vec2(wRcp, hRcp)), texture2D(tex, uv + vec2(-wRcp, hRcp)));
74 | vec4 maxlllr = max(texture2D(tex, uv + vec2(wRcp, -hRcp)), texture2D(tex, uv + vec2(-wRcp, -hRcp)));
75 | vec4 max_neighbor2 = max(maxulur, maxlllr) * pow(u_decay, 1.414); // i.e., sqrt(2)
76 |
77 | vec4 max_neighbor = max(max_neighbor1, max_neighbor2);
78 |
79 | vec4 current = texture2D(tex, uv);
80 | gl_FragColor = max(max_neighbor, current);
81 | }
82 | `;
83 |
84 | const edge_detection = `
85 | precision mediump float;
86 | varying vec2 uv;
87 | uniform sampler2D tex;
88 | uniform float wRcp, hRcp;
89 | void main() {
90 | // 4 adjacent pixels; left, right, up down.
91 | vec4 l = texture2D(tex, uv + vec2(-wRcp, 0.));
92 | vec4 r = texture2D(tex, uv + vec2(wRcp, 0.));
93 | vec4 u = texture2D(tex, uv + vec2(0., hRcp));
94 | vec4 d = texture2D(tex, uv + vec2(0., -hRcp));
95 | vec4 around = (l + r + u + d) / 4.;
96 | vec4 current = texture2D(tex, uv);
97 | if (distance(around, current) < 0.00001) {
98 | gl_FragColor = vec4(0., 0., 0., 0.);
99 | } else {
100 | gl_FragColor = vec4(0., 0., 0., 1.);
101 | }
102 | }
103 | `;
104 |
105 | function rgb2glcolor(col) {
106 | const { r, g, b } = rgb(col);
107 | return [r, g, b, 255];
108 | }
109 | export default class TriMap {
110 | constructor(div, layers, regl) {
111 | this.div = div;
112 | this.regl = regl;
113 | for (let layer of layers) {
114 | layer.bind_to_regl(this.regl);
115 | }
116 | this.layers = layers;
117 | const { width, height } = div;
118 | this.width = width || window.innerWidth;
119 | this.height = height || window.innerHeight;
120 | this.set_magic_numbers();
121 | this.prepare_div(width, height);
122 | this.color_map = this.regl.texture({
123 | width: 128,
124 | format: "rgba",
125 | height: 1,
126 | data: range(128 * 4),
127 | });
128 |
129 | this.set_renderer();
130 |
131 | this.random_points = [];
132 | }
133 |
134 | add_layer(layer) {
135 | layer.bind_to_regl(this.regl);
136 | this.layers.push(layer);
137 | }
138 |
139 | reglize_frag(regl, frag_shader = edge_detection, blend = false) {
140 | // Turn a frag shader into a regl call.
141 | return regl({
142 | blend: {
143 | enable: blend,
144 | func: {
145 | srcRGB: "one",
146 | srcAlpha: "one",
147 | dstRGB: "one minus src alpha",
148 | dstAlpha: "one minus src alpha",
149 | },
150 | },
151 | frag: frag_shader,
152 | vert: `
153 | precision mediump float;
154 | attribute vec2 position;
155 | varying vec2 uv;
156 | void main() {
157 | uv = 0.5 * (position + 1.0);
158 | gl_Position = vec4(position, 0, 1);
159 | }
160 | `,
161 | attributes: {
162 | position: this.fill_buffer,
163 | },
164 | depth: { enable: false },
165 | count: 3,
166 | uniforms: {
167 | u_decay: (_, { decay }) => decay,
168 | tex: (_, { layer }) => layer,
169 | color: (_, { color }) => color,
170 | alpha: (_, { alpha }) => alpha,
171 | wRcp: ({ viewportWidth }) => {
172 | return 1.0 / viewportWidth;
173 | },
174 | hRcp: ({ viewportHeight }) => 1.0 / viewportHeight,
175 | },
176 | });
177 | }
178 |
179 | get fill_buffer() {
180 | const { regl } = this;
181 | if (!this._fill_buffer) {
182 | const { regl } = this;
183 | this._fill_buffer = regl.buffer({ data: [-4, -4, 4, -4, 0, 4] });
184 | }
185 |
186 | return this._fill_buffer;
187 | }
188 | get filter() {
189 | return this._filter
190 | ? this._filter
191 | : function (d) {
192 | return true;
193 | };
194 | }
195 |
196 | set filter(f) {
197 | this._filter = f;
198 | }
199 |
200 | cleanup() {
201 | this.cleanup_point_buffers();
202 | this.cleanup_frame_buffers();
203 | this.cleanup_poly_buffers();
204 | }
205 |
206 | cleanup_poly_buffers() {
207 | // pass
208 | }
209 |
210 | cleanup_frame_buffers() {
211 | if (this.buffers) {
212 | for (let buffer of this.buffers.values()) {
213 | buffer.destroy();
214 | }
215 | }
216 | }
217 |
218 | fbo(name) {
219 | this.buffers = this.buffers || new Map();
220 | if (this.buffers.get(name)) {
221 | return this.buffers.get(name);
222 | }
223 | const fbo = this.regl.framebuffer({
224 | width: this.width,
225 | height: this.height,
226 | stencil: false,
227 | depth: false,
228 | });
229 | this.buffers.set(name, fbo);
230 | return this.buffers.get(name);
231 | }
232 |
233 | set_magic_numbers() {
234 | // It's a major pain to align regl with d3 scales.
235 |
236 | const { layers, width, height } = this;
237 |
238 | const extent = JSON.parse(JSON.stringify(layers[0].bbox));
239 | for (let layer of layers) {
240 | if (layer.t.get(0).get("holc_id")) {
241 | continue;
242 | }
243 | const { bbox } = layer;
244 | extent.x = d3extent([...extent.x, ...bbox.x]);
245 | extent.y = d3extent([...extent.y, ...bbox.y]);
246 | }
247 | const scales = {};
248 |
249 | const scale_dat = { x: {}, y: {} };
250 |
251 | for (let [name, dim] of [
252 | ["x", width],
253 | ["y", height],
254 | ]) {
255 | const limits = extent[name];
256 | scale_dat[name].limits = limits;
257 | scale_dat[name].size_range = limits[1] - limits[0];
258 | scale_dat[name].pixels_per_unit = dim / scale_dat[name].size_range;
259 | }
260 |
261 | const data_aspect_ratio =
262 | scale_dat.x.pixels_per_unit / scale_dat.y.pixels_per_unit;
263 |
264 | let x_buffer_size = 0,
265 | y_buffer_size = 0,
266 | x_target_size = width,
267 | y_target_size = height;
268 | if (data_aspect_ratio > 1) {
269 | // There are more pixels in the x dimension, so we need a buffer
270 | // around it.
271 | x_target_size = width / data_aspect_ratio;
272 | x_buffer_size = (width - x_target_size) / 2;
273 | } else {
274 | y_target_size = height * data_aspect_ratio;
275 | y_buffer_size = (height - y_target_size) / 2;
276 | }
277 |
278 | scales.x = scaleLinear()
279 | .domain(scale_dat.x.limits)
280 | .range([x_buffer_size, width - x_buffer_size]);
281 |
282 | scales.y = scaleLinear()
283 | .domain(scale_dat.y.limits)
284 | .range([y_buffer_size, height - y_buffer_size]);
285 |
286 | this.magic_numbers = window_transform(
287 | scales.x,
288 | scales.y,
289 | width,
290 | height
291 | ).map((d) => d.flat());
292 | }
293 |
294 | prepare_div(width, height) {
295 | this.zoom = { transform: { k: 1, x: 0, y: 0 } };
296 | select(this.div).call(
297 | zoom()
298 | .extent([
299 | [0, 0],
300 | [width, height],
301 | ])
302 | .on("zoom", (event, g) => {
303 | this.zoom.transform = event.transform;
304 | })
305 | );
306 | return this.div;
307 | }
308 |
309 | get size_func() {
310 | return this._size_function ? this._size_function : () => 1;
311 | }
312 |
313 | set size_func(f) {
314 | this._size_function = f;
315 | }
316 |
317 | set color_func(f) {
318 | this._color_function = f;
319 | }
320 |
321 | get index_color() {
322 | return function (f) {
323 | if (f._index_color) {
324 | return f._index_color;
325 | }
326 | f._index_color = [0, 1, 2].map(
327 | (d) => (1 / 255) * Math.floor(Math.random() * 255)
328 | );
329 | return f._index_color;
330 | };
331 | }
332 |
333 | get color_func() {
334 | //return d => [Math.random() * 255, Math.random() * 255, Math.random() * 255];
335 | if (!this._color_function) {
336 | // The bootstrap equivalents of the HOLC colors.
337 | const HOLC_bootstrap = {
338 | A: [40, 167, 69],
339 | B: [23, 162, 184],
340 | C: [255, 193, 7],
341 | D: [220, 53, 69],
342 | };
343 | this._color_function = function (d) {
344 | // Do the defaults include a custom coloring scheme for redline maps? Yes,
345 | // they do.
346 | if (d.properties.holc_grade) {
347 | if (HOLC_bootstrap[d.properties.holc_grade]) {
348 | return HOLC_bootstrap[d.properties.holc_grade].map((c) => c / 255);
349 | }
350 | }
351 | return greyscale(d.ix)
352 | .slice(0, 3)
353 | .map((c) => c / 255);
354 | };
355 | }
356 | return this._color_function;
357 | }
358 |
359 | draw_edges_1(layer) {
360 | const { regl } = this;
361 | const colors = this.fbo("colors");
362 | const edges = this.fbo("edges");
363 |
364 | colors.use((d) => {
365 | this.regl.clear({ color: [0, 0, 0, 0] });
366 | this.poly_tick(layer);
367 | });
368 |
369 | edges.use(() => {
370 | this.regl.clear({ color: [1, 1, 1, 1] });
371 | const shader = this.edge_detect_call;
372 | shader({ layer: colors });
373 | });
374 | }
375 |
376 | instantiate_shader(regl, shader, name) {
377 | this.calls = this.calls || new Map();
378 | if (this.calls.get(name)) {
379 | return this.calls.get(name);
380 | }
381 | const call = this.reglize_frag(regl, shader);
382 | this.calls.set(name, call);
383 | return call;
384 | }
385 |
386 | get edge_detect_call() {
387 | return this.instantiate_shader(
388 | this.regl,
389 | edge_detection,
390 | "edge_detect_call"
391 | );
392 | }
393 |
394 | get copy_call() {
395 | return this.instantiate_shader(this.regl, copy, "copy_call");
396 | }
397 |
398 | get edge_propagation_call() {
399 | return this.instantiate_shader(
400 | this.regl,
401 | edge_propagation,
402 | "edge_propagation_call"
403 | );
404 | }
405 | get alpha_color_merge_call() {
406 | return this.instantiate_shader(
407 | this.regl,
408 | alpha_color_merge,
409 | "alpha_color_merge"
410 | );
411 | }
412 | draw_edges_2() {
413 | const { regl } = this;
414 | // Copy the edges to a ping-pong shader to be blurred.
415 | const edges = this.fbo("edges");
416 |
417 | const pingpong = [this.fbo("ping"), this.fbo("pong")];
418 | const copier = this.copy_call;
419 | const { decay } = this;
420 | pingpong[0].use(() => {
421 | regl.clear({ color: [0, 0, 0, 0] });
422 | copier({ layer: edges });
423 | });
424 |
425 | const edge_propagator = this.edge_propagation_call;
426 | let alpha = 1;
427 | while (alpha > 1 / 255) {
428 | pingpong[1].use(() => {
429 | regl.clear({ color: [0, 0, 0, 0] });
430 | edge_propagator({ layer: pingpong[0], decay: decay });
431 | });
432 | alpha *= decay;
433 | // swap the buffers.
434 | pingpong.reverse();
435 | }
436 | }
437 |
438 | draw_edges(layer) {
439 | this.draw_edges_1(layer);
440 | this.draw_edges_2();
441 | const pingpong = [this.fbo("ping"), this.fbo("pong")];
442 | const final_shade = this.alpha_color_merge_call;
443 | // First copy the blur
444 | final_shade({ alpha: pingpong[0], color: this.fbo("colors") });
445 | // copier({layer: colors})
446 | }
447 |
448 | get decay() {
449 | const pixels = 8;
450 | return Math.exp(Math.log(1 / 255) / pixels);
451 | }
452 |
453 | cleanup_point_buffers() {
454 | this.random_points.map((d) => {
455 | d.x.destroy();
456 | d.y.destroy();
457 | d.f_num.destroy();
458 | d.ix.destroy();
459 | });
460 | }
461 |
462 | generate_random_points(
463 | fields,
464 | represented = 1,
465 | layers,
466 | clear = true,
467 | index_function
468 | ) {
469 | if (clear) {
470 | this.cleanup_point_buffers();
471 | this._number_of_points = 0;
472 | this.random_points = [];
473 | }
474 |
475 | for (let layer of layers) {
476 | const { regl } = this;
477 | const { x_array, y_array, f_num_array } = random_points(
478 | layer,
479 | fields,
480 | represented,
481 | index_function
482 | );
483 | this._number_of_points += x_array.length;
484 | let this_item = {
485 | x: regl.buffer(x_array),
486 | y: regl.buffer(y_array),
487 | f_num: regl.buffer(f_num_array),
488 | ix: regl.buffer(range(x_array.length)),
489 | count: x_array.length,
490 | };
491 | this.random_points.push(this_item);
492 | }
493 | }
494 |
495 | point_tick() {
496 | const { regl } = this;
497 | const calls = [];
498 | // multiple interleaved tranches prevent Trump or Biden from always being on top. This is
499 | // an issue with Williamson's maps, which over-represent the Hispanic population of DC because it
500 | // gets plotted last.
501 |
502 | const alpha_scale = scaleSqrt().domain([0, 500]).range([0, 1]);
503 | for (let pointset of this.random_points) {
504 | calls.push({
505 | x: pointset.x,
506 | y: pointset.y,
507 | ix: pointset.ix,
508 | f_num: pointset.f_num,
509 | transform: this.zoom.transform,
510 | // Drops the last point in each tranch--needs a modulo operation to know how
511 | // many to expect.
512 | count: pointset.count,
513 | centroid: [0, 0],
514 | size: this.point_size ? this.point_size : 1,
515 | alpha: this.point_opacity > 1 / 255 ? this.point_opacity : 1 / 255,
516 | });
517 | }
518 | this.render_points(calls);
519 | }
520 |
521 | tick(wut) {
522 | const { regl } = this;
523 | regl.clear({
524 | color: [0, 0, 0, 0],
525 | });
526 | const alpha = 1;
527 | if (wut === "points") {
528 | this.point_tick();
529 | } else {
530 | for (let layer of this.layers) {
531 | // console.log(layer)
532 | this.draw_edges(layer);
533 | return;
534 | }
535 | this.fbo("points").use((d) => {
536 | regl.clear({
537 | color: [0, 0, 0, 0],
538 | });
539 | this.point_tick();
540 | });
541 |
542 | const copier = this.copy_call;
543 | copier({ layer: this.fbo("points") });
544 | }
545 | }
546 |
547 | get copy_call() {
548 | return this.instantiate_shader(this.regl, copy_shader, "copy_call");
549 | }
550 |
551 | poly_tick(layer) {
552 | const calls = [];
553 | let i = 0;
554 | for (let feature of layer) {
555 | //if (feature.properties['2020_tot'] === null) {continue}
556 |
557 | const { vertices, coords } = feature;
558 | if (!vertices) {
559 | continue;
560 | }
561 | calls.push({
562 | transform: this.zoom.transform,
563 | color: this.color_func(feature),
564 | u_blob: this.blob_func(feature),
565 | centroid: [
566 | feature.properties.centroid_x,
567 | feature.properties.centroid_y,
568 | ],
569 | size: this.size_func(feature),
570 | alpha: 1,
571 | vertices: vertices,
572 | coords: coords,
573 | });
574 | }
575 | this.render_polygons(calls);
576 | }
577 | get blob_func() {
578 | return (d) => [1, 1, 0.0];
579 | }
580 | get point_vertex_shader() {
581 | return `
582 | precision mediump float;
583 | attribute float a_x;
584 | attribute float a_y;
585 | attribute float a_ix;
586 | attribute float a_f_num;
587 | uniform sampler2D u_color_map;
588 |
589 | uniform float u_discard_prob;
590 | uniform float u_size;
591 | uniform vec2 u_centroid;
592 | varying vec4 fragColor;
593 | uniform float u_k;
594 | uniform float u_time;
595 | varying vec4 fill;
596 |
597 | // Transform from data space to the open window.
598 | uniform mat3 u_window_scale;
599 | // Transform from the open window to the d3-zoom.
600 | uniform mat3 u_zoom;
601 | uniform mat3 u_untransform;
602 | uniform float u_scale_factor;
603 |
604 | float distortion_factor = exp(log(u_k)*u_scale_factor);
605 |
606 | vec4 discard_me = vec4(-100., -100., 0., 1.);
607 |
608 | float tau = 3.14159265358 * 2.;
609 |
610 | highp float ix_to_random(in float ix, in float seed) {
611 | // For high numbers, taking the log avoids coincidence.
612 | highp float seed2 = log(ix) + 1.;
613 | vec2 co = vec2(seed2, seed);
614 | highp float a = 12.9898;
615 | highp float b = 78.233;
616 | highp float c = 43758.5453;
617 | highp float dt = dot(co.xy, vec2(a, b));
618 | highp float sn = mod(dt, 3.14);
619 | return fract(sin(sn) * c);
620 | }
621 |
622 | vec2 box_muller(in float ix, in float seed) {
623 | // Box-Muller transform gives you two gaussian randoms for two uniforms.
624 | highp float U = ix_to_random(ix, seed);
625 | highp float V = ix_to_random(ix, seed + 17.123123);
626 | return vec2(sqrt(-2. * log(U)) * cos(tau * V),
627 | sqrt(-2. * log(U)) * sin(tau * V));
628 | }
629 |
630 |
631 |
632 | // From another project
633 | vec2 circle_jitter(in float ix, in float aspect_ratio, in float time,
634 | in float radius, in float speed) {
635 | float rand1 = ix_to_random(ix, 3.0);
636 | float rand2 = ix_to_random(ix, 4.0);
637 |
638 | float stagger_time = rand1 * tau;
639 |
640 | // How long does a circuit take?
641 |
642 | float units_per_period = radius * radius * tau / 2.;
643 | float units_per_second = speed / 100.;
644 | float seconds_per_period = units_per_period / units_per_second;
645 | seconds_per_period = tau / speed;
646 | float time_period = seconds_per_period;
647 | if (time_period > 1e4) {
648 | return vec2(0., 0.);
649 | }
650 |
651 | // Adjust time from the clock to our current spot.
652 | float varying_time = time + stagger_time * time_period;
653 | // Where are we from 0 to 1 relative to the time period
654 |
655 | float relative_time = 1. - mod(varying_time, time_period) / time_period;
656 |
657 | float theta = relative_time * tau;
658 |
659 | return vec2(cos(theta), aspect_ratio * sin(theta)) *
660 | radius * rand2;
661 | }
662 |
663 |
664 | vec2 jitter(in float ix, in float radius) {
665 | return circle_jitter(ix, 1.2, u_time, radius, .5);
666 | }
667 |
668 | // We can bundle the three matrices together here for all shaders.
669 | mat3 from_coord_to_gl = u_window_scale * u_zoom * u_untransform;
670 | void main () {
671 |
672 | vec2 position = vec2(a_x, a_y);
673 |
674 |
675 |
676 | vec3 p = vec3(position, 1.) * from_coord_to_gl;
677 |
678 | // vec2 jittered = jitter(a_ix, .0004 * distortion_factor) * distortion_factor;
679 | // p = p + vec3(jittered.xy, 0.);
680 |
681 | float my_offset = ix_to_random(a_ix, 3.2);
682 | float keep_prob = (1. - u_discard_prob);
683 | // always stay on screen 10 seconds.
684 | float time_period = 10./(keep_prob);
685 | float fraction_of_time = fract(u_time / time_period);
686 | float size_dilate = 0.;
687 | float my_fract = fract(fraction_of_time + my_offset);
688 | if (my_fract >= keep_prob) {
689 | gl_Position = discard_me;
690 | gl_PointSize = 0.;
691 | return;
692 | } else {
693 | float fraction_within = my_fract / keep_prob;
694 | size_dilate = abs(1. - 4.*pow((.5 - fraction_within), 2.));
695 | size_dilate = clamp(size_dilate, 0., 1.);
696 | }
697 | gl_Position = vec4(p, 1.0);
698 |
699 | gl_PointSize = u_size * distortion_factor * size_dilate;
700 |
701 | //gl_PointSize += exp(sin(u_time / 2. + a_f_num/6. * 2. * 3.1415));
702 |
703 | fragColor = texture2D(u_color_map, vec2(a_f_num / 128., .5));
704 |
705 | }
706 | `;
707 | }
708 |
709 | get vertex_shader() {
710 | return `
711 | precision mediump float;
712 | attribute vec2 position;
713 | uniform float u_size;
714 | uniform vec2 u_centroid;
715 | varying vec4 fragColor;
716 | uniform float u_k;
717 | uniform float u_time;
718 | uniform vec3 u_color;
719 | varying vec4 fill;
720 |
721 | // Transform from data space to the open window.
722 | uniform mat3 u_window_scale;
723 | // Transform from the open window to the d3-zoom.
724 | uniform mat3 u_zoom;
725 | uniform mat3 u_untransform;
726 | uniform float u_scale_factor;
727 | // rate, grittiness, blobbiness
728 | uniform vec3 u_blob;
729 | // We can bundle the three matrices together here for all shaders.
730 | mat3 from_coord_to_gl = u_window_scale * u_zoom * u_untransform;
731 |
732 |
733 |
734 |
735 | void main () {
736 | // scale to normalized device coordinates
737 | // gl_Position is a special variable that holds the position
738 | // of a vertex
739 |
740 | vec2 from_center = position-u_centroid;
741 | float angle = atan(from_center.x, from_center.y);
742 | from_center *= (1. + u_blob.b * sin(angle * u_blob.g + u_blob.r * u_time));
743 |
744 | vec3 p = vec3(from_center * u_size + u_centroid, 1.) * from_coord_to_gl;
745 | gl_Position = vec4(p, 1.0);
746 |
747 | //gl_PointSize = u_size * (exp(log(u_k)*u_scale_factor));
748 |
749 | fragColor = vec4(u_color.rgb, 1.);
750 | //gl_Position = vec4(position / vec2(1., u_aspect), 1., 1.);
751 | }
752 | `;
753 | }
754 |
755 | set_renderer() {
756 | this.render_polygons = this.regl(this.renderer());
757 | this.render_points = this.regl(this.renderer("points"));
758 | }
759 |
760 | get point_frag() {
761 | return `
762 | precision highp float;
763 | uniform float u_alpha;
764 | varying vec4 fragColor;
765 |
766 | void main() {
767 | vec2 coord = gl_PointCoord;
768 | vec2 cxy = 2.0 * coord - 1.0;
769 | float r_sq = dot(cxy, cxy);
770 | if (r_sq > 1.0) {discard;}
771 |
772 | gl_FragColor = fragColor * u_alpha;
773 | }`;
774 | }
775 |
776 | get triangle_frag() {
777 | return `
778 | precision highp float;
779 | uniform float u_alpha;
780 | varying vec4 fragColor;
781 |
782 | void main() {
783 | gl_FragColor = fragColor * u_alpha;
784 | }`;
785 | }
786 |
787 | renderer(wut = "polygons") {
788 | const { regl, magic_numbers } = this;
789 | const definition = {
790 | depth: {
791 | enable: false,
792 | },
793 | blend: {
794 | enable: true,
795 | func: {
796 | srcRGB: "one",
797 | srcAlpha: "one",
798 | dstRGB: "one minus src alpha",
799 | dstAlpha: "one minus src alpha",
800 | },
801 | },
802 | vert: wut == "polygons" ? this.vertex_shader : this.point_vertex_shader,
803 | frag: wut == "polygons" ? this.triangle_frag : this.point_frag,
804 | attributes: {
805 | a_x: regl.prop("x"),
806 | a_y: regl.prop("y"),
807 | a_ix: regl.prop("ix"),
808 | a_f_num: regl.prop("f_num"),
809 |
810 | position:
811 | wut == "polygons"
812 | ? (_, { coords }) => coords
813 | : (_, { position, stride, offset }) => {
814 | return { buffer: position, offset, stride };
815 | },
816 | },
817 | count: regl.prop("count"),
818 | elements: wut == "polygons" ? (_, { vertices }) => vertices : undefined,
819 | uniforms: {
820 | u_time: (context, _) => performance.now() / 500,
821 | u_scale_factor: () => (this.scale_factor ? this.scale_factor : 0.5),
822 | u_color_map: () => this.color_map,
823 | u_k: function (context, props) {
824 | return props.transform.k;
825 | },
826 | u_discard_prob: () => this.discard_share,
827 | u_centroid: propd("centroid", [0, 0]),
828 | u_color: (_, { color }) => (color ? color : [0.8, 0.9, 0.2]),
829 | u_blob: (_, { u_blob }) => u_blob,
830 | u_window_scale: magic_numbers[0].flat(),
831 | u_untransform: magic_numbers[1].flat(),
832 | u_zoom: function (context, props) {
833 | const g = [
834 | // This is how you build a transform matrix from d3 zoom.
835 | [props.transform.k, 0, props.transform.x],
836 | [0, props.transform.k, props.transform.y],
837 | [0, 0, 1],
838 | ].flat();
839 | return g;
840 | },
841 | u_alpha: (_, { alpha }) => (alpha ? alpha : 1),
842 | u_size: (_, { size }) => size || 1,
843 | },
844 | primitive: wut == "polygons" ? "triangles" : "points",
845 | };
846 | if (wut === "polygons") {
847 | delete definition["count"];
848 | }
849 | return definition;
850 | }
851 | }
852 |
853 | function window_transform(x_scale, y_scale, width, height) {
854 | // A function that creates the two matrices a webgl shader needs, in addition to the zoom state,
855 | // to stay aligned with canvas and d3 zoom.
856 |
857 | // width and height are svg parameters; x and y scales project from the data x and y into the
858 | // the webgl space.
859 |
860 | // Given two d3 scales in coordinate space, create two matrices that project from the original
861 | // space into [-1, 1] webgl space.
862 |
863 | function gap(array) {
864 | // Return the magnitude of a scale.
865 | return array[1] - array[0];
866 | }
867 |
868 | let x_mid = mean(x_scale.domain());
869 | let y_mid = mean(y_scale.domain());
870 |
871 | const xmulti = gap(x_scale.range()) / gap(x_scale.domain());
872 | const ymulti = gap(y_scale.range()) / gap(y_scale.domain());
873 |
874 | // the xscale and yscale ranges may not be the full width or height.
875 |
876 | const aspect_ratio = width / height;
877 |
878 | // translates from data space to scaled space.
879 | const m1 = [
880 | // transform by the scale;
881 | [xmulti, 0, -xmulti * x_mid + mean(x_scale.range())],
882 | [0, ymulti, -ymulti * y_mid + mean(y_scale.range())],
883 | [0, 0, 1],
884 | ];
885 |
886 | // translate from scaled space to webgl space.
887 | // The '2' here is because webgl space runs from -1 to 1; the shift at the end is to
888 | // shift from [0, 2] to [-1, 1]
889 | const m2 = [
890 | [2 / width, 0, -1],
891 | [0, -2 / height, 1],
892 | [0, 0, 1],
893 | ];
894 |
895 | return [m1, m2];
896 | }
897 |
898 | function propd(string, def) {
899 | return (_, props) => {
900 | if (props[string] !== undefined) {
901 | return props[string];
902 | }
903 | return def;
904 | };
905 | }
906 |
--------------------------------------------------------------------------------
/src/geo-albers-usa-LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2020 Stamen Design
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
--------------------------------------------------------------------------------
/src/geo-albers-usa-territories.js:
--------------------------------------------------------------------------------
1 | import { geoAlbers, geoConicEqualArea } from "d3-geo";
2 |
3 | var epsilon = 0.000001;
4 |
5 | function multiplex(streams) {
6 | return {
7 | point(x, y) {
8 | for (const s of streams) s.point(x, y);
9 | },
10 | sphere() {
11 | for (const s of streams) s.sphere();
12 | },
13 | lineStart() {
14 | for (const s of streams) s.lineStart();
15 | },
16 | lineEnd() {
17 | for (const s of streams) s.lineEnd();
18 | },
19 | polygonStart() {
20 | for (const s of streams) s.polygonStart();
21 | },
22 | polygonEnd() {
23 | for (const s of streams) s.polygonEnd();
24 | },
25 | };
26 | }
27 |
28 | export function geoAlbersUsaTerritories() {
29 | var cache,
30 | cacheStream,
31 | lower48 = geoAlbers(),
32 | lower48Point,
33 | alaska = geoConicEqualArea()
34 | .rotate([154, 0])
35 | .center([-2, 58.5])
36 | .parallels([55, 65]),
37 | alaskaPoint,
38 | hawaii = geoConicEqualArea()
39 | .rotate([157, 0])
40 | .center([-3, 19.9])
41 | .parallels([8, 18]),
42 | hawaiiPoint,
43 | puertoRico = geoConicEqualArea()
44 | .rotate([66, 0])
45 | .center([0, 18])
46 | .parallels([8, 18]),
47 | puertoRicoPoint,
48 | guamMariana = geoConicEqualArea()
49 | .rotate([-145, 0])
50 | .center([0, 16])
51 | .parallels([10, 20]),
52 | guamMarianaPoint,
53 | americanSamoa = geoConicEqualArea()
54 | .rotate([170, 0])
55 | .center([0, -14])
56 | .parallels([-14, 0]),
57 | americanSamoaPoint,
58 | point,
59 | pointStream = {
60 | point: function (x, y) {
61 | point = [x, y];
62 | },
63 | };
64 |
65 | function albersUsaTerritories(coordinates) {
66 | var x = coordinates[0],
67 | y = coordinates[1];
68 | return (
69 | (point = null),
70 | (lower48Point.point(x, y), point) ||
71 | (alaskaPoint.point(x, y), point) ||
72 | (hawaiiPoint.point(x, y), point) ||
73 | (puertoRicoPoint.point(x, y), point) ||
74 | (guamMarianaPoint.point(x, y), point) ||
75 | (americanSamoaPoint.point(x, y), point)
76 | );
77 | }
78 |
79 | albersUsaTerritories.invert = function (coordinates) {
80 | var k = lower48.scale(),
81 | t = lower48.translate(),
82 | x = (coordinates[0] - t[0]) / k,
83 | y = (coordinates[1] - t[1]) / k;
84 | return (
85 | y >= 0.12 && y < 0.234 && x >= -0.225 && x < -0.185
86 | ? alaska
87 | : y >= 0.166 && y < 0.234 && x >= -0.185 && x < -0.08
88 | ? hawaii
89 | : y >= 0.204 && y < 0.234 && x >= 0.3 && x < 0.38
90 | ? puertoRico
91 | : y >= 0.05 && y < 0.204 && x >= -0.415 && x < -0.225
92 | ? guamMariana
93 | : y >= 0.18 && y < 0.234 && x >= -0.415 && x < -0.225
94 | ? americanSamoa
95 | : lower48
96 | ).invert(coordinates);
97 | };
98 |
99 | albersUsaTerritories.stream = function (stream) {
100 | return cache && cacheStream === stream
101 | ? cache
102 | : (cache = multiplex([
103 | lower48.stream((cacheStream = stream)),
104 | alaska.stream(stream),
105 | hawaii.stream(stream),
106 | puertoRico.stream(stream),
107 | guamMariana.stream(stream),
108 | americanSamoa.stream(stream),
109 | ]));
110 | };
111 |
112 | albersUsaTerritories.precision = function (_) {
113 | if (!arguments.length) return lower48.precision();
114 | lower48.precision(_);
115 | alaska.precision(_);
116 | hawaii.precision(_);
117 | puertoRico.precision(_);
118 | guamMariana.precision(_);
119 | americanSamoa.precision(_);
120 | return reset();
121 | };
122 |
123 | albersUsaTerritories.scale = function (_) {
124 | if (!arguments.length) return lower48.scale();
125 | lower48.scale(_);
126 | alaska.scale(_ * 0.35);
127 | hawaii.scale(_);
128 | puertoRico.scale(_);
129 | guamMariana.scale(_);
130 | americanSamoa.scale(_);
131 | return albersUsaTerritories.translate(lower48.translate());
132 | };
133 |
134 | albersUsaTerritories.translate = function (_) {
135 | if (!arguments.length) return lower48.translate();
136 | var k = lower48.scale(),
137 | x = +_[0],
138 | y = +_[1];
139 |
140 | lower48Point = lower48
141 | .translate(_)
142 | .clipExtent([
143 | [x - 0.455 * k, y - 0.238 * k],
144 | [x + 0.455 * k, y + 0.238 * k],
145 | ])
146 | .stream(pointStream);
147 |
148 | alaskaPoint = alaska
149 | .translate([x - 0.275 * k, y + 0.201 * k])
150 | .clipExtent([
151 | [x - 0.425 * k + epsilon, y + 0.12 * k + epsilon],
152 | [x - 0.185 * k - epsilon, y + 0.234 * k - epsilon],
153 | ])
154 | .stream(pointStream);
155 |
156 | hawaiiPoint = hawaii
157 | .translate([x - 0.18 * k, y + 0.212 * k])
158 | .clipExtent([
159 | [x - 0.185 * k + epsilon, y + 0.166 * k + epsilon],
160 | [x - 0.08 * k - epsilon, y + 0.234 * k - epsilon],
161 | ])
162 | .stream(pointStream);
163 |
164 | puertoRicoPoint = puertoRico
165 | .translate([x + 0.335 * k, y + 0.224 * k])
166 | .clipExtent([
167 | [x + 0.3 * k, y + 0.204 * k],
168 | [x + 0.38 * k, y + 0.234 * k],
169 | ])
170 | .stream(pointStream);
171 |
172 | guamMarianaPoint = guamMariana
173 | .translate([x - 0.415 * k, y + 0.14 * k])
174 | .clipExtent([
175 | [x - 0.45 * k, y + 0.05 * k],
176 | [x - 0.39 * k, y + 0.21 * k],
177 | ])
178 | .stream(pointStream);
179 |
180 | americanSamoaPoint = americanSamoa
181 | .translate([x - 0.415 * k, y + 0.215 * k])
182 | .clipExtent([
183 | [x - 0.45 * k, y + 0.21 * k],
184 | [x - 0.39 * k, y + 0.234 * k],
185 | ])
186 | .stream(pointStream);
187 | return reset();
188 | };
189 |
190 | function reset() {
191 | cache = cacheStream = null;
192 | return albersUsaTerritories;
193 | }
194 |
195 | return albersUsaTerritories.scale(1070);
196 | }
197 |
--------------------------------------------------------------------------------
/src/index.js:
--------------------------------------------------------------------------------
1 | export { default as TriFeather } from "./TriFeather";
2 | export { default as TriMap } from "./TriMap";
3 | export { random_points as randomPoints } from "./RandomPoints";
4 |
--------------------------------------------------------------------------------
/vite.config.cjs:
--------------------------------------------------------------------------------
1 | // vite.config.js
2 |
3 | export default {
4 | build: {
5 | target: "es2020",
6 | lib: {
7 | entry: __dirname + "/src/index.js",
8 | name: "trifeather",
9 | formats: ["es", "umd"],
10 | fileName: (format) => `trifeather.${format}.js`,
11 | },
12 | rollupOptions: {
13 | // make sure to externalize deps that shouldn't be bundled
14 | // into your library
15 | external: ["regl"],
16 | output: {
17 | // Provide global variables to use in the UMD build
18 | // for externalized deps
19 | globals: {
20 | regl: "regl",
21 | },
22 | },
23 | },
24 | },
25 | };
26 |
--------------------------------------------------------------------------------