this.ref = ref} className={`idyll-step ${className || ''}`} {...props} />
13 | );
14 | }
15 | }
16 |
17 | export default Step;
--------------------------------------------------------------------------------
/components/default/stepper-control.js:
--------------------------------------------------------------------------------
1 | const React = require('react');
2 |
3 | class StepperControl extends React.Component {
4 |
5 | componentDidMount() {
6 | }
7 | render() {
8 | const { idyll, ...props } = this.props;
9 | return
this.ref = ref} className={`idyll-step ${className || ''}`} style={{margin: '10vh 0 60vh 0'}} {...props} />
21 | // );
22 | }
23 | }
24 |
25 | export default StepperControl;
--------------------------------------------------------------------------------
/components/default/stepper.js:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | const { filterChildren, mapChildren } = require('idyll-component-children');
3 |
4 | class Stepper extends React.PureComponent {
5 |
6 | constructor(props) {
7 | super(props);
8 | this.SCROLL_STEP_MAP = {};
9 | this.SCROLL_NAME_MAP = {};
10 | }
11 |
12 |
13 | registerStep(elt, name, val) {
14 | this.SCROLL_STEP_MAP[elt] = val;
15 | this.SCROLL_NAME_MAP[elt] = name;
16 | }
17 |
18 | getSteps() {
19 | return filterChildren(
20 | this.props.children || [],
21 | (c) => {
22 | return c.type.name && c.type.name.toLowerCase() === 'step';
23 | }
24 | ) || []
25 | }
26 |
27 | next() {
28 | this.props.updateProps({ currentStep: (this.props.currentStep + 1) % (this.getSteps().length) });
29 | }
30 | previous() {
31 | let newStep = this.props.currentStep - 1;
32 | if (newStep < 0) {
33 | newStep = (this.getSteps().length) + newStep;
34 | }
35 |
36 | this.props.updateProps({ currentStep: newStep });
37 | }
38 |
39 | getSelectedStep() {
40 | const { currentState, currentStep } = this.props;
41 | const steps = this.getSteps();
42 | if (currentState) {
43 | return filterChildren(
44 | steps,
45 | (c) => {
46 | return c.props.state === currentState
47 | }
48 | )[0];
49 | }
50 | return steps[currentStep % steps.length];
51 | }
52 |
53 | render() {
54 | const { children, height, ...props } = this.props;
55 | return (
56 |
57 |
58 | {filterChildren(
59 | children,
60 | (c) => {
61 | return c.type.name && c.type.name.toLowerCase() === 'graphic';
62 | }
63 | )}
64 |
65 |
66 | {
67 | mapChildren(this.getSelectedStep(), (c) => {
68 | return React.cloneElement(c, {
69 | registerStep: this.registerStep.bind(this)
70 | })
71 | })
72 | }
73 |
74 | {mapChildren(filterChildren(
75 | children,
76 | (c) => {
77 | return c.type.name && c.type.name.toLowerCase() === 'steppercontrol';
78 | }
79 | ), (c) => {
80 | return React.cloneElement(c, {
81 | next: this.next.bind(this),
82 | previous: this.previous.bind(this)
83 | })
84 | })}
85 |
86 | );
87 | }
88 | }
89 |
90 |
91 | Stepper.defaultProps = {
92 | currentStep: 0,
93 | height: 500
94 | };
95 |
96 | Stepper._idyll = {
97 | name: "Stepper",
98 | tagType: "open",
99 | children: [`
100 | [Step]This is the content for step 1[/Step]
101 | [Step]This is the content for step 2[/Step]
102 | [Step]This is the content for step 3[/Step]`],
103 | props: [{
104 | name: "currentStep",
105 | type: "number",
106 | example: '0'
107 | }]
108 | }
109 | export default Stepper;
110 |
--------------------------------------------------------------------------------
/components/default/svg.js:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | import InlineSVG from 'react-inlinesvg';
3 |
4 | class SVG extends React.PureComponent {
5 | render() {
6 | return (
7 |
8 | );
9 | }
10 | }
11 |
12 | SVG.defaultProps = {
13 | src: ''
14 | }
15 |
16 | SVG._idyll = {
17 | name: "SVG",
18 | tagType: "closed",
19 | props: [{
20 | name: "src",
21 | type: "string",
22 | example: '"https://upload.wikimedia.org/wikipedia/commons/f/fd/Ghostscript_Tiger.svg"'
23 | }]
24 | }
25 |
26 | export default SVG;
27 |
28 |
--------------------------------------------------------------------------------
/components/default/text-container.js:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 |
3 | class TextContainer extends React.PureComponent {
4 | render() {
5 | const { idyll, children, className, hasError, updateProps, ...props } = this.props;
6 | const { styles, ...layout } = idyll.layout;
7 | const { styles: _, ...theme } = idyll.theme;
8 | const style = { ...layout, ...theme };
9 | const cn = (className || '') + ' idyll-text-container';
10 | return (
11 |
{children}
12 | );
13 | }
14 | }
15 |
16 | export default TextContainer;
17 |
--------------------------------------------------------------------------------
/components/default/text-input.js:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | const ReactDOM = require('react-dom');
3 |
4 | class TextInput extends React.PureComponent {
5 | constructor(props) {
6 | super(props);
7 | this.onChange = this.onChange.bind(this);
8 | }
9 |
10 | onChange(e) {
11 | this.props.updateProps({ value: e.target.value });
12 | }
13 |
14 | render() {
15 | const { idyll, hasError, updateProps, ...props } = this.props;
16 | return (
17 |
18 | );
19 | }
20 | }
21 |
22 | TextInput._idyll = {
23 | name: "TextInput",
24 | tagType: "closed",
25 | props: [{
26 | name: "value",
27 | type: "string",
28 | example: '"Hello"'
29 | }]
30 | }
31 |
32 | export default TextInput;
33 |
--------------------------------------------------------------------------------
/components/dr-component.js:
--------------------------------------------------------------------------------
1 | const React = require('react');
2 | const D3Component = require('idyll-d3-component');
3 | const d3 = require('d3');
4 | const H = require('hilbert');
5 | const Path = require('svg-path-generator');
6 |
7 | const IMAGE_BASE = 'https://d1qh62yyj9qkpe.cloudfront.net'
8 |
9 | const jitter = (d, j = 40) => {
10 | // return d;
11 | return d + j * (Math.random() - 0.5);
12 | }
13 |
14 | const brightnessKey = 'brightness_avg_perceived';
15 |
16 | const ANIMATION_DURATION = 500;
17 | const DELAY_FACTOR = 100;
18 | const DELAY_LOG_FACTOR = 100;
19 | const DELAY_BASE = 150;
20 |
21 | const smallImageSize = 20;
22 | const largeImageSize = 160;
23 | let revealed = false;
24 |
25 | d3.selection.prototype.moveToFront = function() {
26 | return this.each(function(){
27 | this.parentNode.appendChild(this);
28 | });
29 | };
30 |
31 |
32 |
33 | class DRComponent extends D3Component {
34 |
35 | initialize(node, props) {
36 | this.width = node.getBoundingClientRect().width;
37 | this.height = window.innerHeight;
38 |
39 | const images = props.images.filter((d) => Math.random() > 0.5);
40 |
41 | const svg = this.svg = d3.select(node).append('svg');
42 | svg.attr('viewBox', `0 0 ${this.width} ${this.height}`)
43 | .style('width', '100%')
44 | .style('height', 'auto')
45 | .style('overflow', 'visible')
46 | .style('cursor', 'crosshair')
47 | .style('box-shadow', '0px 0px 10000px transparent') // hack for overflow on chrome
48 | // .style('background', 'white')
49 | // .style('max-height', '100vh');
50 |
51 | this.weightKeys = Object.keys(props.weights);
52 | const _scaleCache = [];
53 | this.weightKeys.concat(['X_pca_x', 'X_pca_y', 'X_mctsne_x', 'X_mctsne_y', 'X_umap_x', 'X_umap_y']).forEach((key) => {
54 | _scaleCache[key] = d3.scaleLinear().domain(d3.extent(images, (d) => d[key]));
55 | })
56 |
57 | this.normalizeVar = (d, key) => {
58 | // console.log(key);
59 | try {
60 | return _scaleCache[key](d[key]);
61 | } catch(e) {
62 | // console.log(e);
63 | return 1;
64 | }
65 | }
66 |
67 |
68 | this.brightness = d3.scaleLinear().domain(d3.extent(images, (d) => d[brightnessKey])).range([0, this.height - this.height / 30]);
69 |
70 |
71 | const scale = 0.9;
72 | const HILBERT_SIZE = 4;
73 | const _hilbertNormalize = d3.scaleLinear().domain([0, 1]).range([0, Math.pow(HILBERT_SIZE, 4)]);
74 | const _hilbert = new H.Hilbert2d(HILBERT_SIZE);
75 |
76 | const hilbertG = svg.append('g').attr('transform', `translate(${scale / Math.pow(HILBERT_SIZE, 2) / 2 * this.width}, ${scale / Math.pow(HILBERT_SIZE, 2) / 2 * this.height})`)
77 |
78 | let hilbertPath = Path();
79 | d3.range(Math.pow(HILBERT_SIZE, 4)).forEach((i) => {
80 | const hilbertOut = _hilbert.xy(i);
81 | const cx = (hilbertOut.x / Math.pow(2, HILBERT_SIZE)) * scale * this.width + (1 - scale) / 2 * this.width;
82 | const cy = (hilbertOut.y / Math.pow(2, HILBERT_SIZE)) * scale * this.height + (1 - scale) / 2 * this.height;
83 | if (i === 0) {
84 | hilbertPath.moveTo(cx, cy);
85 | } else {
86 | hilbertPath.lineTo(cx, cy);
87 | }
88 | })
89 |
90 | const $hPath = hilbertG.append('path').attr('d', hilbertPath).attr('fill', 'none').attr('stroke', 'none').attr('stroke-width', 3);
91 | const hPath = $hPath.node();
92 |
93 | this.$hPath = $hPath;
94 |
95 | const _pathLength = hPath.getTotalLength();
96 | this.hilbert = (d) => {
97 |
98 | const hilbertOut = hPath.getPointAtLength(_pathLength * d);
99 |
100 | const scale = 0.66;
101 | const xOffset = scale / Math.pow(HILBERT_SIZE, 2) / 2 * this.width;
102 | const yOffset = scale / Math.pow(HILBERT_SIZE, 2) / 2 * this.height;
103 | return {
104 | x: xOffset + hilbertOut.x,
105 | y: yOffset + hilbertOut.y
106 | }
107 | }
108 |
109 |
110 | const $elements = svg.selectAll('.element')
111 | .data(images.map((d) => {
112 | return Object.assign({_seed: Math.random()}, d);
113 | }))
114 |
115 | const $el = $elements.enter()
116 | .append('g')
117 | .attr('transform', () => `translate(${Math.random() * this.width}, ${Math.random() * this.height})`)
118 | .classed('element', true);
119 |
120 |
121 | this.$el = $el;
122 | this.$elements = $elements;
123 |
124 |
125 | this.$images = this.$el.append("svg:image")
126 | .attr('x', (d) => {
127 | return - smallImageSize / 2;
128 | })
129 | .attr('y', (d) => {
130 | return - smallImageSize / 2;
131 | })
132 | .attr('width', (d) => {
133 | return smallImageSize;
134 | })
135 | .attr('height', (d) => {
136 | return smallImageSize;
137 | })
138 | .on('mouseenter', (d, i, nodes) => {
139 |
140 | if (!revealed) {
141 | return;
142 | }
143 | // console.log('mouseenter');
144 | // TODO - move node's parent to front
145 | this.props.updateProps({
146 | selectedArtwork: d
147 | });
148 |
149 | d3.select(nodes[i].parentNode).moveToFront();
150 |
151 | const size = smallImageSize * 1.5;
152 | d3.select(nodes[i])
153 | .attr('x', (d) => {
154 | return - size / 2;
155 | })
156 | .attr('y', (d) => {
157 | return - size / 2;
158 | })
159 | .attr('width', (d) => {
160 | return size;
161 | })
162 | .attr('height', (d) => {
163 | return size;
164 | })
165 | // .attr("xlink:href", (d) => `${IMAGE_BASE}/met/${d['Object ID']}.jpg`);
166 | })
167 | .on('mouseleave', (d, i, nodes) => {
168 | // console.log('mouseleave');
169 | this.props.updateProps({
170 | selectedArtwork: null
171 | });
172 | d3.select(nodes[i])
173 | .attr('x', (d) => {
174 | return - smallImageSize / 2;
175 | })
176 | .attr('y', (d) => {
177 | return - smallImageSize / 2;
178 | })
179 | .attr('width', (d) => {
180 | return smallImageSize;
181 | })
182 | .attr('height', (d) => {
183 | return smallImageSize;
184 | })
185 | // .attr("xlink:href", (d) => `${IMAGE_BASE}/thumbnails/met/${d['Object ID']}.jpg`);
186 | })
187 | .style('opacity', 0)
188 |
189 |
190 |
191 | // d3.range(0, 1, 0.001).forEach((d) => {
192 | // const h = this.hilbert(d);
193 | // svg.append('circle')
194 | // .attr('cx', h.x)
195 | // .attr('cy', h.y)
196 | // .attr('r', 5)
197 | // })
198 | }
199 |
200 | _updateHilbert(props) {
201 | let max = Number.NEGATIVE_INFINITY;
202 | let min = Number.POSITIVE_INFINITY;
203 | let weights = [];
204 | // const scale = this.weightKeys.reduce((memo, key, i) => {
205 | // return memo + props.weights[key];
206 | // }, 0);
207 | // if (scale === 0) {
208 | // return;
209 | // }
210 | this.$el
211 | .each((d) => {
212 |
213 | const _weighted = this.weightKeys.reduce((memo, key, i) => {
214 | // console.log('props.weights[' + key + ']', props.weightKeys)
215 | return memo + props.weights[key] * this.normalizeVar(d, key);
216 | }, 0);
217 |
218 | if (_weighted > max) {
219 | max = _weighted;
220 | }
221 | if (_weighted < min) {
222 | min = _weighted;
223 | }
224 | weights.push(_weighted);
225 | });
226 |
227 | this.$el
228 | // .transition()
229 | // .duration(1000)
230 | .attr('transform', (d, i) => {
231 | const { x, y } = this.hilbert((weights[i] - min) / (max - min));
232 | return `translate(${jitter(x)}, ${jitter(y)})`;
233 | })
234 | }
235 |
236 | update(props) {
237 | if (props.state !== this.props.state) {
238 | switch(props.state) {
239 | case 'initial':
240 |
241 | this.$rects = this.$el.append('rect')
242 | .attr('x', 0)
243 | .attr('y', 0)
244 | .attr('width', 0)
245 | .attr('height', 0)
246 | .style('fill', '#FFD5C7');
247 |
248 |
249 | this.$rects
250 | .transition()
251 | .duration(ANIMATION_DURATION)
252 | // .delay(100)
253 | // .delay((d, i) => 100 + (i + 10) * 30 + (Math.random() - 0.5) * 30)
254 | .delay((d, i) => DELAY_BASE * Math.random() + DELAY_LOG_FACTOR * Math.log(DELAY_FACTOR * i + 1))
255 | .ease(d3.easeQuadIn)
256 | .attr('x', (d) => {
257 | return -10;
258 | })
259 | .attr('y', (d) => {
260 | return -10;
261 | })
262 | .attr('width', (d) => {
263 | return 20;
264 | })
265 | .attr('height', (d) => {
266 | return 20;
267 | })
268 | // .on('end', () => {
269 | // // `./static/images/${d.AccessionNumber}.jpg`)
270 | // });
271 |
272 | setTimeout(() => {
273 | this
274 | .$images
275 | .attr("xlink:href", (d) => `${IMAGE_BASE}/thumbnails/met/${d['Object ID']}.jpg`)
276 | }, 2000)
277 |
278 | break;
279 |
280 | case 'reveal':
281 | revealed = true;
282 | if (this.props.state === '1d') {
283 | this.$el
284 | .transition()
285 | .duration(ANIMATION_DURATION)
286 | .attr('transform', () => `translate(${Math.random() * this.width}, ${Math.random() * this.height})`)
287 | } else {
288 | this
289 | .$images
290 | .style('opacity', 1);
291 | this.$rects
292 | .transition()
293 | .delay((d, i) => DELAY_BASE * Math.random() + DELAY_LOG_FACTOR * Math.log(DELAY_FACTOR * i + 1))
294 | .duration(ANIMATION_DURATION)
295 | .style('opacity', 0)
296 | .on('end', function() {
297 | d3.select(this).remove();
298 | })
299 | }
300 | break;
301 | case '1d':
302 | this.$el
303 | .transition()
304 | .duration(ANIMATION_DURATION)
305 | .attr('transform', (d) => `translate(${jitter(this.width / 2, this.width / 2)}, ${jitter(this.brightness(d[brightnessKey]))})` );
306 | break;
307 | case 'reset':
308 | props.updateProps({ showHilbert: false });
309 | this.$el
310 | .transition()
311 | .duration(ANIMATION_DURATION)
312 | .attr('transform', () => `translate(${Math.random() * this.width}, ${Math.random() * this.height})`)
313 | break;
314 | case 'hilbert-brightness':
315 | this.$el
316 | .transition()
317 | .duration(ANIMATION_DURATION)
318 | .attr('transform', (d) => {
319 | const { x, y } = this.hilbert(this.normalizeVar(d, brightnessKey));
320 | return `translate(${jitter(x)}, ${jitter(y)})`;
321 | })
322 | break;
323 | case 'hilbert-custom':
324 | this._updateHilbert(props);
325 | props.updateProps({ algorithm: '' });
326 | break;
327 | case 'algorithms':
328 | props.updateProps({ showHilbert: false, algorithm: 'pca' });
329 | break;
330 | default:
331 | break;
332 | }
333 | } else if (props.state.indexOf('hilbert') > -1 && props.selectedArtwork === this.props.selectedArtwork) {
334 | this._updateHilbert(props);
335 | } else if (props.state === 'algorithms' && props.algorithm !== this.props.algorithm) {
336 | this.$el
337 | .transition()
338 | .duration(ANIMATION_DURATION)
339 | .attr('transform', (d) => {
340 | const x = this.width * this.normalizeVar(d, `X_${props.algorithm}_x`);
341 | const y = this.height * this.normalizeVar(d, `X_${props.algorithm}_y`);
342 | return `translate(${x}, ${y})`;
343 | })
344 | }
345 |
346 | if (props.showHilbert !== this.props.showHilbert) {
347 | this.$el.style('opacity', props.showHilbert ? 0.7 : 1);
348 | this.$hPath.attr('stroke', props.showHilbert ? '#81daf3' : 'none');
349 | }
350 | }
351 | }
352 |
353 | module.exports = DRComponent;
354 |
--------------------------------------------------------------------------------
/components/projection.js:
--------------------------------------------------------------------------------
1 | const React = require('react');
2 | import SVG from 'react-inlinesvg';
3 | import * as d3 from 'd3';
4 |
5 | const imageSize = 40;
6 | const IMAGE_BASE = 'https://d1qh62yyj9qkpe.cloudfront.net'
7 |
8 | class Projection extends React.Component {
9 |
10 | constructor(props) {
11 | super(props);
12 | this.state = {
13 | }
14 | }
15 | onLoad(src) {
16 | const svg = d3.select(this.ref).select('.projection-svg-el svg .container');
17 | const circle = svg.select('.output-circle');
18 | const line = svg.select('.output-line');
19 | const minX = 77.5;
20 | const maxX = 402.35;
21 |
22 | const x = d3.scaleLinear().domain([4.807568008913199, 235.72559024683454]).range([minX, maxX]);
23 | const color = d3.scaleLinear().domain([4.807568008913199, 235.72559024683454]).range([0, 255]);
24 |
25 | const cy = +circle.attr('cy');
26 |
27 | const attributeLabels = svg.selectAll('.attribute-labels tspan');
28 |
29 |
30 | const img = svg.append("svg:image").attr('y', cy - imageSize / 2).attr('width', imageSize).attr('height', imageSize);
31 |
32 | let count = 0;
33 | const images = this.props.images;
34 | const transition = () => {
35 | const image = images[count % images.length];
36 | const r = image['brightness_avg_perceived'];
37 |
38 | attributeLabels
39 | .style('fill', '#81daf3')
40 | .transition()
41 | .delay(250)
42 | .duration(1000)
43 | .style('fill', '#fff');
44 |
45 | img
46 | .attr("xlink:href", `${IMAGE_BASE}/thumbnails/met/${image['Object ID']}.jpg`)
47 | .transition().duration(1000)
48 | .attr('x', x(r) - imageSize / 2);
49 |
50 | circle.transition().duration(1000).attr('cx', x(r)).attr('fill', `rgb(${color(r)}, ${color(r)}, ${color(r)})`);
51 | line.transition().duration(1000).attr('d', `M${x(r)},325.5 L221.5,222.5`);
52 |
53 | count += 1;
54 | setTimeout(transition, 2000);
55 | }
56 |
57 | transition();
58 | }
59 |
60 |
61 | render() {
62 | const { hasError, updateProps, ...props } = this.props;
63 |
64 | return (
65 |
this.ref = ref}>
66 |
73 | );
74 | }
75 | }
76 |
77 | module.exports = Projection;
--------------------------------------------------------------------------------
/components/references.js:
--------------------------------------------------------------------------------
1 | const React = require('react');
2 | const parse = require('bibtex-parser')
3 | import bibliography from './bib.js'
4 |
5 | const citationCache = {};
6 | const cite = (label) => {
7 | return citationCache[label.toUpperCase()];
8 | }
9 |
10 | const parsedBib = parse(bibliography);
11 |
12 | Object.keys(parsedBib).forEach((key, i) => {
13 | citationCache[key.toUpperCase()] = i + 1;
14 | });
15 |
16 | class References extends React.Component {
17 |
18 | createReference(reference, i) {
19 | return (
20 |
21 | {reference.TITLE}.
22 |
23 | {reference.AUTHOR}.
24 |
25 | {reference.JOURNAL}, {reference.YEAR}.
26 |
27 | )
28 | }
29 |
30 | createReferences() {
31 | return Object.keys(parsedBib)
32 | .map((key, i) => {
33 | const reference = parsedBib[key];
34 | return this.createReference(reference, i);
35 | });
36 | };
37 |
38 | render() {
39 | const { hasError, idyll, updateProps, ...props } = this.props;
40 | return (
41 |
42 |
References
43 |
44 | {this.createReferences()}
45 |
46 |
47 | );
48 | }
49 | }
50 |
51 | References.cite = cite;
52 |
53 | module.exports = References;
54 |
--------------------------------------------------------------------------------
/components/start-button.js:
--------------------------------------------------------------------------------
1 | const React = require('react');
2 | const d3 = require('d3');
3 |
4 | class StartButton extends React.Component {
5 |
6 | constructor(props) {
7 | super(props);
8 | if (typeof window !== 'undefined') {
9 | window.onbeforeunload = function(){
10 | window.scrollTo(0,0);
11 | };
12 | }
13 | }
14 |
15 | onClick() {
16 | d3.select('body').style('overflow', 'auto');
17 | this.props.updateProps({
18 | state: 'initial'
19 | });
20 | this.props.updateProps({ status: 'loading' })
21 | setTimeout(() => {
22 | this.props.updateProps({ status: 'loaded' })
23 | }, 2000);
24 | }
25 |
26 | render() {
27 | const { hasError, idyll, status, updateProps, ...props } = this.props;
28 | if (status === 'initial') {
29 | return (
30 |
33 | );
34 | }
35 | // else if (status === 'loading') {
36 | // return
Data fetch initiated...
;
37 | // }
38 |
39 | return null;
40 | // return (
41 | //
42 | //
43 | // Data loading.
Scroll to Continue...
44 | //
45 | //
46 | // );
47 | }
48 | }
49 |
50 | module.exports = StartButton;
51 |
--------------------------------------------------------------------------------
/components/table.js:
--------------------------------------------------------------------------------
1 | const React = require('react');
2 | const Table = require('react-table').default;
3 |
4 | class TableComponent extends React.PureComponent {
5 | getColumns() {
6 | if (this.props.columns) {
7 | if (this.props.columns.length && typeof this.props.columns[0] === 'string') {
8 | return this.props.columns.map((d) => {
9 | return {
10 | Header: d,
11 | accessor: d
12 | };
13 | })
14 | }
15 |
16 | return this.props.columns;
17 | }
18 | if ((this.props.data || []).length) {
19 | return Object.keys(this.props.data[0]).map((d) => {
20 | return {
21 | Header: d,
22 | accessor: d
23 | }
24 | })
25 | }
26 |
27 | return [];
28 | }
29 | render() {
30 | const { idyll, hasError, updateProps, children, ...props} = this.props;
31 | return (
32 |
this.props.defaultPageSize}
35 | minRows={this.props.data.length <= this.props.defaultPageSize ? this.props.data.length : undefined}
36 | data={this.props.data || []}
37 | children={undefined}
38 | columns={this.getColumns()}
39 | />
40 | );
41 | }
42 | }
43 |
44 | TableComponent.defaultProps = {
45 | data: [],
46 | showPageSizeOptions: false,
47 | showPageJump: false,
48 | defaultPageSize: 20
49 | }
50 |
51 | TableComponent._idyll = {
52 | name: "Table",
53 | tagType: "closed",
54 | props: [{
55 | name: "data",
56 | type: "array",
57 | example: 'x'
58 | }, {
59 | name: "showPagination",
60 | type: "boolean",
61 | example: 'false'
62 | }, {
63 | name: "showPageSizeOptions",
64 | type: "boolean",
65 | example: 'false'
66 | }, {
67 | name: "showPageJump",
68 | type: "boolean",
69 | example: 'false'
70 | }]
71 | }
72 |
73 | export default TableComponent;
74 |
--------------------------------------------------------------------------------
/data/example-data.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "x": 0,
4 | "y": 0
5 | }, {
6 | "x": 1,
7 | "y": 1
8 | }
9 | ]
10 |
--------------------------------------------------------------------------------
/index.idyll:
--------------------------------------------------------------------------------
1 | [meta
2 | title:"The Beginner's Guide to Dimensionality Reduction"
3 | description:"Explore the methods that data scientists use to visualize high-dimensional data in this interactive article."
4 | shareImageUrl:"https://idyll.pub/post/dimensionality-reduction-293e465c2a3443e8941b016d/static/images/share.png"
5 | shareImageWidth:"1600"
6 | shareImageHeight:"800" /]
7 |
8 |
9 | [data name:"images" source:"met-with-coordinates.csv" /]
10 | [var name:"scrollState" value:"loading" /]
11 |
12 | [var name:"widthWeight" value:0 /]
13 | [var name:"heightWeight" value:0 /]
14 | [var name:"weightWeight" value:0 /]
15 | [var name:"endDateWeight" value:0 /]
16 | [var name:"brightnessWeight" value:1 /]
17 | [var name:"showHilbert" value:false /]
18 | [var name:"algorithm" value:"" /]
19 | [var name:"selectedArtwork" value:`null ` /]
20 |
21 | [Fixed]
22 | [DRComponent
23 | images:images
24 | state:scrollState
25 | showHilbert:showHilbert
26 | algorithm:algorithm
27 | selectedArtwork:selectedArtwork
28 | weights:`{
29 | // 'Width (cm)': widthWeight,
30 | // 'Height (cm)': heightWeight,
31 | 'Object End Date': 1 - brightnessWeight,
32 | 'brightness_avg_perceived': brightnessWeight
33 | }`
34 | /]
35 |
36 | [/Fixed]
37 |
38 | [Scroller currentState:scrollState]
39 |
40 |
41 | [Step state:"loading"]
42 |
43 |
44 | [Header
45 | title:"The Beginner's Guide to Dimensionality Reduction"
46 | subtitle:"Explore the methods that data scientists use to visualize high-dimensional data."
47 | date:"July 16, 2018"
48 | authors:`[
49 | { name: "Matthew Conlen", link: "https://twitter.com/mathisonian" },
50 | { name: "Fred Hohman", link: "https://twitter.com/fredhohman" }
51 | ]` /]
52 |
53 | [var name:"loadStatus" value:"initial" /]
54 | [StartButton state:scrollState status:loadStatus /]
55 |
56 | [Conditional if:`loadStatus === 'loaded'`]
57 | Dimensionality reduction is a powerful
58 | technique used by data scientists to look for hidden structure in data. The method
59 | is useful in a number of domains, for example document categorization, protein disorder prediction, and machine learning model debugging[Cite reference:"maaten2008visualizing"/].
60 |
61 | https://idyll-lang.org
62 |
63 | ```
64 | https://idyll-lang.org
65 | ```
66 |
67 | The results of a dimensionality reduction algorithm can be visualized to reveal patterns and clusters of similar or dissimilar data. Even though the data is displayed in only two or three dimensions, structures present in higher dimensions are maintained, at least roughly[Cite reference:"olah2014visualizing"/].
68 |
69 | The technique is available in many applications, for example Google's [Embedding Projector](https://projector.tensorflow.org/)[Cite reference:"smilkov2016embedding"/] let's you view high-dimensional datasets embedded in two or three dimensions under a variety of different projections.
70 |
71 | // Dimensionality reduction algorithms work by inferring a function to describe the structure of *unlabeled* data, that is, data that has no preexisting categorization. Many of the commonly used algorithms produce complex embeddings and, since all considered data are unlabelled, it is difficult to evaluate the results of the structures produced by such algorithms.
72 |
73 | This guide will teach you how to think about these embeddings, and provide a comparison of some of the most popular dimensionality reduction algorithms used today.
74 |
75 | [/Conditional]
76 |
77 | [/Step]
78 |
79 |
80 | [Step state:"reveal"]
81 |
82 | # Art or science?
83 |
84 | Your browser has just loaded information about
85 | roughly 800 artworks from the collection at the [Metropolitan Museum of Art](https://www.metmuseum.org/). The museum has publicly released a large dataset about their collection[Cite reference:"met"/], just a small fraction are displayed here. They are positioned randomly.
86 |
87 | *Hover over an artwork to see its details.*
88 |
89 | [br/]
90 |
91 | Each artwork includes basic metadata, such as its title, artist, date made, medium, and dimensions.
92 | Data scientists like to call metadata for each data point (artwork) *features*.
93 | Below are some of the features of 10 artworks in the dataset.
94 |
95 | [br/]
96 |
97 | [Table
98 | data:`images.slice(0, 10)`
99 | columns:`[
100 | { Header: 'Year', accessor: 'Object End Date' },
101 | { Header: 'Title', accessor: 'Title'},
102 | { Header: 'Artist', accessor: 'Artist Display Name'}]` /]
103 |
104 | [br/]
105 |
106 | [/Step]
107 |
108 |
109 | [Step state:"1d"]
110 |
111 | # Projecting onto a line
112 |
113 | These features can be thought of as vectors existing in a high-dimensional space. Visualizing the vectors would reveal a lot about the distribution of the data, however humans can't see so many dimensions all at once.
114 |
115 | Instead the data can be projected onto a lower dimension, one that can be visualized directly. This kind of projection is called an *embedding*.
116 |
117 | // *Remember that points have 0 dimensions, lines are 1 dimensional, planes are 2 dimensional, etc.*
118 | // [br/]
119 |
120 | Computing a 1-dimensional embedding requires taking each artwork and computing a single number to describe it.
121 | A benefit of reducing to 1D is that the numbers, and the artworks, can be sorted on a line.
122 |
123 | [Projection images:images src:"./static/images/projection.svg" /]
124 |
125 | On the right you see the artwork positioned according to their *average pixel brightness*. Notice that the images are sorted, with the darkest images appearing at the top and the brightest images on the bottom!
126 | [/Step]
127 |
128 |
129 | [Step state:"reset"]
130 |
131 | # For the mathematically inclined
132 |
133 | Dimensionality reduction can be formulated mathematically in the context of a given dataset. Consider a dataset represented as a matrix
134 | [Equation display:false]X[/Equation], where
135 | [Equation display:false]X[/Equation]
136 | is of size
137 | [Equation display:false]m \times n[/Equation], where
138 | [Equation display:false]m[/Equation] represents the number of rows of
139 | [Equation display:false]X[/Equation], and
140 | [Equation display:false]n[/Equation] representes the number of columns.
141 |
142 | Typically, the rows of the matrix are *data points* and the columns are *features*.
143 | Dimensionality reduction will reduce the number of features of each data point, turning
144 | [Equation display:false]X[/Equation] into a new matrix,
145 | [Equation display:false]X'[/Equation], of size
146 | [Equation display:false]m \times d[/Equation], where
147 | [Equation display:false]d < n[/Equation].
148 | For visualizations we typically set
149 | [Equation display:false]d[/Equation] to be 1, 2 or 3.
150 |
151 | Say [Equation display:false]m=n[/Equation], that is
152 | [Equation display:false]X[/Equation] is a square matrix.
153 | Performing dimensionality reduction on
154 | [Equation display:false]X[/Equation] and setting
155 | [Equation display:false]d=2[/Equation] will change it from a square matrix to a tall, rectangular matrix.
156 |
157 | [Equation display:true]
158 | X =
159 | \begin{bmatrix}
160 | x & x & x \\
161 | x & x & x \\
162 | x & x & x
163 | \end{bmatrix}
164 | \implies
165 | \begin{bmatrix}
166 | x' & x' \\
167 | x' & x' \\
168 | x' & x'
169 | \end{bmatrix}
170 | = X'
171 | [/Equation]
172 |
173 | // Here,
174 | // [Equation display:false]x[/Equation] and
175 | // [Equation display:false]x'[/Equation] just represent nonzero values of a matrix.
176 |
177 | Each data point only has two features now, i.e., each data point has been reduced from a 3 dimensional vector to a 2 dimensional vector.
178 |
179 | [/Step]
180 |
181 |
182 | [Step state:"hilbert-brightness"]
183 |
184 | # Embedding data in two dimensions
185 |
186 | The same brightness feature can be used to position the artworks in 2D space instead of 1D.
187 | The pieces have more room to spread out.
188 |
189 | On the right you see a simple 2-dimensional embedding based on image brightness,
190 | but this isn't the only way to position the artworks.
191 | In fact, there are many, and some projections are more useful than others.
192 |
193 | *Use the slider to vary the influence that the brightness and artwork age have in determining the embedding positions.*
194 |
195 | As you move the slider from brightness to artwork age, the embedding changes from highlighting bright and dark images,
196 | and starts to cluster recent modern-day images in the bottom left corner whereas older artworks are moved farther away (*hover over images to see their date*).
197 |
198 |
199 | [div style:`{display: 'flex', flexAlign: 'row', textAlign: 'center'}`]
200 |
201 | [div style:`{margin: '0 auto'}`]
202 | *Artwork Age*
203 | [Range value:brightnessWeight min:0 max:1 step:0.005 /]
204 | *Brightness*
205 | [/div]
206 |
207 | [/div]
208 |
209 | [var name:"showHilbertDetails" value:false /]
210 |
211 | [conditional if:`!showHilbertDetails ` ]
212 | [button onClick:` showHilbertDetails = true; showHilbert = true; ` ]
213 | Show technical details
214 | [/button]
215 | [/conditional]
216 |
217 | [conditional if:showHilbertDetails ]
218 |
219 | The embedding you see here is actually a linear 1D embedding, whose resulting scalar is then
220 | mapped on a space-filling Hilbert curve[Cite reference:"hilbert1890stetige"/] to give the illusion of a 2D embedding,
221 | since space-filling curves preserve locality fairly well[Cite reference:"moon2001analysis"/].
222 |
223 | Each artwork's 1D reduced projection is computed by a linear combination of the three features above.
224 |
225 | Let [Equation display:false]a[/Equation] be a given artwork, and let each slider's value be a weight
226 | [Equation display:false]w_{i}[/Equation].
227 | We will compute
228 | [Equation display:false]a'[/Equation], the scalar projection of
229 | [Equation display:false]a[/Equation] into
230 | [Equation display:false]\mathbb{R}[/Equation].
231 |
232 | [Equation display:true]
233 | a' = (a_{\text{brightness}} \times w_{\text{brightness}}) + (a_{\text{age}} \times w_{\text{age}})
234 | [/Equation]
235 |
236 | Each artwork's final position is randomly jittered to prevent excessive overlap.
237 |
238 | [button onClick:`showHilbert = false; showHilbertDetails = false; `]Hide technical details[/button]
239 |
240 | [/conditional]
241 | [/Step]
242 |
243 | [Step state:"algorithms"]
244 |
245 | # Real-world algorithms
246 |
247 | The previous section showed an example of a user-driven embedding, where the exact influence of each feature is known.
248 | However, you may have noticed that it's hard to find meaningful combinations of feature weights.
249 |
250 |
251 | State-of-the-art algorithms can find an optimal combination of features so that distances in
252 | the high dimensional space are preserved in the embedding. Use the tool below to project the artworks using three commonly used algorithms.
253 |
254 |
255 | In this example the reduction is performed on the pixels of each image: each image is flattened into a single vector, where each pixel represents one feature.
256 | The vectors records are then reduced to two dimensions.
257 |
258 | [div className:"panel" ]
259 |
260 | [div style:`{display: 'flex', flexAlign: 'row', textAlign: 'center'}`]
261 |
262 | [button className:`(!algorithm || algorithm === 'pca') ? 'selected' : ''` onClick:`algorithm = "pca" `]
263 | PCA
264 | [/button]
265 |
266 | [button className:`algorithm === 'mctsne' ? 'selected' : ''` onClick:`algorithm = "mctsne" `]
267 | t-SNE
268 | [/button]
269 |
270 | [button className:`algorithm === 'umap' ? 'selected' : ''` onClick:`algorithm = "umap" `]
271 | UMAP
272 | [/button]
273 | [/div]
274 |
275 | [conditional if:`!algorithm || algorithm === 'pca' ` ]
276 | ## Principal component analysis
277 |
278 | Pros:
279 | * Relatively computationally cheap.
280 | * Can save embedding model to then project new data points into the reduced space.
281 |
282 | Cons:
283 | * Linear reduction limits information that can be captured; not as discriminably clustered as other algorithms.
284 |
285 | [/conditional]
286 | [conditional if:`algorithm === 'mctsne' ` ]
287 | ## t-Distributed stochastic neighbor embedding
288 |
289 | Pros:
290 | * Produces highly clustered, visually striking embeddings.
291 | * Non-linear reduction, captures local structure well.
292 |
293 | Cons:
294 | * Global structure may be lost in favor of preserving local distances.
295 | * More computationally expensive.
296 | * Requires setting hyperparameters that influence quality of the embedding.
297 | * Non-deterministic algorithm.
298 |
299 | [/conditional]
300 | [conditional if:`algorithm === 'umap' ` ]
301 | ## Uniform manifold approximation and projection
302 |
303 | Pros:
304 | * Non-linear reduction that is computationally faster than t-SNE.
305 | * User defined parameter for preserving local or global structure.
306 | * Solid theoretical foundations in manifold learning.
307 |
308 | Cons:
309 | * New, less prevalent algorithm.
310 | * Requires setting hyperparameters that influence quality of the embedding.
311 | * Non-deterministic algorithm.
312 |
313 | [/conditional]
314 | [/div]
315 |
316 |
317 | There are many algorithms that compute a dimensionality reduction of a dataset.
318 | Simpler algorithms such as principal component analysis (PCA) maximize the variance in the data to produce the best possible embedding.
319 | More complicated algorithms, such as t-distributed stochastic neighbor embedding (t-SNE)[Cite reference:"maaten2008visualizing"/],
320 | iteratively produce highly clustered embeddings.
321 | Unfortunately, whereas before the influence of each feature was explicitly known,
322 | one must relinquish control to the algorithm to determine the best embedding—
323 | that means that it is not clear what features of the data are used to compute the embedding.
324 | This can be problematic for misinterpreting what an embedding is showing[Cite reference:"wattenberg2016how"/].
325 |
326 | Dimensionality reduction, and more broadly the field of unsupervised learning, is an active area of research where researchers are
327 | developing new techniques to create better embeddings.
328 | A new technique, uniform manifold approximation and projection (UMAP)[Cite reference:"mcinnes2018umap"/],
329 | is a non-linear reduction that aims to create visually striking embeddings fast, scaling to larger datasets.
330 |
331 |
332 | [/Step]
333 |
334 | [Step]
335 | # Try it for yourself
336 |
337 | Dimensionality reduction is a powerful tool to better understand high-dimensional data.
338 | If you have your own dataset and wish to visualize it using dimensionality reduction, there are a number of different algorithms [Cite reference:"van2009dimensionality"/] and
339 | implementations available.
340 | In Python, the scikit-learn package [Cite reference:`["scikit-learn", "sklearn_api"]`/] provides APIs for many
341 | [unsupervised dimensionality reduction](http://scikit-learn.org/stable/modules/unsupervised_reduction.html) algorithms,
342 | as well as [manifold learning](http://scikit-learn.org/stable/modules/manifold.html): an approach to non-linear dimensionality reduction.
343 |
344 | Regarding the three algorithms discussed above, you can find the open-source Python implementations that we used here:
345 | [PCA](http://scikit-learn.org/stable/modules/generated/sklearn.decomposition.PCA.html),
346 | [t-SNE](http://scikit-learn.org/stable/modules/generated/sklearn.manifold.TSNE.html) [Display value:" "/] [Cite reference:"maaten2008visualizing"/],
347 | and [UMAP](https://github.com/lmcinnes/umap)[Cite reference:"mcinnes2018umap"/].
348 |
349 | ### Acknowledgments
350 | * This article was created using [Idyll](https://idyll-lang.org).
351 | * The source code is available on [Github](https://github.com/mathisonian/dimensionality-reduction).
352 | * Thanks to everyone who gave feedback on this article, including Jeffrey Heer, Polo Chau, Caleb Robinson, Nicky Case, and Hamish Todd, and support from a NASA Space Technology Research Fellowship.
353 |
354 |
355 |
356 | https://idyll-lang.org
357 |
358 | ```
359 | https://idyll-lang.org
360 | ```
361 |
362 | https://idyll.pub/
363 |
364 | [References /]
365 |
366 | [/Step]
367 |
368 | [/Scroller]
369 |
370 |
371 |
372 | [ArtworkDetails artwork:selectedArtwork /]
373 |
374 |
375 | [analytics google:"UA-108267630-1" tag:"dimensionality-reduction" /]
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "visxai-dimensionality-reduction",
3 | "version": "1.0.0",
4 | "license": "MIT",
5 | "scripts": {
6 | "make-thumbnails": "thumb -w 20 -s '' static/images/met/ static/images/thumbnails/met/"
7 | },
8 | "idyll": {
9 | "layout": "blog",
10 | "template": "_index.html"
11 | },
12 | "dependencies": {
13 | "bibtex-parser": "0.0.0",
14 | "d3": "^4.13.0",
15 | "firebase": "^5.2.0",
16 | "hilbert": "^2.0.0",
17 | "idyll": "^3.8.5",
18 | "idyll-components": "^3.0.6",
19 | "idyll-d3-component": "^2.0.0",
20 | "svg-path-generator": "^1.1.0"
21 | },
22 | "devDependencies": {
23 | "gh-pages": "^0.12.0",
24 | "node-thumbnail": "^0.14.0",
25 | "papaparse": "^4.5.0",
26 | "puppeteer": "^1.5.0"
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/scripts/download-met-images.js:
--------------------------------------------------------------------------------
1 | /**
2 | * This script will download the images
3 | * associated with the MET dataset
4 | * collection listed in `../data/met.csv`.
5 | *
6 | * It uses puppeteer to open the details page
7 | * for each artwork and download the image
8 | * file locally. The data should be filtered
9 | * beforehand to select only images which are
10 | * released in the public domain.
11 | *
12 | * - mathisonian
13 | */
14 | async function asyncForEach(array, callback) {
15 | for (let index = 0; index < array.length; index++) {
16 | await callback(array[index], index, array)
17 | }
18 | }
19 |
20 |
21 | const fs = require('fs');
22 | const puppeteer = require('puppeteer');
23 | const Papa = require('papaparse');
24 |
25 | const promisify = require('util').promisify;
26 |
27 | const readFile = promisify(fs.readFile);
28 | const writeFile = promisify(fs.writeFile);
29 |
30 | (async () => {
31 |
32 | const csvString = await readFile(__dirname + '/../data/met.csv', 'utf-8');
33 | // const parseResults = Papa.parse(csvString);
34 | // console.log(parseResults);
35 | const artworks = Papa.parse(csvString, { header: true }).data;
36 |
37 | const browser = await puppeteer.launch();
38 | const page = await browser.newPage();
39 | await asyncForEach(artworks, async (artwork) => {
40 | // console.log(artwork);
41 | // console.log(artwork['Object Number']);
42 | console.log(artwork['Link Resource']);
43 | if (fs.existsSync(`${__dirname}/../static/images/met/${artwork['Object ID']}.jpg`)) {
44 | return;
45 | }
46 |
47 | await page.goto(artwork['Link Resource']);
48 | const img = await page.$('#artwork__image');
49 | await img.screenshot({
50 | path: `${__dirname}/../static/images/met/${artwork['Object ID']}.jpg`,
51 | omitBackground: true,
52 | });
53 | })
54 |
55 | await browser.close();
56 | })();
57 |
58 |
59 |
--------------------------------------------------------------------------------
/static/images/projection.svg:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/static/images/quill.svg:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/static/images/share.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mathisonian/dimensionality-reduction/79902ca715b9ef1c5746dc217e71497d18cac52c/static/images/share.png
--------------------------------------------------------------------------------
/styles.css:
--------------------------------------------------------------------------------
1 | /*
2 | Colors:
3 | */
4 |
5 | body {
6 | overflow: hidden;
7 | background: #1E467E;
8 | }
9 |
10 | h1, h2, h3, h4, h5, h6 {
11 | border: none;
12 | }
13 |
14 | .idyll-root {
15 | padding: 0;
16 | }
17 |
18 | .article-header {
19 | text-align: center;
20 | font-family: 'Open Sans';
21 | width: 100%;
22 | }
23 |
24 | .citation {
25 | color: hsl(49, 98%, 84%);
26 | }
27 | .citation a {
28 | color: hsl(49, 98%, 84%);
29 | border-bottom: none;
30 | text-decoration: none;
31 | }
32 | .citation a:hover {
33 | border-bottom: none;
34 | text-decoration: none;
35 | }
36 |
37 | .article-header h1 {
38 | font-size: 2.5rem;
39 | width: 120%;
40 | margin-left: -10%;
41 | font-family: 'Arvo';
42 | color: white !important;
43 | }
44 |
45 | .article-header h2 {
46 | color: hsl(49, 98%, 84%);
47 | color: #81daf3;
48 | font-family: 'Open Sans';
49 | font-weight: normal;
50 | }
51 |
52 | .start-button {
53 | display: block;
54 | text-align: center;
55 | margin: 0 auto;
56 | background: hsl(49, 98%, 84%);
57 | /* background: hsla(15, 100%, 89%, 1); */
58 | /* border-color: hsla(15, 100%, 89%, 1);
59 | border-color: black;
60 | border-width: 10px; */
61 | margin-top: 75px;
62 | box-shadow: 0 5px 20px #222222;
63 | border-top: none;
64 | border-left: none;
65 | padding: 10px;
66 | cursor: pointer;
67 | width: 200px;
68 | color: #1E467E;
69 | /* color: white; */
70 | font-weight: bold;
71 | border-radius: 20px;
72 | transition: all 1s;
73 | }
74 |
75 | .start-button:hover {
76 | transform: rotate(-2.5deg);
77 | box-shadow: 5px 15px 10px #222222;
78 | }
79 |
80 | .byline {
81 | margin-bottom: 10px;
82 | }
83 |
84 | .byline a, a {
85 | color: hsl(49, 98%, 84%);
86 | /* color: hsla(193, 81%, 73%, 1); */
87 | color: white;
88 | border-bottom: solid 2px hsl(49, 98%, 84%);
89 | transition: all 0.25s;
90 | }
91 |
92 | .byline a:hover, a:hover {
93 | /* color: hsl(49, 98%, 84%); */
94 | /* color: hsla(193, 81%, 73%, 1); */
95 | color: hsl(49, 98%, 84%);
96 | border-bottom: solid 2px hsl(49, 98%, 84%);
97 | text-decoration: none;
98 | }
99 |
100 | .idyll-scroll {
101 | margin-top: 0;
102 | pointer-events: none;
103 | }
104 |
105 | .idyll-scroll-text {
106 | pointer-events: all;
107 | }
108 |
109 | .idyll-scroll-text:first-of-type {
110 | padding-top: 15vh;
111 | }
112 |
113 | .idyll-scroll-text .idyll-step:first-of-type {
114 | margin-bottom: 25vh;
115 | }
116 |
117 | .idyll-scroll-text .idyll-step {
118 | margin-top: 0;
119 | background: none;
120 | color: hsla(51, 100%, 98%, 1);
121 | }
122 |
123 | .idyll-root {
124 | color: #fff;
125 | }
126 |
127 | h1, h2, h3, h4, h5, h6 {
128 | color: hsl(49, 98%, 84%);
129 | /* color: #ffd5c7; */
130 | /* color: #81daf3; */
131 | font-family: 'Arvo';
132 | }
133 |
134 | h1 {
135 | font-size: 28px;
136 | }
137 |
138 | p, li {
139 | font-family: 'Open Sans';
140 | font-size: 18px;
141 | }
142 |
143 | svg text {
144 | fill: white;
145 | }
146 |
147 | .rt-td {
148 | background: #ffffff;
149 | }
150 |
151 | .rt-tbody {
152 | color: #1E467E;
153 | }
154 |
155 | .rt-thead {
156 | background: #ffd5c7;
157 | color: #1E467E;
158 | font-weight: 700;
159 | }
160 |
161 | .idyll-pub-date {
162 | /* color: hsl(49, 98%, 84%); */
163 | }
164 |
165 | button {
166 | background: none;
167 | border: 3px solid hsla(15, 100%, 89%, 1);
168 | padding: 15px;
169 | color: hsla(15, 100%, 89%, 1);
170 | text-transform: uppercase;
171 | margin: 0 auto;
172 | text-align: center;
173 | display: block;
174 | cursor: pointer;
175 | background: #1e467d;
176 | }
177 |
178 | .panel button {
179 | border: 3px solid #1E467E;
180 | padding: 10px;
181 | background: #ffffff;
182 | color: #1E467E;
183 | font-weight: bold;
184 | border-radius: 20px;
185 | transition: all 0.25s;
186 | width: 100px;
187 | text-transform: none;
188 | }
189 |
190 | .panel button:hover {
191 | /* transform: rotate(-2.5deg); */
192 | background: #eee;
193 | /* box-shadow: 5px 15px 10px #000; */
194 | }
195 |
196 | .panel {
197 | background: #fff;
198 | color: #1E467E;
199 | padding: 15px;
200 | margin: 30px 0;
201 | box-shadow: 7px 7px 10px #222222;
202 | border-radius: 10px;
203 | }
204 |
205 | .ReactTable {
206 | box-shadow: 7px 7px 10px #222222;
207 | }
208 |
209 | .panel h2 {
210 | color: #1E467E;
211 | width: 76%;
212 | }
213 |
214 | button.selected {
215 | border-color: hsl(49, 98%, 84%);
216 | color: hsl(49, 98%, 84%);
217 | }
218 |
219 | .panel button.selected {
220 | background: #1E467E;
221 | border-color: #1E467E;
222 | color: #fff;
223 | }
224 |
225 | .panel button.selected:hover {
226 | background: #1E467E;
227 | border-color: #1E467E;
228 | color: #fff;
229 | }
230 |
231 | @media all and (max-width: 1000px) {
232 |
233 | .article-header {
234 | /* background: rgba(55, 55, 55, 0.5); */
235 | }
236 |
237 |
238 | .article-header, h1 {
239 | text-shadow:
240 | -1px -1px 3px #000,
241 | 1px -1px 3px #000,
242 | -1px 1px 3px #000,
243 | 1px 1px 3px #000;
244 | }
245 |
246 | p, li {
247 | font-size: 14px;
248 | padding: 5px 10px;
249 | background: rgba(22, 22, 22, 0.9);
250 | }
251 |
252 | .panel button {
253 | background: #fff;
254 |
255 | }
256 |
257 | .panel p, .panel li {
258 | background: none;
259 | }
260 | .fixed {
261 | background: none;
262 | border-top: none;
263 | }
264 |
265 | .idyll-text-container {
266 | margin-left: 0 !important;
267 | }
268 |
269 | .d3-component svg {
270 | max-height: 200px;
271 | }
272 |
273 | .idyll-scroll-text:first-of-type {
274 | padding-top: 0;
275 | }
276 |
277 | h1 {
278 | font-size: 22px;
279 | }
280 |
281 | .article-header h1 {
282 | font-size: 1.8rem;
283 | /* width: 100%; */
284 | }
285 |
286 | .article-header h2 {
287 | font-size: 1.5rem;
288 | }
289 |
290 | .katex-display {
291 | overflow-x: auto;
292 | }
293 |
294 | .byline {
295 | font-size: .8rem;
296 | }
297 | }
298 |
299 | .katex-html {
300 | /* color: hsla(51,100%,98%,1); */
301 | color: hsl(49, 98%, 84%);
302 | }
303 |
304 | .reference {
305 | font-size: 14px;
306 | }
307 |
308 | hr {
309 | height: 0.15em;
310 | }
311 |
312 | input[type="range"] {
313 | -webkit-appearance: none;
314 | width: 160px;
315 | height: 10px;
316 | margin: 10px 15px;
317 | background: linear-gradient(to right, #fff 0%, #fff 100%);
318 | background-size: 150px 2px;
319 | background-position: center;
320 | background-repeat: no-repeat;
321 | overflow: visible;
322 | outline: none;
323 | }
324 |
325 | input[type="range"]::-webkit-slider-thumb {
326 | -webkit-appearance: none;
327 | width: 15px;
328 | height: 15px;
329 | border-radius: 10px;
330 | background: #81daf3;
331 | border: solid 1px #fff;
332 | position: relative;
333 | z-index: 3;
334 | /* box-shadow:0 0 5px 0 rgba(0,0,0,0.3); */
335 | }
336 |
337 | input[type="range"]::-webkit-slider-thumb:after {
338 | content: " ";
339 | width: 160px;
340 | height: 10px;
341 | position: absolute;
342 | z-index: 1;
343 | right: 20px;
344 | top: 5px;
345 | background: #000;
346 | }
347 |
348 | em {
349 | /* color: #81daf3; */
350 | }
351 |
352 | textarea:focus, input:focus, button:focus {
353 | outline: none;
354 | }
355 |
356 | .reference-title {
357 | font-weight: 700;
358 | }
--------------------------------------------------------------------------------