├── data ├── to_label │ ├── .gitkeep │ └── scale.json ├── movies.csv ├── training │ ├── labeler.json │ ├── schema.json │ └── manual.json ├── compassql_examples │ ├── input │ │ ├── cql_4.json │ │ ├── 1d-N.json │ │ ├── 1d-O.json │ │ ├── 1d-N-mark.json │ │ ├── 1d-Q-mark.json │ │ ├── 1d-T.json │ │ ├── scale-type.json │ │ ├── bin-maxbins.json │ │ ├── 1d-Q.json │ │ ├── scatter.json │ │ ├── cql_2.json │ │ ├── cql_3.json │ │ ├── 2d-NxN.json │ │ ├── 2d-NxQ.json │ │ ├── showme_automatic-mark.json │ │ ├── cql_1.json │ │ ├── voyager_exact-match.json │ │ ├── 2d-OxQ.json │ │ ├── 2d-QxT.json │ │ ├── rank-by-feature_histogram.json │ │ ├── 2d-QxQ.json │ │ ├── showme_add-to-sheet.json │ │ ├── 3d-NxOxQ.json │ │ ├── 3d-OxQxQ.json │ │ └── 3d-NxQxQ.json │ ├── output │ │ ├── 1d-T.json │ │ ├── cql_4.json │ │ ├── 1d-Q-mark.json │ │ ├── 1d-Q.json │ │ ├── scale-type.json │ │ ├── 1d-N.json │ │ ├── 2d-QxQ.json │ │ ├── 1d-N-mark.json │ │ ├── 1d-O.json │ │ ├── 2d-NxQ.json │ │ ├── 2d-OxQ.json │ │ ├── cql_1.json │ │ ├── cql_3.json │ │ ├── rank-by-feature_histogram.json │ │ ├── voyager_exact-match.json │ │ ├── showme_automatic-mark.json │ │ ├── 2d-QxT.json │ │ ├── cql_2.json │ │ ├── scatter.json │ │ ├── bin-maxbins.json │ │ ├── 3d-NxOxQ.json │ │ ├── 3d-OxQxQ.json │ │ ├── 3d-NxQxQ.json │ │ ├── 2d-NxN.json │ │ └── showme_add-to-sheet.json │ └── run_compassql.js ├── spec_pairs │ ├── data.json │ └── draco_cql.json ├── driving.json └── weights.json ├── setup.cfg ├── asp ├── hard-integrity.lp ├── cost.lp ├── _validate.lp ├── optimize.lp ├── _enumerate.lp ├── _violations.lp ├── examples │ ├── histogram.lp │ ├── strip.lp │ ├── invalid.lp │ ├── scatter.lp │ ├── valid.lp │ ├── data.lp │ ├── saket2018.lp │ ├── benchmark.lp │ ├── kim2018.lp │ └── apt.lp ├── output.lp ├── _kim2018.lp ├── _saket2018.lp ├── _cost.lp ├── _all.lp ├── saket2018.lp ├── kim2018.lp ├── generate.lp ├── process_soft.py ├── _apt.lp ├── topk-py.lp ├── topk-lua.lp ├── define.lp ├── weights.lp ├── weights_learned.lp ├── assign_weights.lp ├── tests.yaml └── hard.lp ├── js ├── .gitignore ├── typings │ └── datalib.d.ts ├── .npmignore ├── .prettierrc.json ├── rollup.config.js ├── test │ ├── index.test.ts │ ├── asp2vl.test.ts │ ├── vl2asp.test.ts │ ├── constraints2json.test.ts │ ├── cql2asp.test.ts │ └── json2constraints.test.ts ├── tslint.json ├── bin │ ├── asp2vl │ ├── constraints2json │ ├── vl2asp │ ├── cql2asp │ ├── data2schema │ └── schema2asp ├── tsconfig.json ├── src │ ├── index.ts │ ├── data2schema.ts │ ├── schema2asp.ts │ ├── json2constraints.ts │ ├── asp2vl.ts │ ├── vl2asp.ts │ ├── cql2asp.ts │ └── constraints2json.ts ├── concat_lp.sh ├── package.json └── Readme.md ├── logos ├── Draco Logo.sketch ├── dark │ ├── logo-dark.pdf │ ├── logo-dark.png │ └── logo-dark.svg └── light │ ├── logo-light.pdf │ ├── logo-light.png │ └── logo-light.svg ├── examples ├── data │ ├── ab.csv │ └── census2000.csv ├── strip.json ├── histogram.json ├── scatter.json ├── ab.json ├── assignment.json ├── scatter.lp └── scatter.vl.json ├── draco ├── __init__.py ├── helper.py ├── js.py ├── cli.py └── run.py ├── requirements.txt ├── generate_parallel.sh ├── MANIFEST.in ├── package.json ├── tests ├── test_cli.py ├── test_run.py ├── test_recommendation.py └── test_valid_specs.py ├── .gitignore ├── .editorconfig ├── run_pipeline.sh ├── .github └── workflows │ └── test.yml ├── LICENSE ├── setup.py └── README.md /data/to_label/.gitkeep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [bdist_wheel] 2 | universal=1 3 | -------------------------------------------------------------------------------- /asp/hard-integrity.lp: -------------------------------------------------------------------------------- 1 | :- hard(_). 2 | :- hard(_,_). 3 | :- hard(_,_,_). 4 | -------------------------------------------------------------------------------- /data/movies.csv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/uwdata/draco/master/data/movies.csv -------------------------------------------------------------------------------- /js/.gitignore: -------------------------------------------------------------------------------- 1 | build 2 | node_modules 3 | src/constraints.ts 4 | yarn-error.log 5 | -------------------------------------------------------------------------------- /logos/Draco Logo.sketch: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/uwdata/draco/master/logos/Draco Logo.sketch -------------------------------------------------------------------------------- /logos/dark/logo-dark.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/uwdata/draco/master/logos/dark/logo-dark.pdf -------------------------------------------------------------------------------- /logos/dark/logo-dark.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/uwdata/draco/master/logos/dark/logo-dark.png -------------------------------------------------------------------------------- /logos/light/logo-light.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/uwdata/draco/master/logos/light/logo-light.pdf -------------------------------------------------------------------------------- /logos/light/logo-light.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/uwdata/draco/master/logos/light/logo-light.png -------------------------------------------------------------------------------- /examples/data/ab.csv: -------------------------------------------------------------------------------- 1 | a,b 2 | C,2 3 | C,7 4 | C,4 5 | D,1 6 | D,2 7 | D,6 8 | E,8 9 | E,4 10 | E,7 -------------------------------------------------------------------------------- /js/typings/datalib.d.ts: -------------------------------------------------------------------------------- 1 | declare module 'datalib/src/stats'; 2 | declare module 'datalib/src/import/read'; 3 | -------------------------------------------------------------------------------- /data/training/labeler.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "./schema.json", 3 | "source": "labeler", 4 | "data": [ 5 | ] 6 | } 7 | -------------------------------------------------------------------------------- /draco/__init__.py: -------------------------------------------------------------------------------- 1 | __version__ = "0.0.9" 2 | 3 | from .helper import * 4 | from .js import asp2vl, vl2asp 5 | from .run import run, run_clingo 6 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | ansunit 2 | black 3 | clyngor 4 | jsonschema 5 | mypy 6 | pandas 7 | pytest 8 | pytest-cov 9 | scipy 10 | pyyaml==5.4.1 11 | -------------------------------------------------------------------------------- /examples/strip.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": { 3 | "url": "data/cars.csv" 4 | }, 5 | "encodings": [ 6 | {"channel": "x", "field": "horsepower"} 7 | ] 8 | } 9 | -------------------------------------------------------------------------------- /examples/histogram.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": { 3 | "url": "data/cars.csv" 4 | }, 5 | "encoding": [ 6 | {"channel": "x", "field": "horsepower", "bin": true} 7 | ] 8 | } 9 | -------------------------------------------------------------------------------- /asp/cost.lp: -------------------------------------------------------------------------------- 1 | % !! This is super slow and should only be used for experiments !! 2 | 3 | cost(C) :- C = #sum { W,Q,F: violation_weight(F,W), violation(F,Q) }. 4 | 5 | #show cost/1. 6 | -------------------------------------------------------------------------------- /generate_parallel.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | cat draco/generation/define/interactions.json | jq -r '.[] .name' | parallel --eta python draco/generation/run.py --interaction {} 4 | -------------------------------------------------------------------------------- /js/.npmignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | .gitignore 3 | .prettierrc.json 4 | .vscode 5 | concat_lp.sh 6 | node_modules 7 | src 8 | test 9 | tsconfig.json 10 | tslint.json 11 | yarn-error.log 12 | -------------------------------------------------------------------------------- /examples/scatter.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": { 3 | "url": "data/cars.csv" 4 | }, 5 | "encodings": [ 6 | {"field": "acceleration"}, 7 | {"field": "horsepower"} 8 | ] 9 | } 10 | -------------------------------------------------------------------------------- /asp/_validate.lp: -------------------------------------------------------------------------------- 1 | % Collection of ASP files that can be used to quickly run an experiment. 2 | % E.g. clingo asp/_validate.lp test.lp 3 | 4 | #include "asp/define.lp". 5 | #include "asp/hard.lp". 6 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include asp/*.lp 2 | include js/bin/* 3 | include js/build/draco.js* 4 | include LICENSE 5 | include README.md 6 | 7 | exclude *.json 8 | exclude *.lp 9 | exclude *.sh 10 | exclude yarn.lock 11 | -------------------------------------------------------------------------------- /asp/optimize.lp: -------------------------------------------------------------------------------- 1 | % Minimize the feature weight 2 | 3 | #minimize { W,F,Q: soft_weight(F,W), soft(F,Q); #inf,F,Q: soft(F,Q), not soft_weight(F,_); #inf,F: hard(F); #inf,F,Q: hard(F,Q); #inf,F,Q1,Q2: hard(F,Q1,Q2) }. 4 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "dependencies": { 3 | "vega-cli": "^5.22.1", 4 | "vega-lite": "^5.3.0" 5 | }, 6 | "scripts": { 7 | "vl2png": "vl2png", 8 | "build": "yarn --cwd ./js build" 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /data/compassql_examples/input/cql_4.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": { 3 | "url": "data/cars.json" 4 | }, 5 | "mark": "?", 6 | "encodings": [ 7 | { 8 | "channel": "?", 9 | "field": "Horsepower" 10 | } 11 | ] 12 | } 13 | -------------------------------------------------------------------------------- /data/compassql_examples/input/1d-N.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": {"url": "data/cars.json"}, 3 | "mark": "?", 4 | "encodings": [ 5 | { 6 | "channel": "?", 7 | "field": "Origin", 8 | "type": "nominal" 9 | } 10 | ] 11 | } -------------------------------------------------------------------------------- /data/compassql_examples/input/1d-O.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": {"url": "data/cars.json"}, 3 | "mark": "?", 4 | "encodings": [ 5 | { 6 | "channel": "?", 7 | "field": "Cylinders", 8 | "type": "ordinal" 9 | } 10 | ] 11 | } -------------------------------------------------------------------------------- /asp/_enumerate.lp: -------------------------------------------------------------------------------- 1 | % Collection of ASP files that can be used to enumerate a space. 2 | % E.g. clingo asp/_enumerate.lp test.lp 3 | 4 | #include "asp/define.lp". 5 | #include "asp/generate.lp". 6 | #include "asp/hard.lp". 7 | #include "asp/output.lp". 8 | -------------------------------------------------------------------------------- /asp/_violations.lp: -------------------------------------------------------------------------------- 1 | % Collection of ASP files that can be used to quickly run an experiment. 2 | % E.g. clingo asp/_violations.lp test.lp 3 | 4 | #include "asp/define.lp". 5 | #include "asp/hard.lp". 6 | #include "asp/soft.lp". 7 | 8 | #show soft/2. 9 | -------------------------------------------------------------------------------- /data/compassql_examples/input/1d-N-mark.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": {"url": "data/cars.json"}, 3 | "mark": "?", 4 | "encodings": [ 5 | { 6 | "channel": "x", 7 | "field": "Origin", 8 | "type": "nominal" 9 | } 10 | ] 11 | } -------------------------------------------------------------------------------- /data/compassql_examples/input/1d-Q-mark.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": {"url": "data/cars.json"}, 3 | "mark": "?", 4 | "encodings": [ 5 | { 6 | "channel": "x", 7 | "field": "Horsepower", 8 | "type": "quantitative" 9 | } 10 | ] 11 | } -------------------------------------------------------------------------------- /asp/examples/histogram.lp: -------------------------------------------------------------------------------- 1 | % ====== Data definitions ====== 2 | num_rows(142). 3 | 4 | fieldtype(horsepower,number). 5 | cardinality(horsepower,94). 6 | 7 | % ====== Query constraints ====== 8 | encoding(e0). 9 | :- not field(e0,horsepower). 10 | :- not bin(e0,_). 11 | -------------------------------------------------------------------------------- /examples/ab.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": { 3 | "url": "data/ab.csv" 4 | }, 5 | "mark": "bar", 6 | "encodings": [ 7 | {"channel": "x", "field": "?", "type": "?"}, 8 | {"channel": "y", "aggregate": "max", "field": "?", "type": "quantitative"} 9 | ] 10 | } 11 | -------------------------------------------------------------------------------- /asp/examples/strip.lp: -------------------------------------------------------------------------------- 1 | % ====== Data definitions ====== 2 | num_rows(142). 3 | 4 | fieldtype(horsepower,number). 5 | cardinality(horsepower,94). 6 | 7 | % ====== Query constraints ====== 8 | encoding(e0). 9 | :- not type(e0,quantitative). 10 | :- not field(e0,horsepower). 11 | -------------------------------------------------------------------------------- /data/compassql_examples/input/1d-T.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": {"url": "data/movies.json"}, 3 | "mark": "?", 4 | "encodings": [ 5 | { 6 | "channel": "?", 7 | "timeUnit": "?", 8 | "field": "Release_Date", 9 | "type": "temporal" 10 | } 11 | ] 12 | } -------------------------------------------------------------------------------- /asp/output.lp: -------------------------------------------------------------------------------- 1 | % ====== Output ====== 2 | 3 | #show data/1. 4 | 5 | #show mark/1. 6 | 7 | #show type/2. 8 | #show channel/2. 9 | #show field/2. 10 | #show aggregate/2. 11 | #show bin/2. 12 | #show stack/2. 13 | 14 | #show log/1. 15 | #show zero/1. 16 | 17 | #show soft/2. 18 | -------------------------------------------------------------------------------- /js/.prettierrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "overrides": [ 3 | { 4 | "files": "*.ts", 5 | "options": { 6 | "printWidth": 120, 7 | "parser": "typescript", 8 | "singleQuote": true, 9 | "trailingComma": "es5" 10 | } 11 | } 12 | ] 13 | } 14 | -------------------------------------------------------------------------------- /asp/examples/invalid.lp: -------------------------------------------------------------------------------- 1 | % ====== Data definitions ====== 2 | fieldtype(name,string). 3 | 4 | % ====== Visualization specification ====== 5 | 6 | mark(point). 7 | 8 | encoding(e0). 9 | field(e0,name). 10 | channel(e0,x). 11 | type(e0,quantitative). % canot use string as quantitative 12 | -------------------------------------------------------------------------------- /data/compassql_examples/input/scale-type.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": {"url": "data/cars.json"}, 3 | "mark": "tick", 4 | "encodings": [ 5 | { 6 | "channel": "x", 7 | "scale": {"type": "?"}, 8 | "field": "Miles_per_Gallon", 9 | "type": "quantitative" 10 | } 11 | ] 12 | } -------------------------------------------------------------------------------- /data/compassql_examples/input/bin-maxbins.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": {"url": "data/cars.json"}, 3 | "mark": "bar", 4 | "encodings": [ 5 | { 6 | "channel": "x", 7 | "bin": {"maxbins": "?"}, 8 | "field": "Miles_per_Gallon", 9 | "type": "quantitative" 10 | } 11 | ] 12 | } -------------------------------------------------------------------------------- /data/compassql_examples/input/1d-Q.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": {"url": "data/cars.json"}, 3 | "mark": "?", 4 | "encodings": [ 5 | { 6 | "channel": "?", 7 | "bin": "?", 8 | "aggregate": "?", 9 | "field": "Miles_per_Gallon", 10 | "type": "quantitative" 11 | } 12 | ] 13 | } -------------------------------------------------------------------------------- /data/compassql_examples/input/scatter.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": { 3 | "url": "data/cars.json" 4 | }, 5 | "mark": "?", 6 | "encodings": [ 7 | {"field": "Acceleration", "channel": "?", "type": "?"}, 8 | {"field": "Horsepower", "channel": "?", "type": "?"} 9 | ] 10 | } 11 | -------------------------------------------------------------------------------- /data/compassql_examples/input/cql_2.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": { 3 | "url": "data/cars.json" 4 | }, 5 | "mark": "?", 6 | "encodings": [ 7 | { 8 | "channel": "?", 9 | "field": "Horsepower" 10 | }, 11 | { 12 | "channel": "?", 13 | "field": "Acceleration" 14 | } 15 | ] 16 | } 17 | -------------------------------------------------------------------------------- /examples/assignment.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": { 3 | "url": "data/census2000.csv" 4 | }, 5 | "encoding": [ 6 | {"field": "sex", "type": "nominal"}, 7 | {"field": "year", "type": "ordinal"}, 8 | {"field": "age", "type": "ordinal"}, 9 | {"field": "people"} 10 | ] 11 | } 12 | -------------------------------------------------------------------------------- /js/rollup.config.js: -------------------------------------------------------------------------------- 1 | import nodeResolve from "@rollup/plugin-node-resolve"; 2 | import commonjs from "@rollup/plugin-commonjs"; 3 | 4 | export default { 5 | input: "build/index.js", 6 | output: { 7 | file: "build/draco.js", 8 | format: "cjs", 9 | sourcemap: true 10 | }, 11 | plugins: [nodeResolve(), commonjs()] 12 | }; 13 | -------------------------------------------------------------------------------- /tests/test_cli.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from draco.cli import create_parser 4 | 5 | 6 | class TestCli: 7 | @classmethod 8 | def setup_class(cls): 9 | cls.parser = create_parser() 10 | 11 | def test_with_unknown_args(self): 12 | with pytest.raises(SystemExit): 13 | self.parser.parse_args(["--foo"]) 14 | -------------------------------------------------------------------------------- /data/compassql_examples/input/cql_3.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": { 3 | "url": "data/cars.json" 4 | }, 5 | "mark": "?", 6 | "encodings": [ 7 | { 8 | "channel": "?", 9 | "aggregate": "mean", 10 | "field": "Horsepower" 11 | }, 12 | { 13 | "channel": "?", 14 | "field": "Cylinders" 15 | } 16 | ] 17 | } 18 | -------------------------------------------------------------------------------- /data/compassql_examples/input/2d-NxN.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": {"url": "data/movies.json"}, 3 | "mark": "?", 4 | "encodings": [ 5 | { 6 | "channel": "?", 7 | "field": "Major_Genre", 8 | "type": "nominal" 9 | },{ 10 | "channel": "?", 11 | "field": "Creative_Type", 12 | "type": "nominal" 13 | } 14 | ] 15 | } -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__ 2 | __tmp__ 3 | .cache 4 | .coverage 5 | .eggs 6 | .ipynb_checkpoints 7 | .mypy_cache 8 | .pytest_cache 9 | .vscode 10 | *.css 11 | *.db 12 | *.DS_Store 13 | *.egg-info 14 | **/__init__.pyc 15 | dist 16 | experiments 17 | ILASP 18 | node_modules 19 | package-lock.json 20 | test*.lp 21 | yarn-error.log 22 | build/ 23 | coverage.xml 24 | venv 25 | -------------------------------------------------------------------------------- /asp/examples/scatter.lp: -------------------------------------------------------------------------------- 1 | % ====== Data definitions ====== 2 | num_rows(142). 3 | 4 | fieldtype(horsepower,number). 5 | cardinality(horsepower,94). 6 | 7 | fieldtype(acceleration,number). 8 | cardinality(acceleration,96). 9 | 10 | % ====== Query constraints ====== 11 | encoding(e0). 12 | :- not field(e0,acceleration). 13 | 14 | encoding(e1). 15 | :- not field(e1,horsepower). 16 | -------------------------------------------------------------------------------- /asp/_kim2018.lp: -------------------------------------------------------------------------------- 1 | % restrict to subset that is supported by Kim et al 2 | 3 | #include "asp/define.lp". 4 | #include "asp/generate.lp". 5 | #include "asp/hard.lp". 6 | #include "asp/soft.lp". 7 | #include "asp/weights_learned.lp". 8 | #include "asp/assign_weights.lp". 9 | #include "asp/optimize.lp". 10 | 11 | #include "asp/kim2018.lp". 12 | 13 | #show mark/1. 14 | #show channel/2. 15 | -------------------------------------------------------------------------------- /js/test/index.test.ts: -------------------------------------------------------------------------------- 1 | import { asp2vl, vl2asp } from '../src'; 2 | import { aspSpecs, vlSpecs } from './specs'; 3 | 4 | test('asp2vl and vl2asp work', () => { 5 | for (let i = 0; i < vlSpecs.length; i++) { 6 | const aspSpec = aspSpecs[i]; 7 | const vlSpec = vlSpecs[i]; 8 | expect([asp2vl(aspSpec), vl2asp(vlSpec).sort()]).toEqual([vlSpec, aspSpec.sort()]); 9 | } 10 | }); 11 | -------------------------------------------------------------------------------- /asp/_saket2018.lp: -------------------------------------------------------------------------------- 1 | % restrict to subset that is supported by Saket et al 2 | 3 | #include "asp/define.lp". 4 | #include "asp/generate.lp". 5 | #include "asp/hard.lp". 6 | #include "asp/soft.lp". 7 | #include "asp/weights_learned.lp". 8 | #include "asp/assign_weights.lp". 9 | #include "asp/optimize.lp". 10 | 11 | #include "asp/saket2018.lp". 12 | 13 | #show mark/1. 14 | #show channel/2. 15 | -------------------------------------------------------------------------------- /data/compassql_examples/input/2d-NxQ.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": {"url": "data/cars.json"}, 3 | "mark": "?", 4 | "encodings": [ 5 | { 6 | "channel": "?", 7 | "field": "Origin", 8 | "type": "nominal" 9 | },{ 10 | "channel": "?", 11 | "bin": "?", 12 | "aggregate": "?", 13 | "field": "Horsepower", 14 | "type": "quantitative" 15 | } 16 | ] 17 | } -------------------------------------------------------------------------------- /data/compassql_examples/input/showme_automatic-mark.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": {"url": "data/cars.json"}, 3 | "mark": "?", 4 | "encodings": [ 5 | { 6 | "channel": "x", 7 | "aggregate": "mean", 8 | "field": "Horsepower", 9 | "type": "quantitative" 10 | },{ 11 | "channel": "y", 12 | "field": "Cylinders", 13 | "type": "ordinal" 14 | } 15 | ] 16 | } -------------------------------------------------------------------------------- /asp/_cost.lp: -------------------------------------------------------------------------------- 1 | % Instead of running optimization, just count the cost of violations 2 | % !! This is super slow and should only be used for experiments !! 3 | 4 | #include "asp/define.lp". 5 | #include "asp/generate.lp". 6 | #include "asp/hard.lp". 7 | #include "asp/soft.lp". 8 | #include "asp/weights.lp". 9 | #include "asp/assign_weights.lp". 10 | #include "asp/output.lp". 11 | #include "asp/cost.lp". 12 | 13 | -------------------------------------------------------------------------------- /data/compassql_examples/input/cql_1.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": { 3 | "url": "data/cars.json" 4 | }, 5 | "mark": "?", 6 | "encodings": [ 7 | { 8 | "channel": "x", 9 | "aggregate": "mean", 10 | "field": "Horsepower", 11 | "type": "quantitative" 12 | }, 13 | { 14 | "channel": "y", 15 | "field": "Cylinders", 16 | "type": "ordinal" 17 | } 18 | ] 19 | } 20 | -------------------------------------------------------------------------------- /data/compassql_examples/input/voyager_exact-match.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": {"url": "data/cars.json"}, 3 | "mark": "?", 4 | "encodings": [ 5 | { 6 | "channel": "?", 7 | "field": "Cylinders", 8 | "type": "ordinal" 9 | },{ 10 | "channel": "?", 11 | "bin": "?", 12 | "aggregate": "?", 13 | "field": "Horsepower", 14 | "type": "quantitative" 15 | } 16 | ] 17 | } -------------------------------------------------------------------------------- /data/compassql_examples/output/1d-T.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": { 3 | "url": "data/movies.json" 4 | }, 5 | "mark": "point", 6 | "encoding": { 7 | "x": { 8 | "field": "Release_Date", 9 | "type": "temporal", 10 | "scale": {} 11 | } 12 | }, 13 | "config": { 14 | "overlay": { 15 | "line": true 16 | }, 17 | "scale": { 18 | "useUnaggregatedDomain": true 19 | } 20 | } 21 | } -------------------------------------------------------------------------------- /data/compassql_examples/output/cql_4.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": { 3 | "url": "data/cars.json" 4 | }, 5 | "mark": "tick", 6 | "encoding": { 7 | "x": { 8 | "field": "Horsepower", 9 | "type": "quantitative", 10 | "scale": {} 11 | } 12 | }, 13 | "config": { 14 | "overlay": { 15 | "line": true 16 | }, 17 | "scale": { 18 | "useUnaggregatedDomain": true 19 | } 20 | } 21 | } -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | # top-most EditorConfig file 2 | root = true 3 | 4 | # Unix-style newlines with a newline ending every file 5 | [*] 6 | end_of_line = lf 7 | charset = utf-8 8 | trim_trailing_whitespace = true 9 | insert_final_newline = true 10 | indent_style = space 11 | [*.py] 12 | indent_size = 4 13 | [*.js] 14 | indent_size = 2 15 | [*.ts] 16 | indent_size = 2 17 | [*.tsx] 18 | indent_size = 2 19 | [*.json] 20 | indent_size = 2 21 | -------------------------------------------------------------------------------- /data/compassql_examples/input/2d-OxQ.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": {"url": "data/cars.json"}, 3 | "mark": "?", 4 | "encodings": [ 5 | { 6 | "channel": "?", 7 | "field": "Cylinders", 8 | "type": "ordinal" 9 | },{ 10 | "channel": "?", 11 | "bin": "?", 12 | "aggregate": "?", 13 | "field": "Horsepower", 14 | "type": "quantitative" 15 | } 16 | ] 17 | } -------------------------------------------------------------------------------- /data/compassql_examples/output/1d-Q-mark.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": { 3 | "url": "data/cars.json" 4 | }, 5 | "mark": "tick", 6 | "encoding": { 7 | "x": { 8 | "field": "Horsepower", 9 | "type": "quantitative", 10 | "scale": {} 11 | } 12 | }, 13 | "config": { 14 | "overlay": { 15 | "line": true 16 | }, 17 | "scale": { 18 | "useUnaggregatedDomain": true 19 | } 20 | } 21 | } -------------------------------------------------------------------------------- /data/compassql_examples/output/1d-Q.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": { 3 | "url": "data/cars.json" 4 | }, 5 | "mark": "tick", 6 | "encoding": { 7 | "y": { 8 | "field": "Miles_per_Gallon", 9 | "type": "quantitative", 10 | "scale": {} 11 | } 12 | }, 13 | "config": { 14 | "overlay": { 15 | "line": true 16 | }, 17 | "scale": { 18 | "useUnaggregatedDomain": true 19 | } 20 | } 21 | } -------------------------------------------------------------------------------- /data/compassql_examples/input/2d-QxT.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": {"url": "data/movies.json"}, 3 | "mark": "?", 4 | "encodings": [ 5 | { 6 | "channel": "?", 7 | "bin": "?", 8 | "aggregate": "?", 9 | "field": "IMDB_Rating", 10 | "type": "quantitative" 11 | },{ 12 | "channel": "?", 13 | "timeUnit": "?", 14 | "field": "Release_Date", 15 | "type": "temporal" 16 | } 17 | ] 18 | } -------------------------------------------------------------------------------- /data/compassql_examples/input/rank-by-feature_histogram.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": {"url": "data/cars.json"}, 3 | "mark": "?", 4 | "encodings": [ 5 | { 6 | "channel": "?", 7 | "bin": "?", 8 | "timeUnit": "?", 9 | "field": "?", 10 | "type": "?" 11 | }, 12 | { 13 | "channel": "?", 14 | "field": "*", 15 | "aggregate": "count", 16 | "type": "quantitative" 17 | } 18 | ] 19 | } -------------------------------------------------------------------------------- /data/compassql_examples/output/scale-type.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": { 3 | "url": "data/cars.json" 4 | }, 5 | "mark": "tick", 6 | "encoding": { 7 | "x": { 8 | "field": "Miles_per_Gallon", 9 | "type": "quantitative", 10 | "scale": {} 11 | } 12 | }, 13 | "config": { 14 | "overlay": { 15 | "line": true 16 | }, 17 | "scale": { 18 | "useUnaggregatedDomain": true 19 | } 20 | } 21 | } -------------------------------------------------------------------------------- /js/tslint.json: -------------------------------------------------------------------------------- 1 | { 2 | "tslint.options": { 3 | "project": "tsconfig.json", 4 | "typeCheck": true 5 | }, 6 | "defaultSeverity": "error", 7 | "extends": [ 8 | "tslint:recommended", 9 | "tslint-config-prettier" 10 | ], 11 | "rules": { 12 | "arrow-parens": [true, "ban-single-arg-parens"], 13 | "no-console": false, 14 | "object-literal-sort-keys": false, 15 | "interface-name": false 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /examples/scatter.lp: -------------------------------------------------------------------------------- 1 | % dataset for generated VL 2 | data("data/cars.csv"). 3 | 4 | % ====== Data definitions ====== 5 | num_rows(142). 6 | 7 | fieldtype(horsepower,number). 8 | cardinality(horsepower,94). 9 | 10 | fieldtype(acceleration,number). 11 | cardinality(acceleration,96). 12 | 13 | % ====== Query constraints ====== 14 | encoding(e0). 15 | :- not field(e0,acceleration). 16 | 17 | encoding(e1). 18 | :- not field(e1,horsepower). 19 | -------------------------------------------------------------------------------- /asp/_all.lp: -------------------------------------------------------------------------------- 1 | % Collection of ASP files that can be used to quickly run an experiment. 2 | % E.g. clingo asp/_all.lp test.lp 3 | 4 | #include "asp/define.lp". 5 | #include "asp/generate.lp". 6 | #include "asp/hard.lp". 7 | #include "asp/soft.lp". 8 | #include "asp/weights.lp". 9 | #include "asp/assign_weights.lp". 10 | #include "asp/optimize.lp". 11 | #include "asp/output.lp". 12 | #include "asp/hard-integrity.lp" 13 | % #include "asp/topk-lua.lp". 14 | -------------------------------------------------------------------------------- /data/compassql_examples/input/2d-QxQ.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": {"url": "data/cars.json"}, 3 | "mark": "?", 4 | "encodings": [ 5 | { 6 | "channel": "?", 7 | "bin": "?", 8 | "aggregate": "?", 9 | "field": "Miles_per_Gallon", 10 | "type": "quantitative" 11 | },{ 12 | "channel": "?", 13 | "bin": "?", 14 | "aggregate": "?", 15 | "field": "Horsepower", 16 | "type": "quantitative" 17 | } 18 | ] 19 | } -------------------------------------------------------------------------------- /js/bin/asp2vl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const asp2vl = require('../build/draco').asp2vl; 4 | 5 | const stdin = process.stdin; 6 | const stdout = process.stdout; 7 | const inputChunks = []; 8 | 9 | stdin.on('data', chunk => { 10 | inputChunks.push(chunk); 11 | }); 12 | 13 | stdin.on('end', () => { 14 | const lines = inputChunks.join('').split('\n') 15 | const result = asp2vl(lines); 16 | stdout.write(JSON.stringify(result)); 17 | stdout.write('\n'); 18 | }); 19 | -------------------------------------------------------------------------------- /data/compassql_examples/input/showme_add-to-sheet.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": {"url": "data/cars.json"}, 3 | "mark": "?", 4 | "encodings": [ 5 | { 6 | "channel": "x", 7 | "aggregate": "mean", 8 | "field": "Horsepower", 9 | "type": "quantitative" 10 | },{ 11 | "channel": "y", 12 | "field": "Cylinders", 13 | "type": "ordinal" 14 | },{ 15 | "channel": "?", 16 | "field": "Origin", 17 | "type": "nominal" 18 | } 19 | ] 20 | } 21 | -------------------------------------------------------------------------------- /data/compassql_examples/input/3d-NxOxQ.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": {"url": "data/cars.json"}, 3 | "mark": "?", 4 | "encodings": [ 5 | { 6 | "channel": "?", 7 | "field": "Cylinders", 8 | "type": "ordinal" 9 | },{ 10 | "channel": "?", 11 | "field": "Origin", 12 | "type": "nominal" 13 | },{ 14 | "channel": "?", 15 | "bin": "?", 16 | "aggregate": "?", 17 | "field": "Acceleration", 18 | "type": "quantitative" 19 | } 20 | ] 21 | } 22 | -------------------------------------------------------------------------------- /js/bin/constraints2json: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const constraints2json = require('../build/draco').constraints2json; 4 | 5 | const stdin = process.stdin; 6 | const stdout = process.stdout; 7 | const inputChunks = []; 8 | 9 | stdin.on('data', chunk => { 10 | inputChunks.push(chunk); 11 | }); 12 | 13 | stdin.on('end', () => { 14 | const lines = inputChunks.join('') 15 | const result = constraints2json(lines); 16 | stdout.write(JSON.stringify(result)); 17 | stdout.write('\n'); 18 | }); 19 | -------------------------------------------------------------------------------- /data/compassql_examples/output/1d-N.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": { 3 | "url": "data/cars.json" 4 | }, 5 | "mark": "bar", 6 | "encoding": { 7 | "y": { 8 | "field": "Origin", 9 | "type": "nominal" 10 | }, 11 | "x": { 12 | "aggregate": "count", 13 | "field": "*", 14 | "type": "quantitative" 15 | } 16 | }, 17 | "config": { 18 | "overlay": { 19 | "line": true 20 | }, 21 | "scale": { 22 | "useUnaggregatedDomain": true 23 | } 24 | } 25 | } -------------------------------------------------------------------------------- /data/compassql_examples/output/2d-QxQ.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": { 3 | "url": "data/cars.json" 4 | }, 5 | "mark": "point", 6 | "encoding": { 7 | "x": { 8 | "field": "Miles_per_Gallon", 9 | "type": "quantitative" 10 | }, 11 | "y": { 12 | "field": "Horsepower", 13 | "type": "quantitative" 14 | } 15 | }, 16 | "config": { 17 | "overlay": { 18 | "line": true 19 | }, 20 | "scale": { 21 | "useUnaggregatedDomain": true 22 | } 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /js/bin/vl2asp: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const vl2asp = require('../build/draco').vl2asp; 4 | 5 | const stdin = process.stdin; 6 | const stdout = process.stdout; 7 | const inputChunks = []; 8 | 9 | stdin.on('data', chunk => { 10 | inputChunks.push(chunk); 11 | }); 12 | 13 | stdin.on('end', () => { 14 | const inputJSON = JSON.parse(inputChunks.join('')) 15 | const result = vl2asp(inputJSON); 16 | for (const res of result) { 17 | stdout.write(res); 18 | stdout.write('\n'); 19 | } 20 | }); 21 | -------------------------------------------------------------------------------- /asp/saket2018.lp: -------------------------------------------------------------------------------- 1 | 2 | % allowed marktypes 3 | :- not mark(bar), not mark(line), not mark(point). 4 | 5 | % two encodings 6 | 2 = { encoding(E): encoding(E) }. 7 | 8 | % only use x and y 9 | :- not channel(_,(x;y)). 10 | 11 | % y has to be aggregated and quantitative 12 | :- channel(E,y), not type(E,quantitative). 13 | :- channel(E,y), not aggregate(E,mean). 14 | 15 | % no binning 16 | :- bin(_,_). 17 | 18 | % no log scale 19 | :- log(_). 20 | 21 | % no aggregate on x 22 | :- aggregate(E,_), channel(E,x). 23 | -------------------------------------------------------------------------------- /data/compassql_examples/output/1d-N-mark.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": { 3 | "url": "data/cars.json" 4 | }, 5 | "mark": "bar", 6 | "encoding": { 7 | "x": { 8 | "field": "Origin", 9 | "type": "nominal" 10 | }, 11 | "y": { 12 | "aggregate": "count", 13 | "field": "*", 14 | "type": "quantitative" 15 | } 16 | }, 17 | "config": { 18 | "overlay": { 19 | "line": true 20 | }, 21 | "scale": { 22 | "useUnaggregatedDomain": true 23 | } 24 | } 25 | } -------------------------------------------------------------------------------- /data/compassql_examples/output/1d-O.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": { 3 | "url": "data/cars.json" 4 | }, 5 | "mark": "bar", 6 | "encoding": { 7 | "y": { 8 | "field": "Cylinders", 9 | "type": "ordinal" 10 | }, 11 | "x": { 12 | "aggregate": "count", 13 | "field": "*", 14 | "type": "quantitative" 15 | } 16 | }, 17 | "config": { 18 | "overlay": { 19 | "line": true 20 | }, 21 | "scale": { 22 | "useUnaggregatedDomain": true 23 | } 24 | } 25 | } -------------------------------------------------------------------------------- /data/compassql_examples/output/2d-NxQ.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": { 3 | "url": "data/cars.json" 4 | }, 5 | "mark": "tick", 6 | "encoding": { 7 | "y": { 8 | "field": "Origin", 9 | "type": "nominal" 10 | }, 11 | "x": { 12 | "field": "Horsepower", 13 | "type": "quantitative", 14 | "scale": {} 15 | } 16 | }, 17 | "config": { 18 | "overlay": { 19 | "line": true 20 | }, 21 | "scale": { 22 | "useUnaggregatedDomain": true 23 | } 24 | } 25 | } -------------------------------------------------------------------------------- /data/compassql_examples/output/2d-OxQ.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": { 3 | "url": "data/cars.json" 4 | }, 5 | "mark": "tick", 6 | "encoding": { 7 | "y": { 8 | "field": "Cylinders", 9 | "type": "ordinal" 10 | }, 11 | "x": { 12 | "field": "Horsepower", 13 | "type": "quantitative", 14 | "scale": {} 15 | } 16 | }, 17 | "config": { 18 | "overlay": { 19 | "line": true 20 | }, 21 | "scale": { 22 | "useUnaggregatedDomain": true 23 | } 24 | } 25 | } -------------------------------------------------------------------------------- /js/bin/cql2asp: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const cql2asp = require('../build/draco').cql2asp; 4 | 5 | const stdin = process.stdin; 6 | const stdout = process.stdout; 7 | const inputChunks = []; 8 | 9 | stdin.on('data', chunk => { 10 | inputChunks.push(chunk); 11 | }); 12 | 13 | stdin.on('end', () => { 14 | const inputJSON = JSON.parse(inputChunks.join('')); 15 | const result = cql2asp(inputJSON); 16 | for (const res of result) { 17 | stdout.write(res); 18 | stdout.write('\n'); 19 | } 20 | }); 21 | -------------------------------------------------------------------------------- /js/bin/data2schema: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const data2schema = require('../build/draco').data2schema; 4 | 5 | const stdin = process.stdin; 6 | const stdout = process.stdout; 7 | const inputChunks = []; 8 | 9 | stdin.on('data', chunk => { 10 | inputChunks.push(chunk); 11 | }); 12 | 13 | stdin.on('end', () => { 14 | const jsonString = inputChunks.join(''); 15 | const inputJSON = JSON.parse(jsonString); 16 | const result = data2schema(inputJSON); 17 | stdout.write(JSON.stringify(result)); 18 | }); 19 | -------------------------------------------------------------------------------- /data/compassql_examples/output/cql_1.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": { 3 | "url": "data/cars.json" 4 | }, 5 | "mark": "bar", 6 | "encoding": { 7 | "x": { 8 | "aggregate": "mean", 9 | "field": "Horsepower", 10 | "type": "quantitative" 11 | }, 12 | "y": { 13 | "field": "Cylinders", 14 | "type": "ordinal" 15 | } 16 | }, 17 | "config": { 18 | "overlay": { 19 | "line": true 20 | }, 21 | "scale": { 22 | "useUnaggregatedDomain": true 23 | } 24 | } 25 | } -------------------------------------------------------------------------------- /data/compassql_examples/output/cql_3.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": { 3 | "url": "data/cars.json" 4 | }, 5 | "mark": "bar", 6 | "encoding": { 7 | "x": { 8 | "aggregate": "mean", 9 | "field": "Horsepower", 10 | "type": "quantitative" 11 | }, 12 | "y": { 13 | "field": "Cylinders", 14 | "type": "nominal" 15 | } 16 | }, 17 | "config": { 18 | "overlay": { 19 | "line": true 20 | }, 21 | "scale": { 22 | "useUnaggregatedDomain": true 23 | } 24 | } 25 | } -------------------------------------------------------------------------------- /data/compassql_examples/output/rank-by-feature_histogram.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": { 3 | "url": "data/cars.json" 4 | }, 5 | "mark": "bar", 6 | "encoding": { 7 | "y": { 8 | "field": "Origin", 9 | "type": "nominal" 10 | }, 11 | "x": { 12 | "aggregate": "count", 13 | "field": "*", 14 | "type": "quantitative" 15 | } 16 | }, 17 | "config": { 18 | "overlay": { 19 | "line": true 20 | }, 21 | "scale": { 22 | "useUnaggregatedDomain": true 23 | } 24 | } 25 | } -------------------------------------------------------------------------------- /data/compassql_examples/output/voyager_exact-match.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": { 3 | "url": "data/cars.json" 4 | }, 5 | "mark": "tick", 6 | "encoding": { 7 | "y": { 8 | "field": "Cylinders", 9 | "type": "ordinal" 10 | }, 11 | "x": { 12 | "field": "Horsepower", 13 | "type": "quantitative", 14 | "scale": {} 15 | } 16 | }, 17 | "config": { 18 | "overlay": { 19 | "line": true 20 | }, 21 | "scale": { 22 | "useUnaggregatedDomain": true 23 | } 24 | } 25 | } -------------------------------------------------------------------------------- /js/bin/schema2asp: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const schema2asp = require('../build/draco').schema2asp; 4 | 5 | const stdin = process.stdin; 6 | const stdout = process.stdout; 7 | const inputChunks = []; 8 | 9 | stdin.on('data', chunk => { 10 | inputChunks.push(chunk); 11 | }); 12 | 13 | stdin.on('end', () => { 14 | const inputJSON = JSON.parse(inputChunks.join('')) 15 | 16 | const result = schema2asp(inputJSON); 17 | for (const res of result) { 18 | stdout.write(res); 19 | stdout.write('\n'); 20 | } 21 | }); 22 | -------------------------------------------------------------------------------- /data/compassql_examples/output/showme_automatic-mark.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": { 3 | "url": "data/cars.json" 4 | }, 5 | "mark": "bar", 6 | "encoding": { 7 | "x": { 8 | "aggregate": "mean", 9 | "field": "Horsepower", 10 | "type": "quantitative" 11 | }, 12 | "y": { 13 | "field": "Cylinders", 14 | "type": "ordinal" 15 | } 16 | }, 17 | "config": { 18 | "overlay": { 19 | "line": true 20 | }, 21 | "scale": { 22 | "useUnaggregatedDomain": true 23 | } 24 | } 25 | } -------------------------------------------------------------------------------- /data/compassql_examples/output/2d-QxT.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": { 3 | "url": "data/movies.json" 4 | }, 5 | "mark": "point", 6 | "encoding": { 7 | "y": { 8 | "field": "IMDB_Rating", 9 | "type": "quantitative", 10 | "scale": {} 11 | }, 12 | "x": { 13 | "field": "Release_Date", 14 | "type": "temporal", 15 | "scale": {} 16 | } 17 | }, 18 | "config": { 19 | "overlay": { 20 | "line": true 21 | }, 22 | "scale": { 23 | "useUnaggregatedDomain": true 24 | } 25 | } 26 | } -------------------------------------------------------------------------------- /data/compassql_examples/output/cql_2.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": { 3 | "url": "data/cars.json" 4 | }, 5 | "mark": "point", 6 | "encoding": { 7 | "x": { 8 | "field": "Horsepower", 9 | "type": "quantitative", 10 | "scale": {} 11 | }, 12 | "y": { 13 | "field": "Acceleration", 14 | "type": "quantitative", 15 | "scale": {} 16 | } 17 | }, 18 | "config": { 19 | "overlay": { 20 | "line": true 21 | }, 22 | "scale": { 23 | "useUnaggregatedDomain": true 24 | } 25 | } 26 | } -------------------------------------------------------------------------------- /data/compassql_examples/output/scatter.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": { 3 | "url": "data/cars.json" 4 | }, 5 | "mark": "point", 6 | "encoding": { 7 | "x": { 8 | "field": "Acceleration", 9 | "type": "quantitative", 10 | "scale": {} 11 | }, 12 | "y": { 13 | "field": "Horsepower", 14 | "type": "quantitative", 15 | "scale": {} 16 | } 17 | }, 18 | "config": { 19 | "overlay": { 20 | "line": true 21 | }, 22 | "scale": { 23 | "useUnaggregatedDomain": true 24 | } 25 | } 26 | } -------------------------------------------------------------------------------- /asp/examples/valid.lp: -------------------------------------------------------------------------------- 1 | % ====== Data definitions ====== 2 | data("cars.csv"). 3 | 4 | num_rows(142). 5 | 6 | fieldtype(horsepower,number). 7 | cardinality(horsepower,94). 8 | 9 | fieldtype(acceleration,number). 10 | cardinality(acceleration,96). 11 | 12 | % ====== Visualization specification ====== 13 | 14 | mark(point). 15 | 16 | encoding(e0). 17 | field(e0,acceleration). 18 | channel(e0,x). 19 | type(e0,quantitative). 20 | zero(e0). 21 | 22 | encoding(e1). 23 | field(e1,horsepower). 24 | channel(e1,y). 25 | type(e1,quantitative). 26 | zero(e1). 27 | -------------------------------------------------------------------------------- /data/compassql_examples/input/3d-OxQxQ.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": {"url": "data/cars.json"}, 3 | "mark": "?", 4 | "encodings": [ 5 | { 6 | "channel": "?", 7 | "field": "Origin", 8 | "type": "nominal" 9 | },{ 10 | "channel": "?", 11 | "bin": "?", 12 | "aggregate": "?", 13 | "field": "Horsepower", 14 | "type": "quantitative" 15 | },{ 16 | "channel": "?", 17 | "bin": "?", 18 | "aggregate": "?", 19 | "field": "Acceleration", 20 | "type": "quantitative" 21 | } 22 | ] 23 | } 24 | -------------------------------------------------------------------------------- /data/compassql_examples/input/3d-NxQxQ.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": {"url": "data/cars.json"}, 3 | "mark": "?", 4 | "encodings": [ 5 | { 6 | "channel": "?", 7 | "field": "Cylinders", 8 | "type": "ordinal" 9 | },{ 10 | "channel": "?", 11 | "bin": "?", 12 | "aggregate": "?", 13 | "field": "Horsepower", 14 | "type": "quantitative" 15 | },{ 16 | "channel": "?", 17 | "bin": "?", 18 | "aggregate": "?", 19 | "field": "Acceleration", 20 | "type": "quantitative" 21 | } 22 | ] 23 | } 24 | -------------------------------------------------------------------------------- /asp/examples/data.lp: -------------------------------------------------------------------------------- 1 | % ====== Data definitions for tests ====== 2 | 3 | num_rows(100). 4 | 5 | fieldtype(n1,string). 6 | cardinality(n1,20). 7 | entropy(n1,8). % Entropy is in 0.1 units because ASP only supports integers. 8 | 9 | fieldtype(n2,string). 10 | cardinality(n2,3). 11 | entropy(n2,12). 12 | 13 | fieldtype(o1,string). 14 | cardinality(o2,7). 15 | entropy(o2,1). 16 | 17 | fieldtype(q1,number). 18 | cardinality(q1,12). 19 | entropy(q1,21). 20 | extent(q1,0,10). 21 | 22 | fieldtype(q2,number). 23 | cardinality(q2,100). 24 | entropy(q2,8). 25 | extent(q2,10,30). 26 | -------------------------------------------------------------------------------- /data/compassql_examples/output/bin-maxbins.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": { 3 | "url": "data/cars.json" 4 | }, 5 | "mark": "bar", 6 | "encoding": { 7 | "x": { 8 | "bin": { 9 | "maxbins": 5 10 | }, 11 | "field": "Miles_per_Gallon", 12 | "type": "quantitative" 13 | }, 14 | "y": { 15 | "aggregate": "count", 16 | "field": "*", 17 | "type": "quantitative" 18 | } 19 | }, 20 | "config": { 21 | "overlay": { 22 | "line": true 23 | }, 24 | "scale": { 25 | "useUnaggregatedDomain": true 26 | } 27 | } 28 | } -------------------------------------------------------------------------------- /js/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "esnext", 4 | "module": "es2015", 5 | "moduleResolution": "node", 6 | "declaration": true, 7 | "strict": true, 8 | "outDir": "./build", 9 | "inlineSourceMap": true, 10 | "importHelpers": true, 11 | "suppressImplicitAnyIndexErrors": true, 12 | "resolveJsonModule": true, 13 | "esModuleInterop": true, 14 | "allowSyntheticDefaultImports": true, 15 | "strictNullChecks": false, 16 | "types": ["jest"] 17 | }, 18 | "files": ["src/index.ts"], 19 | "include": ["typings/*.d.ts"] 20 | } 21 | -------------------------------------------------------------------------------- /data/compassql_examples/output/3d-NxOxQ.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": { 3 | "url": "data/cars.json" 4 | }, 5 | "mark": "point", 6 | "encoding": { 7 | "x": { 8 | "field": "Cylinders", 9 | "type": "ordinal" 10 | }, 11 | "y": { 12 | "field": "Origin", 13 | "type": "nominal" 14 | }, 15 | "size": { 16 | "aggregate": "mean", 17 | "field": "Acceleration", 18 | "type": "quantitative" 19 | } 20 | }, 21 | "config": { 22 | "overlay": { 23 | "line": true 24 | }, 25 | "scale": { 26 | "useUnaggregatedDomain": true 27 | } 28 | } 29 | } -------------------------------------------------------------------------------- /examples/scatter.vl.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://vega.github.io/schema/vega-lite/v2.0.json", 3 | "data": { 4 | "url": "data/cars.csv" 5 | }, 6 | "encoding": { 7 | "x": { 8 | "field": "horsepower", 9 | "scale": { 10 | "zero": true 11 | }, 12 | "type": "quantitative" 13 | }, 14 | "y": { 15 | "field": "acceleration", 16 | "scale": { 17 | "zero": true 18 | }, 19 | "type": "quantitative" 20 | } 21 | }, 22 | "mark": "point" 23 | } 24 | -------------------------------------------------------------------------------- /js/src/index.ts: -------------------------------------------------------------------------------- 1 | import asp2vl from './asp2vl'; 2 | import * as constraints from './constraints'; 3 | import constraints2json, { Constraint } from './constraints2json'; 4 | import cql2asp from './cql2asp'; 5 | import data2schema from './data2schema'; 6 | import json2constraints, { ConstraintAsp } from './json2constraints'; 7 | import schema2asp from './schema2asp'; 8 | import vl2asp from './vl2asp'; 9 | 10 | export { 11 | vl2asp, 12 | asp2vl, 13 | cql2asp, 14 | data2schema, 15 | schema2asp, 16 | constraints, 17 | constraints2json, 18 | Constraint, 19 | json2constraints, 20 | ConstraintAsp, 21 | }; 22 | -------------------------------------------------------------------------------- /asp/kim2018.lp: -------------------------------------------------------------------------------- 1 | % only three encodings 2 | 3 { encoding(E): encoding(E) } 3. 3 | 4 | % two quantitative, one nominal 5 | 2 { type(E,quantitative): encoding(E) } 2. 6 | 1 { type(E,nominal): encoding(E) } 1. 7 | :- type(_,ordinal). 8 | 9 | % need to use x and y 10 | :- not channel(_,(x;y)). 11 | 12 | % only scatterplots 13 | :- not mark(point). 14 | 15 | % always use zero for quantitative 16 | :- type(E,quantitative), not zero(E). 17 | 18 | % no aggregations 19 | :- aggregate(E,_). 20 | 21 | % no binning 22 | :- bin(E,_). 23 | 24 | % no column 25 | :- channel(E,column). 26 | 27 | % no shape 28 | :- channel(E,shape). 29 | -------------------------------------------------------------------------------- /js/src/data2schema.ts: -------------------------------------------------------------------------------- 1 | import read from 'datalib/src/import/read'; 2 | import dlstats from 'datalib/src/stats'; 3 | 4 | export interface Schema { 5 | stats: any; 6 | size: number; 7 | } 8 | 9 | export default function data2schema(data: any[]): Schema { 10 | const readData = read(data); 11 | const summary = dlstats.summary(readData); 12 | 13 | const keyedSummary = {}; 14 | summary.forEach((column: any) => { 15 | const field = column.field; 16 | delete column.field; 17 | keyedSummary[field] = column; 18 | }); 19 | 20 | return { 21 | stats: keyedSummary, 22 | size: data.length, 23 | }; 24 | } 25 | -------------------------------------------------------------------------------- /data/compassql_examples/output/3d-OxQxQ.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": { 3 | "url": "data/cars.json" 4 | }, 5 | "mark": "point", 6 | "encoding": { 7 | "y": { 8 | "field": "Origin", 9 | "type": "nominal" 10 | }, 11 | "size": { 12 | "aggregate": "mean", 13 | "field": "Horsepower", 14 | "type": "quantitative" 15 | }, 16 | "x": { 17 | "bin": {}, 18 | "field": "Acceleration", 19 | "type": "quantitative" 20 | } 21 | }, 22 | "config": { 23 | "overlay": { 24 | "line": true 25 | }, 26 | "scale": { 27 | "useUnaggregatedDomain": true 28 | } 29 | } 30 | } -------------------------------------------------------------------------------- /data/compassql_examples/output/3d-NxQxQ.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": { 3 | "url": "data/cars.json" 4 | }, 5 | "mark": "point", 6 | "encoding": { 7 | "y": { 8 | "field": "Cylinders", 9 | "type": "ordinal" 10 | }, 11 | "size": { 12 | "aggregate": "mean", 13 | "field": "Horsepower", 14 | "type": "quantitative" 15 | }, 16 | "x": { 17 | "bin": {}, 18 | "field": "Acceleration", 19 | "type": "quantitative" 20 | } 21 | }, 22 | "config": { 23 | "overlay": { 24 | "line": true 25 | }, 26 | "scale": { 27 | "useUnaggregatedDomain": true 28 | } 29 | } 30 | } -------------------------------------------------------------------------------- /asp/examples/saket2018.lp: -------------------------------------------------------------------------------- 1 | % test with `clingo asp/_saket2018.lp asp/examples/saket2018.lp --opt-mode=optN --quiet=1 --project` 2 | 3 | % ====== Data definitions ====== 4 | num_rows(407). 5 | 6 | fieldtype(n,string). 7 | cardinality(n,9). 8 | 9 | fieldtype(o1,number). 10 | cardinality(o1,25). 11 | 12 | fieldtype(q2,number). 13 | cardinality(q2,407). 14 | 15 | 16 | % ====== Query constraints ====== 17 | encoding(e0). 18 | :- not channel(e0,y). 19 | :- not field(e0,q2). 20 | :- not type(e0,quantitative). 21 | 22 | encoding(e1). 23 | :- not channel(e1,x). 24 | :- not field(e1,o1). 25 | :- not type(e1,ordinal). 26 | 27 | 28 | % ====== Task constraint ====== 29 | task(value). 30 | -------------------------------------------------------------------------------- /data/compassql_examples/output/2d-NxN.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": { 3 | "url": "data/movies.json" 4 | }, 5 | "mark": "point", 6 | "encoding": { 7 | "x": { 8 | "field": "Major_Genre", 9 | "type": "nominal", 10 | "scale": { 11 | "rangeStep": 12 12 | } 13 | }, 14 | "y": { 15 | "field": "Creative_Type", 16 | "type": "nominal" 17 | }, 18 | "size": { 19 | "aggregate": "count", 20 | "field": "*", 21 | "type": "quantitative" 22 | } 23 | }, 24 | "config": { 25 | "overlay": { 26 | "line": true 27 | }, 28 | "scale": { 29 | "useUnaggregatedDomain": true 30 | } 31 | } 32 | } -------------------------------------------------------------------------------- /data/compassql_examples/output/showme_add-to-sheet.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": { 3 | "url": "data/cars.json" 4 | }, 5 | "mark": "bar", 6 | "encoding": { 7 | "x": { 8 | "aggregate": "mean", 9 | "field": "Horsepower", 10 | "type": "quantitative" 11 | }, 12 | "y": { 13 | "field": "Cylinders", 14 | "type": "ordinal", 15 | "scale": { 16 | "rangeStep": 12 17 | } 18 | }, 19 | "row": { 20 | "field": "Origin", 21 | "type": "nominal" 22 | } 23 | }, 24 | "config": { 25 | "overlay": { 26 | "line": true 27 | }, 28 | "scale": { 29 | "useUnaggregatedDomain": true 30 | } 31 | } 32 | } -------------------------------------------------------------------------------- /js/src/schema2asp.ts: -------------------------------------------------------------------------------- 1 | import { Schema } from './data2schema'; 2 | 3 | export default function schema2asp(schema: Schema): string[] { 4 | if (!schema) { 5 | throw Error('No data has been prepared'); 6 | } 7 | 8 | const stats = schema.stats; 9 | const decl = [`num_rows(${schema.size}).\n`]; 10 | 11 | Object.keys(stats).forEach((field, i) => { 12 | const fieldName = `\"${field}\"`; 13 | const fieldStats = stats[field]; 14 | const fieldType = `fieldtype(${fieldName},${fieldStats.type}).`; 15 | const cardinality = `cardinality(${fieldName}, ${fieldStats.distinct}).`; 16 | 17 | decl.push(`${fieldType}\n${cardinality}`); 18 | }); 19 | 20 | return decl; 21 | } 22 | -------------------------------------------------------------------------------- /js/concat_lp.sh: -------------------------------------------------------------------------------- 1 | # usage: ./concat_lp.sh srcdir destdir 2 | 3 | declare -a files=("topk-lua" 4 | "define" 5 | "generate" 6 | "hard" 7 | "hard-integrity" 8 | "soft" 9 | "weights" 10 | "assign_weights" 11 | "optimize" 12 | "output" 13 | ) 14 | 15 | newline=$'\n\n' 16 | output="// GENERATED WITH concat_lp.sh. DO NOT MODIFY.${newline}" 17 | 18 | i=0 19 | for file in "${files[@]}" 20 | do 21 | path="${1}/${file}.lp" 22 | lp=$(cat "$path" | sed -e s/\`/\'/g) 23 | const=$(echo "$file" | tr a-z A-Z | tr \- _) 24 | if [ "$i" -ne 0 ] 25 | then 26 | output+="${newline}" 27 | fi 28 | output+="export const ${const}: string = \`${lp}${newline}\`;" 29 | let "i++" 30 | done 31 | 32 | echo "$output" > "$2"/constraints.ts 33 | -------------------------------------------------------------------------------- /run_pipeline.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -e 4 | #set -x 5 | 6 | mkdir -p __tmp__ 7 | 8 | output_dir="__tmp__" 9 | input_file="examples/scatter.lp" 10 | 11 | if [ $# -ge 1 ] 12 | then 13 | input_file=$1 14 | fi 15 | 16 | input_file_fullname=$(basename "$input_file") 17 | target_name="${input_file_fullname%.*}" 18 | 19 | output_spec="$output_dir/$target_name.vl.json" 20 | output_png="$output_dir/$target_name.png" 21 | 22 | echo "🌟 [OK] Start processing file $input_file..." 23 | 24 | draco $input_file --out $output_spec 25 | 26 | echo "🌟 [OK] Output spec: $output_spec" 27 | 28 | ./node_modules/.bin/vl2png --silent -b examples -- $output_spec > $output_png 29 | 30 | echo "🌟 [OK] Output png: $output_png" 31 | 32 | open $output_png 33 | -------------------------------------------------------------------------------- /js/test/asp2vl.test.ts: -------------------------------------------------------------------------------- 1 | import 'jest'; 2 | import { asp2vl } from '../src'; 3 | 4 | test('parses results correctly', () => { 5 | expect( 6 | asp2vl([ 7 | 'mark(bar).', 8 | 9 | 'encoding(e0).', 10 | 'channel(e0,x).', 11 | 'field(e0,"foo").', 12 | 'type(e0,ordinal).', 13 | 14 | 'encoding(e1).', 15 | 'channel(e1,y).', 16 | 'aggregate(e1,count).', 17 | 'type(e1,quantitative).', 18 | 'zero(e1).', 19 | ]) 20 | ).toEqual({ 21 | $schema: 'https://vega.github.io/schema/vega-lite/v5.json', 22 | data: { url: 'data/cars.json' }, 23 | mark: 'bar', 24 | encoding: { 25 | x: { field: 'foo', type: 'ordinal' }, 26 | y: { aggregate: 'count', type: 'quantitative', scale: { zero: true } }, 27 | }, 28 | }); 29 | }); 30 | -------------------------------------------------------------------------------- /asp/examples/benchmark.lp: -------------------------------------------------------------------------------- 1 | % ====== Data definitions ====== 2 | num_rows(142). 3 | 4 | fieldtype(a,number). 5 | fieldtype(b,number). 6 | fieldtype(c,number). 7 | fieldtype(d,number). 8 | fieldtype(e,number). 9 | fieldtype(f,number). 10 | fieldtype(g,number). 11 | 12 | fieldtype(h,string). 13 | fieldtype(i,string). 14 | fieldtype(j,string). 15 | fieldtype(k,string). 16 | fieldtype(l,string). 17 | fieldtype(m,string). 18 | fieldtype(n,string). 19 | fieldtype(o,string). 20 | fieldtype(p,string). 21 | fieldtype(q,string). 22 | fieldtype(r,string). 23 | fieldtype(s,string). 24 | 25 | fieldtype(t,datetime). 26 | fieldtype(u,datetime). 27 | fieldtype(w,datetime). 28 | fieldtype(x,datetime). 29 | fieldtype(y,datetime). 30 | 31 | % ====== Query constraints ====== 32 | encoding(e0). 33 | encoding(e1). 34 | encoding(e3). 35 | encoding(e4). 36 | -------------------------------------------------------------------------------- /asp/examples/kim2018.lp: -------------------------------------------------------------------------------- 1 | % test with `clingo asp/_kim2018.lp asp/examples/kim2018.lp --opt-mode=optN --quiet=1 --project` 2 | 3 | % ====== Data definitions ====== 4 | num_rows(30). 5 | 6 | fieldtype(n,string). 7 | cardinality(n,10). 8 | interesting(n). 9 | 10 | fieldtype(q1,number). 11 | cardinality(q1,30). 12 | interesting(q1). 13 | entropy(q1,38). 14 | 15 | fieldtype(q2,number). 16 | cardinality(q2,30). 17 | entropy(q2,39). 18 | 19 | % ====== Query constraints ====== 20 | mark(point). 21 | 22 | encoding(enc_n). 23 | :- not type(enc_n,nominal). 24 | :- not field(enc_n,n). 25 | 26 | encoding(enc_q1). 27 | :- not type(enc_q1,quantitative). 28 | :- not field(enc_q1,q1). 29 | 30 | encoding(enc_q2). 31 | :- not type(enc_q2,quantitative). 32 | :- not field(enc_q2,q2). 33 | 34 | % ====== Task constraints ====== 35 | task(summary). 36 | -------------------------------------------------------------------------------- /js/test/vl2asp.test.ts: -------------------------------------------------------------------------------- 1 | import { vl2asp } from '../src'; 2 | 3 | test('generates correct asp', () => { 4 | expect( 5 | vl2asp({ 6 | $schema: 'https://vega.github.io/schema/vega-lite/v5.json', 7 | data: { url: 'data/cars.json' }, 8 | mark: 'bar', 9 | encoding: { 10 | x: { field: 'foo', type: 'ordinal' }, 11 | y: { aggregate: 'count', type: 'quantitative', scale: { zero: true } }, 12 | }, 13 | }).sort() 14 | ).toEqual( 15 | [ 16 | 'data("data/cars.json").', 17 | 'mark(bar).', 18 | 19 | 'encoding(e0).', 20 | 'channel(e0,x).', 21 | 'field(e0,"foo").', 22 | 'type(e0,ordinal).', 23 | 24 | 'encoding(e1).', 25 | 'channel(e1,y).', 26 | 'aggregate(e1,count).', 27 | 'type(e1,quantitative).', 28 | 'zero(e1).', 29 | ].sort() 30 | ); 31 | }); 32 | -------------------------------------------------------------------------------- /asp/examples/apt.lp: -------------------------------------------------------------------------------- 1 | % test with `clingo asp/_apt.lp asp/examples/apt.lp --opt-mode=optN --quiet=1 --project -c max_extra_encs=0` 2 | 3 | % ====== Data definitions ====== 4 | num_rows(20). 5 | 6 | fieldtype(a,number). 7 | cardinality(a,20). 8 | 9 | fieldtype(b,number). 10 | cardinality(b,20). 11 | 12 | fieldtype(c,string). 13 | cardinality(c,4). 14 | 15 | fieldtype(d,number). 16 | cardinality(d,6). 17 | 18 | % ====== Query constraints ====== 19 | mark(point). 20 | 21 | encoding(e0). 22 | priority(e0,4). 23 | :- not type(e0,quantitative). 24 | :- not field(e0,a). 25 | 26 | encoding(e1). 27 | priority(e1,3). 28 | :- not type(e1,quantitative). 29 | :- not field(e1,b). 30 | 31 | encoding(e2). 32 | priority(e2,2). 33 | :- not type(e2,nominal). 34 | :- not field(e2,c). 35 | 36 | encoding(e3). 37 | priority(e3,1). 38 | :- not type(e3,ordinal). 39 | :- not field(e3,d). 40 | -------------------------------------------------------------------------------- /asp/generate.lp: -------------------------------------------------------------------------------- 1 | % ====== Generators ====== 2 | 3 | % encodings 4 | 5 | % maximum number for each multi channel encoding 6 | #const max_extra_encs = 5. 7 | 8 | obj_id(1..max_extra_encs). 9 | 10 | { encoding(E): obj_id(E) }. 11 | 12 | :- not encoding(ID), encoding(ID-1), obj_id(ID), obj_id(ID-1). 13 | 14 | % properties of encodings 15 | 16 | % channel and type have to be present 17 | { channel(E,C): channel(C) } = 1 :- encoding(E). 18 | { type(E,T): type(T) } = 1 :- encoding(E). 19 | 20 | % other properties that are not required 21 | 0 { field(E,F): field(F) } 1 :- encoding(E). 22 | 0 { aggregate(E,A): aggregate_op(A) } 1 :- encoding(E). 23 | 0 { bin(E,B): binning(B) } 1 :- encoding(E). 24 | 0 { zero(E) } 1 :- encoding(E). 25 | 0 { log(E) } 1 :- encoding(E). 26 | 27 | % pick one mark type 28 | 29 | { mark(M) : marktype(M) } = 1. 30 | 31 | % stacking 32 | 33 | 0 { stack(S): stacking(S) } 1. 34 | -------------------------------------------------------------------------------- /asp/process_soft.py: -------------------------------------------------------------------------------- 1 | """ 2 | Reads the weights file and generates assign_weights.lp and weights.json 3 | """ 4 | 5 | import json 6 | import os 7 | import re 8 | 9 | 10 | def absolute_path(p: str) -> str: 11 | return os.path.join(os.path.dirname(os.path.abspath(__file__)), p) 12 | 13 | 14 | def main(): 15 | with open(absolute_path("weights.lp")) as weight_constants, open( 16 | absolute_path("assign_weights.lp"), "w" 17 | ) as assign, open(absolute_path("../data/weights.json"), "w") as weights_json: 18 | 19 | assign.write("%% GENERATED FILE. DO NOT EDIT.\n\n") 20 | 21 | weights = {} 22 | 23 | for line in weight_constants.readlines(): 24 | match = re.search("#const (.*)_weight = ([\-0-9]*)", line) 25 | if match: 26 | name = match.group(1) 27 | value = int(match.group(2)) 28 | 29 | weights[f"{name}_weight"] = value 30 | 31 | assign.write(f"soft_weight({name},{name}_weight).\n") 32 | 33 | json.dump(weights, weights_json, indent=2) 34 | 35 | 36 | if __name__ == "__main__": 37 | main() 38 | -------------------------------------------------------------------------------- /asp/_apt.lp: -------------------------------------------------------------------------------- 1 | % implementation of APT 2 | 3 | #include "asp/define.lp". 4 | #include "asp/generate.lp". 5 | #include "asp/hard.lp". 6 | 7 | :~ type(E,quantitative), channel(E,x), priority(E,P). [1@P,E] 8 | :~ type(E,quantitative), channel(E,y), priority(E,P). [1@P,E] 9 | :~ type(E,quantitative), channel(E,size), priority(E,P). [2@P,E] 10 | :~ type(E,quantitative), channel(E,color), priority(E,P). [3@P,E] 11 | 12 | :~ type(E,ordinal), channel(E,x), priority(E,P). [1@P,E] 13 | :~ type(E,ordinal), channel(E,y), priority(E,P). [1@P,E] 14 | :~ type(E,ordinal), channel(E,color), priority(E,P). [2@P,E] 15 | :~ type(E,ordinal), channel(E,size), priority(E,P). [3@P,E] 16 | 17 | :~ type(E,nominal), channel(E,x), priority(E,P). [1@P,E] 18 | :~ type(E,nominal), channel(E,y), priority(E,P). [1@P,E] 19 | :~ type(E,nominal), channel(E,color), priority(E,P). [2@P,E] 20 | :~ type(E,nominal), channel(E,shape), priority(E,P). [3@P,E] 21 | :~ type(E,nominal), channel(E,size), priority(E,P). [4@P,E] 22 | 23 | :- channel(_,text). 24 | :- channel(_,detail). 25 | :- channel(_,row). 26 | :- channel(_,column). 27 | 28 | % don't bin, use log, or aggregate 29 | :- bin(_). 30 | :- log(_). 31 | :- aggregate(_,_). 32 | 33 | #show mark/1. 34 | #show channel/2. 35 | -------------------------------------------------------------------------------- /data/compassql_examples/run_compassql.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const cql = require('compassql'); 4 | const dl = require('datalib'); 5 | const fs = require('fs'); 6 | 7 | var path = require('path'); 8 | 9 | // the folder containing input partial specs 10 | const inputDir = 'data/compassql_examples/input/'; 11 | // the folder for output full specs 12 | const outputDir = 'data/compassql_examples/output/'; 13 | 14 | files = fs.readdirSync(inputDir); 15 | 16 | for (var i = 0; i < files.length; i ++) { 17 | 18 | console.log('[OK] Processing ' + files[i]); 19 | 20 | input = path.join(inputDir, files[i]); 21 | output = path.join(outputDir, files[i]); 22 | 23 | // read spec 24 | var raw_spec = fs.readFileSync(input, 'utf8'); 25 | var spec = JSON.parse(raw_spec); 26 | 27 | // compile data schema for compassql 28 | var data = dl.json(spec.data.url); 29 | var schema = cql.schema.build(data); 30 | 31 | const query = { 32 | spec, 33 | chooseBy: 'effectiveness', 34 | config: { autoAddCount: true } 35 | }; 36 | 37 | const recommendation = cql.recommend(query, schema); 38 | 39 | const vlSpec = recommendation.result.items[0].toSpec(); 40 | 41 | fs.writeFileSync(output, JSON.stringify(vlSpec, null, 2), 'utf8'); 42 | } 43 | 44 | -------------------------------------------------------------------------------- /asp/topk-py.lp: -------------------------------------------------------------------------------- 1 | #script(python) 2 | 3 | class Observer: 4 | def __init__(self): 5 | self.minimize_literals = [] 6 | 7 | def minimize(self, priority, literals): 8 | self.minimize_literals = literals 9 | 10 | def main(prg): 11 | assert(prg.configuration.solve.opt_mode == 'optN'), 'Set --opt-mode=OptN.' 12 | 13 | with prg.backend() as backend: 14 | # Use model count as number of models to output 15 | count = int(prg.configuration.solve.models) 16 | 17 | # register observer 18 | obs = Observer() 19 | prg.register_observer(obs) 20 | 21 | prg.ground([('base', [])]) 22 | 23 | while count > 0: 24 | cost = 0 25 | 26 | prg.configuration.solve.models = count 27 | with prg.solve(yield_=True) as handle: 28 | if handle.get().unsatisfiable: 29 | break 30 | for model in handle: 31 | if model.optimality_proven: 32 | cost = model.cost[0] 33 | count -= 1 34 | 35 | if count > 0: 36 | aux = backend.add_atom() 37 | backend.add_weight_rule([aux], cost+1, obs.minimize_literals) 38 | backend.add_rule([], [-aux]) 39 | #end. 40 | -------------------------------------------------------------------------------- /asp/topk-lua.lp: -------------------------------------------------------------------------------- 1 | #script(lua) 2 | 3 | function main(prg) 4 | local count = tonumber(prg.configuration.solve.models) 5 | local backend = prg:backend() 6 | 7 | local observer = { 8 | minimize_literals = {} 9 | } 10 | function observer:minimize (priority, literals) 11 | self.minimize_literals = literals 12 | end 13 | 14 | prg:register_observer(observer) 15 | 16 | prg:ground({{"base", {}}}, self) 17 | 18 | while count > 0 do 19 | local cost = 0 20 | 21 | prg.configuration.solve.models = count 22 | local it = prg:solve{yield=true} 23 | local ret, err = pcall(function() 24 | if it:get().unsatisfiable then 25 | count = 0 26 | return 27 | end 28 | 29 | for m in it:iter() do 30 | if m.optimality_proven then 31 | cost = m.cost[1] 32 | count = count-1 33 | end 34 | end 35 | end) 36 | it:close() 37 | if not ret then 38 | error(err) 39 | end 40 | 41 | if count > 0 then 42 | local aux = backend:add_atom() 43 | backend:add_weight_rule{{aux}, cost+1, observer.minimize_literals} 44 | backend:add_rule{{aux}, {-aux}} 45 | end 46 | end 47 | end 48 | #end. 49 | -------------------------------------------------------------------------------- /js/src/json2constraints.ts: -------------------------------------------------------------------------------- 1 | import { Constraint } from './constraints2json'; 2 | 3 | export interface ConstraintAsp { 4 | definitions: string; 5 | weights?: string; 6 | assigns?: string; 7 | } 8 | 9 | export default function json2constraints(json: Constraint[]): ConstraintAsp { 10 | const type = json[0].type; 11 | // tslint:disable-next-line 12 | json.forEach((constraint) => { 13 | if (constraint.type !== type) { 14 | throw new Error(`constraints not all of type ${type}`); 15 | } 16 | }); 17 | 18 | let definitions = ''; 19 | let weights; 20 | let assigns; 21 | if (type === 'soft') { 22 | weights = ''; 23 | assigns = ''; 24 | } 25 | 26 | for (const constraint of json) { 27 | const def = `% @constraint ${constraint.description} 28 | ${constraint.asp}`; 29 | definitions += def; 30 | definitions += '\n\n'; 31 | 32 | if (type === 'soft') { 33 | const weight = `#const ${constraint.name}_weight = ${constraint.weight}.`; 34 | weights += weight; 35 | weights += '\n'; 36 | 37 | const assign = `soft_weight(${constraint.name}, ${constraint.name}_weight).`; 38 | assigns += assign; 39 | assigns += '\n'; 40 | } 41 | } 42 | 43 | if (type === 'hard') { 44 | return { definitions }; 45 | } else { 46 | return { 47 | definitions, 48 | weights, 49 | assigns, 50 | }; 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /data/spec_pairs/data.json: -------------------------------------------------------------------------------- 1 | { 2 | "headers": { 3 | "first": { 4 | "title": "Some spec", 5 | "subtitle": "Some description" 6 | }, 7 | "second": { 8 | "title": "Some other spec", 9 | "subtitle": "Some description" 10 | } 11 | }, 12 | "specs": [{ 13 | "first": { 14 | "mark": "point", 15 | "encoding": { 16 | "y": { 17 | "scale": { 18 | "zero": true 19 | }, 20 | "field": "Miles_per_Gallon", 21 | "type": "quantitative", 22 | "aggregate": "sum" 23 | } 24 | }, 25 | "data": { 26 | "url": "data/cars.json" 27 | }, 28 | "$schema": "https://vega.github.io/schema/vega-lite/v2.0.json" 29 | }, 30 | "second": { 31 | "mark": "tick", 32 | "encoding": { 33 | "y": { 34 | "scale": { 35 | "zero": false 36 | }, 37 | "field": "Miles_per_Gallon", 38 | "type": "quantitative" 39 | } 40 | }, 41 | "data": { 42 | "url": "data/cars.json" 43 | }, 44 | "$schema": "https://vega.github.io/schema/vega-lite/v2.0.json" 45 | }, 46 | "properties": {} 47 | }] 48 | } 49 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: Test 2 | 3 | on: ["push", "pull_request"] 4 | 5 | jobs: 6 | test: 7 | runs-on: ubuntu-latest 8 | 9 | # We want to run on external PRs, but not on our own internal PRs as they'll be run by the push to the branch. 10 | if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != github.repository 11 | 12 | steps: 13 | - uses: actions/checkout@v2 14 | 15 | - uses: conda-incubator/setup-miniconda@v2 16 | with: 17 | auto-update-conda: true 18 | python-version: 3.8 19 | 20 | - shell: bash -l {0} 21 | run: | 22 | which pip 23 | pip --version 24 | which python 25 | python --version 26 | which conda 27 | conda --version 28 | 29 | - name: Install 30 | shell: bash -l {0} 31 | run: | 32 | conda install -c potassco clingo 33 | pip install -r requirements.txt 34 | pip install -e . 35 | 36 | - name: Setup Node 37 | uses: actions/setup-node@v3 38 | with: 39 | node-version: 16 40 | 41 | - name: Install Node dependencies 42 | run: yarn --cwd js --frozen-lockfile 43 | 44 | - name: Node Build 45 | run: yarn --cwd js build 46 | 47 | - name: Tests 48 | shell: bash -l {0} 49 | run: python setup.py test 50 | 51 | - name: Upload coverage to Codecov 52 | uses: codecov/codecov-action@v1 53 | with: 54 | fail_ci_if_error: true 55 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2017, Dominik Moritz and Chenglong Wang 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without 5 | modification, are permitted provided that the following conditions are met: 6 | 7 | 1. Redistributions of source code must retain the above copyright notice, this 8 | list of conditions and the following disclaimer. 9 | 10 | 2. Redistributions in binary form must reproduce the above copyright notice, 11 | this list of conditions and the following disclaimer in the documentation 12 | and/or other materials provided with the distribution. 13 | 14 | 3. Neither the name of the copyright holder nor the names of its contributors 15 | may be used to endorse or promote products derived from this software 16 | without specific prior written permission. 17 | 18 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 19 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 20 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 21 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE 22 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 23 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 24 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 25 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 26 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 27 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 28 | -------------------------------------------------------------------------------- /tests/test_run.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | 4 | from jsonschema import validate 5 | 6 | from draco.run import run 7 | from draco.helper import read_data_to_asp 8 | from draco.js import cql2asp 9 | 10 | EXAMPLES_DIR = os.path.join("examples") 11 | 12 | 13 | class TestFull: 14 | def test_output_schema(self): 15 | json_files = [ 16 | os.path.join(EXAMPLES_DIR, fname) 17 | for fname in os.listdir(EXAMPLES_DIR) 18 | if fname.endswith(".json") and not fname.endswith(".vl.json") 19 | ] 20 | 21 | with open("js/node_modules/vega-lite/build/vega-lite-schema.json") as sf: 22 | schema = json.load(sf) 23 | 24 | for fname in json_files: 25 | with open(fname, "r") as f: 26 | query_spec = json.load(f) 27 | 28 | data = None 29 | if "url" in query_spec["data"]: 30 | data = read_data_to_asp( 31 | os.path.join( 32 | os.path.dirname(f.name), query_spec["data"]["url"] 33 | ) 34 | ) 35 | elif "values" in query_spec["data"]: 36 | data = read_data_to_asp(query_spec["data"]["values"]) 37 | else: 38 | raise Exception("no data found in spec") 39 | print(data) 40 | query = cql2asp(query_spec) 41 | program = query + data 42 | result = run(program) 43 | validate(result.as_vl(), schema) 44 | -------------------------------------------------------------------------------- /draco/helper.py: -------------------------------------------------------------------------------- 1 | import json 2 | from typing import List 3 | 4 | import pandas as pd 5 | 6 | from draco.js import data2schema, schema2asp 7 | from draco.run import run_clingo 8 | 9 | 10 | def is_valid(draco_query: List[str], debug=False) -> bool: 11 | """Check a task. 12 | Args: 13 | draco_query: a list of facts 14 | Returns: 15 | whether the task is valid 16 | """ 17 | _, stdout = run_clingo( 18 | draco_query, 19 | files=["define.lp", "hard.lp", "hard-integrity.lp"], 20 | silence_warnings=True, 21 | debug=debug, 22 | ) 23 | 24 | return json.loads(stdout)["Result"] != "UNSATISFIABLE" 25 | 26 | 27 | def data_to_asp(data: List) -> List[str]: 28 | """Reads the data array and generates the ASP definition. 29 | Args: 30 | file: the data as a list of objects 31 | Returns: 32 | the asp definition. 33 | """ 34 | return schema2asp(data2schema(data)) 35 | 36 | 37 | def read_data_to_asp(file: str) -> List[str]: 38 | """Reads the given JSON file and generates the ASP definition. 39 | Args: 40 | file: the json data file 41 | Returns: 42 | the asp definition. 43 | """ 44 | if file.endswith(".json"): 45 | with open(file) as f: 46 | data = json.load(f) 47 | return schema2asp(data2schema(data)) 48 | elif file.endswith(".csv"): 49 | df = pd.read_csv(file, keep_default_na=False, na_values=None) 50 | df = df.where((pd.notnull(df)), None) 51 | data = list(df.T.to_dict().values()) 52 | schema = data2schema(data) 53 | asp = schema2asp(schema) 54 | return asp 55 | else: 56 | raise Exception("invalid file type") 57 | -------------------------------------------------------------------------------- /examples/data/census2000.csv: -------------------------------------------------------------------------------- 1 | Sex,Year,Age,People 2 | 1,1900,0,4619544 3 | 1,2000,0,9735380 4 | 1,1900,5,4465783 5 | 1,2000,5,10552146 6 | 1,1900,10,4057669 7 | 1,2000,10,10563233 8 | 1,1900,15,3774846 9 | 1,2000,15,10237419 10 | 1,1900,20,3694038 11 | 1,2000,20,9731315 12 | 1,1900,25,3389280 13 | 1,2000,25,9659493 14 | 1,1900,30,2918964 15 | 1,2000,30,10205879 16 | 1,1900,35,2633883 17 | 1,2000,35,11475182 18 | 1,1900,40,2261070 19 | 1,2000,40,11320252 20 | 1,1900,45,1868413 21 | 1,2000,45,9925006 22 | 1,1900,50,1571038 23 | 1,2000,50,8507934 24 | 1,1900,55,1161908 25 | 1,2000,55,6459082 26 | 1,1900,60,916571 27 | 1,2000,60,5123399 28 | 1,1900,65,672663 29 | 1,2000,65,4453623 30 | 1,1900,70,454747 31 | 1,2000,70,3792145 32 | 1,1900,75,268211 33 | 1,2000,75,2912655 34 | 1,1900,80,127435 35 | 1,2000,80,1902638 36 | 1,1900,85,44008 37 | 1,2000,85,970357 38 | 1,1900,90,15164 39 | 1,2000,90,336303 40 | 2,1900,0,4589196 41 | 2,2000,0,9310714 42 | 2,1900,5,4390483 43 | 2,2000,5,10069564 44 | 2,1900,10,4001749 45 | 2,2000,10,10022524 46 | 2,1900,15,3801743 47 | 2,2000,15,9692669 48 | 2,1900,20,3751061 49 | 2,2000,20,9324244 50 | 2,1900,25,3236056 51 | 2,2000,25,9518507 52 | 2,1900,30,2665174 53 | 2,2000,30,10119296 54 | 2,1900,35,2347737 55 | 2,2000,35,11635647 56 | 2,1900,40,2004987 57 | 2,2000,40,11488578 58 | 2,1900,45,1648025 59 | 2,2000,45,10261253 60 | 2,1900,50,1411981 61 | 2,2000,50,8911133 62 | 2,1900,55,1064632 63 | 2,2000,55,6921268 64 | 2,1900,60,887508 65 | 2,2000,60,5668961 66 | 2,1900,65,640212 67 | 2,2000,65,4804784 68 | 2,1900,70,440007 69 | 2,2000,70,5184855 70 | 2,1900,75,265879 71 | 2,2000,75,4355644 72 | 2,1900,80,132449 73 | 2,2000,80,3221898 74 | 2,1900,85,48614 75 | 2,2000,85,1981156 76 | 2,1900,90,20093 77 | 2,2000,90,1064581 78 | -------------------------------------------------------------------------------- /js/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "draco-core", 3 | "description": "Visualization Knowledge as Constraints.", 4 | "version": "0.0.6", 5 | "author": "Dominik Moritz", 6 | "license": "BSD-3-Clause", 7 | "main": "build/index.js", 8 | "module": "build/index.js", 9 | "typings": "build/index.d.ts", 10 | "dependencies": { 11 | "datalib": "^1.9.3", 12 | "vega": "^5.22.1", 13 | "vega-lite": "^5.3.0" 14 | }, 15 | "devDependencies": { 16 | "@rollup/plugin-commonjs": "^22.0.1", 17 | "@rollup/plugin-node-resolve": "^13.3.0", 18 | "@types/jest": "^28.1.6", 19 | "@types/json-stable-stringify": "^1.0.34", 20 | "@types/node": "^18.6.1", 21 | "jest": "^28.1.3", 22 | "prettier": "^2.7.1", 23 | "rollup": "^2.77.0", 24 | "ts-jest": "^28.0.7", 25 | "tslint": "^6.1.3", 26 | "tslint-config-prettier": "^1.18.0", 27 | "typescript": "^4.7.4", 28 | "vega-typings": "^0.22.3" 29 | }, 30 | "bin": { 31 | "vl2asp": "./bin/vl2asp", 32 | "asp2vl": "./bin/asp2vl" 33 | }, 34 | "scripts": { 35 | "build": "npm run concat && rm -rf build && yarn tsc && rollup -c", 36 | "concat": "bash concat_lp.sh ../asp src", 37 | "format": "tslint -p . --fix && prettier --write '{src,test}/**/*.ts'", 38 | "lint": "tslint -p . && prettier --list-different '{src,test}/**/*.ts'", 39 | "prepublishOnly": "npm run test && npm run build", 40 | "test": "jest" 41 | }, 42 | "jest": { 43 | "transform": { 44 | "^.+\\.tsx?$": "ts-jest" 45 | }, 46 | "testRegex": "(/__tests__/.*|(\\.|/)(test|spec))\\.(jsx?|tsx?)$", 47 | "moduleFileExtensions": [ 48 | "ts", 49 | "tsx", 50 | "js", 51 | "jsx", 52 | "json", 53 | "node" 54 | ], 55 | "testPathIgnorePatterns": [ 56 | "node_modules", 57 | "/build", 58 | "src" 59 | ] 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /data/to_label/scale.json: -------------------------------------------------------------------------------- 1 | { 2 | "1": [], 3 | "2": [], 4 | "3": [ 5 | [ 6 | { 7 | "data": { 8 | "url": "data/cars_mod.json" 9 | }, 10 | "encoding": { 11 | "color": { 12 | "field": "Rating", 13 | "type": "nominal" 14 | }, 15 | "x": { 16 | "aggregate": "sum", 17 | "field": "Retail Price", 18 | "scale": { 19 | "zero": true 20 | }, 21 | "type": "quantitative" 22 | }, 23 | "y": { 24 | "bin": { 25 | "maxbins": 20 26 | }, 27 | "field": "Horsepower", 28 | "scale": { 29 | "zero": true 30 | }, 31 | "type": "quantitative" 32 | } 33 | }, 34 | "mark": "bar" 35 | }, 36 | { 37 | "data": { 38 | "url": "data/cars_mod.json" 39 | }, 40 | "encoding": { 41 | "color": { 42 | "field": "Rating", 43 | "type": "nominal" 44 | }, 45 | "x": { 46 | "aggregate": "sum", 47 | "field": "Retail Price", 48 | "scale": { 49 | "zero": true 50 | }, 51 | "type": "quantitative" 52 | }, 53 | "y": { 54 | "bin": { 55 | "maxbins": 20 56 | }, 57 | "field": "Horsepower", 58 | "type": "quantitative" 59 | } 60 | }, 61 | "mark": "bar" 62 | } 63 | ] 64 | ], 65 | "4": [] 66 | } -------------------------------------------------------------------------------- /js/src/asp2vl.ts: -------------------------------------------------------------------------------- 1 | import { TopLevelUnitSpec } from 'vega-lite/build/src/spec/unit'; 2 | import { Field } from 'vega-lite/build/src/channeldef'; 3 | 4 | const REGEX = /(\w+)\(([\w\.\/]+)(,([\w\.]+))?\)/; 5 | const DEFAULT_DATASET = 'data/cars.json'; 6 | 7 | /** 8 | * Convert from ASP to Vega-Lite. 9 | */ 10 | export default function asp2vl(facts: string[], url: string = DEFAULT_DATASET): TopLevelUnitSpec { 11 | let mark = ''; 12 | const encodings: { [enc: string]: any } = {}; 13 | 14 | for (const value of facts) { 15 | // TODO: Better handle quoted fields. We currently simply remove all ". 16 | const cleanedValue = value.replace(/\"/g, ''); 17 | const negSymbol = value.trim().startsWith(':-'); // TODO: remove this 18 | const [_, predicate, first, __, second] = REGEX.exec(cleanedValue) as any; 19 | 20 | if (predicate === 'mark') { 21 | mark = first; 22 | } else if (predicate === 'data') { 23 | url = first; 24 | } else if (predicate !== 'soft') { 25 | if (!encodings[first]) { 26 | encodings[first] = {}; 27 | } 28 | // if it contains the neg symbol, and the field is a boolean field, its value would be false 29 | // e.g., for the case ":- zero(e3)" 30 | encodings[first][predicate] = second || !negSymbol; 31 | } 32 | } 33 | 34 | const encoding: { [channel: string]: any } = {}; 35 | 36 | for (const e of Object.keys(encodings)) { 37 | const enc = encodings[e]; 38 | 39 | // if quantitative encoding and zero is not set, set zero to false 40 | if (enc.type === 'quantitative' && enc.zero === undefined && enc.bin === undefined) { 41 | enc.zero = false; 42 | } 43 | 44 | const scale = { 45 | ...(enc.log ? { type: 'log' } : {}), 46 | ...(enc.zero === undefined ? {} : enc.zero ? { zero: true } : { zero: false }), 47 | }; 48 | 49 | encoding[enc.channel] = { 50 | type: enc.type, 51 | ...(enc.aggregate ? { aggregate: enc.aggregate } : {}), 52 | ...(enc.field ? { field: enc.field } : {}), 53 | ...(enc.stack ? { stack: enc.stack } : {}), 54 | ...(enc.bin !== undefined ? (+enc.bin === 10 ? { bin: true } : { bin: { maxbins: +enc.bin } }) : {}), 55 | ...(Object.keys(scale).length ? { scale } : {}), 56 | }; 57 | } 58 | 59 | return { 60 | $schema: 'https://vega.github.io/schema/vega-lite/v5.json', 61 | data: { url: `${url}` }, 62 | mark, 63 | encoding, 64 | } as TopLevelUnitSpec; 65 | } 66 | -------------------------------------------------------------------------------- /js/src/vl2asp.ts: -------------------------------------------------------------------------------- 1 | import { TopLevelUnitSpec } from 'vega-lite/build/src/spec/unit'; 2 | import { Field } from 'vega-lite/build/src/channeldef'; 3 | 4 | /** 5 | * Convert from Vega-Lite to ASP. 6 | */ 7 | export default function vl2asp(spec: TopLevelUnitSpec): string[] { 8 | const facts = [`mark(${spec.mark}).`]; 9 | 10 | if ('data' in spec && 'url' in spec.data) { 11 | facts.push(`data("${spec.data.url}").`); 12 | } 13 | 14 | const encoding = spec.encoding || {}; 15 | 16 | let i = 0; 17 | for (const channel of Object.keys(encoding)) { 18 | const eid = `e${i++}`; 19 | facts.push(`encoding(${eid}).`); 20 | facts.push(`channel(${eid},${channel}).`); 21 | 22 | let encFieldType = null; 23 | let encZero = null; 24 | let encBinned = null; 25 | 26 | // translate encodings 27 | for (const field of Object.keys(encoding[channel])) { 28 | const fieldContent = encoding[channel][field]; 29 | if (field === 'type') { 30 | encFieldType = fieldContent; 31 | } 32 | if (field === 'bin') { 33 | encBinned = fieldContent; 34 | } 35 | if (field === 'scale') { 36 | // translate two boolean fields 37 | if ('zero' in fieldContent) { 38 | encZero = fieldContent.zero; 39 | if (fieldContent.zero) { 40 | facts.push(`zero(${eid}).`); 41 | } else { 42 | facts.push(`:- zero(${eid}).`); 43 | } 44 | } 45 | if ('log' in fieldContent) { 46 | if (fieldContent.log) { 47 | facts.push(`log(${eid}).`); 48 | } else { 49 | facts.push(`:-log(${eid}).`); 50 | } 51 | } 52 | } else if (field === 'bin') { 53 | if (fieldContent.maxbins) { 54 | facts.push(`${field}(${eid},${fieldContent.maxbins}).`); 55 | } else { 56 | facts.push(`${field}(${eid},10).`); 57 | } 58 | } else if (field === 'field') { 59 | // fields can have spaces and start with capital letters 60 | facts.push(`${field}(${eid},"${fieldContent}").`); 61 | } else { 62 | // translate normal fields 63 | if (field !== 'bin') { 64 | facts.push(`${field}(${eid},${fieldContent}).`); 65 | } 66 | } 67 | } 68 | 69 | if (encFieldType === 'quantitative' && encZero === null && encBinned === null) { 70 | facts.push(`zero(${eid}).`); 71 | } 72 | } 73 | 74 | return facts; 75 | } 76 | -------------------------------------------------------------------------------- /js/Readme.md: -------------------------------------------------------------------------------- 1 | # Draco Core 2 | 3 | JavaScript module with the Draco knowledge base and helper functions to convert from Draco ASP to Vega-Lite and 4 | vice-versa as well as a function to convert from CompassQL to Draco ASP. 5 | 6 | If you wish to run Draco in a web browser, consider using draco-vis, which bundles the Clingo solver as a WebAssembly 7 | module. The Draco-Core API does not include this functionality by itself. It merely handles the logic of translating 8 | between the various interface languages. 9 | 10 | ## Draco-Core API (Typescript / Javascript) 11 | 12 | **vl2asp** *(spec: TopLevelUnitSpec): string[]* [<>](https://github.com/uwdata/draco/blob/master/js/src/vl2asp.ts) 13 | 14 | > Translates a Vega-Lite specification into a list of ASP Draco facts. 15 | 16 | **cql2asp** *(spec: any): string[]* [<>](https://github.com/uwdata/draco/blob/master/js/src/cql2asp.ts) 17 | 18 | > Translates a CompassQL specification into a list of ASP Draco constraints. 19 | 20 | **asp2vl** *(facts: string[]): TopLevelUnitSpec* [<>](https://github.com/uwdata/draco/blob/master/js/src/asp2vl.ts) 21 | 22 | > Interprets a list of ASP Draco facts as a Vega-Lite specification. 23 | 24 | **data2schema** *(data: any[]): Schema* [<>](https://github.com/uwdata/draco/blob/master/js/src/data2schema.ts) 25 | 26 | > Reads a list of rows and generates a data schema for the dataset. `data` should be given as a list of dictionaries. 27 | 28 | **schema2asp** *(schema: Schema): string[]* [<>](https://github.com/uwdata/draco/blob/master/js/src/schema2asp.ts) 29 | 30 | > Translates a data schema into an ASP declaration of the data it describes. 31 | 32 | **constraints2json** *(constraintsAsp: string, weightsAsp?: string): 33 | Constraint[]* [<>](https://github.com/uwdata/draco/blob/master/js/src/constraints2json.ts) 34 | 35 | > Translates the given ASP constraints and matching weights (i.e. for soft constraints) into JSON format. 36 | 37 | **json2constraints** *(constraints: Constraint[]): 38 | ConstraintAsp* [<>](https://github.com/uwdata/draco/blob/master/js/src/json2constraints.ts) 39 | 40 | > Translates the given JSON format ASP constraints into ASP strings for definitions and weights (if applicable, i.e. for 41 | > soft constraints). 42 | 43 | ## Examples 44 | 45 | You can run vl2asp, aps2vl, and cql2asp on the command line. 46 | 47 | ```sh 48 | cat ../examples/ab.json | ./bin/vl2asp | clingo 49 | ``` 50 | 51 | ## Local Testing 52 | 53 | Before running tests locally, you need to generate `./src/constraints.ts` by executing the following command: 54 | 55 | ```sh 56 | yarn concat 57 | ``` 58 | 59 | After this, the execution of `yarn test` should yield no errors. 60 | -------------------------------------------------------------------------------- /draco/js.py: -------------------------------------------------------------------------------- 1 | import json 2 | import logging 3 | import os 4 | import subprocess 5 | from typing import Dict, List, Optional, Tuple 6 | 7 | logging.basicConfig(level=logging.INFO) 8 | logger = logging.getLogger(__name__) 9 | 10 | 11 | def absolute_path(p: str) -> str: 12 | return os.path.join(os.path.dirname(os.path.abspath(__file__)), p) 13 | 14 | 15 | def vl2asp(vl: Dict) -> List[str]: 16 | proc = subprocess.Popen( 17 | args=["node", absolute_path("../js/bin/vl2asp")], 18 | stdin=subprocess.PIPE, 19 | stdout=subprocess.PIPE, 20 | stderr=subprocess.PIPE, 21 | ) 22 | stdout, stderr = proc.communicate(json.dumps(vl).encode("utf8")) 23 | 24 | if stderr: 25 | logger.error("stderr: %s", stderr) 26 | 27 | return list(filter(lambda x: x, stdout.decode("utf-8").split("\n"))) 28 | 29 | 30 | def asp2vl(asp: List[str]) -> Dict: 31 | proc = subprocess.Popen( 32 | args=["node", absolute_path("../js/bin/asp2vl")], 33 | stdin=subprocess.PIPE, 34 | stdout=subprocess.PIPE, 35 | stderr=subprocess.PIPE, 36 | ) 37 | stdout, stderr = proc.communicate("\n".join(asp).encode("utf8")) 38 | 39 | if stderr: 40 | logger.error("stderr: %s", stderr) 41 | 42 | return json.loads(stdout) 43 | 44 | 45 | def cql2asp(cql: Dict) -> List[str]: 46 | proc = subprocess.Popen( 47 | args=["node", absolute_path("../js/bin/cql2asp")], 48 | stdin=subprocess.PIPE, 49 | stdout=subprocess.PIPE, 50 | stderr=subprocess.PIPE, 51 | ) 52 | stdout, stderr = proc.communicate(json.dumps(cql).encode("utf8")) 53 | 54 | if stderr: 55 | logger.error("stderr: %s", stderr) 56 | 57 | return stdout.decode("utf-8").split("\n") 58 | 59 | 60 | def data2schema(data: List) -> Dict: 61 | proc = subprocess.Popen( 62 | args=["node", absolute_path("../js/bin/data2schema")], 63 | stdin=subprocess.PIPE, 64 | stdout=subprocess.PIPE, 65 | stderr=subprocess.PIPE, 66 | ) 67 | stdout, stderr = proc.communicate(json.dumps(data).encode("utf8")) 68 | 69 | if stderr: 70 | logger.error("stderr: %s", stderr) 71 | 72 | return json.loads(stdout) 73 | 74 | 75 | def schema2asp(schema: Dict) -> List[str]: 76 | proc = subprocess.Popen( 77 | args=["node", absolute_path("../js/bin/schema2asp")], 78 | stdin=subprocess.PIPE, 79 | stdout=subprocess.PIPE, 80 | stderr=subprocess.PIPE, 81 | ) 82 | stdout, stderr = proc.communicate(json.dumps(schema).encode("utf8")) 83 | 84 | if stderr: 85 | logger.error("stderr: %s", stderr) 86 | 87 | return stdout.decode("utf-8").split("\n") 88 | -------------------------------------------------------------------------------- /js/src/cql2asp.ts: -------------------------------------------------------------------------------- 1 | const HOLE = '?'; 2 | 3 | export default function cql2asp(spec: any) { 4 | const mark = subst_if_hole(spec.mark); 5 | 6 | const facts = []; 7 | 8 | if (mark) { 9 | facts.push(`mark(${spec.mark}).`); 10 | } 11 | 12 | if ('data' in spec && 'url' in spec.data) { 13 | facts.push(`data("${spec.data.url}").`); 14 | } 15 | 16 | for (let i = 0; i < spec.encodings.length; i++) { 17 | const enc = spec.encodings[i]; 18 | const eid = `e${i}`; 19 | facts.push(`encoding(${eid}).`); 20 | 21 | let encFieldType = null; 22 | let encZero = null; 23 | let encBinned = null; 24 | 25 | for (const field of Object.keys(enc)) { 26 | const fieldContent = subst_if_hole(enc[field]); 27 | 28 | if (!fieldContent) { 29 | continue; 30 | } 31 | 32 | if (!remove_if_star(fieldContent)) { 33 | continue; 34 | } 35 | 36 | if (field === 'type') { 37 | encFieldType = fieldContent; 38 | } 39 | if (field === 'bin') { 40 | encBinned = fieldContent; 41 | } 42 | if (field === 'scale') { 43 | // translate two boolean fields 44 | if ('zero' in fieldContent) { 45 | encZero = fieldContent.zero; 46 | if (fieldContent.zero) { 47 | facts.push(`zero(${eid}).`); 48 | } else { 49 | facts.push(`:- zero(${eid}).`); 50 | } 51 | } 52 | if ('log' in fieldContent) { 53 | if (fieldContent.log) { 54 | facts.push(`log(${eid}).`); 55 | } else { 56 | facts.push(`:-log(${eid}).`); 57 | } 58 | } 59 | } else if (field === 'bin') { 60 | if (fieldContent.maxbins) { 61 | facts.push(`${field}(${eid},${fieldContent.maxbins}).`); 62 | } else if (fieldContent) { 63 | facts.push(`:- not bin(${eid},_).`); 64 | } else { 65 | facts.push(`:- bin(${eid},_).`); 66 | } 67 | } else if (field === 'field') { 68 | // fields can have spaces and start with capital letters 69 | facts.push(`${field}(${eid},"${fieldContent}").`); 70 | } else { 71 | // translate normal fields 72 | if (field !== 'bin') { 73 | facts.push(`${field}(${eid},${fieldContent}).`); 74 | } 75 | } 76 | } 77 | 78 | if (encFieldType === 'quantitative' && encZero === null && encBinned === null) { 79 | facts.push(`zero(${eid}).`); 80 | } 81 | } 82 | 83 | return facts; 84 | } 85 | 86 | function subst_if_hole(v: any) { 87 | return v !== HOLE ? v : null; 88 | } 89 | 90 | function remove_if_star(v: any) { 91 | return v !== '*' ? v : null; 92 | } 93 | -------------------------------------------------------------------------------- /data/training/schema.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "http://json-schema.org/draft-06/schema", 3 | "type": "object", 4 | "additionalProperties": false, 5 | "required": ["data", "source"], 6 | "properties": { 7 | "source": { 8 | "type": "string" 9 | }, 10 | "$schema": { 11 | "type": "string" 12 | }, 13 | "data": { 14 | "type": "array", 15 | "items": { 16 | "type": "object", 17 | "properties": { 18 | "fields": { 19 | "type": "array", 20 | "items": { 21 | "type": "object", 22 | "properties": { 23 | "name": { 24 | "type": "string" 25 | }, 26 | "type": { 27 | "type": "string" 28 | }, 29 | "entropy": { 30 | "type": "number" 31 | }, 32 | "cardinality": { 33 | "type": "number" 34 | }, 35 | "interesting": { 36 | "type": "boolean" 37 | } 38 | }, 39 | "additionalProperties": false, 40 | "required": [ 41 | "name", 42 | "type" 43 | ] 44 | } 45 | }, 46 | "num_rows": { 47 | "type": "number" 48 | }, 49 | "pvalue": { 50 | "type": "number" 51 | }, 52 | "task": { 53 | "type": "string", 54 | "enum": [ 55 | "value", 56 | "summary" 57 | ] 58 | }, 59 | "negative": { 60 | "$ref": "https://vega.github.io/schema/vega-lite/v2.json" 61 | }, 62 | "positive": { 63 | "$ref": "https://vega.github.io/schema/vega-lite/v2.json" 64 | } 65 | }, 66 | "additionalProperties": false, 67 | "required": [ 68 | "fields", 69 | "negative", 70 | "positive" 71 | ] 72 | } 73 | } 74 | } 75 | } 76 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from codecs import open 4 | from os.path import abspath, dirname, join 5 | from subprocess import call 6 | from time import time 7 | from typing import List 8 | 9 | from draco import __version__ 10 | from setuptools import Command, setup 11 | 12 | this_dir = abspath(dirname(__file__)) 13 | with open(join(this_dir, "README.md"), encoding="utf-8") as file: 14 | long_description = file.read() 15 | 16 | 17 | class RunTests(Command): 18 | """Run all tests.""" 19 | 20 | description = "run tests" 21 | user_options: List[str] = [] 22 | 23 | def initialize_options(self): 24 | pass 25 | 26 | def finalize_options(self): 27 | pass 28 | 29 | def run(self): 30 | """Run all tests!""" 31 | print("=> Running Ansunit Tests:") 32 | 33 | errno_ansunit = call(["ansunit", "asp/tests.yaml", "-v"]) 34 | 35 | print("=> Running JS Tests:") 36 | 37 | errno_js = call(["yarn", "--cwd", "js", "test"]) 38 | 39 | print("\n\n=> Running Python Tests:") 40 | start = int(round(time() * 1000)) 41 | 42 | errno_pytest = call( 43 | [ 44 | "pytest", 45 | "tests", 46 | "--cov=draco", 47 | "--cov-report=xml", 48 | "--cov-report=term-missing", 49 | ] 50 | ) 51 | 52 | end = int(round(time() * 1000)) 53 | 54 | print("\n\n RAN IN: {0} sec".format((end - start) / 1000)) 55 | 56 | print("\n\n=> Running MyPy:") 57 | errno_mypy = call(["mypy", "draco", "tests", "--ignore-missing-imports"]) 58 | 59 | print("\n\n=> Running Black:") 60 | errno_mypy = call(["black", "--check", "."]) 61 | 62 | print("=> Running Prettier:") 63 | 64 | errno_prettier = call(["yarn", "--cwd", "js", "lint"]) 65 | 66 | raise SystemExit( 67 | errno_ansunit + errno_js + errno_pytest + errno_mypy + errno_prettier 68 | ) 69 | 70 | 71 | setup( 72 | name="draco", 73 | version=__version__, 74 | description="Visualization recommendation using constraints", 75 | long_description=long_description, 76 | author="Dominik Moritz, Chenglong Wang", 77 | author_email="domoritz@cs.washington.edu, clwang@cs.washington.edu", 78 | license="BSD-3", 79 | url="https://github.com/uwdata/draco", 80 | packages=["draco"], 81 | entry_points={"console_scripts": ["draco=draco.cli:main"]}, 82 | install_requires=["clyngor"], 83 | include_package_data=True, 84 | extras_require={ 85 | "test": ["coverage", "pytest", "pytest-cov", "black", "ansunit", "mypy"] 86 | }, 87 | package_data={ 88 | "draco": [ 89 | "../asp/*.lp", 90 | "../js/bin/*", 91 | "../js/build/draco.js*", 92 | "../LICENSE", 93 | "../README.md", 94 | ] 95 | }, 96 | cmdclass={"test": RunTests}, 97 | ) 98 | -------------------------------------------------------------------------------- /js/test/constraints2json.test.ts: -------------------------------------------------------------------------------- 1 | import 'jest'; 2 | import { constraints2json } from '../src'; 3 | 4 | test('generates correct json for constraints', () => { 5 | expect( 6 | constraints2json(`% @constraint Primitive type has to support data type. 7 | hard(enc_type_valid,E,F) :- type(E,quantitative), field(E,F), fieldtype(F,(string;boolean)). 8 | hard(enc_type_valid,E,F) :- type(E,temporal), field(E,F), not fieldtype(F,datetime). 9 | `) 10 | ).toEqual([ 11 | { 12 | name: 'enc_type_valid', 13 | description: 'Primitive type has to support data type.', 14 | type: 'hard', 15 | asp: 'hard(enc_type_valid,E,F) :- type(E,quantitative), field(E,F), fieldtype(F,(string;boolean)).\nhard(enc_type_valid,E,F) :- type(E,temporal), field(E,F), not fieldtype(F,datetime).', 16 | }, 17 | ]); 18 | expect( 19 | constraints2json(`% @constraint Primitive type has to support data type. 20 | hard(enc_type_valid,E,F) :- type(E,quantitative), field(E,F), fieldtype(F,(string;boolean)). 21 | hard(enc_type_valid,E,F) :- type(E,temporal), field(E,F), not fieldtype(F,datetime). 22 | 23 | % @constraint Can only bin quantitative or ordinal. 24 | hard(bin_q_o,E,T) :- type(E,T), bin(E,_), T != quantitative, T != ordinal. 25 | `) 26 | ).toEqual([ 27 | { 28 | name: 'enc_type_valid', 29 | description: 'Primitive type has to support data type.', 30 | type: 'hard', 31 | asp: 'hard(enc_type_valid,E,F) :- type(E,quantitative), field(E,F), fieldtype(F,(string;boolean)).\nhard(enc_type_valid,E,F) :- type(E,temporal), field(E,F), not fieldtype(F,datetime).', 32 | }, 33 | { 34 | name: 'bin_q_o', 35 | description: 'Can only bin quantitative or ordinal.', 36 | type: 'hard', 37 | asp: 'hard(bin_q_o,E,T) :- type(E,T), bin(E,_), T != quantitative, T != ordinal.', 38 | }, 39 | ]); 40 | expect( 41 | constraints2json( 42 | ` 43 | % @constraint Prefer to use raw (no aggregate). 44 | soft(aggregate,E) :- aggregate(E,_). 45 | `, 46 | ` 47 | #const aggregate_weight = 1. 48 | ` 49 | ) 50 | ).toEqual([ 51 | { 52 | name: 'aggregate', 53 | description: 'Prefer to use raw (no aggregate).', 54 | type: 'soft', 55 | asp: 'soft(aggregate,E) :- aggregate(E,_).', 56 | weight: 1, 57 | }, 58 | ]); 59 | expect( 60 | constraints2json( 61 | ` 62 | % @constraint Prefer to use raw (no aggregate). 63 | soft(aggregate,E) :- aggregate(E,_). 64 | 65 | % @constraint Prefer to not bin. 66 | soft(bin,E) :- bin(E,_). 67 | `, 68 | ` 69 | #const aggregate_weight = 1. 70 | #const bin_weight = 2. 71 | ` 72 | ) 73 | ).toEqual([ 74 | { 75 | name: 'aggregate', 76 | description: 'Prefer to use raw (no aggregate).', 77 | type: 'soft', 78 | asp: 'soft(aggregate,E) :- aggregate(E,_).', 79 | weight: 1, 80 | }, 81 | { 82 | name: 'bin', 83 | description: 'Prefer to not bin.', 84 | type: 'soft', 85 | asp: 'soft(bin,E) :- bin(E,_).', 86 | weight: 2, 87 | }, 88 | ]); 89 | }); 90 | -------------------------------------------------------------------------------- /js/test/cql2asp.test.ts: -------------------------------------------------------------------------------- 1 | import { cql2asp } from '../src'; 2 | 3 | test('generates correct asp', () => { 4 | expect( 5 | cql2asp({ 6 | $schema: 'https://vega.github.io/schema/vega-lite/v5.json', 7 | data: { url: 'data/cars.json' }, 8 | mark: 'bar', 9 | encodings: [ 10 | { channel: 'x', field: 'foo', type: 'ordinal' }, 11 | { channel: 'y', aggregate: 'count', type: 'quantitative', scale: { zero: true } }, 12 | ], 13 | }).sort() 14 | ).toEqual( 15 | [ 16 | 'data("data/cars.json").', 17 | 'mark(bar).', 18 | 19 | 'encoding(e0).', 20 | 'channel(e0,x).', 21 | 'field(e0,"foo").', 22 | 'type(e0,ordinal).', 23 | 24 | 'encoding(e1).', 25 | 'channel(e1,y).', 26 | 'aggregate(e1,count).', 27 | 'type(e1,quantitative).', 28 | 'zero(e1).', 29 | ].sort() 30 | ); 31 | expect( 32 | cql2asp({ 33 | $schema: 'https://vega.github.io/schema/vega-lite/v5.json', 34 | data: { url: 'data/cars.json' }, 35 | mark: 'bar', 36 | encodings: [ 37 | { channel: '?', field: 'foo', type: 'ordinal' }, 38 | { channel: '?', aggregate: 'count', type: 'quantitative', scale: { zero: true } }, 39 | ], 40 | }).sort() 41 | ).toEqual( 42 | [ 43 | 'data("data/cars.json").', 44 | 'mark(bar).', 45 | 46 | 'encoding(e0).', 47 | 'field(e0,"foo").', 48 | 'type(e0,ordinal).', 49 | 50 | 'encoding(e1).', 51 | 'aggregate(e1,count).', 52 | 'type(e1,quantitative).', 53 | 'zero(e1).', 54 | ].sort() 55 | ); 56 | expect( 57 | cql2asp({ 58 | $schema: 'https://vega.github.io/schema/vega-lite/v5.json', 59 | data: { url: 'data/cars.json' }, 60 | mark: '?', 61 | encodings: [ 62 | { channel: 'x', field: 'foo', type: 'ordinal' }, 63 | { channel: 'y', aggregate: 'count', type: 'quantitative', scale: { zero: true } }, 64 | ], 65 | }).sort() 66 | ).toEqual( 67 | [ 68 | 'data("data/cars.json").', 69 | 70 | 'encoding(e0).', 71 | 'channel(e0,x).', 72 | 'field(e0,"foo").', 73 | 'type(e0,ordinal).', 74 | 75 | 'encoding(e1).', 76 | 'channel(e1,y).', 77 | 'aggregate(e1,count).', 78 | 'type(e1,quantitative).', 79 | 'zero(e1).', 80 | ].sort() 81 | ); 82 | expect( 83 | cql2asp({ 84 | $schema: 'https://vega.github.io/schema/vega-lite/v5.json', 85 | data: { url: 'data/cars.json' }, 86 | mark: 'bar', 87 | encodings: [ 88 | { channel: 'x', field: 'foo', type: 'ordinal' }, 89 | { channel: 'y', aggregate: 'count', type: 'quantitative', scale: { zero: true }, bin: true }, 90 | ], 91 | }).sort() 92 | ).toEqual( 93 | [ 94 | 'data("data/cars.json").', 95 | 'mark(bar).', 96 | 97 | 'encoding(e0).', 98 | 'channel(e0,x).', 99 | 'field(e0,"foo").', 100 | 'type(e0,ordinal).', 101 | 102 | 'encoding(e1).', 103 | 'channel(e1,y).', 104 | ':- not bin(e1,_).', 105 | 'aggregate(e1,count).', 106 | 'type(e1,quantitative).', 107 | 'zero(e1).', 108 | ].sort() 109 | ); 110 | }); 111 | -------------------------------------------------------------------------------- /js/src/constraints2json.ts: -------------------------------------------------------------------------------- 1 | interface Doc { 2 | description: string; 3 | } 4 | 5 | interface Asp { 6 | type: string; 7 | name: string; 8 | asp: string; 9 | } 10 | 11 | export interface Constraint extends Doc, Asp { 12 | weight?: number; 13 | } 14 | 15 | export default function constraints2json(constraintsAsp: string, weightsAsp?: string): Constraint[] { 16 | const constraints = constraintsAsp.match(CONSTRAINT_MATCH); 17 | if (!constraints) { 18 | throw Error('invalid constraints'); 19 | } 20 | 21 | const result = constraints.map((s: string): Constraint => { 22 | const doc = getDoc(s); 23 | const asp = getAsp(s); 24 | return { 25 | ...doc, 26 | ...asp, 27 | }; 28 | }); 29 | 30 | if (weightsAsp) { 31 | const weights = weightsAsp.match(WEIGHTS_MATCH); 32 | const weightMap = getWeightMap(weights); 33 | 34 | if (!weights) { 35 | throw Error('invalid weights'); 36 | } 37 | 38 | for (const constraint of result) { 39 | const name = constraint.name; 40 | constraint.weight = weightMap[name]; 41 | } 42 | } 43 | 44 | return result; 45 | } 46 | 47 | function getDoc(s: string): Doc { 48 | const docMatch = s.match(DOC_MATCH); 49 | if (docMatch) { 50 | const docString = docMatch[0]; 51 | const descriptionParts = DESCRIPTION_EXTRACT.exec(docString); 52 | 53 | if (descriptionParts) { 54 | return { 55 | description: descriptionParts[1], 56 | }; 57 | } 58 | } 59 | 60 | return null; 61 | } 62 | 63 | function getAsp(s: string): Asp { 64 | const aspMatch = s.match(ASP_MATCH); 65 | if (aspMatch) { 66 | const asp = aspMatch.join('\n'); 67 | const typeExtract = TYPE_EXTRACT.exec(asp); 68 | 69 | if (!typeExtract) { 70 | throw Error(`invalid asp: ${asp}`); 71 | } 72 | const type = typeExtract[1]; 73 | 74 | const nameExtract = NAME_EXTRACT.exec(asp); 75 | if (!nameExtract) { 76 | throw Error(`invalid asp: ${asp}`); 77 | } 78 | const name = nameExtract[1]; 79 | 80 | return { 81 | type, 82 | name, 83 | asp, 84 | }; 85 | } 86 | 87 | return null; 88 | } 89 | 90 | function getWeightMap(weights: string[]): { [s: string]: number } { 91 | const map = {}; 92 | for (const weight of weights) { 93 | const nameExtract = WEIGHT_NAME_EXTRACT.exec(weight); 94 | if (!nameExtract) { 95 | throw Error(`invalid weight: ${weight}`); 96 | } 97 | const name = nameExtract[1]; 98 | 99 | const valueExtract = WEIGHT_VALUE_EXTRACT.exec(weight); 100 | if (!valueExtract) { 101 | throw Error(`invalid weight: ${weight}`); 102 | } 103 | const value = +valueExtract[1]; 104 | 105 | map[name] = value; 106 | } 107 | 108 | return map; 109 | } 110 | 111 | const CONSTRAINT_MATCH = /%\s*@constraint(?:(.+)\n)+/g; 112 | const DOC_MATCH = /(%.*\n)+/g; 113 | const DESCRIPTION_EXTRACT = /@constraint\s+(.*)/; 114 | const ASP_MATCH = /^[^%].*/gm; 115 | const TYPE_EXTRACT = /(\w+)\(/; 116 | const NAME_EXTRACT = /\((\w+),?.*?\)/; 117 | 118 | const WEIGHTS_MATCH = /#const.*/g; 119 | const WEIGHT_NAME_EXTRACT = /#const\s+(\w+?)_weight/; 120 | const WEIGHT_VALUE_EXTRACT = /=\s*(\d+)/; 121 | -------------------------------------------------------------------------------- /logos/dark/logo-dark.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | logo-dark 5 | Created with Sketch. 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | -------------------------------------------------------------------------------- /data/training/manual.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "./schema.json", 3 | "source": "manual", 4 | "data": [ 5 | { 6 | "fields": [ 7 | { 8 | "name": "q1", 9 | "type": "number", 10 | "cardinality": 100, 11 | "entropy": 1 12 | }, 13 | { 14 | "name": "q2", 15 | "type": "number", 16 | "cardinality": 100, 17 | "entropy": 1 18 | }, 19 | { 20 | "name": "n", 21 | "type": "string", 22 | "cardinality": 5 23 | } 24 | ], 25 | "num_rows": 100, 26 | "negative": { 27 | "mark": "point", 28 | "encoding": { 29 | "x": { 30 | "field": "q1", 31 | "type": "quantitative" 32 | }, 33 | "y": { 34 | "field": "q1", 35 | "type": "quantitative" 36 | } 37 | } 38 | }, 39 | "positive": { 40 | "mark": "point", 41 | "encoding": { 42 | "x": { 43 | "field": "q1", 44 | "type": "quantitative" 45 | }, 46 | "y": { 47 | "field": "q2", 48 | "type": "quantitative" 49 | } 50 | } 51 | } 52 | }, 53 | { 54 | "fields": [ 55 | { 56 | "name": "q1", 57 | "type": "number", 58 | "cardinality": 100, 59 | "entropy": 1 60 | }, 61 | { 62 | "name": "q2", 63 | "type": "number", 64 | "cardinality": 100, 65 | "entropy": 1 66 | }, 67 | { 68 | "name": "n", 69 | "type": "string", 70 | "cardinality": 5 71 | } 72 | ], 73 | "num_rows": 100, 74 | "negative": { 75 | "mark": "point", 76 | "encoding": { 77 | "x": { 78 | "field": "q1", 79 | "type": "quantitative" 80 | }, 81 | "color": { 82 | "field": "q2", 83 | "type": "quantitative" 84 | } 85 | } 86 | }, 87 | "positive": { 88 | "mark": "point", 89 | "encoding": { 90 | "x": { 91 | "field": "q1", 92 | "type": "quantitative" 93 | }, 94 | "y": { 95 | "field": "q2", 96 | "type": "quantitative" 97 | } 98 | } 99 | } 100 | } 101 | ] 102 | } 103 | -------------------------------------------------------------------------------- /data/driving.json: -------------------------------------------------------------------------------- 1 | [ 2 | {"side": "left", "year": 1956, "miles": 3675, "gas": 2.38}, 3 | {"side": "right", "year": 1957, "miles": 3706, "gas": 2.40}, 4 | {"side": "bottom", "year": 1958, "miles": 3766, "gas": 2.26}, 5 | {"side": "top", "year": 1959, "miles": 3905, "gas": 2.31}, 6 | {"side": "right", "year": 1960, "miles": 3935, "gas": 2.27}, 7 | {"side": "bottom", "year": 1961, "miles": 3977, "gas": 2.25}, 8 | {"side": "right", "year": 1962, "miles": 4085, "gas": 2.22}, 9 | {"side": "bottom", "year": 1963, "miles": 4218, "gas": 2.12}, 10 | {"side": "bottom", "year": 1964, "miles": 4369, "gas": 2.11}, 11 | {"side": "bottom", "year": 1965, "miles": 4538, "gas": 2.14}, 12 | {"side": "top", "year": 1966, "miles": 4676, "gas": 2.14}, 13 | {"side": "bottom", "year": 1967, "miles": 4827, "gas": 2.14}, 14 | {"side": "right", "year": 1968, "miles": 5038, "gas": 2.13}, 15 | {"side": "right", "year": 1969, "miles": 5207, "gas": 2.07}, 16 | {"side": "right", "year": 1970, "miles": 5376, "gas": 2.01}, 17 | {"side": "bottom", "year": 1971, "miles": 5617, "gas": 1.93}, 18 | {"side": "bottom", "year": 1972, "miles": 5973, "gas": 1.87}, 19 | {"side": "right", "year": 1973, "miles": 6154, "gas": 1.90}, 20 | {"side": "left", "year": 1974, "miles": 5943, "gas": 2.34}, 21 | {"side": "bottom", "year": 1975, "miles": 6111, "gas": 2.31}, 22 | {"side": "bottom", "year": 1976, "miles": 6389, "gas": 2.32}, 23 | {"side": "top", "year": 1977, "miles": 6630, "gas": 2.36}, 24 | {"side": "bottom", "year": 1978, "miles": 6883, "gas": 2.23}, 25 | {"side": "left", "year": 1979, "miles": 6744, "gas": 2.68}, 26 | {"side": "left", "year": 1980, "miles": 6672, "gas": 3.30}, 27 | {"side": "right", "year": 1981, "miles": 6732, "gas": 3.30}, 28 | {"side": "right", "year": 1982, "miles": 6835, "gas": 2.92}, 29 | {"side": "right", "year": 1983, "miles": 6943, "gas": 2.66}, 30 | {"side": "right", "year": 1984, "miles": 7130, "gas": 2.48}, 31 | {"side": "right", "year": 1985, "miles": 7323, "gas": 2.36}, 32 | {"side": "left", "year": 1986, "miles": 7558, "gas": 1.76}, 33 | {"side": "top", "year": 1987, "miles": 7770, "gas": 1.76}, 34 | {"side": "bottom", "year": 1988, "miles": 8089, "gas": 1.68}, 35 | {"side": "left", "year": 1989, "miles": 8397, "gas": 1.75}, 36 | {"side": "top", "year": 1990, "miles": 8529, "gas": 1.88}, 37 | {"side": "right", "year": 1991, "miles": 8535, "gas": 1.78}, 38 | {"side": "right", "year": 1992, "miles": 8662, "gas": 1.69}, 39 | {"side": "left", "year": 1993, "miles": 8855, "gas": 1.60}, 40 | {"side": "bottom", "year": 1994, "miles": 8909, "gas": 1.59}, 41 | {"side": "bottom", "year": 1995, "miles": 9150, "gas": 1.60}, 42 | {"side": "top", "year": 1996, "miles": 9192, "gas": 1.67}, 43 | {"side": "right", "year": 1997, "miles": 9416, "gas": 1.65}, 44 | {"side": "bottom", "year": 1998, "miles": 9590, "gas": 1.39}, 45 | {"side": "right", "year": 1999, "miles": 9687, "gas": 1.50}, 46 | {"side": "top", "year": 2000, "miles": 9717, "gas": 1.89}, 47 | {"side": "left", "year": 2001, "miles": 9699, "gas": 1.77}, 48 | {"side": "bottom", "year": 2002, "miles": 9814, "gas": 1.64}, 49 | {"side": "right", "year": 2003, "miles": 9868, "gas": 1.86}, 50 | {"side": "left", "year": 2004, "miles": 9994, "gas": 2.14}, 51 | {"side": "left", "year": 2005, "miles": 10067, "gas": 2.53}, 52 | {"side": "right", "year": 2006, "miles": 10037, "gas": 2.79}, 53 | {"side": "right", "year": 2007, "miles": 10025, "gas": 2.95}, 54 | {"side": "left", "year": 2008, "miles": 9880, "gas": 3.31}, 55 | {"side": "bottom", "year": 2009, "miles": 9657, "gas": 2.38}, 56 | {"side": "left", "year": 2010, "miles": 9596, "gas": 2.61} 57 | ] -------------------------------------------------------------------------------- /js/test/json2constraints.test.ts: -------------------------------------------------------------------------------- 1 | import 'jest'; 2 | import { Constraint, json2constraints } from '../src'; 3 | import { ConstraintAsp } from '../src/json2constraints'; 4 | 5 | test('generates constraint asp from json', () => { 6 | let input: Constraint[] = [ 7 | { 8 | name: 'enc_type_valid', 9 | description: 'Primitive type has to support data type.', 10 | type: 'hard', 11 | asp: 'hard(enc_type_valid,E,F) :- type(E,quantitative), field(E,F), fieldtype(F,(string;boolean)).\nhard(enc_type_valid,E,F) :- type(E,temporal), field(E,F), not fieldtype(F,datetime).', 12 | }, 13 | ]; 14 | let output: ConstraintAsp = { 15 | definitions: `% @constraint Primitive type has to support data type. 16 | hard(enc_type_valid,E,F) :- type(E,quantitative), field(E,F), fieldtype(F,(string;boolean)). 17 | hard(enc_type_valid,E,F) :- type(E,temporal), field(E,F), not fieldtype(F,datetime). 18 | 19 | `, 20 | }; 21 | expect(json2constraints(input)).toEqual(output); 22 | 23 | input = [ 24 | { 25 | name: 'enc_type_valid', 26 | description: 'Primitive type has to support data type.', 27 | type: 'hard', 28 | asp: 'hard(enc_type_valid,E,F) :- type(E,quantitative), field(E,F), fieldtype(F,(string;boolean)).\nhard(enc_type_valid,E,F) :- type(E,temporal), field(E,F), not fieldtype(F,datetime).', 29 | }, 30 | { 31 | name: 'bin_q_o', 32 | description: 'Can only bin quantitative or ordinal.', 33 | type: 'hard', 34 | asp: 'hard(bin_q_o,E,T) :- type(E,T), bin(E,_), T != quantitative, T != ordinal.', 35 | }, 36 | ]; 37 | 38 | output = { 39 | definitions: `% @constraint Primitive type has to support data type. 40 | hard(enc_type_valid,E,F) :- type(E,quantitative), field(E,F), fieldtype(F,(string;boolean)). 41 | hard(enc_type_valid,E,F) :- type(E,temporal), field(E,F), not fieldtype(F,datetime). 42 | 43 | % @constraint Can only bin quantitative or ordinal. 44 | hard(bin_q_o,E,T) :- type(E,T), bin(E,_), T != quantitative, T != ordinal. 45 | 46 | `, 47 | }; 48 | expect(json2constraints(input)).toEqual(output); 49 | 50 | input = [ 51 | { 52 | name: 'aggregate', 53 | description: 'Prefer to use raw (no aggregate).', 54 | type: 'soft', 55 | asp: 'soft(aggregate,E) :- aggregate(E,_).', 56 | weight: 1, 57 | }, 58 | ]; 59 | 60 | output = { 61 | definitions: `% @constraint Prefer to use raw (no aggregate). 62 | soft(aggregate,E) :- aggregate(E,_). 63 | 64 | `, 65 | weights: `#const aggregate_weight = 1. 66 | `, 67 | assigns: `soft_weight(aggregate, aggregate_weight). 68 | `, 69 | }; 70 | expect(json2constraints(input)).toEqual(output); 71 | 72 | input = [ 73 | { 74 | name: 'aggregate', 75 | description: 'Prefer to use raw (no aggregate).', 76 | type: 'soft', 77 | asp: 'soft(aggregate,E) :- aggregate(E,_).', 78 | weight: 1, 79 | }, 80 | { 81 | name: 'bin', 82 | description: 'Prefer to not bin.', 83 | type: 'soft', 84 | asp: 'soft(bin,E) :- bin(E,_).', 85 | weight: 2, 86 | }, 87 | ]; 88 | 89 | output = { 90 | definitions: `% @constraint Prefer to use raw (no aggregate). 91 | soft(aggregate,E) :- aggregate(E,_). 92 | 93 | % @constraint Prefer to not bin. 94 | soft(bin,E) :- bin(E,_). 95 | 96 | `, 97 | weights: `#const aggregate_weight = 1. 98 | #const bin_weight = 2. 99 | `, 100 | assigns: `soft_weight(aggregate, aggregate_weight). 101 | soft_weight(bin, bin_weight). 102 | `, 103 | }; 104 | 105 | expect(json2constraints(input)).toEqual(output); 106 | }); 107 | -------------------------------------------------------------------------------- /logos/light/logo-light.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | logo-light 5 | Created with Sketch. 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | -------------------------------------------------------------------------------- /tests/test_recommendation.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from draco.helper import data_to_asp 4 | from draco.js import cql2asp, vl2asp 5 | from draco.run import run 6 | 7 | 8 | def get_rec(data_schema, spec, relax_hard=False): 9 | query = cql2asp(spec) 10 | return run(data_schema + query, relax_hard=relax_hard) 11 | 12 | 13 | def run_spec(data_schema, spec, relax_hard=False): 14 | query = vl2asp(spec) 15 | return run(data_schema + query, relax_hard=relax_hard) 16 | 17 | 18 | spec_schema = [ 19 | 'data("data.csv").', 20 | "num_rows(100).", 21 | 'fieldtype("q1",number).', 22 | 'cardinality("q1",100).', 23 | 'entropy("q1",1).', 24 | 'fieldtype("q2",number).', 25 | 'cardinality("q2",100).', 26 | 'entropy("q2",1).', 27 | 'fieldtype("o1",number).', 28 | 'cardinality("o1",6).', 29 | 'entropy("o1",1).', 30 | 'fieldtype("n1",string).', 31 | 'cardinality("n1",5).', 32 | 'entropy("n1",1).', 33 | ] 34 | 35 | 36 | class TestSpecs: 37 | def test_scatter(self): 38 | recommendation = get_rec( 39 | spec_schema, 40 | {"encodings": [{"channel": "x", "field": "q1"}, {"field": "q2"}]}, 41 | ).as_vl() 42 | 43 | assert recommendation == { 44 | "$schema": "https://vega.github.io/schema/vega-lite/v5.json", 45 | "data": {"url": "data.csv"}, 46 | "mark": "point", 47 | "encoding": { 48 | "x": {"field": "q1", "type": "quantitative", "scale": {"zero": True}}, 49 | "y": {"field": "q2", "type": "quantitative", "scale": {"zero": True}}, 50 | }, 51 | } 52 | 53 | def test_histogram(self): 54 | recommendation = get_rec( 55 | spec_schema, {"encodings": [{"field": "q1", "bin": True, "channel": "x"}]} 56 | ).as_vl() 57 | 58 | print(recommendation) 59 | assert recommendation == { 60 | "$schema": "https://vega.github.io/schema/vega-lite/v5.json", 61 | "data": {"url": "data.csv"}, 62 | "mark": "bar", 63 | "encoding": { 64 | "x": {"field": "q1", "type": "quantitative", "bin": True}, 65 | "y": { 66 | "aggregate": "count", 67 | "type": "quantitative", 68 | "scale": {"zero": True}, 69 | }, 70 | }, 71 | } 72 | 73 | def test_strip(self): 74 | recommendation = get_rec(spec_schema, {"encodings": [{"field": "q1"}]}).as_vl() 75 | 76 | assert recommendation == { 77 | "$schema": "https://vega.github.io/schema/vega-lite/v5.json", 78 | "data": {"url": "data.csv"}, 79 | "mark": "tick", 80 | "encoding": { 81 | "x": {"field": "q1", "type": "quantitative", "scale": {"zero": True}} 82 | }, 83 | } 84 | 85 | def test_disable_hard_integrity(self): 86 | recommendation = get_rec( 87 | spec_schema, 88 | {"encodings": [{"field": "n1", "scale": {"log": True}}]}, 89 | relax_hard=True, 90 | ) 91 | assert recommendation is not None 92 | 93 | 94 | class TestTypeChannel: 95 | def get_spec(self, t, channel): 96 | return { 97 | "mark": "point", 98 | "encoding": { 99 | "y": {"field": "q1", "type": "quantitative"}, 100 | channel: {"field": "q2" if t == "quantitative" else "o1", "type": t}, 101 | }, 102 | } 103 | 104 | def test_q(self): 105 | comparisons = [("x", "size"), ("size", "color")] 106 | 107 | for c0, c1 in comparisons: 108 | a = run_spec(spec_schema, self.get_spec("quantitative", c0)).cost 109 | b = run_spec(spec_schema, self.get_spec("quantitative", c1)).cost 110 | 111 | assert a < b, f"Channel {c0} is not better than {c1}." 112 | 113 | def test_o(self): 114 | comparisons = [("x", "color"), ("color", "size")] 115 | 116 | for c0, c1 in comparisons: 117 | a = run_spec(spec_schema, self.get_spec("ordinal", c0)).cost 118 | b = run_spec(spec_schema, self.get_spec("ordinal", c1)).cost 119 | 120 | assert a < b, f"Channel {c0} is not better than {c1}." 121 | -------------------------------------------------------------------------------- /draco/cli.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import argparse 4 | import io 5 | import json 6 | import logging 7 | import os 8 | import sys 9 | from enum import Enum 10 | 11 | from draco import __version__ 12 | from draco.js import cql2asp, vl2asp 13 | from draco.run import run 14 | 15 | logging.basicConfig(level=logging.INFO) 16 | logger = logging.getLogger(__name__) 17 | 18 | 19 | class ArgEnum(Enum): 20 | def __str__(self): 21 | return self.value 22 | 23 | @staticmethod 24 | def from_string(s): 25 | try: 26 | return QueryType[s] 27 | except KeyError: 28 | raise ValueError() 29 | 30 | 31 | class QueryType(ArgEnum): 32 | vl = "vl" 33 | asp = "asp" 34 | cql = "cql" 35 | 36 | 37 | class Mode(ArgEnum): 38 | optimize = "optimize" 39 | violations = "violations" 40 | valid = "valid" 41 | 42 | 43 | def create_parser(): 44 | parser = argparse.ArgumentParser( 45 | description="Draco Visualization recommendation system.", 46 | epilog="There is a moment in every dawn when light floats, there is the possibility of magic. Creation holds its breath. — Douglas Adams, The Hitchhikers Guide to the Galaxy", 47 | ) 48 | 49 | parser.add_argument( 50 | "query", 51 | nargs="?", 52 | type=argparse.FileType("r"), 53 | default=sys.stdin, 54 | help="The input query.", 55 | ) 56 | parser.add_argument( 57 | "--type", 58 | "-t", 59 | type=QueryType, 60 | choices=list(QueryType), 61 | default=QueryType.asp, 62 | help="Type of query. asp (Answer Set Program, default), cql (CompassQL) or vl (Vega-Lite).", 63 | ) 64 | parser.add_argument( 65 | "--mode", 66 | "-m", 67 | type=Mode, 68 | choices=list(Mode), 69 | default=Mode.optimize, 70 | help="Mode to run draco in.", 71 | ) 72 | parser.add_argument( 73 | "--out", 74 | "-o", 75 | type=argparse.FileType("w"), 76 | default=sys.stdout, 77 | help="specify the Vega-Lite output file", 78 | ) 79 | parser.add_argument( 80 | "--debug", "-d", help="Create debugging information.", action="store_true" 81 | ) 82 | parser.add_argument("--version", action="version", version=__version__) 83 | 84 | return parser 85 | 86 | 87 | def main(): # pragma: no cover 88 | parser = create_parser() 89 | args = parser.parse_args() 90 | 91 | if args.mode != Mode.optimize and ( 92 | args.type == QueryType.draco or args.type == QueryType.cql 93 | ): 94 | print("Validation only works with full specs.", sys.stderr) 95 | else: 96 | logger.info(f"Processing query: {args.query.name} ...") 97 | if args.type == QueryType.asp: 98 | draco_query = args.query.read().split("\n") 99 | else: 100 | query_spec = json.load(args.query) 101 | if args.type == QueryType.vl: 102 | draco_query = vl2asp(query_spec) 103 | elif args.type == QueryType.cql: 104 | draco_query = cql2asp(query_spec) 105 | 106 | print(draco_query) 107 | 108 | if args.mode == Mode.violations: 109 | result = run( 110 | draco_query, 111 | debug=args.debug, 112 | files=["define.lp", "hard.lp", "soft.lp", "output.lp"], 113 | silence_warnings=True, 114 | ) 115 | 116 | if result: 117 | print(result.violations, file=args.out) 118 | elif args.mode == Mode.valid: 119 | result = run( 120 | draco_query, 121 | debug=args.debug, 122 | files=["define.lp", "hard.lp", "output.lp"], 123 | silence_warnings=True, 124 | ) 125 | 126 | print("valid" if result else "invalid", file=args.out) 127 | elif args.mode == Mode.optimize: 128 | result = run(draco_query, debug=args.debug) 129 | 130 | if result: 131 | print(json.dumps(result.as_vl()), file=args.out) 132 | logger.info(f"Cost: {result.cost}") 133 | outname = ( 134 | "stringIO" if isinstance(args.out, io.StringIO) else args.out.name 135 | ) 136 | logger.info(f"Wrote Vega-Lite spec to {outname}") 137 | 138 | # close open files 139 | if args.query is not sys.stdin: 140 | args.query.close() 141 | 142 | if args.out is not sys.stdout: 143 | args.out.close() 144 | 145 | 146 | if __name__ == "__main__": # pragma: no cover 147 | main() 148 | -------------------------------------------------------------------------------- /asp/define.lp: -------------------------------------------------------------------------------- 1 | % ====== Definitions ====== 2 | 3 | % Types of marks to encode data. 4 | marktype(point;bar;line;area;text;tick;rect). 5 | % High level data types: quantitative, ordinal, nominal, temporal. 6 | type(quantitative;ordinal;nominal;temporal). 7 | % Basic types of the data. 8 | primitive_type(string;number;boolean;datetime). 9 | % Supported aggregation functions. 10 | aggregate_op(count;mean;median;min;max;stdev;sum). 11 | summative_aggregate_op(count;sum). 12 | % Numbers of bins that can be recommended; any natural number is allowed. 13 | binning(10;25;200). 14 | 15 | % Encoding channels. 16 | single_channel(x;y;color;size;shape;text;row;column). 17 | multi_channel(detail). 18 | channel(C) :- single_channel(C). 19 | channel(C) :- multi_channel(C). 20 | non_positional(color;size;shape;text;detail). 21 | 22 | % Possible tasks. 23 | tasks(value;summary). 24 | 25 | % Possible stackings. 26 | stacking(zero;normalize). 27 | 28 | % ====== Helpers ====== 29 | 30 | discrete(E) :- type(E,(nominal;ordinal)). 31 | discrete(E) :- bin(E,_). 32 | continuous(E) :- encoding(E), not discrete(E). 33 | 34 | channel_discrete(C) :- discrete(E), channel(E,C). 35 | channel_continuous(C) :- continuous(E), channel(E,C). 36 | 37 | ordered(E) :- type(E,(ordinal;quantitative)). 38 | 39 | % Fields 40 | field(F) :- fieldtype(F,_). 41 | 42 | % Stacking is applied to the continuous x or y. 43 | stack(EC,S) :- channel(EC,(x;y)), channel(ED,(x;y)), continuous(EC), discrete(ED), stack(S). 44 | % X and y are continuous. 45 | stack(E,S) :- channel_continuous(x), channel(E,y), continuous(E), stack(S). 46 | 47 | stack(S) :- stack(_,S). 48 | 49 | % Data properties 50 | enc_cardinality(E,C) :- field(E,F), cardinality(F,C). 51 | enc_entropy(E,EN) :- field(E,F), entropy(F,EN). 52 | enc_interesting(E) :- field(E,F), interesting(F). 53 | enc_extent(E,MIN,MAX) :- field(E,F), extent(F,MIN,MAX). 54 | 55 | % Cardinality of discrete field. A binned field has the cadinality of its field. 56 | discrete_cardinality(E,CE) :- discrete(E), enc_cardinality(E,CE), channel(E,C), not bin(E,_). 57 | discrete_cardinality(E,CB) :- channel(E,C), bin(E,CB). 58 | 59 | % Define a fake soft/2 for all soft/1. 60 | soft(F,_placeholder) :- soft(F). 61 | 62 | % Silence warnings about properties never appearing in head. 63 | entropy(0,0) :- #false. 64 | interesting(0) :- #false. 65 | extent(0,0,0) :- #false. 66 | soft(0) :- #false. 67 | task(value) :- #false. 68 | task(summary) :- #false. 69 | data(0) :- #false. 70 | 71 | % == Chart Types == 72 | 73 | % Continuous by continuous. 74 | is_c_c :- channel_continuous(x), channel_continuous(y). 75 | 76 | % Continuous by discrete (or continuous only). 77 | is_c_d :- channel_continuous(x), not channel_continuous(y). 78 | is_c_d :- channel_continuous(y), not channel_continuous(x). 79 | 80 | % Discrete by discrete. 81 | is_d_d :- channel_discrete(x), channel_discrete(y). 82 | 83 | % == Overlap == 84 | 85 | % The continuous variable is a measure (it is aggregated) and all other channels are .aggregated, or we use stack -> no overlap 86 | non_pos_unaggregated :- channel(E,C), non_positional(C), not aggregate(E,_). 87 | no_overlap :- is_c_d, continuous(E), channel(E,(x;y)), aggregate(E,_), not non_pos_unaggregated. 88 | no_overlap :- is_c_d, stack(_). 89 | 90 | % the size of the discrete positional encoding 91 | discrete_size(S) :- is_c_d, x_y_cardinality(_,S). 92 | discrete_size(1) :- is_c_d, channel_continuous(x), not channel(_,y). 93 | discrete_size(1) :- is_c_d, channel_continuous(y), not channel(_,x). 94 | 95 | % Data size is as small as discrete dimension -> no overlap. 96 | no_overlap :- is_c_d, num_rows(S), discrete_size(S). 97 | 98 | % We definitely overlap if the data size > discrete size. 99 | overlap :- is_c_d, not no_overlap, num_rows(S1), discrete_size(S2), S1 > S2. 100 | 101 | % helpers to go from quadratic to linear number of grounding 102 | x_y_cardinality(x,S) :- channel(E,x), discrete_cardinality(E,S). 103 | x_y_cardinality(y,S) :- channel(E,y), discrete_cardinality(E,S). 104 | 105 | % No overlap if all other dimensions are aggregated. 106 | discrete_size(S) :- is_d_d, x_y_cardinality(x,SX), x_y_cardinality(y,SY), S = SX*SY. 107 | no_overlap :- is_d_d, not non_pos_unaggregated. 108 | no_overlap :- is_d_d, num_rows(S1), discrete_size(S2), S1 <= S2. % This cannot guarantee no overlap. 109 | 110 | % We can guarantee overlap using this rule unless we are using row / column. 111 | row_col :- channel(_,(row;column)). 112 | overlap :- is_d_d, channel(E,C), not row_col, not no_overlap, num_rows(S1), discrete_size(S2), S1 > S2. 113 | 114 | % == Orientation == 115 | 116 | % Orientation tells us which one is the dependent and independent variable. 117 | 118 | orientation(vertical) :- mark(bar;tick;area;line), channel_discrete(x). 119 | orientation(vertical) :- mark(area;line), channel_continuous(x), channel_continuous(y). 120 | 121 | orientation(horizontal) :- mark(bar;tick;area;line), channel_discrete(y). 122 | -------------------------------------------------------------------------------- /data/weights.json: -------------------------------------------------------------------------------- 1 | { 2 | "type_q_weight": 0, 3 | "type_o_weight": 1, 4 | "type_n_weight": 2, 5 | "aggregate_weight": 1, 6 | "bin_weight": 2, 7 | "bin_high_weight": 10, 8 | "bin_low_weight": 6, 9 | "encoding_weight": 0, 10 | "encoding_field_weight": 6, 11 | "same_field_2_weight": 8, 12 | "same_field_gte3_weight": 16, 13 | "count_twice_weight": 50, 14 | "shape_cardinality_weight": 5, 15 | "number_nominal_weight": 10, 16 | "string_non_nominal_weight": 2, 17 | "bin_cardinality_weight": 5, 18 | "quant_bin_weight": 1, 19 | "agg_dim_weight": 2, 20 | "only_discrete_weight": 30, 21 | "multiple_non_pos_weight": 3, 22 | "non_positional_pref_weight": 10, 23 | "aggregate_group_by_raw_weight": 3, 24 | "x_y_raw_weight": 1, 25 | "log_weight": 1, 26 | "zero_weight": 1, 27 | "zero_size_weight": 3, 28 | "zero_positional_weight": 1, 29 | "zero_skew_weight": 5, 30 | "includes_zero_weight": 10, 31 | "only_x_weight": 1, 32 | "orientation_binned_weight": 1, 33 | "high_cardinality_ordinal_weight": 10, 34 | "high_cardinality_nominal_weight": 10, 35 | "high_cardinality_nominal_color_weight": 10, 36 | "horizontal_scrolling_weight": 20, 37 | "temporal_date_weight": 1, 38 | "quantitative_numbers_weight": 2, 39 | "position_entropy_weight": 2, 40 | "high_cardinality_size_weight": 1, 41 | "value_agg_weight": 1, 42 | "facet_summary_weight": 0, 43 | "x_row_weight": 1, 44 | "y_row_weight": 1, 45 | "x_column_weight": 1, 46 | "y_column_weight": 1, 47 | "color_entropy_high_weight": 0, 48 | "color_entropy_low_weight": 0, 49 | "size_entropy_high_weight": 0, 50 | "size_entropy_low_weight": 0, 51 | "c_d_column_weight": 5, 52 | "temporal_y_weight": 1, 53 | "d_d_overlap_weight": 20, 54 | "c_c_point_weight": 0, 55 | "c_c_line_weight": 20, 56 | "c_c_area_weight": 20, 57 | "c_c_text_weight": 2, 58 | "c_c_tick_weight": 5, 59 | "c_d_point_weight": 10, 60 | "c_d_bar_weight": 20, 61 | "c_d_line_weight": 20, 62 | "c_d_area_weight": 20, 63 | "c_d_text_weight": 50, 64 | "c_d_tick_weight": 0, 65 | "c_d_no_overlap_point_weight": 20, 66 | "c_d_no_overlap_bar_weight": 0, 67 | "c_d_no_overlap_line_weight": 20, 68 | "c_d_no_overlap_area_weight": 20, 69 | "c_d_no_overlap_text_weight": 30, 70 | "c_d_no_overlap_tick_weight": 25, 71 | "d_d_point_weight": 0, 72 | "d_d_text_weight": 1, 73 | "d_d_rect_weight": 0, 74 | "continuous_x_weight": 0, 75 | "continuous_y_weight": 0, 76 | "continuous_color_weight": 10, 77 | "continuous_size_weight": 1, 78 | "continuous_text_weight": 20, 79 | "ordered_x_weight": 1, 80 | "ordered_y_weight": 0, 81 | "ordered_color_weight": 8, 82 | "ordered_size_weight": 10, 83 | "ordered_text_weight": 32, 84 | "ordered_row_weight": 10, 85 | "ordered_column_weight": 10, 86 | "nominal_x_weight": 3, 87 | "nominal_y_weight": 0, 88 | "nominal_color_weight": 10, 89 | "nominal_shape_weight": 11, 90 | "nominal_text_weight": 12, 91 | "nominal_row_weight": 7, 92 | "nominal_column_weight": 10, 93 | "nominal_detail_weight": 20, 94 | "interesting_x_weight": 0, 95 | "interesting_y_weight": 1, 96 | "interesting_color_weight": 2, 97 | "interesting_size_weight": 2, 98 | "interesting_shape_weight": 3, 99 | "interesting_text_weight": 6, 100 | "interesting_row_weight": 6, 101 | "interesting_column_weight": 7, 102 | "interesting_detail_weight": 20, 103 | "aggregate_count_weight": 0, 104 | "aggregate_sum_weight": 2, 105 | "aggregate_mean_weight": 1, 106 | "aggregate_median_weight": 3, 107 | "aggregate_min_weight": 4, 108 | "aggregate_max_weight": 4, 109 | "aggregate_stdev_weight": 5, 110 | "value_point_weight": 0, 111 | "value_bar_weight": 0, 112 | "value_line_weight": 0, 113 | "value_area_weight": 0, 114 | "value_text_weight": 0, 115 | "value_tick_weight": 0, 116 | "value_rect_weight": 0, 117 | "summary_point_weight": 0, 118 | "summary_bar_weight": 0, 119 | "summary_line_weight": 0, 120 | "summary_area_weight": 0, 121 | "summary_text_weight": 0, 122 | "summary_tick_weight": 0, 123 | "summary_rect_weight": 0, 124 | "value_continuous_x_weight": 0, 125 | "value_continuous_y_weight": 0, 126 | "value_continuous_color_weight": 0, 127 | "value_continuous_size_weight": 0, 128 | "value_continuous_text_weight": 0, 129 | "value_discrete_x_weight": 0, 130 | "value_discrete_y_weight": 0, 131 | "value_discrete_color_weight": 0, 132 | "value_discrete_shape_weight": 0, 133 | "value_discrete_size_weight": 0, 134 | "value_discrete_text_weight": 0, 135 | "value_discrete_row_weight": 0, 136 | "value_discrete_column_weight": 0, 137 | "summary_continuous_x_weight": 0, 138 | "summary_continuous_y_weight": 0, 139 | "summary_continuous_color_weight": 0, 140 | "summary_continuous_size_weight": 0, 141 | "summary_continuous_text_weight": 0, 142 | "summary_discrete_x_weight": 0, 143 | "summary_discrete_y_weight": 0, 144 | "summary_discrete_color_weight": 0, 145 | "summary_discrete_shape_weight": 0, 146 | "summary_discrete_size_weight": 0, 147 | "summary_discrete_text_weight": 0, 148 | "summary_discrete_row_weight": 0, 149 | "summary_discrete_column_weight": 0, 150 | "stack_zero_weight": 0, 151 | "stack_normalize_weight": 1 152 | } -------------------------------------------------------------------------------- /draco/run.py: -------------------------------------------------------------------------------- 1 | """ 2 | Run constraint solver to complete spec. 3 | """ 4 | 5 | import json 6 | import logging 7 | import os 8 | import subprocess 9 | import tempfile 10 | from collections import defaultdict 11 | from typing import Dict, List, Optional, Tuple, Union 12 | 13 | import clyngor 14 | from clyngor.answers import Answers 15 | 16 | from draco.js import asp2vl 17 | 18 | logging.basicConfig(level=logging.INFO) 19 | logger = logging.getLogger(__name__) 20 | 21 | DRACO_LP = [ 22 | "define.lp", 23 | "generate.lp", 24 | "hard.lp", 25 | "hard-integrity.lp", 26 | "soft.lp", 27 | "weights.lp", 28 | "assign_weights.lp", 29 | "optimize.lp", 30 | "output.lp", 31 | ] 32 | DRACO_LP_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "../asp") 33 | 34 | 35 | file_cache: Dict[str, bytes] = {} 36 | 37 | 38 | class Result: 39 | props: List[str] 40 | cost: Optional[int] 41 | violations: Dict[str, int] 42 | 43 | def __init__(self, answers: Answers, cost: Optional[int] = None) -> None: 44 | violations: Dict[str, int] = defaultdict(int) 45 | props: List[str] = [] 46 | 47 | for ((head, body),) in answers: 48 | if head == "cost": 49 | cost = int(body[0]) 50 | elif head == "soft": 51 | violations[body[0]] += 1 52 | else: 53 | b = ",".join(map(str, body)) 54 | props.append(f"{head}({b}).") 55 | 56 | self.props = props 57 | self.violations = violations 58 | self.cost = cost 59 | 60 | def as_vl(self) -> Dict: 61 | return asp2vl(self.props) 62 | 63 | 64 | def load_file(path: str) -> bytes: 65 | content = file_cache.get(path) 66 | if content is not None: 67 | return content 68 | with open(path) as f: 69 | content = f.read().encode("utf8") 70 | file_cache[path] = content 71 | return content 72 | 73 | 74 | def run_clingo( 75 | draco_query: List[str], 76 | constants: Dict[str, str] = None, 77 | files: List[str] = None, 78 | relax_hard=False, 79 | silence_warnings=False, 80 | debug=False, 81 | ) -> Tuple[bytes, bytes]: 82 | """ 83 | Run draco and return stderr and stdout 84 | """ 85 | 86 | # default args 87 | files = files or DRACO_LP 88 | if relax_hard and "hard-integrity.lp" in files: 89 | files.remove("hard-integrity.lp") 90 | 91 | constants = constants or {} 92 | 93 | options = ["--outf=2", "--quiet=1,2,2"] 94 | if silence_warnings: 95 | options.append("--warn=no-atom-undefined") 96 | for name, value in constants.items(): 97 | options.append(f"-c {name}={value}") 98 | 99 | cmd = ["clingo"] + options 100 | logger.debug("Command: %s", " ".join(cmd)) 101 | 102 | proc = subprocess.Popen( 103 | args=cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE 104 | ) 105 | 106 | program = "\n".join(draco_query) 107 | file_names = [os.path.join(DRACO_LP_DIR, f) for f in files] 108 | asp_program = b"\n".join(map(load_file, file_names)) + program.encode("utf8") 109 | 110 | if debug: 111 | with tempfile.NamedTemporaryFile(mode="w", delete=False) as fd: 112 | fd.write(program) 113 | 114 | logger.info('Debug ASP with "clingo %s %s"', " ".join(file_names), fd.name) 115 | 116 | stdout, stderr = proc.communicate(asp_program) 117 | 118 | return (stderr, stdout) 119 | 120 | 121 | def run( 122 | draco_query: List[str], 123 | constants: Dict[str, str] = None, 124 | files: List[str] = None, 125 | relax_hard=False, 126 | silence_warnings=False, 127 | debug=False, 128 | clear_cache=False, 129 | ) -> Optional[Result]: 130 | """Run clingo to compute a completion of a partial spec or violations.""" 131 | 132 | # Clear file cache. useful during development in notebooks. 133 | if clear_cache and file_cache: 134 | logger.warning("Cleared file cache") 135 | file_cache.clear() 136 | 137 | stderr, stdout = run_clingo( 138 | draco_query, constants, files, relax_hard, silence_warnings, debug 139 | ) 140 | 141 | try: 142 | json_result = json.loads(stdout) 143 | except json.JSONDecodeError: 144 | logger.error("stdout: %s", stdout) 145 | logger.error("stderr: %s", stderr) 146 | raise 147 | 148 | if stderr: 149 | logger.error(stderr) 150 | 151 | result = json_result["Result"] 152 | 153 | if result == "UNSATISFIABLE": 154 | logger.info("Constraints are unsatisfiable.") 155 | return None 156 | elif result == "OPTIMUM FOUND": 157 | # get the last witness, which is the best result 158 | answers = json_result["Call"][0]["Witnesses"][-1] 159 | 160 | logger.debug(answers["Value"]) 161 | 162 | return Result( 163 | clyngor.Answers(answers["Value"]).sorted, 164 | cost=json_result["Models"]["Costs"][0], 165 | ) 166 | elif result == "SATISFIABLE": 167 | answers = json_result["Call"][0]["Witnesses"][-1] 168 | 169 | assert ( 170 | json_result["Models"]["Number"] == 1 171 | ), "Should not have more than one model if we don't optimize" 172 | 173 | logger.debug(answers["Value"]) 174 | 175 | return Result(clyngor.Answers(answers["Value"]).sorted) 176 | else: 177 | logger.error("Unsupported result: %s", result) 178 | return None 179 | -------------------------------------------------------------------------------- /asp/weights.lp: -------------------------------------------------------------------------------- 1 | % Weights as constants 2 | 3 | #const type_q_weight = 0. 4 | #const type_o_weight = 1. 5 | #const type_n_weight = 2. 6 | #const aggregate_weight = 1. 7 | #const bin_weight = 2. 8 | #const bin_high_weight = 10. 9 | #const bin_low_weight = 6. 10 | #const encoding_weight = 0. 11 | #const encoding_field_weight = 6. 12 | #const same_field_2_weight = 8. 13 | #const same_field_gte3_weight = 16. 14 | #const count_twice_weight = 50. 15 | #const shape_cardinality_weight = 5. 16 | #const number_nominal_weight = 10. 17 | #const bin_cardinality_weight = 5. 18 | #const quant_bin_weight = 1. 19 | #const agg_dim_weight = 2. 20 | #const only_discrete_weight = 30. 21 | #const multiple_non_pos_weight = 3. 22 | #const non_positional_pref_weight = 10. 23 | #const aggregate_group_by_raw_weight = 3. 24 | #const x_y_raw_weight = 1. 25 | #const log_weight = 1. 26 | #const zero_weight = 1. 27 | #const zero_size_weight = 3. 28 | #const zero_positional_weight = 1. 29 | #const zero_skew_weight = 5. 30 | #const includes_zero_weight = 10. 31 | 32 | #const only_x_weight = 1. 33 | #const orientation_binned_weight = 1. 34 | #const high_cardinality_ordinal_weight = 10. 35 | #const high_cardinality_nominal_weight = 10. 36 | #const high_cardinality_nominal_color_weight = 10. 37 | #const horizontal_scrolling_weight = 20. 38 | #const temporal_date_weight = 1. 39 | #const quantitative_numbers_weight = 2. 40 | #const position_entropy_weight = 2. 41 | #const high_cardinality_size_weight = 1. 42 | #const value_agg_weight = 1. 43 | #const facet_summary_weight = 0. 44 | #const x_row_weight = 1. 45 | #const y_row_weight = 1. 46 | #const x_column_weight = 1. 47 | #const y_column_weight = 1. 48 | #const color_entropy_high_weight = 0. 49 | #const color_entropy_low_weight = 0. 50 | #const size_entropy_high_weight = 0. 51 | #const size_entropy_low_weight = 0. 52 | 53 | #const c_d_column_weight = 5. 54 | #const temporal_y_weight = 1. 55 | #const d_d_overlap_weight = 20. 56 | 57 | #const c_c_point_weight = 0. 58 | #const c_c_line_weight = 20. 59 | #const c_c_area_weight = 20. 60 | #const c_c_text_weight = 2. 61 | #const c_c_tick_weight = 5. 62 | 63 | #const c_d_point_weight = 10. 64 | #const c_d_bar_weight = 20. 65 | #const c_d_line_weight = 20. 66 | #const c_d_area_weight = 20. 67 | #const c_d_text_weight = 50. 68 | #const c_d_tick_weight = 0. 69 | 70 | #const c_d_no_overlap_point_weight = 20. 71 | #const c_d_no_overlap_bar_weight = 0. 72 | #const c_d_no_overlap_line_weight = 20. 73 | #const c_d_no_overlap_area_weight = 20. 74 | #const c_d_no_overlap_text_weight = 30. 75 | #const c_d_no_overlap_tick_weight = 25. 76 | 77 | #const d_d_point_weight = 0. 78 | #const d_d_text_weight = 1. 79 | #const d_d_rect_weight = 0. 80 | 81 | #const continuous_x_weight = 0. 82 | #const continuous_y_weight = 0. 83 | #const continuous_color_weight = 10. 84 | #const continuous_size_weight = 1. 85 | #const continuous_text_weight = 20. 86 | 87 | #const ordered_x_weight = 1. 88 | #const ordered_y_weight = 0. 89 | #const ordered_color_weight = 8. 90 | #const ordered_size_weight = 10. 91 | #const ordered_text_weight = 32. 92 | #const ordered_row_weight = 10. 93 | #const ordered_column_weight = 10. 94 | 95 | #const nominal_x_weight = 3. 96 | #const nominal_y_weight = 0. 97 | #const nominal_color_weight = 10. 98 | #const nominal_shape_weight = 11. 99 | #const nominal_text_weight = 12. 100 | #const nominal_row_weight = 7. 101 | #const nominal_column_weight = 10. 102 | #const nominal_detail_weight = 20. 103 | 104 | #const interesting_x_weight = 0. 105 | #const interesting_y_weight = 1. 106 | #const interesting_color_weight = 2. 107 | #const interesting_size_weight = 2. 108 | #const interesting_shape_weight = 3. 109 | #const interesting_text_weight = 6. 110 | #const interesting_row_weight = 6. 111 | #const interesting_column_weight = 7. 112 | #const interesting_detail_weight = 20. 113 | 114 | #const aggregate_count_weight = 0. 115 | #const aggregate_sum_weight = 2. 116 | #const aggregate_mean_weight = 1. 117 | #const aggregate_median_weight = 3. 118 | #const aggregate_min_weight = 4. 119 | #const aggregate_max_weight = 4. 120 | #const aggregate_stdev_weight = 5. 121 | 122 | #const value_point_weight = 0. 123 | #const value_bar_weight = 0. 124 | #const value_line_weight = 0. 125 | #const value_area_weight = 0. 126 | #const value_text_weight = 0. 127 | #const value_tick_weight = 0. 128 | #const value_rect_weight = 0. 129 | #const summary_point_weight = 0. 130 | #const summary_bar_weight = 0. 131 | #const summary_line_weight = 0. 132 | #const summary_area_weight = 0. 133 | #const summary_text_weight = 0. 134 | #const summary_tick_weight = 0. 135 | #const summary_rect_weight = 0. 136 | 137 | #const value_continuous_x_weight = 0. 138 | #const value_continuous_y_weight = 0. 139 | #const value_continuous_color_weight = 0. 140 | #const value_continuous_size_weight = 0. 141 | #const value_continuous_text_weight = 0. 142 | #const value_discrete_x_weight = 0. 143 | #const value_discrete_y_weight = 0. 144 | #const value_discrete_color_weight = 0. 145 | #const value_discrete_shape_weight = 0. 146 | #const value_discrete_size_weight = 0. 147 | #const value_discrete_text_weight = 0. 148 | #const value_discrete_row_weight = 0. 149 | #const value_discrete_column_weight = 0. 150 | #const summary_continuous_x_weight = 0. 151 | #const summary_continuous_y_weight = 0. 152 | #const summary_continuous_color_weight = 0. 153 | #const summary_continuous_size_weight = 0. 154 | #const summary_continuous_text_weight = 0. 155 | #const summary_discrete_x_weight = 0. 156 | #const summary_discrete_y_weight = 0. 157 | #const summary_discrete_color_weight = 0. 158 | #const summary_discrete_shape_weight = 0. 159 | #const summary_discrete_size_weight = 0. 160 | #const summary_discrete_text_weight = 0. 161 | #const summary_discrete_row_weight = 0. 162 | #const summary_discrete_column_weight = 0. 163 | 164 | #const stack_zero_weight = 0. 165 | #const stack_normalize_weight = 1. 166 | -------------------------------------------------------------------------------- /asp/weights_learned.lp: -------------------------------------------------------------------------------- 1 | % Generated with `python draco/learn/linear.py`. 2 | 3 | #const agg_dim_weight = 191. 4 | #const aggregate_weight = 0. 5 | #const aggregate_count_weight = 0. 6 | #const aggregate_group_by_raw_weight = 191. 7 | #const aggregate_max_weight = 0. 8 | #const aggregate_mean_weight = 0. 9 | #const aggregate_median_weight = 0. 10 | #const aggregate_min_weight = 0. 11 | #const aggregate_stdev_weight = 0. 12 | #const aggregate_sum_weight = 0. 13 | #const bin_weight = 0. 14 | #const bin_cardinality_weight = 0. 15 | #const bin_high_weight = 0. 16 | #const bin_low_weight = 0. 17 | #const c_c_area_weight = 0. 18 | #const c_c_line_weight = 309. 19 | #const c_c_point_weight = -397. 20 | #const c_c_text_weight = 0. 21 | #const c_c_tick_weight = 0. 22 | #const c_d_area_weight = 0. 23 | #const c_d_bar_weight = 0. 24 | #const c_d_column_weight = 0. 25 | #const c_d_line_weight = 0. 26 | #const c_d_no_overlap_area_weight = 0. 27 | #const c_d_no_overlap_bar_weight = -262. 28 | #const c_d_no_overlap_line_weight = -416. 29 | #const c_d_no_overlap_point_weight = 487. 30 | #const c_d_no_overlap_text_weight = 0. 31 | #const c_d_no_overlap_tick_weight = 0. 32 | #const c_d_point_weight = 279. 33 | #const c_d_text_weight = 0. 34 | #const c_d_tick_weight = 0. 35 | #const color_entropy_high_weight = -582. 36 | #const color_entropy_low_weight = 1618. 37 | #const continuous_color_weight = -38. 38 | #const continuous_size_weight = 317. 39 | #const continuous_text_weight = 0. 40 | #const continuous_x_weight = 3. 41 | #const continuous_y_weight = -91. 42 | #const count_twice_weight = 0. 43 | #const d_d_overlap_weight = 0. 44 | #const d_d_point_weight = 0. 45 | #const d_d_rect_weight = 0. 46 | #const d_d_text_weight = 0. 47 | #const encoding_weight = 0. 48 | #const encoding_field_weight = 0. 49 | #const facet_summary_weight = 4. 50 | #const high_cardinality_nominal_weight = 0. 51 | #const high_cardinality_nominal_color_weight = 1042. 52 | #const high_cardinality_ordinal_weight = 0. 53 | #const high_cardinality_size_weight = 216. 54 | #const horizontal_scrolling_weight = 0. 55 | #const includes_zero_weight = 0. 56 | #const interesting_color_weight = 1235. 57 | #const interesting_column_weight = 0. 58 | #const interesting_detail_weight = 0. 59 | #const interesting_row_weight = 4. 60 | #const interesting_shape_weight = 0. 61 | #const interesting_size_weight = -112. 62 | #const interesting_text_weight = 0. 63 | #const interesting_x_weight = -439. 64 | #const interesting_y_weight = -687. 65 | #const log_weight = 0. 66 | #const multiple_non_pos_weight = 0. 67 | #const nominal_color_weight = -370. 68 | #const nominal_column_weight = 0. 69 | #const nominal_detail_weight = 0. 70 | #const nominal_row_weight = 270. 71 | #const nominal_shape_weight = 0. 72 | #const nominal_text_weight = 0. 73 | #const nominal_x_weight = 188. 74 | #const nominal_y_weight = -88. 75 | #const non_positional_pref_weight = 180. 76 | #const number_nominal_weight = 0. 77 | #const only_discrete_weight = 0. 78 | #const only_x_weight = 0. 79 | #const ordered_color_weight = 0. 80 | #const ordered_column_weight = 0. 81 | #const ordered_row_weight = 0. 82 | #const ordered_size_weight = 0. 83 | #const ordered_text_weight = 0. 84 | #const ordered_x_weight = -191. 85 | #const ordered_y_weight = 0. 86 | #const orientation_binned_weight = 0. 87 | #const position_entropy_weight = 339. 88 | #const quant_bin_weight = 0. 89 | #const quantitative_numbers_weight = -191. 90 | #const same_field_2_weight = 666. 91 | #const same_field_gte3_weight = 0. 92 | #const shape_cardinality_weight = 0. 93 | #const size_entropy_high_weight = 142. 94 | #const size_entropy_low_weight = -254. 95 | #const stack_normalize_weight = 0. 96 | #const stack_zero_weight = 0. 97 | #const summary_area_weight = 0. 98 | #const summary_bar_weight = -540. 99 | #const summary_continuous_color_weight = 106. 100 | #const summary_continuous_size_weight = -575. 101 | #const summary_continuous_text_weight = 0. 102 | #const summary_continuous_x_weight = 207. 103 | #const summary_continuous_y_weight = 261. 104 | #const summary_discrete_color_weight = 198. 105 | #const summary_discrete_column_weight = 0. 106 | #const summary_discrete_row_weight = 4. 107 | #const summary_discrete_shape_weight = 0. 108 | #const summary_discrete_size_weight = 0. 109 | #const summary_discrete_text_weight = 0. 110 | #const summary_discrete_x_weight = -248. 111 | #const summary_discrete_y_weight = 45. 112 | #const summary_line_weight = 343. 113 | #const summary_point_weight = 196. 114 | #const summary_rect_weight = 0. 115 | #const summary_text_weight = 0. 116 | #const summary_tick_weight = 0. 117 | #const temporal_date_weight = 0. 118 | #const temporal_y_weight = 0. 119 | #const type_n_weight = 0. 120 | #const type_o_weight = -191. 121 | #const type_q_weight = 191. 122 | #const value_agg_weight = 0. 123 | #const value_area_weight = 0. 124 | #const value_bar_weight = 277. 125 | #const value_continuous_color_weight = 929. 126 | #const value_continuous_size_weight = 463. 127 | #const value_continuous_text_weight = 0. 128 | #const value_continuous_x_weight = -398. 129 | #const value_continuous_y_weight = -994. 130 | #const value_discrete_color_weight = 0. 131 | #const value_discrete_column_weight = 0. 132 | #const value_discrete_row_weight = 0. 133 | #const value_discrete_shape_weight = 0. 134 | #const value_discrete_size_weight = 0. 135 | #const value_discrete_text_weight = 0. 136 | #const value_discrete_x_weight = 0. 137 | #const value_discrete_y_weight = 0. 138 | #const value_line_weight = -451. 139 | #const value_point_weight = 173. 140 | #const value_rect_weight = 0. 141 | #const value_text_weight = 0. 142 | #const value_tick_weight = 0. 143 | #const x_column_weight = 0. 144 | #const x_row_weight = 270. 145 | #const x_y_raw_weight = 0. 146 | #const y_column_weight = 0. 147 | #const y_row_weight = 270. 148 | #const zero_weight = 0. 149 | #const zero_positional_weight = 0. 150 | #const zero_size_weight = 0. 151 | #const zero_skew_weight = 0. 152 | -------------------------------------------------------------------------------- /tests/test_valid_specs.py: -------------------------------------------------------------------------------- 1 | from draco.helper import is_valid 2 | from draco.js import vl2asp 3 | 4 | data_schema = [ 5 | 'fieldtype("n1",string).', 6 | 'fieldtype("n2",string).', 7 | 'fieldtype("q1",number).', 8 | 'fieldtype("q2",number).', 9 | 'fieldtype("q3",number).', 10 | ] 11 | 12 | 13 | class TestValidSpecs: 14 | def test_hist(self): 15 | query = vl2asp( 16 | { 17 | "mark": "bar", 18 | "encoding": { 19 | "x": {"type": "quantitative", "field": "q1", "bin": True}, 20 | "y": {"type": "quantitative", "aggregate": "count"}, 21 | }, 22 | } 23 | ) 24 | 25 | assert is_valid(data_schema + query, True) == True 26 | 27 | def test_bar(self): 28 | query = vl2asp( 29 | { 30 | "mark": "bar", 31 | "encoding": { 32 | "x": {"type": "ordinal", "field": "n1"}, 33 | "y": {"type": "quantitative", "field": "q1"}, 34 | }, 35 | } 36 | ) 37 | 38 | assert is_valid(data_schema + query, True) == True 39 | 40 | def test_one_bar(self): 41 | query = vl2asp( 42 | {"mark": "bar", "encoding": {"y": {"type": "quantitative", "field": "q1"}}} 43 | ) 44 | 45 | assert is_valid(data_schema + query, True) == True 46 | 47 | def test_scatter(self): 48 | query = vl2asp( 49 | { 50 | "mark": "point", 51 | "encoding": { 52 | "x": {"type": "quantitative", "field": "q1"}, 53 | "y": {"type": "quantitative", "field": "q2"}, 54 | "color": {"type": "nominal", "field": "n2"}, 55 | "size": {"type": "quantitative", "field": "q3"}, 56 | }, 57 | } 58 | ) 59 | 60 | assert is_valid(data_schema + query, True) == True 61 | 62 | def test_stack(self): 63 | query = vl2asp( 64 | { 65 | "mark": "bar", 66 | "encoding": { 67 | "x": {"type": "nominal", "field": "n1"}, 68 | "y": { 69 | "type": "quantitative", 70 | "field": "q1", 71 | "stack": "zero", 72 | "aggregate": "sum", 73 | }, 74 | "color": {"type": "nominal", "field": "n2"}, 75 | }, 76 | } 77 | ) 78 | 79 | assert is_valid(data_schema + query, True) == True 80 | 81 | def test_stack_agg(self): 82 | query = vl2asp( 83 | { 84 | "mark": "bar", 85 | "encoding": { 86 | "x": {"type": "nominal", "field": "n1"}, 87 | "y": { 88 | "type": "quantitative", 89 | "field": "q1", 90 | "stack": "zero", 91 | "aggregate": "sum", 92 | }, 93 | "detail": {"type": "nominal", "field": "n2"}, 94 | "color": { 95 | "type": "quantitative", 96 | "field": "q2", 97 | "aggregate": "mean", 98 | }, 99 | }, 100 | } 101 | ) 102 | 103 | assert is_valid(data_schema + query, True) == True 104 | 105 | def test_stack_q_q(self): 106 | query = vl2asp( 107 | { 108 | "mark": "area", 109 | "encoding": { 110 | "x": { 111 | "type": "quantitative", 112 | "field": "q1", 113 | "scale": {"zero": False}, 114 | }, 115 | "y": {"type": "quantitative", "field": "q2", "stack": "zero"}, 116 | "color": {"type": "nominal", "field": "n1"}, 117 | }, 118 | } 119 | ) 120 | 121 | assert is_valid(data_schema + query, True) == True 122 | 123 | def test_heatmap(self): 124 | query = vl2asp( 125 | { 126 | "mark": "rect", 127 | "encoding": { 128 | "x": {"type": "nominal", "field": "n1"}, 129 | "y": {"type": "ordinal", "field": "q1", "bin": True}, 130 | }, 131 | } 132 | ) 133 | 134 | assert is_valid(data_schema + query, True) == True 135 | 136 | 137 | class TestInvalidSpecs: 138 | def test_row_only(self): 139 | query = vl2asp( 140 | {"mark": "point", "encoding": {"row": {"type": "nominal", "field": "n1"}}} 141 | ) 142 | 143 | assert is_valid(data_schema + query, True) == False 144 | 145 | def test_q_q_bar(self): 146 | query = vl2asp( 147 | { 148 | "mark": "bar", 149 | "encoding": { 150 | "x": {"type": "quantitative", "field": "q1"}, 151 | "y": {"type": "quantitative", "field": "q2"}, 152 | }, 153 | } 154 | ) 155 | 156 | assert is_valid(data_schema + query, True) == False 157 | 158 | def test_only_one_agg(self): 159 | query = vl2asp( 160 | { 161 | "mark": "point", 162 | "encoding": { 163 | "x": {"type": "quantitative", "field": "q1"}, 164 | "y": {"type": "quantitative", "field": "q2", "aggregate": "mean"}, 165 | }, 166 | } 167 | ) 168 | 169 | assert is_valid(data_schema + query, True) == False 170 | 171 | def test_stack_multiple(self): 172 | query = vl2asp( 173 | { 174 | "mark": "bar", 175 | "encoding": { 176 | "x": { 177 | "type": "quantitative", 178 | "field": "q1", 179 | "stack": "zero", 180 | "aggregate": "sum", 181 | }, 182 | "y": { 183 | "type": "quantitative", 184 | "field": "q2", 185 | "stack": "zero", 186 | "aggregate": "sum", 187 | }, 188 | "color": {"type": "nominal", "field": "n2"}, 189 | }, 190 | } 191 | ) 192 | 193 | assert is_valid(data_schema + query, True) == False 194 | -------------------------------------------------------------------------------- /asp/assign_weights.lp: -------------------------------------------------------------------------------- 1 | %% GENERATED FILE. DO NOT EDIT. 2 | 3 | soft_weight(type_q,type_q_weight). 4 | soft_weight(type_o,type_o_weight). 5 | soft_weight(type_n,type_n_weight). 6 | soft_weight(aggregate,aggregate_weight). 7 | soft_weight(bin,bin_weight). 8 | soft_weight(bin_high,bin_high_weight). 9 | soft_weight(bin_low,bin_low_weight). 10 | soft_weight(encoding,encoding_weight). 11 | soft_weight(encoding_field,encoding_field_weight). 12 | soft_weight(same_field_2,same_field_2_weight). 13 | soft_weight(same_field_gte3,same_field_gte3_weight). 14 | soft_weight(count_twice,count_twice_weight). 15 | soft_weight(shape_cardinality,shape_cardinality_weight). 16 | soft_weight(number_nominal,number_nominal_weight). 17 | soft_weight(bin_cardinality,bin_cardinality_weight). 18 | soft_weight(quant_bin,quant_bin_weight). 19 | soft_weight(agg_dim,agg_dim_weight). 20 | soft_weight(only_discrete,only_discrete_weight). 21 | soft_weight(multiple_non_pos,multiple_non_pos_weight). 22 | soft_weight(non_positional_pref,non_positional_pref_weight). 23 | soft_weight(aggregate_group_by_raw,aggregate_group_by_raw_weight). 24 | soft_weight(x_y_raw,x_y_raw_weight). 25 | soft_weight(log,log_weight). 26 | soft_weight(zero,zero_weight). 27 | soft_weight(zero_size,zero_size_weight). 28 | soft_weight(zero_positional,zero_positional_weight). 29 | soft_weight(zero_skew,zero_skew_weight). 30 | soft_weight(includes_zero,includes_zero_weight). 31 | soft_weight(only_x,only_x_weight). 32 | soft_weight(orientation_binned,orientation_binned_weight). 33 | soft_weight(high_cardinality_ordinal,high_cardinality_ordinal_weight). 34 | soft_weight(high_cardinality_nominal,high_cardinality_nominal_weight). 35 | soft_weight(high_cardinality_nominal_color,high_cardinality_nominal_color_weight). 36 | soft_weight(horizontal_scrolling,horizontal_scrolling_weight). 37 | soft_weight(temporal_date,temporal_date_weight). 38 | soft_weight(quantitative_numbers,quantitative_numbers_weight). 39 | soft_weight(position_entropy,position_entropy_weight). 40 | soft_weight(high_cardinality_size,high_cardinality_size_weight). 41 | soft_weight(value_agg,value_agg_weight). 42 | soft_weight(facet_summary,facet_summary_weight). 43 | soft_weight(x_row,x_row_weight). 44 | soft_weight(y_row,y_row_weight). 45 | soft_weight(x_column,x_column_weight). 46 | soft_weight(y_column,y_column_weight). 47 | soft_weight(color_entropy_high,color_entropy_high_weight). 48 | soft_weight(color_entropy_low,color_entropy_low_weight). 49 | soft_weight(size_entropy_high,size_entropy_high_weight). 50 | soft_weight(size_entropy_low,size_entropy_low_weight). 51 | soft_weight(c_d_column,c_d_column_weight). 52 | soft_weight(temporal_y,temporal_y_weight). 53 | soft_weight(d_d_overlap,d_d_overlap_weight). 54 | soft_weight(c_c_point,c_c_point_weight). 55 | soft_weight(c_c_line,c_c_line_weight). 56 | soft_weight(c_c_area,c_c_area_weight). 57 | soft_weight(c_c_text,c_c_text_weight). 58 | soft_weight(c_c_tick,c_c_tick_weight). 59 | soft_weight(c_d_point,c_d_point_weight). 60 | soft_weight(c_d_bar,c_d_bar_weight). 61 | soft_weight(c_d_line,c_d_line_weight). 62 | soft_weight(c_d_area,c_d_area_weight). 63 | soft_weight(c_d_text,c_d_text_weight). 64 | soft_weight(c_d_tick,c_d_tick_weight). 65 | soft_weight(c_d_no_overlap_point,c_d_no_overlap_point_weight). 66 | soft_weight(c_d_no_overlap_bar,c_d_no_overlap_bar_weight). 67 | soft_weight(c_d_no_overlap_line,c_d_no_overlap_line_weight). 68 | soft_weight(c_d_no_overlap_area,c_d_no_overlap_area_weight). 69 | soft_weight(c_d_no_overlap_text,c_d_no_overlap_text_weight). 70 | soft_weight(c_d_no_overlap_tick,c_d_no_overlap_tick_weight). 71 | soft_weight(d_d_point,d_d_point_weight). 72 | soft_weight(d_d_text,d_d_text_weight). 73 | soft_weight(d_d_rect,d_d_rect_weight). 74 | soft_weight(continuous_x,continuous_x_weight). 75 | soft_weight(continuous_y,continuous_y_weight). 76 | soft_weight(continuous_color,continuous_color_weight). 77 | soft_weight(continuous_size,continuous_size_weight). 78 | soft_weight(continuous_text,continuous_text_weight). 79 | soft_weight(ordered_x,ordered_x_weight). 80 | soft_weight(ordered_y,ordered_y_weight). 81 | soft_weight(ordered_color,ordered_color_weight). 82 | soft_weight(ordered_size,ordered_size_weight). 83 | soft_weight(ordered_text,ordered_text_weight). 84 | soft_weight(ordered_row,ordered_row_weight). 85 | soft_weight(ordered_column,ordered_column_weight). 86 | soft_weight(nominal_x,nominal_x_weight). 87 | soft_weight(nominal_y,nominal_y_weight). 88 | soft_weight(nominal_color,nominal_color_weight). 89 | soft_weight(nominal_shape,nominal_shape_weight). 90 | soft_weight(nominal_text,nominal_text_weight). 91 | soft_weight(nominal_row,nominal_row_weight). 92 | soft_weight(nominal_column,nominal_column_weight). 93 | soft_weight(nominal_detail,nominal_detail_weight). 94 | soft_weight(interesting_x,interesting_x_weight). 95 | soft_weight(interesting_y,interesting_y_weight). 96 | soft_weight(interesting_color,interesting_color_weight). 97 | soft_weight(interesting_size,interesting_size_weight). 98 | soft_weight(interesting_shape,interesting_shape_weight). 99 | soft_weight(interesting_text,interesting_text_weight). 100 | soft_weight(interesting_row,interesting_row_weight). 101 | soft_weight(interesting_column,interesting_column_weight). 102 | soft_weight(interesting_detail,interesting_detail_weight). 103 | soft_weight(aggregate_count,aggregate_count_weight). 104 | soft_weight(aggregate_sum,aggregate_sum_weight). 105 | soft_weight(aggregate_mean,aggregate_mean_weight). 106 | soft_weight(aggregate_median,aggregate_median_weight). 107 | soft_weight(aggregate_min,aggregate_min_weight). 108 | soft_weight(aggregate_max,aggregate_max_weight). 109 | soft_weight(aggregate_stdev,aggregate_stdev_weight). 110 | soft_weight(value_point,value_point_weight). 111 | soft_weight(value_bar,value_bar_weight). 112 | soft_weight(value_line,value_line_weight). 113 | soft_weight(value_area,value_area_weight). 114 | soft_weight(value_text,value_text_weight). 115 | soft_weight(value_tick,value_tick_weight). 116 | soft_weight(value_rect,value_rect_weight). 117 | soft_weight(summary_point,summary_point_weight). 118 | soft_weight(summary_bar,summary_bar_weight). 119 | soft_weight(summary_line,summary_line_weight). 120 | soft_weight(summary_area,summary_area_weight). 121 | soft_weight(summary_text,summary_text_weight). 122 | soft_weight(summary_tick,summary_tick_weight). 123 | soft_weight(summary_rect,summary_rect_weight). 124 | soft_weight(value_continuous_x,value_continuous_x_weight). 125 | soft_weight(value_continuous_y,value_continuous_y_weight). 126 | soft_weight(value_continuous_color,value_continuous_color_weight). 127 | soft_weight(value_continuous_size,value_continuous_size_weight). 128 | soft_weight(value_continuous_text,value_continuous_text_weight). 129 | soft_weight(value_discrete_x,value_discrete_x_weight). 130 | soft_weight(value_discrete_y,value_discrete_y_weight). 131 | soft_weight(value_discrete_color,value_discrete_color_weight). 132 | soft_weight(value_discrete_shape,value_discrete_shape_weight). 133 | soft_weight(value_discrete_size,value_discrete_size_weight). 134 | soft_weight(value_discrete_text,value_discrete_text_weight). 135 | soft_weight(value_discrete_row,value_discrete_row_weight). 136 | soft_weight(value_discrete_column,value_discrete_column_weight). 137 | soft_weight(summary_continuous_x,summary_continuous_x_weight). 138 | soft_weight(summary_continuous_y,summary_continuous_y_weight). 139 | soft_weight(summary_continuous_color,summary_continuous_color_weight). 140 | soft_weight(summary_continuous_size,summary_continuous_size_weight). 141 | soft_weight(summary_continuous_text,summary_continuous_text_weight). 142 | soft_weight(summary_discrete_x,summary_discrete_x_weight). 143 | soft_weight(summary_discrete_y,summary_discrete_y_weight). 144 | soft_weight(summary_discrete_color,summary_discrete_color_weight). 145 | soft_weight(summary_discrete_shape,summary_discrete_shape_weight). 146 | soft_weight(summary_discrete_size,summary_discrete_size_weight). 147 | soft_weight(summary_discrete_text,summary_discrete_text_weight). 148 | soft_weight(summary_discrete_row,summary_discrete_row_weight). 149 | soft_weight(summary_discrete_column,summary_discrete_column_weight). 150 | soft_weight(stack_zero,stack_zero_weight). 151 | soft_weight(stack_normalize,stack_normalize_weight). 152 | -------------------------------------------------------------------------------- /asp/tests.yaml: -------------------------------------------------------------------------------- 1 | Definitions: 2 | # test data 3 | data: {filename: asp/examples/data.lp} 4 | 5 | define: {filename: asp/define.lp} 6 | generate: {filename: asp/generate.lp} 7 | hard: {filename: asp/hard.lp} 8 | hard-integrity: {filename: asp/hard-integrity.lp} 9 | features: {filename: asp/soft.lp} 10 | optimize: {filename: asp/optimize.lp} 11 | 12 | # program to test constraints 13 | no_opt: {group: [data, define, generate, hard, hard-integrity, features]} 14 | no_gen: {group: [data, define, hard, hard-integrity, features]} 15 | draco: {group: [data, define, generate, hard, hard-integrity, features, optimize]} 16 | 17 | Test a implies b: 18 | Program: | 19 | b :- a. 20 | a. 21 | :- not b. 22 | Expect: SAT 23 | 24 | Test program by itself is satisfiable: 25 | Modules: no_opt 26 | Expect: SAT 27 | 28 | Test valid values: 29 | Modules: no_opt 30 | Expect: SAT 31 | 32 | Test channel: 33 | Program: | 34 | :- not channel(_,x). 35 | 36 | Test field: 37 | Program: | 38 | :- not field(_,n1). 39 | 40 | Test type: 41 | Program: | 42 | :- not type(_,quantitative). 43 | 44 | Test aggregate: 45 | Program: | 46 | :- not aggregate(_,mean). 47 | 48 | Test bin: 49 | Program: | 50 | :- not bin(_,10). 51 | 52 | Test zero: 53 | Program: | 54 | :- not zero(_). 55 | 56 | Test fieldtype: 57 | Program: | 58 | :- not fieldtype(_,string). 59 | 60 | Test mark: 61 | Program: | 62 | :- not mark(bar). 63 | 64 | Test data size: 65 | Program: | 66 | num_rows(10). 67 | 68 | Test invalid values: 69 | Modules: no_opt 70 | Expect: UNSAT 71 | 72 | Test channel: 73 | Program: | 74 | :- not channel(_,foo). 75 | 76 | Test field: 77 | Program: | 78 | :- not field(_,foo). 79 | 80 | Test type: 81 | Program: | 82 | :- not type(_,foo). 83 | 84 | Test aggregate: 85 | Program: | 86 | :- not aggregate(_,foo). 87 | 88 | Test bin: 89 | Program: | 90 | :- not bin(_,-1). 91 | 92 | Test fieldtype: 93 | Program: | 94 | :- not fieldtype(_,foo). 95 | 96 | Test mark: 97 | Program: | 98 | :- not mark(foo). 99 | 100 | Test data size: 101 | Program: | 102 | num_rows(-10). 103 | 104 | Test constraints: 105 | Modules: no_opt 106 | 107 | Test can use binning: 108 | Program: | 109 | encoding(e). 110 | :- not bin(e,_). 111 | Expect: SAT 112 | 113 | Test can use aggregation: 114 | Program: | 115 | encoding(e). 116 | :- not aggregate(e,_). 117 | Expect: SAT 118 | 119 | Test cannot use aggregation and binning: 120 | Program: | 121 | encoding(e). 122 | :- not aggregate(e,_). 123 | :- not bin(e,_). 124 | Expect: UNSAT 125 | 126 | Test cannot use field with count: 127 | Program: | 128 | encoding(e). 129 | :- not aggregate(e,count). 130 | :- not field(e,q1). 131 | Expect: UNSAT 132 | 133 | Test can use string as o: 134 | Program: | 135 | encoding(e). 136 | :- not field(e,n1). 137 | :- not type(e,ordinal). 138 | Expect: SAT 139 | 140 | Test cannot use string as q: 141 | Program: | 142 | encoding(e). 143 | :- not field(e,n1). 144 | :- not type(e,quantitative). 145 | Expect: UNSAT 146 | 147 | Test cannot bin temporal: 148 | Program: | 149 | fieldtype(d,datetime). 150 | encoding(e). 151 | :- not field(e,d). 152 | :- not type(e,temporal). 153 | :- not bin(e,_). 154 | Expect: UNSAT 155 | 156 | Test cannot use log with string: 157 | Program: | 158 | encoding(e). 159 | :- not field(e,n1). 160 | :- not log(e). 161 | Expect: UNSAT 162 | 163 | Test can log number: 164 | Program: | 165 | encoding(e). 166 | :- not field(e,q2). 167 | :- not log(e). 168 | Expect: SAT 169 | 170 | Test cannot use log with binned data because it is discrete: 171 | Program: | 172 | encoding(e). 173 | :- not log(e). 174 | :- not bin(e). 175 | Expect: UNSAT 176 | 177 | Test cannot use zero with string: 178 | Program: | 179 | encoding(e). 180 | :- not field(e,n1). 181 | :- not zero(e). 182 | Expect: UNSAT 183 | 184 | Test can zero number: 185 | Program: | 186 | encoding(e). 187 | :- not field(e,q1). 188 | :- not zero(e). 189 | Expect: SAT 190 | 191 | Test cannot use zero with binned data because it is discrete: 192 | Program: | 193 | encoding(e). 194 | :- not zero(e). 195 | :- not bin(e). 196 | Expect: UNSAT 197 | 198 | Test can compute average of a number: 199 | Program: | 200 | encoding(e). 201 | :- not field(e,q1). 202 | :- not aggregate(e,mean). 203 | Expect: SAT 204 | 205 | Test cannot compute average of a string: 206 | Program: | 207 | encoding(e). 208 | :- not field(e,n1). 209 | :- not aggregate(e,mean). 210 | Expect: UNSAT 211 | 212 | Test cannot aggregate nominal: 213 | Program: | 214 | encoding(e). 215 | :- not aggregate(e,_). 216 | :- not type(e,nominal). 217 | Expect: UNSAT 218 | 219 | Test can aggregate continuous: 220 | Program: | 221 | encoding(e). 222 | :- not aggregate(e,_). 223 | Expect: SAT 224 | 225 | Test temporal requires datetime: 226 | Program: | 227 | fieldtype(t,datetime). 228 | encoding(e). 229 | :- not field(e,t). 230 | :- not type(e,temporal). 231 | Expect: SAT 232 | 233 | Test cannot use different different on x and y: 234 | Program: | 235 | encoding(e0). 236 | :- not channel(e0,x). 237 | :- not field(e0,n1). 238 | encoding(e1). 239 | :- not channel(e1,y). 240 | :- not field(e1,n2). 241 | Expect: SAT 242 | 243 | Test cannot use same field on x and y: 244 | Program: | 245 | encoding(e0). 246 | :- not channel(e0,x). 247 | :- not field(e0,n1). 248 | encoding(e1). 249 | :- not channel(e1,y). 250 | :- not field(e1,n1). 251 | Expect: UNSAT 252 | 253 | Test helpers: 254 | Modules: no_gen 255 | 256 | Test overlap: 257 | Program: | 258 | mark(point). 259 | 260 | encoding(e1). 261 | field(e1,n1). 262 | channel(e1,x). 263 | type(e1,ordinal). 264 | 265 | encoding(e2). 266 | field(e2,q1). 267 | channel(e2,y). 268 | type(e2,quantitative). 269 | 270 | :- no_overlap. 271 | Expect: SAT 272 | 273 | Test no overlap aggregation: 274 | Program: | 275 | mark(point). 276 | 277 | encoding(e1). 278 | field(e1,n1). 279 | channel(e1,x). 280 | type(e1,ordinal). 281 | 282 | encoding(e2). 283 | field(e2,q1). 284 | channel(e2,y). 285 | type(e2,quantitative). 286 | aggregate(e2,mean). 287 | 288 | :- no_overlap. 289 | Expect: UNSAT 290 | 291 | Test no overlap cardinality: 292 | Program: | 293 | mark(point). 294 | 295 | encoding(e1). 296 | field(e1,q2). 297 | channel(e1,x). 298 | type(e1,ordinal). 299 | 300 | encoding(e2). 301 | field(e2,q1). 302 | channel(e2,y). 303 | type(e2,quantitative). 304 | 305 | :- no_overlap. 306 | Expect: UNSAT 307 | 308 | Test soft constraints: 309 | Modules: no_gen 310 | 311 | Test high entropy data: 312 | Program: | 313 | fieldtype(n3,number). 314 | cardinality(n3,200). 315 | entropy(n3,1). 316 | 317 | mark(point). 318 | 319 | encoding(e). 320 | field(e,n3). 321 | channel(e,x). 322 | type(e,quantitative). 323 | 324 | :- not soft(position_entropy, e). 325 | Expect: SAT 326 | 327 | Test skew with zero: 328 | Program: | 329 | fieldtype(n3,number). 330 | cardinality(n3,10). 331 | extent(n3,10,12). 332 | 333 | mark(tick). 334 | 335 | encoding(e). 336 | field(e,n3). 337 | channel(e,x). 338 | type(e,quantitative). 339 | zero(e). 340 | 341 | :- not soft(zero_skew). 342 | Expect: SAT 343 | 344 | Test zero with data that covers it: 345 | Program: | 346 | fieldtype(n3,number). 347 | cardinality(n3,200). 348 | entropy(n3,1). 349 | extent(n3,-10,10). 350 | 351 | mark(point). 352 | 353 | encoding(e). 354 | field(e,n3). 355 | channel(e,x). 356 | type(e,quantitative). 357 | zero(e). 358 | 359 | :- not soft(includes_zero). 360 | 361 | Expect: UNSAT 362 | -------------------------------------------------------------------------------- /asp/hard.lp: -------------------------------------------------------------------------------- 1 | % ====== Expressiveness and Well-Formedness Constraints ====== 2 | 3 | % === Within Encodings === 4 | 5 | % @constraint Primitive type has to support data type. 6 | hard(enc_type_valid,E,F) :- type(E,quantitative), field(E,F), fieldtype(F,(string;boolean)). 7 | hard(enc_type_valid,E,F) :- type(E,temporal), field(E,F), not fieldtype(F,datetime). 8 | 9 | % @constraint Can only bin quantitative or ordinal. 10 | hard(bin_q_o,E,T) :- type(E,T), bin(E,_), T != quantitative, T != ordinal. 11 | 12 | % @constraint Can only use log with quantitative. 13 | hard(log_q,E) :- log(E), not type(E,quantitative). 14 | 15 | % @constraint Can only use zero with quantitative. 16 | hard(zero_q,E) :- zero(E), not type(E,quantitative). 17 | 18 | % @constraint Cannot use log scale with discrete (which includes binned). 19 | hard(log_discrete,E) :- log(E), discrete(E). 20 | 21 | % @constraint Cannot use log and zero together. 22 | hard(log_zero,E) :- log(E), zero(E). 23 | 24 | % @constraint Cannot use log if the data is negative or zero. 25 | hard(log_non_positive,E,F) :- log(E), field(E,F), extent(F,MIN,_), MIN <= 0. 26 | 27 | % @constraint Cannot bin and aggregate. 28 | hard(bin_and_aggregate,E) :- bin(E,_), aggregate(E,_). 29 | 30 | % @constraint Oridnal only supports min, max, and median. 31 | hard(aggregate_o_valid,E,A) :- type(E,ordinal), aggregate(E,A), A != min, A != max, A != median. 32 | 33 | % @constraint Temporal only supports min and max. 34 | hard(aggregate_t_valid,E,A) :- type(E,temporal), aggregate(E,A), A != min, A != max. 35 | 36 | % @constraint Cannot aggregate nominal. 37 | hard(aggregate_nominal,E) :- aggregate(E,_), type(E,nominal). 38 | 39 | % @constraint Detail cannot be aggregated. 40 | hard(aggregate_detail,E) :- channel(E,detail), aggregate(E,_). 41 | 42 | % @constraint Count has to be quantitative and not use a field. 43 | hard(count_q_without_field,E) :- aggregate(E,count), field(E,_). 44 | hard(count_q_without_field,E) :- aggregate(E,count), not type(E,quantitative). 45 | 46 | % @constraint Shape requires discrete and not ordered (nominal). Using ordinal would't make a difference in Vega-Lite. 47 | hard(shape_discrete_non_ordered,E) :- channel(E,shape), not type(E,nominal). 48 | 49 | % @constraint Detail requires nominal. 50 | hard(detail_non_ordered,E) :- channel(E,detail), not type(E,nominal). 51 | 52 | % @constraint Size implies order so nominal is misleading. 53 | hard(size_nominal) :- channel(E,size), type(E,nominal). 54 | 55 | % @constraint Do not use size when data is negative as size implies that data is positive. 56 | hard(size_negative,E) :- channel(E,size), enc_extent(E,MIN,MAX), MIN < 0, MAX > 0. 57 | 58 | % === Across encodings and between encodings and marks === 59 | 60 | % @constraint Cannot use single channels twice. 61 | hard(repeat_channel,C):- single_channel(C), 2 { channel(_,C) }. 62 | 63 | % @constraint There has to be at least one encoding. Otherwise, the visualization doesn't show anything. 64 | hard(no_encodings) :- not encoding(_). 65 | 66 | % @constraint Row and column require discrete. 67 | hard(row_or_column_c) :- channel_continuous(row;column). 68 | 69 | % @constraint Don't use row without y. Just using y is simpler. 70 | hard(row_no_y) :- channel(_,row), not channel(_,y). 71 | 72 | % @constraint Don't use column without x. Just using x is simpler. 73 | hard(column_no_x) :- channel(_,column), not channel(_,x). 74 | 75 | % @constraint All encodings (if they have a channel) require field except if we have a count aggregate. 76 | hard(encoding_no_field_and_not_count,E) :- not field(E,_), not aggregate(E,count), encoding(E). 77 | 78 | % @constraint Count should not have a field. Having a field doesn't make a difference. 79 | hard(count_with_field,E) :- aggregate(E,count), field(E,_). 80 | 81 | % @constraint Text mark requires text channel. 82 | hard(text_mark_without_text_channel) :- mark(text), not channel(_,text). 83 | 84 | % @constraint Text channel requires text mark. 85 | hard(text_channel_without_text_mark) :- channel(_,text), not mark(text). 86 | 87 | % @constraint Point, tick, and bar require x or y channel. 88 | hard(point_tick_bar_without_x_or_y) :- mark(point;tick;bar), not channel(_,x), not channel(_,y). 89 | 90 | % @constraint Line and area require x and y channel. 91 | hard(line_area_without_x_y) :- mark(line;area), not channel(_,(x;y)). 92 | 93 | % @constraint Line and area cannot have two discrete. 94 | hard(line_area_with_discrete) :- mark(line;area), channel_discrete(x), channel_discrete(y). 95 | 96 | % @constraint Bar and tick cannot have both x and y continuous. 97 | hard(bar_tick_continuous_x_y) :- mark(bar;tick), channel_continuous(x), channel_continuous(y). 98 | 99 | % @constraint Bar, tick, line, area require some continuous variable on x or y. 100 | hard(bar_tick_area_line_without_continuous_x_y) :- mark(bar;tick;area;line), not channel_continuous(x), not channel_continuous(y). 101 | 102 | % @constraint Bar and area mark requires scale of continuous to start at zero. 103 | hard(bar_area_without_zero) :- mark(bar;area), channel(E,x), orientation(horizontal), not zero(E). 104 | hard(bar_area_without_zero) :- mark(bar;area), channel(E,y), orientation(vertical), not zero(E). 105 | 106 | % @constraint Shape channel requires point mark. 107 | hard(shape_without_point) :- channel(_,shape), not mark(point). 108 | 109 | % @constraint Size only works with some marks. Vega-Lite can also size lines, and ticks but that would violate best practices. 110 | hard(size_without_point_text) :- channel(_,size), not mark(point), not mark(text). 111 | 112 | % @constraint Detail requires aggregation. Detail adds a field to the group by. Detail could also be used to add information to tooltips. We may remove this later. 113 | hard(detail_without_agg) :- channel(_,detail), not aggregate(_,_). 114 | 115 | % @constraint Do not use log for bar or area mark as they are often misleading. We may remove this rule in the future. 116 | hard(area_bar_with_log) :- mark(bar;area), log(E), channel(E,(x;y)). 117 | 118 | % @constraint Rect mark needs discrete x and y. 119 | hard(rect_without_d_d) :- mark(rect), not is_d_d. 120 | 121 | % @constraint Don't use the same field on x and y. 122 | hard(same_field_x_and_y) :- { field(E,F) : channel(E,x); field(E,F) : channel(E,y) } >= 2, field(F). 123 | 124 | % @constraint Don't use count on x and y. 125 | hard(count_on_x_and_y):- channel(EX,x), channel(EY,y), aggregate(EX,count), aggregate(EY,count). 126 | 127 | % @constraint If we use aggregation, then all continuous fields need to be aggeragted. 128 | hard(aggregate_not_all_continuous):- aggregate(_,_), continuous(E), not aggregate(E,_). 129 | 130 | % @constraint Don't use count twice. 131 | hard(count_twice) :- { aggregate(_,count) } = 2. 132 | 133 | % === Global properties === 134 | 135 | % @constraint Bars and area cannot overlap. 136 | hard(bar_area_overlap) :- mark(bar;area), overlap. 137 | 138 | % @constraint Rects shouldn't overlap. They are used for dioscrete heatmaps. 139 | hard(rect_overlap) :- mark(rect), overlap. 140 | 141 | % == Stacking == 142 | 143 | % @constraint Only use stacking for bar and area. 144 | hard(stack_without_bar_area) :- stack(_), not mark(bar), not mark(area). 145 | 146 | % @constraint Don't stack if aggregation is not summative (summative are count, sum, distinct, valid, missing). 147 | hard(stack_without_summative_agg,E,A) :- stack(E,_), aggregate(E,A), not summative_aggregate_op(A). 148 | 149 | % @constraint Need to stack if we use bar, area with discrete color. 150 | hard(no_stack_with_bar_area_discrete_color,E) :- mark(bar;area), channel(E,color), discrete(E), not stack(_). 151 | 152 | % @constraint Can only use stack if we also use discrete color, or detail. 153 | hard(stack_without_discrete_color_or_detail) :- stack(_), not channel_discrete(color), not channel(_,detail). 154 | 155 | % @constraint If we use stack and detail, we also have to use quantitative color. 156 | hard(stack_detail_without_q_color) :- stack(_), channel(_,detail), not channel(_,color). 157 | hard(stack_detail_without_q_color,E) :- stack(_), channel(_,detail), channel(E,color), not aggregate(E,_). 158 | 159 | % @constraint Stack can only be on continuous. 160 | hard(stack_discrete,E) :- stack(E,_), discrete(E). 161 | 162 | % @constraint Stack can only be on x or y. 163 | hard(stack_without_x_y,E) :- stack(E,_), not channel(E,x), not channel(E,y). 164 | 165 | % @constraint Cannot use non positional continuous with stack unless it's aggregated. 166 | hard(stack_with_non_positional_non_agg,E,C) :- stack(_), non_positional(C), channel(E,C), not aggregate(E,_), continuous(E). 167 | 168 | % @constraint Vega-Lite currently supports 8 shapes. 169 | hard(shape_with_cardinality_gt_eight,E,C) :- channel(E,shape), enc_cardinality(E,C), C > 8. 170 | 171 | % @constraint At most 20 categorical colors. 172 | hard(color_with_cardinality_gt_twenty,E,C) :- channel(E,color), discrete(E), enc_cardinality(E,C), C > 20. 173 | 174 | % === Type checks === 175 | 176 | % @constraint Check mark. 177 | hard(invalid_mark,M) :- mark(M), not marktype(M). 178 | 179 | % @constraint Check types of encoding properties. 180 | hard(invalid_channel,C) :- channel(_,C), not channel(C). 181 | hard(invalid_field,F) :- field(_,F), not field(F). 182 | hard(invalid_type,T) :- type(_,T), not type(T). 183 | hard(invalid_agg,A) :- aggregate(_,A), not aggregate_op(A). 184 | hard(invalid_bin,B) :- bin(_,B), not B >= 0. % @constraint Bin has to be a natural number. 185 | 186 | % @constraint Fieldtype has to be primitive type. 187 | hard(invalid_fieldtype,T) :- fieldtype(_,T), not primitive_type(T). 188 | 189 | % @constraint Task has to be one of the tasks. 190 | hard(invalid_task,T) :- task(T), not tasks(T). 191 | 192 | % @constraint Num_rows has to be larger than 0. 193 | hard(invalid_num_rows,S) :- num_rows(S), S < 0. 194 | 195 | % @constraint Cardinality has to be larger than 0. 196 | hard(invalid_cardinality,C) :- cardinality(_,C), C < 0. 197 | 198 | % @constraint Entropy has to be positive. 199 | hard(invalid_entropy,E) :- entropy(_,E), E < 0. 200 | 201 | % @constraint Extent only allowed for numbers (for now). 202 | hard(invalid_extent_non_number,F) :- extent(F,_,_), not fieldtype(F,number). 203 | 204 | % @constraint Order has to be correct. 205 | hard(invalid_extent_order,MIN,MAX):- extent(_,MIN,MAX), MIN > MAX. 206 | 207 | % @constraint The name of a field cannot be the name of an encoding. This is to prevent errors coming from the shortcuts in define.lp. 208 | hard(encoding_field_same_name,N) :- encoding(N), field(N). 209 | -------------------------------------------------------------------------------- /data/spec_pairs/draco_cql.json: -------------------------------------------------------------------------------- 1 | {"headers": {"first": {"title": "Draco", "subtitle": "Draco Prediction"}, "second": {"title": "CQL", "subtitle": "Compassql Prediction"}}, "specs": [{"first": {"mark": "tick", "encoding": {"x": {"scale": {"zero": true}, "field": "Miles_per_Gallon", "type": "quantitative"}}, "data": {"url": "data/cars.json"}, "$schema": "https://vega.github.io/schema/vega-lite/v2.0.json"}, "second": {"mark": "tick", "encoding": {"y": {"scale": {"zero": false}, "field": "Miles_per_Gallon", "type": "quantitative"}}, "data": {"url": "data/cars.json"}, "$schema": "https://vega.github.io/schema/vega-lite/v2.0.json"}, "properties": {}}, {"first": {"mark": "tick", "encoding": {"x": {"scale": {"zero": true}, "field": "Horsepower", "type": "quantitative"}}, "data": {"url": "data/cars.json"}, "$schema": "https://vega.github.io/schema/vega-lite/v2.0.json"}, "second": {"mark": "tick", "encoding": {"x": {"scale": {"zero": false}, "field": "Horsepower", "type": "quantitative"}}, "data": {"url": "data/cars.json"}, "$schema": "https://vega.github.io/schema/vega-lite/v2.0.json"}, "properties": {}}, {"first": {"mark": "tick", "encoding": {"row": {"scale": {"zero": false}, "field": "Cylinders", "type": "ordinal"}, "x": {"scale": {"zero": true}, "field": "Horsepower", "type": "quantitative"}}, "data": {"url": "data/cars.json"}, "$schema": "https://vega.github.io/schema/vega-lite/v2.0.json"}, "second": {"mark": "tick", "encoding": {"y": {"scale": {"zero": false}, "field": "Cylinders", "type": "ordinal"}, "x": {"scale": {"zero": false}, "field": "Horsepower", "type": "quantitative"}}, "data": {"url": "data/cars.json"}, "$schema": "https://vega.github.io/schema/vega-lite/v2.0.json"}, "properties": {}}, {"first": {"mark": "tick", "encoding": {"x": {"scale": {"zero": true}, "field": "Miles_per_Gallon", "type": "quantitative"}}, "data": {"url": "data/cars.json"}, "$schema": "https://vega.github.io/schema/vega-lite/v2.0.json"}, "second": {"mark": "tick", "encoding": {"x": {"scale": {"zero": false}, "field": "Miles_per_Gallon", "type": "quantitative"}}, "data": {"url": "data/cars.json"}, "$schema": "https://vega.github.io/schema/vega-lite/v2.0.json"}, "properties": {}}, {"first": {"mark": "rule", "encoding": {"row": {"scale": {"zero": false}, "field": "Origin", "type": "nominal"}, "x": {"scale": {"zero": true}, "type": "quantitative", "aggregate": "count"}}, "data": {"url": "data/cars.json"}, "$schema": "https://vega.github.io/schema/vega-lite/v2.0.json"}, "second": {"mark": "bar", "encoding": {"y": {"scale": {"zero": false}, "field": "Origin", "type": "nominal"}, "x": {"scale": {"zero": false}, "type": "quantitative", "aggregate": "count"}}, "data": {"url": "data/cars.json"}, "$schema": "https://vega.github.io/schema/vega-lite/v2.0.json"}, "properties": {}}, {"first": {"mark": "line", "encoding": {"x": {"scale": {"zero": true}, "field": "Horsepower", "type": "quantitative", "aggregate": "mean"}, "y": {"scale": {"zero": false}, "field": "Cylinders", "type": "ordinal", "aggregate": "min"}}, "data": {"url": "data/cars.json"}, "$schema": "https://vega.github.io/schema/vega-lite/v2.0.json"}, "second": {"mark": "bar", "encoding": {"x": {"scale": {"zero": false}, "field": "Horsepower", "type": "quantitative", "aggregate": "mean"}, "y": {"scale": {"zero": false}, "field": "Cylinders", "type": "ordinal"}}, "data": {"url": "data/cars.json"}, "$schema": "https://vega.github.io/schema/vega-lite/v2.0.json"}, "properties": {}}, {"first": {"mark": "area", "encoding": {"y": {"scale": {"zero": true}, "field": "Miles_per_Gallon", "type": "quantitative"}, "x": {"scale": {"zero": true}, "field": "Horsepower", "type": "quantitative"}}, "data": {"url": "data/cars.json"}, "$schema": "https://vega.github.io/schema/vega-lite/v2.0.json"}, "second": {"mark": "point", "encoding": {"x": {"scale": {"zero": false}, "field": "Miles_per_Gallon", "type": "quantitative", "aggregate": "mean"}, "y": {"scale": {"zero": false}, "field": "Horsepower", "type": "quantitative", "aggregate": "mean"}}, "data": {"url": "data/cars.json"}, "$schema": "https://vega.github.io/schema/vega-lite/v2.0.json"}, "properties": {}}, {"first": {"mark": "rule", "encoding": {"x": {"scale": {"zero": false}, "field": "Origin", "type": "nominal"}}, "data": {"url": "data/cars.json"}, "$schema": "https://vega.github.io/schema/vega-lite/v2.0.json"}, "second": {"mark": "bar", "encoding": {"x": {"scale": {"zero": false}, "field": "Origin", "type": "nominal"}, "y": {"scale": {"zero": false}, "type": "quantitative", "aggregate": "count"}}, "data": {"url": "data/cars.json"}, "$schema": "https://vega.github.io/schema/vega-lite/v2.0.json"}, "properties": {}}, {"first": {"mark": "rule", "encoding": {"row": {"scale": {"zero": false}, "field": "Cylinders", "type": "ordinal", "aggregate": "min"}, "x": {"scale": {"zero": true}, "type": "quantitative", "aggregate": "count"}}, "data": {"url": "data/cars.json"}, "$schema": "https://vega.github.io/schema/vega-lite/v2.0.json"}, "second": {"mark": "bar", "encoding": {"y": {"scale": {"zero": false}, "field": "Cylinders", "type": "ordinal"}, "x": {"scale": {"zero": false}, "type": "quantitative", "aggregate": "count"}}, "data": {"url": "data/cars.json"}, "$schema": "https://vega.github.io/schema/vega-lite/v2.0.json"}, "properties": {}}, {"first": {"mark": "area", "encoding": {"x": {"scale": {"zero": true}, "type": "quantitative", "aggregate": "count"}, "y": {"scale": {"zero": true}, "type": "quantitative", "aggregate": "count"}}, "data": {"url": "data/cars.json"}, "$schema": "https://vega.github.io/schema/vega-lite/v2.0.json"}, "second": {"mark": "bar", "encoding": {"y": {"scale": {"zero": false}, "field": "Origin", "type": "nominal"}, "x": {"scale": {"zero": false}, "type": "quantitative", "aggregate": "count"}}, "data": {"url": "data/cars.json"}, "$schema": "https://vega.github.io/schema/vega-lite/v2.0.json"}, "properties": {}}, {"first": {"mark": "area", "encoding": {"row": {"scale": {"zero": false}, "field": "Origin", "type": "ordinal", "aggregate": "min"}, "y": {"scale": {"zero": true}, "field": "Horsepower", "type": "quantitative", "aggregate": "min"}, "x": {"scale": {"zero": true}, "field": "Acceleration", "type": "quantitative", "aggregate": "min"}}, "data": {"url": "data/cars.json"}, "$schema": "https://vega.github.io/schema/vega-lite/v2.0.json"}, "second": {"mark": "point", "encoding": {"y": {"scale": {"zero": false}, "field": "Origin", "type": "ordinal"}, "size": {"scale": {"zero": false}, "field": "Horsepower", "type": "quantitative", "aggregate": "mean"}, "x": {"scale": {"zero": false}, "field": "Acceleration", "type": "quantitative", "bin": {"maxbins": 10}}}, "data": {"url": "data/cars.json"}, "$schema": "https://vega.github.io/schema/vega-lite/v2.0.json"}, "properties": {}}, {"first": {"mark": "area", "encoding": {"row": {"scale": {"zero": false}, "field": "Cylinders", "type": "nominal"}, "x": {"scale": {"zero": true}, "field": "Horsepower", "type": "quantitative", "aggregate": "min"}, "y": {"scale": {"zero": true}, "field": "Acceleration", "type": "quantitative", "aggregate": "min"}}, "data": {"url": "data/cars.json"}, "$schema": "https://vega.github.io/schema/vega-lite/v2.0.json"}, "second": {"mark": "point", "encoding": {"y": {"scale": {"zero": false}, "field": "Cylinders", "type": "nominal"}, "size": {"scale": {"zero": false}, "field": "Horsepower", "type": "quantitative", "aggregate": "mean"}, "x": {"scale": {"zero": false}, "field": "Acceleration", "type": "quantitative", "bin": {"maxbins": 10}}}, "data": {"url": "data/cars.json"}, "$schema": "https://vega.github.io/schema/vega-lite/v2.0.json"}, "properties": {}}, {"first": {"mark": "bar", "encoding": {"x": {"scale": {"zero": false}, "field": "Miles_per_Gallon", "type": "quantitative", "bin": {"maxbins": 10}}}, "data": {"url": "data/cars.json"}, "$schema": "https://vega.github.io/schema/vega-lite/v2.0.json"}, "second": {"mark": "bar", "encoding": {"x": {"scale": {"zero": false}, "field": "Miles_per_Gallon", "type": "quantitative", "bin": {"maxbins": 5}}, "y": {"scale": {"zero": false}, "type": "quantitative", "aggregate": "count"}}, "data": {"url": "data/cars.json"}, "$schema": "https://vega.github.io/schema/vega-lite/v2.0.json"}, "properties": {}}, {"first": {"mark": "tick", "encoding": {"row": {"scale": {"zero": false}, "field": "Origin", "type": "nominal"}, "x": {"scale": {"zero": true}, "field": "Horsepower", "type": "quantitative"}}, "data": {"url": "data/cars.json"}, "$schema": "https://vega.github.io/schema/vega-lite/v2.0.json"}, "second": {"mark": "tick", "encoding": {"y": {"scale": {"zero": false}, "field": "Origin", "type": "nominal"}, "x": {"scale": {"zero": false}, "field": "Horsepower", "type": "quantitative"}}, "data": {"url": "data/cars.json"}, "$schema": "https://vega.github.io/schema/vega-lite/v2.0.json"}, "properties": {}}, {"first": {"mark": "line", "encoding": {"x": {"scale": {"zero": true}, "field": "Horsepower", "type": "quantitative", "aggregate": "mean"}, "y": {"scale": {"zero": false}, "field": "Cylinders", "type": "nominal"}, "row": {"scale": {"zero": false}, "field": "Origin", "type": "nominal"}}, "data": {"url": "data/cars.json"}, "$schema": "https://vega.github.io/schema/vega-lite/v2.0.json"}, "second": {"mark": "bar", "encoding": {"x": {"scale": {"zero": false}, "field": "Horsepower", "type": "quantitative", "aggregate": "mean"}, "y": {"scale": {"zero": false}, "field": "Cylinders", "type": "nominal"}, "row": {"scale": {"zero": false}, "field": "Origin", "type": "nominal"}}, "data": {"url": "data/cars.json"}, "$schema": "https://vega.github.io/schema/vega-lite/v2.0.json"}, "properties": {}}, {"first": {"mark": "line", "encoding": {"row": {"scale": {"zero": false}, "field": "Cylinders", "type": "nominal"}, "y": {"scale": {"zero": false}, "field": "Origin", "type": "ordinal", "aggregate": "min"}, "x": {"scale": {"zero": true}, "field": "Acceleration", "type": "quantitative", "aggregate": "min"}}, "data": {"url": "data/cars.json"}, "$schema": "https://vega.github.io/schema/vega-lite/v2.0.json"}, "second": {"mark": "point", "encoding": {"x": {"scale": {"zero": false}, "field": "Cylinders", "type": "nominal"}, "y": {"scale": {"zero": false}, "field": "Origin", "type": "ordinal"}, "size": {"scale": {"zero": false}, "field": "Acceleration", "type": "quantitative", "aggregate": "mean"}}, "data": {"url": "data/cars.json"}, "$schema": "https://vega.github.io/schema/vega-lite/v2.0.json"}, "properties": {}}, {"first": {"mark": "tick", "encoding": {"row": {"scale": {"zero": false}, "field": "Cylinders", "type": "ordinal"}, "x": {"scale": {"zero": true}, "field": "Horsepower", "type": "quantitative"}}, "data": {"url": "data/cars.json"}, "$schema": "https://vega.github.io/schema/vega-lite/v2.0.json"}, "second": {"mark": "tick", "encoding": {"y": {"scale": {"zero": false}, "field": "Cylinders", "type": "ordinal"}, "x": {"scale": {"zero": false}, "field": "Horsepower", "type": "quantitative"}}, "data": {"url": "data/cars.json"}, "$schema": "https://vega.github.io/schema/vega-lite/v2.0.json"}, "properties": {}}]} -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |

2 | 3 | 4 | 5 |

6 | 7 | # Formalizing Visualization Design Knowledge as Constraints 8 | 9 | **This reporsitory contains the source for the original Draco project. We develop a much improved Draco 2 at https://github.com/cmudig/draco2.** 10 | 11 | ![Test](https://github.com/uwdata/draco/workflows/Test/badge.svg) 12 | [![Coverage Status](https://coveralls.io/repos/github/uwdata/draco/badge.svg?branch=master)](https://coveralls.io/github/uwdata/draco?branch=master) 13 | [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/ambv/black) 14 | [![code style: prettier](https://img.shields.io/badge/code_style-prettier-ff69b4.svg?style=rounded)](https://github.com/prettier/prettier) 15 | 16 | Draco is a formal framework for representing design knowledge about effective visualization design as a collection of constraints. You can use Draco to find effective visualization visual designs in Vega-Lite. Draco's constraints are implemented in based on Answer Set Programming (ASP) and solved with the Clingo constraint solver. We also implemented a way to learn weights for the recommendation system directly from the results of graphical perception experiment. 17 | 18 | Read our introductory [blog post about Draco](https://medium.com/@uwdata/draco-representing-applying-learning-visualization-design-guidelines-64ce20287e9d) and our [research paper](https://idl.cs.washington.edu/papers/draco/) for more details. Try Draco in the browser at https://uwdata.github.io/draco-editor. 19 | 20 | ## Status 21 | 22 | **There Be Dragons!** This project is in active development and we are working hard on cleaning up the repository and making it easier to use the recommendation model in Draco. If you want to use this right now, please talk to us. More documentation is forthcoming. 23 | 24 | ## Overview 25 | 26 | This repository currently contains: 27 | 28 | * [**draco**](https://pypi.org/project/draco/) (pypi) The ASP programs with soft and hard constraints, a python API for [running Draco](https://github.com/uwdata/draco/blob/master/draco/run.py), the [CLI](https://github.com/uwdata/draco/blob/master/draco/cli.py), and the [python wrapper](https://github.com/uwdata/draco/blob/master/draco/js.py) for the **draco-core** API. Additionally includes some [helper functions](https://github.com/uwdata/draco/blob/master/draco/helper.py) that may prove useful. 29 | * [**draco-core**](https://www.npmjs.com/package/draco-core) (npm) Holds a Typescript / Javascript friendly copy of the ASP programs, and additionally, a Typescript /Javascript API for all the translation logic of Draco, as described below. 30 | 31 | ### Sibling Repositories 32 | 33 | Various functionality and extensions are in the following repositories 34 | 35 | * [draco-vis](https://github.com/uwdata/draco-vis) 36 | * A web-friendly Draco! Including a bundled Webassembly module of Draco's solver, Clingo. 37 | 38 | * [draco-learn](https://github.com/uwdata/draco-learn) 39 | * Runs a learning-to-rank method on results of perception experiments. 40 | 41 | * [draco-tools](https://github.com/uwdata/draco-tools) 42 | * UI tools to create annotated datasets of pairs of visualizations, look at the recommendations, and to explore large datasets of example visualizations. 43 | 44 | * [draco-analysis](https://github.com/uwdata/draco-analysis) 45 | * Notebooks to analyze the results. 46 | 47 | ## Draco API (Python) 48 | 49 | In addition to a wrapper of the Draco-Core API describe below, the python API contains the following functions. 50 | 51 | *object* **Result** [<>](https://github.com/uwdata/draco/blob/2de31e3eeb6eab29577b1b09a92ab3c0fd7bd2e0/draco/run.py#L36) 52 | 53 | >The result of a Draco run, a solution to a draco_query. User `result.as_vl()` to convert this solution into a Vega-Lite specification. 54 | 55 | **run** *(draco_query: List[str] [,constants, files, relax_hard, silence_warnings, debug, clear_cache]) -> Result:* [<>](https://github.com/uwdata/draco/blob/2de31e3eeb6eab29577b1b09a92ab3c0fd7bd2e0/draco/run.py#L115) 56 | 57 | >Runs a `draco_query`, defined as a list of Draco ASP facts (strings), against given `file` asp programs (defaults to base Draco set). Returns a `Result` if the query is satisfiable. If `relax_hard` is set to `True`, hard constraints (`hard.lp`) will not be strictly enforced, and instead will incur an infinite cost when violated. 58 | 59 | **is_valid** *(draco_query: List[str] [,debug]) -> bool:* [<>](https://github.com/uwdata/draco/blob/2de31e3eeb6eab29577b1b09a92ab3c0fd7bd2e0/draco/helper.py#L10) 60 | 61 | >Runs a `draco_query`, defined as a list of Draco ASP facts (strings), against Draco's hard constraints. Returns true if the visualization defined by the query is a valid one (does not violate hard constraints), and false otherwise. Hard constraints can be found in [`hard.lp`](https://github.com/uwdata/draco/blob/master/asp/hard.lp). 62 | 63 | **data_to_asp** *(data: List) -> List[str]:* [<>](https://github.com/uwdata/draco/blob/2de31e3eeb6eab29577b1b09a92ab3c0fd7bd2e0/draco/helper.py#L24) 64 | 65 | >Reads an array of `data` and returns the ASP declaration of it (a list of facts). 66 | 67 | **read_data_to_asp** *(file: str) -> List[str]:* [<>](https://github.com/uwdata/draco/blob/2de31e3eeb6eab29577b1b09a92ab3c0fd7bd2e0/draco/helper.py#L24) 68 | 69 | >Reads a `file` of data (either `.json` or `.csv`) and returns the ASP declaration of it (a list of facts). 70 | 71 | ## Draco-Core API (Typescript / Javascript) 72 | 73 | **vl2asp** *(spec: TopLevelUnitSpec): string[]* [<>](https://github.com/uwdata/draco/blob/master/js/src/vl2asp.ts) 74 | 75 | >Translates a Vega-Lite specification into a list of ASP Draco facts. 76 | 77 | **cql2asp** *(spec: any): string[]* [<>](https://github.com/uwdata/draco/blob/master/js/src/cql2asp.ts) 78 | 79 | >Translates a CompassQL specification into a list of ASP Draco constraints. 80 | 81 | **asp2vl** *(facts: string[]): TopLevelUnitSpec* [<>](https://github.com/uwdata/draco/blob/master/js/src/asp2vl.ts) 82 | 83 | >Interprets a list of ASP Draco facts as a Vega-Lite specification. 84 | 85 | **data2schema** *(data: any[]): Schema* [<>](https://github.com/uwdata/draco/blob/master/js/src/data2schema.ts) 86 | 87 | >Reads a list of rows and generates a data schema for the dataset. `data` should be given as a list of dictionaries. 88 | 89 | **schema2asp** *(schema: Schema): string[]* [<>](https://github.com/uwdata/draco/blob/master/js/src/schema2asp.ts) 90 | 91 | >Translates a data schema into an ASP declaration of the data it describes. 92 | 93 | **constraints2json** *(constraintsAsp: string, weightsAsp?: string): Constraint[]* [<>](https://github.com/uwdata/draco/blob/master/js/src/constraints2json.ts) 94 | 95 | >Translates the given ASP constraints and matching weights (i.e. for soft constraints) into JSON format. 96 | 97 | **json2constraints** *(constraints: Constraint[]): ConstraintAsp* [<>](https://github.com/uwdata/draco/blob/master/js/src/json2constraints.ts) 98 | 99 | >Translates the given JSON format ASP constraints into ASP strings for definitions and weights (if applicable, i.e. for soft constraints). 100 | 101 | ## User Info 102 | 103 | ### Installation 104 | 105 | #### Python (Draco API) 106 | 107 | ##### Install Clingo 108 | 109 | You can install Clingo with conda: `conda install -c potassco clingo`. On MacOS, you can alternatively run `brew install clingo`. 110 | 111 | ##### Install Draco (Python) 112 | 113 | `pip install draco` 114 | 115 | #### Typescript / Javascript (Draco-Core API) 116 | 117 | **STOP!** If you wish to **run** Draco in a **web browser**, consider using [**draco-vis**](https://github.com/uwdata/draco-vis), which bundles the Clingo solver as a WebAssembly module. The Draco-Core API does not include this functionality by itself. It merely handles the logic of translating between the various interface languages. 118 | 119 | `yarn add draco-core` or `npm install draco-core` 120 | 121 | ## Developer Info 122 | 123 | ### Installation 124 | 125 | #### Install Clingo. 126 | 127 | You can install Clingo with conda: `conda install -c potassco clingo`. On MacOS, you can alternatively run `brew install clingo`. 128 | 129 | #### Install node dependencies 130 | 131 | `yarn` or `npm install` 132 | 133 | You might need to activate a Python 2.7 environment to compile the canvas module. 134 | 135 | #### Build JS module 136 | 137 | `yarn build`. We are currently using typescript version 3.2.1 and greater. 138 | 139 | #### Python setup 140 | 141 | `pip install -r requirements.txt` or `conda install --file requirements.txt` 142 | 143 | Install Draco in editable mode. We expect Python 3. 144 | 145 | `pip install -e .` 146 | 147 | Now you can call the command line tool `draco`. For example `draco --version` or `draco --help`. 148 | 149 | 150 | #### Tests 151 | 152 | You should also be able to run the tests (and coverage report) 153 | 154 | `python setup.py test` 155 | 156 | ##### Run only ansunit tests 157 | 158 | `ansunit asp/tests.yaml` 159 | 160 | ##### Run only python tests 161 | 162 | `pytest -v` 163 | 164 | ##### Test types 165 | 166 | `mypy draco tests --ignore-missing-imports` 167 | 168 | ### Running Draco 169 | 170 | #### End to end example 171 | 172 | To run Draco on a partial spec. 173 | 174 | `sh run_pipeline.sh spec` 175 | 176 | The output would be a .vl.json file (for Vega-Lite spec) and a .png file to preview the visualization (by default, outputs would be in folder `__tmp__`). 177 | 178 | #### Use CompassQL to generate examples 179 | 180 | Run `yarn build_cql_examples`. 181 | 182 | #### Run Draco directly on a set of ASP constraints 183 | 184 | You can use the helper file `asp/_all.lp`. 185 | 186 | `clingo asp/_all.lp test.lp` 187 | 188 | Alternatively, you can invoke Draco with `draco -m asp test.lp`. 189 | 190 | #### Run APT example 191 | 192 | `clingo asp/_apt.lp examples/example_apt.lp --opt-mode=optN --quiet=1 --project -c max_extra_encs=0` 193 | 194 | This only prints the relevant data and restricts the extra encodings that are being generated. 195 | 196 | ### Releases 197 | 198 | * Make sure everything works! 199 | * Update `__version__` in `draco/__init__.py` and use the right version below. 200 | * `git commit -m "bump version to 0.0.1"` 201 | * Tag the last commit `git tag -a v0.0.1`. 202 | * `git push` and `git push --tags` 203 | * Run `python setup.py sdist upload`. 204 | 205 | ## Resources 206 | 207 | ### Related Repositories 208 | 209 | Previous prototypes 210 | 211 | * https://github.com/domoritz/vis-csp 212 | * https://github.com/domoritz/vis-constraints 213 | 214 | For a bit of historical perspective, after building [Compass](https://github.com/vega/compass) and [CompassQL](https://github.com/vega/compassql) with @kanitw, @domoritz worked with colleagues at UW on a first constraint-based prototype [vis-csp](https://github.com/domoritz/vis-csp) in ASP, then tried Z3 in [vis-constraints](https://github.com/domoritz/vis-constraints) before going back to ASP for Draco. 215 | 216 | Related software 217 | 218 | * https://github.com/uwdata/draco-vis 219 | * https://github.com/vega/compassql 220 | * https://github.com/potassco/clingo 221 | 222 | ### Guides 223 | 224 | * https://github.com/potassco/guide/releases/ 225 | --------------------------------------------------------------------------------