├── mta
├── queries
│ ├── routes.sql
│ ├── latest.sql
│ ├── all_vehicle_points.sql
│ ├── chunk_metadata.sql
│ ├── time_bucket.sql
│ ├── reorder.sql
│ ├── off_route.sql
│ ├── geofence.sql
│ ├── gapfill.sql
│ ├── window.sql
│ └── buffer_meters.sql
├── imgs
│ ├── bus1.png
│ └── bus2.png
├── requirements.txt
├── aws_lambda
│ ├── Dockerfile
│ ├── Makefile
│ ├── build
│ │ ├── package.sh
│ │ └── lambda_function.py
│ └── README.md
├── gtfs-ingest.py
└── README.md
├── .gitattributes
├── air-quality
├── requirements.txt
├── README.md
├── schema.sql
├── airquality_ingest.py
└── grafana.json
├── compression-preview
├── public
│ ├── robots.txt
│ ├── favicon.ico
│ ├── logo192.png
│ ├── logo512.png
│ ├── manifest.json
│ └── index.html
├── src
│ ├── assets
│ │ └── images
│ │ │ ├── loading-arc.svg
│ │ │ ├── uncompressed.svg
│ │ │ ├── loading-circle.svg
│ │ │ ├── compressed.svg
│ │ │ ├── hasuraio-logo.svg
│ │ │ └── TimescaleLogoHorizontal1Svg.svg
│ ├── index.js
│ ├── components
│ │ ├── count.js
│ │ ├── buttons.scss
│ │ ├── button.js
│ │ ├── cardInfo.js
│ │ └── chunk.js
│ ├── App.css
│ ├── hooks
│ │ └── useOnHover.js
│ ├── App.js
│ ├── styles
│ │ ├── _normalize.scss
│ │ └── subscription.scss
│ └── Subscription.js
├── .gitignore
└── package.json
├── analyze-intraday-stocks
├── charts
│ ├── apple_price.png
│ ├── candlestick.png
│ ├── faang_prices.png
│ ├── candlestick_fig.png
│ ├── most_traded_symbols.png
│ ├── apple_trading_volume.png
│ ├── weekly_price_changes.png
│ └── distribution_price_changes.png
├── config.py
├── sql_script
│ ├── create_hypertable.sql
│ └── create_table.sql
├── requirements.txt
├── scrape_symbols.py
├── symbols.csv
├── readme.md
├── insert_stocks_data.py
└── explore.py
├── hello-timescale
├── nyc_data_rides.csv
└── nyc_data_setup.sql
├── crypto_tutorial
├── Cryptocurrency dataset Sept 16 2019
│ ├── coin_names.csv
│ ├── btc_prices.csv
│ ├── eth_prices.csv
│ └── crypto_prices.csv
├── schema.sql
├── crypto_queries.sql
└── crypto_data_extraction.py
├── pi-light
├── pi-schema.sql
├── pi_photoresistor.service
├── photoresistor.py
├── README.md
└── grafana.json
├── clients
├── tsdb-python-client.py
├── tsdb-node-client.js
├── tsdb-java-connection.java
└── readme.md
├── .gitignore
├── prometheus-grafana
└── README.md
├── README.md
├── grafana-guide
├── advanced-tips
│ └── webinar-demo-queries.sql
├── variables
│ ├── before_variable.json
│ └── after_variable.json
└── series-override
│ └── series_override.json
└── LICENSE
/mta/queries/routes.sql:
--------------------------------------------------------------------------------
1 | select * from route_geofences;
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | *.csv filter=lfs diff=lfs merge=lfs -text
2 |
--------------------------------------------------------------------------------
/air-quality/requirements.txt:
--------------------------------------------------------------------------------
1 | requests==2.22.0
2 | psycopg2==2.8.3
3 | pgcopy==1.4.0
4 |
--------------------------------------------------------------------------------
/mta/imgs/bus1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/timescale/examples/HEAD/mta/imgs/bus1.png
--------------------------------------------------------------------------------
/mta/imgs/bus2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/timescale/examples/HEAD/mta/imgs/bus2.png
--------------------------------------------------------------------------------
/mta/requirements.txt:
--------------------------------------------------------------------------------
1 | gtfs-realtime-bindings
2 | requests
3 | requests-cache
4 | psycopg2
5 |
--------------------------------------------------------------------------------
/compression-preview/public/robots.txt:
--------------------------------------------------------------------------------
1 | # https://www.robotstxt.org/robotstxt.html
2 | User-agent: *
3 | Disallow:
4 |
--------------------------------------------------------------------------------
/compression-preview/public/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/timescale/examples/HEAD/compression-preview/public/favicon.ico
--------------------------------------------------------------------------------
/compression-preview/public/logo192.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/timescale/examples/HEAD/compression-preview/public/logo192.png
--------------------------------------------------------------------------------
/compression-preview/public/logo512.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/timescale/examples/HEAD/compression-preview/public/logo512.png
--------------------------------------------------------------------------------
/analyze-intraday-stocks/charts/apple_price.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/timescale/examples/HEAD/analyze-intraday-stocks/charts/apple_price.png
--------------------------------------------------------------------------------
/analyze-intraday-stocks/charts/candlestick.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/timescale/examples/HEAD/analyze-intraday-stocks/charts/candlestick.png
--------------------------------------------------------------------------------
/analyze-intraday-stocks/charts/faang_prices.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/timescale/examples/HEAD/analyze-intraday-stocks/charts/faang_prices.png
--------------------------------------------------------------------------------
/analyze-intraday-stocks/charts/candlestick_fig.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/timescale/examples/HEAD/analyze-intraday-stocks/charts/candlestick_fig.png
--------------------------------------------------------------------------------
/analyze-intraday-stocks/charts/most_traded_symbols.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/timescale/examples/HEAD/analyze-intraday-stocks/charts/most_traded_symbols.png
--------------------------------------------------------------------------------
/analyze-intraday-stocks/charts/apple_trading_volume.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/timescale/examples/HEAD/analyze-intraday-stocks/charts/apple_trading_volume.png
--------------------------------------------------------------------------------
/analyze-intraday-stocks/charts/weekly_price_changes.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/timescale/examples/HEAD/analyze-intraday-stocks/charts/weekly_price_changes.png
--------------------------------------------------------------------------------
/analyze-intraday-stocks/charts/distribution_price_changes.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/timescale/examples/HEAD/analyze-intraday-stocks/charts/distribution_price_changes.png
--------------------------------------------------------------------------------
/hello-timescale/nyc_data_rides.csv:
--------------------------------------------------------------------------------
1 | version https://git-lfs.github.com/spec/v1
2 | oid sha256:e0b6ff77be8ce0c50fb782d47f388296a001f6d02324d70fc4c98c66aa82004d
3 | size 1678606777
4 |
--------------------------------------------------------------------------------
/mta/queries/latest.sql:
--------------------------------------------------------------------------------
1 | set timezone to 'EST';
2 | --
3 | -- All data for the last minute
4 | --
5 |
6 | SELECT *
7 | FROM mta
8 | WHERE time > now() - interval '1 minute'
9 | ORDER BY time DESC;
--------------------------------------------------------------------------------
/mta/queries/all_vehicle_points.sql:
--------------------------------------------------------------------------------
1 | SELECT
2 | st_collect (geom), route_id
3 | FROM
4 | mta
5 | WHERE
6 | time > now() - interval '1 day'
7 | AND vid = 'MTA NYCT_1062'
8 | GROUP BY route_id
--------------------------------------------------------------------------------
/crypto_tutorial/Cryptocurrency dataset Sept 16 2019/coin_names.csv:
--------------------------------------------------------------------------------
1 | version https://git-lfs.github.com/spec/v1
2 | oid sha256:cf667c37699f1dc3bda94b72c15098cd47cd31537e52466cc1d2b24b1a0c3f86
3 | size 66269
4 |
--------------------------------------------------------------------------------
/crypto_tutorial/Cryptocurrency dataset Sept 16 2019/btc_prices.csv:
--------------------------------------------------------------------------------
1 | version https://git-lfs.github.com/spec/v1
2 | oid sha256:f6807e12c53bc2919d836935b6a25c74d36ebb1e9fcf1a967fc3218b8c5e5803
3 | size 5448783
4 |
--------------------------------------------------------------------------------
/crypto_tutorial/Cryptocurrency dataset Sept 16 2019/eth_prices.csv:
--------------------------------------------------------------------------------
1 | version https://git-lfs.github.com/spec/v1
2 | oid sha256:68b3ddc8a3951f3f90ca664880ec46691a38841e8cf241931eaef8ddddff483a
3 | size 1733455
4 |
--------------------------------------------------------------------------------
/mta/queries/chunk_metadata.sql:
--------------------------------------------------------------------------------
1 | -- TimescaleDB metadata
2 | -- set_chunk_interval()
3 |
4 | select chunk_table, ranges, table_size, index_size, toast_size, total_size
5 | from chunk_relation_size_pretty('mta');
--------------------------------------------------------------------------------
/crypto_tutorial/Cryptocurrency dataset Sept 16 2019/crypto_prices.csv:
--------------------------------------------------------------------------------
1 | version https://git-lfs.github.com/spec/v1
2 | oid sha256:b8c4c8b21e5be9baa1b84eb628d0ec2b508fa09a749a2ed7fa338a7592add485
3 | size 351999265
4 |
--------------------------------------------------------------------------------
/compression-preview/src/assets/images/loading-arc.svg:
--------------------------------------------------------------------------------
1 |
4 |
--------------------------------------------------------------------------------
/pi-light/pi-schema.sql:
--------------------------------------------------------------------------------
1 | CREATE EXTENSION IF NOT EXISTS timescaledb CASCADE;
2 |
3 | CREATE TABLE pi_obs(
4 | time timestamptz,
5 | metric text,
6 | value numeric);
7 |
8 | SELECT create_hypertable('pi_obs', 'time', chunk_time_interval=>'1 week');
9 |
--------------------------------------------------------------------------------
/analyze-intraday-stocks/config.py:
--------------------------------------------------------------------------------
1 | # Make sure to edit this configuration file with your database connection details
2 | # and Alpha Vantage API key
3 | DB_USER = 'user'
4 | DB_PASS = 'passwd'
5 | DB_HOST = 'host'
6 | DB_PORT = '000'
7 | DB_NAME = 'db'
8 | APIKEY = 'alpha_vantage_apikey'
--------------------------------------------------------------------------------
/mta/aws_lambda/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM amazonlinux
2 | RUN yum -y install git \
3 | python37 \
4 | python37-devel \
5 | python37-pip \
6 | zip \
7 | postgresql \
8 | gcc \
9 | && yum clean all
10 | RUN python3 -m pip install --upgrade pip \
11 | && python3 -m pip install boto3
12 |
--------------------------------------------------------------------------------
/compression-preview/src/assets/images/uncompressed.svg:
--------------------------------------------------------------------------------
1 |
5 |
--------------------------------------------------------------------------------
/mta/aws_lambda/Makefile:
--------------------------------------------------------------------------------
1 | default: build_env
2 |
3 | image:
4 | docker build -t lambda-packager .
5 |
6 | build_env: image
7 | docker run --rm -v $(shell pwd)/build:/build -t lambda-packager /build/package.sh
8 |
9 |
10 | shell: image
11 | docker run --rm -v $(shell pwd)/build:/build -it lambda-packager /bin/bash
12 |
--------------------------------------------------------------------------------
/compression-preview/src/assets/images/loading-circle.svg:
--------------------------------------------------------------------------------
1 |
6 |
--------------------------------------------------------------------------------
/analyze-intraday-stocks/sql_script/create_hypertable.sql:
--------------------------------------------------------------------------------
1 | /* Enable the TimscaleDB extension */
2 | CREATE EXTENSION IF NOT EXISTS timescaledb;
3 |
4 | /*
5 | Turn the 'stocks_intraday' table into a hypertable.
6 | This is important to be able to make use of TimescaleDB features later on.
7 | */
8 | SELECT create_hypertable('stocks_intraday', 'time');
--------------------------------------------------------------------------------
/pi-light/pi_photoresistor.service:
--------------------------------------------------------------------------------
1 | [Unit]
2 | Description=Run photoresistor sensor
3 | After=postgresql.service
4 |
5 | [Service]
6 | User=pi
7 | Environment=TIMESCALEDB_CONNECTION=postgres://USERNAME:PASSWORD@HOST:PORT/defaultdb?sslmode=require
8 | ExecStart=/home/pi/photoresistor.py
9 | Restart=always
10 |
11 | [Install]
12 | WantedBy=multi-user.target
13 |
--------------------------------------------------------------------------------
/clients/tsdb-python-client.py:
--------------------------------------------------------------------------------
1 | from psycopg2.extras import RealDictCursor
2 | import psycopg2
3 |
4 | uri = "postgres://YOUR-USER:YOUR-PASSWORD@YOUR-SERVICE.a.timescaledb.io:26479/defaultdb?sslmode=require"
5 |
6 | db_conn = psycopg2.connect(uri)
7 | c = db_conn.cursor(cursor_factory=RealDictCursor)
8 |
9 | c.execute("SELECT 1 = 1")
10 | result = c.fetchone()
11 |
--------------------------------------------------------------------------------
/mta/queries/time_bucket.sql:
--------------------------------------------------------------------------------
1 | set timezone to 'est';
2 | --
3 | -- Hourly count of the number vehicles on a given route
4 | --
5 | SELECT
6 | count(distinct vid) as n_vehicles,
7 | time_bucket('1 hour', time) AS hour
8 | FROM
9 | mta
10 | WHERE
11 | time between '2019-03-05' AND '2019-03-06'
12 | AND route_id = 'M100'
13 | GROUP BY
14 | hour;
15 |
16 |
--------------------------------------------------------------------------------
/analyze-intraday-stocks/requirements.txt:
--------------------------------------------------------------------------------
1 | beautifulsoup4==4.9.3
2 | bs4==0.0.1
3 | certifi==2021.5.30
4 | chardet==4.0.0
5 | idna==2.10
6 | numpy==1.21.0
7 | pandas==1.3.0
8 | pgcopy==1.5.0
9 | plotly==5.1.0
10 | psycopg2==2.9.1
11 | python-dateutil==2.8.1
12 | pytz==2021.1
13 | requests==2.25.1
14 | six==1.16.0
15 | soupsieve==2.2.1
16 | tenacity==7.0.0
17 | urllib3==1.26.6
18 |
--------------------------------------------------------------------------------
/analyze-intraday-stocks/sql_script/create_table.sql:
--------------------------------------------------------------------------------
1 | /* Creates the table which will store stock data */
2 | CREATE TABLE public.stocks_intraday (
3 | "time" timestamp(0) NOT NULL,
4 | symbol varchar NULL,
5 | price_open float8 NULL,
6 | price_close float8 NULL,
7 | price_low float8 NULL,
8 | price_high float8 NULL,
9 | trading_volume int4 NULL,
10 | );
11 |
--------------------------------------------------------------------------------
/mta/aws_lambda/build/package.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | python3 -m venv /tmp/env
3 | source /tmp/env/bin/activate
4 | python3 -m pip install protobuf
5 | python3 -m pip install gtfs-realtime-bindings
6 | python3 -m pip install -U requests psycopg2-binary
7 | rm -f /build/lambda.zip
8 | mkdir /tmp/staging
9 | cd /tmp/staging
10 | cp -r /tmp/env/lib/python3.7/site-packages/* .
11 | cp -r /build/*.py .
12 | zip -r /build/lambda.zip *
13 |
--------------------------------------------------------------------------------
/mta/queries/reorder.sql:
--------------------------------------------------------------------------------
1 | -- Count of all observations for a given route
2 | -- Impact of on-disk ordering, see "shared hit blocks" and "shared read blocks"
3 | --
4 | -- Note: already run
5 | -- SELECT reorder_chunk('_timescaledb_internal._hyper_1_41_chunk', 'idx_mta_route_id');
6 |
7 | -- explain (analyze, buffers)
8 | SELECT count(1)
9 | FROM mta
10 | WHERE time between '2019-03-05 0:00' AND '2019-03-05 23:59'
11 | AND route_id = 'S86';
--------------------------------------------------------------------------------
/compression-preview/.gitignore:
--------------------------------------------------------------------------------
1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
2 |
3 | # dependencies
4 | /node_modules
5 | /.pnp
6 | .pnp.js
7 |
8 | # testing
9 | /coverage
10 |
11 | # production
12 | /build
13 |
14 | # misc
15 | .DS_Store
16 | .env.local
17 | .env.development.local
18 | .env.test.local
19 | .env.production.local
20 | .env
21 | npm-debug.log*
22 | yarn-debug.log*
23 | yarn-error.log*
24 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | **/.DS_Store
2 | /air-quality/venv
3 | .idea/examples.iml
4 | .idea/misc.xml
5 | .idea/modules.xml
6 | .idea/vcs.xml
7 | .idea/workspace.xml
8 | .idea/inspectionProfiles/profiles_settings.xml
9 | .vscode/settings.json
10 | stocks-analysis/__pycache__/config.cpython-38.pyc
11 | stocks-analysis/.vscode/settings.json
12 | .vscode/PythonImportHelper-v2-Completion.json
13 | stocks-analysis/.vscode/launch.json
14 | stocks-analysis/.vscode/PythonImportHelper-v2-Completion.json
15 |
--------------------------------------------------------------------------------
/compression-preview/src/index.js:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | import ReactDOM from 'react-dom';
3 | import App from './App';
4 |
5 | ReactDOM.render(
6 |
7 |
8 | ,
9 | document.getElementById('root')
10 | );
11 |
12 | // If you want to start measuring performance in your app, pass a function
13 | // to log results (for example: reportWebVitals(console.log))
14 | // or send to an analytics endpoint. Learn more: https://bit.ly/CRA-vitals
15 |
--------------------------------------------------------------------------------
/mta/queries/off_route.sql:
--------------------------------------------------------------------------------
1 | --
2 | -- Vehicles on M routes
3 | -- off-route in the last 15 minutes
4 | --
5 | SELECT
6 | bus.route_id,
7 | bus.time,
8 | bus.geom
9 | FROM
10 | route_geofences AS route
11 | JOIN mta AS bus
12 | ON (route.route_id = bus.route_id)
13 | WHERE
14 | bus.time > now() - interval '15 minutes'
15 | AND
16 | bus.route_id like 'M%'
17 | AND NOT
18 | st_within(bus.geom, route.geom)
19 | UNION
20 | select route_id, null, geom from route_geofences where route_id like 'M%';
--------------------------------------------------------------------------------
/mta/queries/geofence.sql:
--------------------------------------------------------------------------------
1 | set timezone to 'est';
2 | --
3 | -- What bus routes pass near 355 Madison Ave each hour?
4 | --
5 |
6 |
7 | WITH geofence AS (
8 | SELECT buffer_meters(-74.00482, 40.7233, 200) AS buffer
9 | )
10 | SELECT
11 | time_bucket_gapfill('1 hour', time) AS hour,
12 | array_agg(DISTINCT route_id) AS nearby_routes
13 | FROM
14 | mta,
15 | geofence
16 | WHERE
17 | time BETWEEN now() - interval '5 days' AND now()
18 | AND st_intersects(buffer, mta.geom)
19 | GROUP BY
20 | hour;
21 |
22 |
--------------------------------------------------------------------------------
/compression-preview/src/assets/images/compressed.svg:
--------------------------------------------------------------------------------
1 |
5 |
6 |
--------------------------------------------------------------------------------
/mta/queries/gapfill.sql:
--------------------------------------------------------------------------------
1 | set timezone to 'est';
2 | --
3 | -- Hourly count of the number vehicles on a given route
4 | -- with gapfill, locf, and interpolate
5 | --
6 | SELECT
7 | -- 4 ways to handle data gaps
8 | count(distinct vid) as n_vehicles,
9 | coalesce(count(distinct vid), 0) as count,
10 | locf(count(distinct vid)),
11 | interpolate(count(distinct vid)::real),
12 | time_bucket_gapfill('1 hour', time) AS hour
13 | FROM
14 | mta
15 | WHERE
16 | time between '2019-03-05' AND '2019-03-06'
17 | AND route_id = 'M100'
18 | GROUP BY
19 | hour;
--------------------------------------------------------------------------------
/mta/queries/window.sql:
--------------------------------------------------------------------------------
1 | WITH window_mta AS (
2 | SELECT
3 | time,
4 | extract(epoch from (time - lag(time, 1) OVER (ORDER by time asc))) as time_delta,
5 | st_distance(
6 | st_transform(geom, 3857), -- transform to a spatial reference system in meters
7 | lag(st_transform(geom, 3857), 1) OVER (ORDER by time asc)
8 | ) as distance
9 | FROM MTA
10 | WHERE time > now() - interval '1 day'
11 | AND vid = 'MTA NYCT_1062'
12 | )
13 | SELECT time, time_delta, distance, (distance / time_delta) * 2.23694 as mph
14 | FROM window_mta
15 | WHERE time_delta > 0
--------------------------------------------------------------------------------
/mta/queries/buffer_meters.sql:
--------------------------------------------------------------------------------
1 | create or replace function buffer_meters (lon numeric, lat numeric, meters numeric)
2 | returns geometry as $$
3 | BEGIN
4 | -- Buffers by meters in the spherical mercator projection
5 | -- (not accurate at high latitudes)
6 | RETURN ST_Transform(
7 | ST_Buffer(
8 | ST_Transform(
9 | ST_SetSRID(
10 | ST_MakePoint(lon, lat),
11 | 4326), -- longitude,latitude
12 | 3857), -- spherical mercator
13 | meters),
14 | 4326); -- back to latitude, longitude
15 | END;
16 | $$ language plpgsql;
17 |
18 | SELECT buffer_meters(-73.97854, 40.75364, 200);
--------------------------------------------------------------------------------
/compression-preview/src/components/count.js:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | import CountUp from 'react-countup';
3 | function Count({
4 | start = 0,
5 | end,
6 | prefix,
7 | suffix,
8 | duration = .5,
9 | decimals = 0,
10 | }) {
11 |
12 | return (
13 |
14 | 50000 ? end - 5000 : end}
17 | duration={duration}
18 | separator=","
19 | decimals={decimals}
20 | prefix={prefix}
21 | suffix={suffix}
22 | useEasing
23 | />
24 |
25 | );
26 | }
27 |
28 | export default Count;
29 |
--------------------------------------------------------------------------------
/compression-preview/public/manifest.json:
--------------------------------------------------------------------------------
1 | {
2 | "short_name": "React App",
3 | "name": "Create React App Sample",
4 | "icons": [
5 | {
6 | "src": "favicon.ico",
7 | "sizes": "64x64 32x32 24x24 16x16",
8 | "type": "image/x-icon"
9 | },
10 | {
11 | "src": "logo192.png",
12 | "type": "image/png",
13 | "sizes": "192x192"
14 | },
15 | {
16 | "src": "logo512.png",
17 | "type": "image/png",
18 | "sizes": "512x512"
19 | }
20 | ],
21 | "start_url": ".",
22 | "display": "standalone",
23 | "theme_color": "#000000",
24 | "background_color": "#ffffff"
25 | }
26 |
--------------------------------------------------------------------------------
/clients/tsdb-node-client.js:
--------------------------------------------------------------------------------
1 | var pg = require('pg');
2 |
3 | var config = {
4 | database: "defaultdb",
5 | host: "YOUR-SERVICE.a.timescaledb.io",
6 | password: "YOUR-PASSWORD",
7 | port: 26479,
8 | ssl: "require",
9 | user: "YOUR-USER",
10 | };
11 |
12 | var client = new pg.Client(config);
13 |
14 | client.connect(function (err) {
15 | if (err)
16 | throw err;
17 | client.query('SELECT 1 AS value', [], function (err, result) {
18 | if (err)
19 | throw err;
20 |
21 | console.log(result.rows[0]);
22 | client.end(function (err) {
23 | if (err)
24 | throw err;
25 | });
26 | });
27 | });
--------------------------------------------------------------------------------
/compression-preview/src/App.css:
--------------------------------------------------------------------------------
1 | @import-normalize;
2 | .App {
3 | text-align: center;
4 | }
5 |
6 | .App-logo {
7 | height: 40vmin;
8 | pointer-events: none;
9 | }
10 |
11 | @media (prefers-reduced-motion: no-preference) {
12 | .App-logo {
13 | animation: App-logo-spin infinite 20s linear;
14 | }
15 | }
16 |
17 | .App-header {
18 | background-color: #282c34;
19 | min-height: 100vh;
20 | display: flex;
21 | flex-direction: column;
22 | align-items: center;
23 | justify-content: center;
24 | font-size: calc(10px + 2vmin);
25 | color: white;
26 | }
27 |
28 | .App-link {
29 | color: #61dafb;
30 | }
31 |
32 | @keyframes App-logo-spin {
33 | from {
34 | transform: rotate(0deg);
35 | }
36 | to {
37 | transform: rotate(360deg);
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/clients/tsdb-java-connection.java:
--------------------------------------------------------------------------------
1 | package pg;
2 |
3 | import java.sql.Connection;
4 | import java.sql.DriverManager;
5 | import java.util.Properties;
6 |
7 | public final class Connect {
8 | public static void main(String[] args) {
9 | Properties props = new Properties();
10 | props.put("jdbc.url", "jdbc:postgresql://YOUR-SERVICE.a.timescaledb.io:20985/defaultdb");
11 | props.put("user", "YOUR-USER");
12 | props.put("password", "YOUR-PASSWORD");
13 | props.put("ssl", "true");
14 | props.put("sslmode", "verify-ca");
15 | props.put("sslrootcert", "/path/to/ca.pem");
16 |
17 | try {
18 | Connection c = DriverManager.getConnection(props.getProperty("jdbc.url"), props);
19 | System.out.println("Success");
20 | c.close();
21 | } catch (Exception e) {
22 | e.printStackTrace();
23 | }
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/compression-preview/src/hooks/useOnHover.js:
--------------------------------------------------------------------------------
1 | import { useState, useEffect, useRef } from 'react';
2 | const useHover = () => {
3 | const ref = useRef();
4 | const [hovered, setHovered] = useState(false);
5 | const enter = () => setHovered(true);
6 | const leave = () => setHovered(false);
7 | // eslint-disable-next-line consistent-return
8 | useEffect(() => {
9 | const el = ref.current; // cache external ref value for cleanup use
10 | if (el) {
11 | el.addEventListener('mouseenter', enter);
12 | el.addEventListener('mouseover', enter);
13 | el.addEventListener('mouseleave', leave);
14 | return () => {
15 | el.removeEventListener('mouseenter', enter);
16 | el.removeEventListener('mouseover', enter);
17 | el.removeEventListener('mouseleave', leave);
18 | };
19 | }
20 | }, []);
21 | return [ref, hovered];
22 | };
23 | export default useHover;
24 |
--------------------------------------------------------------------------------
/analyze-intraday-stocks/scrape_symbols.py:
--------------------------------------------------------------------------------
1 | import requests, csv
2 | from bs4 import BeautifulSoup
3 |
4 | def scrape_symbols():
5 | """Scrapes ticker symbols of top 100 US companies (based on market cap)
6 |
7 | Returns:
8 | list of strings: 100 ticker symbols
9 | """
10 |
11 | url = 'https://companiesmarketcap.com/usa/largest-companies-in-the-usa-by-market-cap/'
12 | html = requests.get(url).text
13 | soup = BeautifulSoup(html, 'html.parser')
14 | return [e.text for e in soup.select('div.company-code')]
15 |
16 | def create_symbols_csv():
17 | with open("symbols.csv", "w", newline='') as f:
18 | writer = csv.writer(f)
19 | for symbol in scrape_symbols():
20 | writer.writerow([symbol])
21 |
22 | def read_symbols_csv():
23 | with open('symbols.csv') as f:
24 | reader = csv.reader(f)
25 | return [row[0] for row in reader]
26 |
27 | create_symbols_csv()
28 | symbols = read_symbols_csv()
29 | print(symbols)
--------------------------------------------------------------------------------
/compression-preview/src/App.js:
--------------------------------------------------------------------------------
1 | import { ApolloClient, ApolloProvider, InMemoryCache } from '@apollo/client';
2 | import Subscription from './Subscription';
3 | import { WebSocketLink } from '@apollo/client/link/ws';
4 | import './App.css';
5 |
6 | const createApolloClient = () => {
7 | return new ApolloClient({
8 | link: new WebSocketLink({
9 | uri: 'wss://fleet-bunny-18.hasura.app/v1/graphql',
10 | options: {
11 | reconnect: true,
12 | connectionParams: {
13 | headers: {
14 | 'x-hasura-admin-secret':
15 | process.env.REACT_APP_X_HASURA_ADMIN_SECRET,
16 | },
17 | },
18 | },
19 | }),
20 | cache: new InMemoryCache(),
21 | });
22 | };
23 |
24 |
25 | function App() {
26 | const client = createApolloClient();
27 |
28 | return (
29 |
30 |
31 |
32 | );
33 | }
34 |
35 | export default App;
36 |
--------------------------------------------------------------------------------
/analyze-intraday-stocks/symbols.csv:
--------------------------------------------------------------------------------
1 | AAPL
2 | MSFT
3 | AMZN
4 | GOOG
5 | FB
6 | BRK-A
7 | TSLA
8 | V
9 | NVDA
10 | JPM
11 | JNJ
12 | WMT
13 | UNH
14 | MA
15 | BAC
16 | PYPL
17 | HD
18 | PG
19 | DIS
20 | ADBE
21 | CMCSA
22 | XOM
23 | NKE
24 | NFLX
25 | VZ
26 | KO
27 | ORCL
28 | CRM
29 | INTC
30 | LLY
31 | CSCO
32 | PFE
33 | ABT
34 | T
35 | PEP
36 | ABBV
37 | TMO
38 | CVX
39 | DHR
40 | MRK
41 | AVGO
42 | UPS
43 | TMUS
44 | WFC
45 | COST
46 | TXN
47 | MCD
48 | MS
49 | QCOM
50 | PM
51 | HON
52 | UNP
53 | BMY
54 | NEE
55 | C
56 | AMGN
57 | BA
58 | LOW
59 | CHTR
60 | INTU
61 | AXP
62 | BLK
63 | SBUX
64 | SCHW
65 | IBM
66 | RTX
67 | GS
68 | AMT
69 | AMAT
70 | TGT
71 | ZM
72 | CAT
73 | EL
74 | AMD
75 | MMM
76 | GE
77 | ISRG
78 | SQ
79 | NOW
80 | DE
81 | SNAP
82 | CVS
83 | LMT
84 | SPGI
85 | SYK
86 | UBER
87 | ANTM
88 | MRNA
89 | ABNB
90 | ZTS
91 | BKNG
92 | MU
93 | PLD
94 | FIS
95 | LRCX
96 | MDLZ
97 | CCI
98 | MO
99 | GILD
100 | ADP
101 |
--------------------------------------------------------------------------------
/clients/readme.md:
--------------------------------------------------------------------------------
1 | # Clients
2 |
3 | Here is a list of client libraries and code examples to connect to a TimescaleDB instance.
4 |
5 |
6 | | Language | Client | Download |
7 | |---|---|---|
8 | | Go | pq | https://github.com/lib/pq |
9 | | [Java](#java) | JDBC Driver | https://jdbc.postgresql.org/ |
10 | | [Node.js](#nodejs) | pg package | https://www.npmjs.com/package/pg |
11 | | [Python](#python) | psycopg2 | https://pypi.org/project/psycopg2/ |
12 |
13 |
14 | ### Java
15 |
16 | Include Maven dependency for PostgreSQL JDBC Driver
17 |
18 |
19 | org.postgresql
20 | postgresql
21 | 42.2.0
22 |
23 |
24 | Java [sample code here](tsdb-java-connection.java).
25 |
26 | ### Node.js
27 |
28 | Install node-postgres [pg](https://www.npmjs.com/package/pg) package:
29 |
30 | npm install pg
31 |
32 | Run the [sample](tsdb-node-client.js):
33 |
34 | node tsdb-node-client.js
35 |
36 |
37 | ### Python
38 |
39 | Install Python PostgreSQL Database Adapter [psycopg]( http://initd.org/psycopg/) package:
40 |
41 | pip install psycopg2
42 |
43 | Run the [sample](tsdb-python-client.py):
44 |
45 | python tsdb-python-client.py
46 |
47 |
--------------------------------------------------------------------------------
/compression-preview/src/components/buttons.scss:
--------------------------------------------------------------------------------
1 | @import '~@timescale/web-styles/src/styles/utils';
2 |
3 | .btn {
4 | padding: 12px;
5 | max-width: 160px;
6 | width: 100%;
7 | @include body-copy(3);
8 | font-weight: 700;
9 | text-transform: uppercase;
10 |
11 | &__add-data {
12 | background: #ffffff;
13 | border: 2px solid #fdb515;
14 | box-sizing: border-box;
15 | border-radius: 4px;
16 | transition: background-color 0.3s;
17 |
18 | &:hover {
19 | background: color($gold, 500);
20 | cursor: pointer;
21 | }
22 | }
23 |
24 | &__compress {
25 | background-color: color($gold);
26 | border: 2px solid #fdb515;
27 | box-sizing: border-box;
28 | border-radius: 4px;
29 | text-transform: uppercase;
30 | transition: background-color 0.3s;
31 |
32 | &:hover {
33 | background: color($gold, 500);
34 | cursor: pointer;
35 | }
36 |
37 | &--disabled {
38 | pointer-events: none;
39 | background-color: color($navy, 200);
40 | }
41 | }
42 |
43 | &__decompress {
44 | background-color: color($blue);
45 | border: 2px solid color($blue);
46 | border-radius: 4px;
47 | transition: background-color 0.3s;
48 | color: white;
49 |
50 | &:hover {
51 | background: color($blue, 500);
52 | cursor: pointer;
53 | }
54 | }
55 | }
56 |
--------------------------------------------------------------------------------
/compression-preview/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "presentation-app",
3 | "version": "0.1.0",
4 | "private": true,
5 | "dependencies": {
6 | "@apollo/client": "^3.3.19",
7 | "@testing-library/jest-dom": "^5.11.4",
8 | "@testing-library/react": "^11.1.0",
9 | "@testing-library/user-event": "^12.1.10",
10 | "@timescale/web-styles": "git+ssh://git@github.com/timescale/web-styles.git",
11 | "classnames": "^2.3.1",
12 | "graphql": "^15.5.0",
13 | "node-sass": "5.0",
14 | "normalize.css": "^8.0.1",
15 | "prismjs": "^1.23.0",
16 | "react": "^17.0.2",
17 | "react-countup": "^4.3.3",
18 | "react-dom": "^17.0.2",
19 | "react-scripts": "4.0.3",
20 | "subscriptions-transport-ws": "^0.9.18",
21 | "web-vitals": "^1.0.1"
22 | },
23 | "scripts": {
24 | "start": "react-scripts start",
25 | "build": "react-scripts build",
26 | "test": "react-scripts test",
27 | "eject": "react-scripts eject"
28 | },
29 | "eslintConfig": {
30 | "extends": [
31 | "react-app",
32 | "react-app/jest"
33 | ]
34 | },
35 | "browserslist": {
36 | "production": [
37 | ">0.2%",
38 | "not dead",
39 | "not op_mini all"
40 | ],
41 | "development": [
42 | "last 1 chrome version",
43 | "last 1 firefox version",
44 | "last 1 safari version"
45 | ]
46 | }
47 | }
48 |
--------------------------------------------------------------------------------
/air-quality/README.md:
--------------------------------------------------------------------------------
1 | # Air Quality Collection - Sample Application
2 |
3 | This folder contains a sample application that demonstrates how users might choose
4 | to collect air quality information. We leverage the Open AQ Platform API (https://docs.openaq.org/)
5 | to collect and store data in TimescaleDB.
6 |
7 | ## Requirements
8 | - A working instance of [TimescaleDB](https://docs.timescale.com)
9 | - Python3 environment
10 | - Install python dependencies with `pip3 install -r requirements.txt`
11 |
12 | ## Content
13 | The files in this directory and how they're used:
14 |
15 | * `airquality_ingest.py`: Python script to read data from Open AQ Platform API and insert them into TimescaleDB.
16 | * `schema.sql`: Data definition (DDL) to create the necessary tables & hypertables.
17 | * `grafana.json`: Grafana dashboard configuration.
18 | * `requirements.txt`: Python dependency requirements.
19 | * `sample.json`: Sample json output from the Open AQ Platform API.
20 |
21 | ## Getting Started
22 | 0. Create a [TimescaleDB and Grafana instance](https://www.timescale.com/cloud-signup)
23 | 1. Update `airquality_ingest.py` with your TimescaleDB connection string
24 | 2. Install necessary packages as listed in `requirements.txt`
25 | 3. Initialize your TimescaleDB database with the schemas specified in `schema.sql`
26 | 4. Run `python3 airquality_ingest.py` to start the ingestion
27 | 5. Create Grafana instance and import dashboard `Login to Grafana > Dashboard > Import > ./grafana.json`
28 |
29 |
--------------------------------------------------------------------------------
/compression-preview/src/components/button.js:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | import { useMutation, gql } from '@apollo/client';
3 | import './buttons.scss';
4 | import classNames from 'classnames';
5 |
6 | const COMPRESS_CHUNK = gql`
7 | mutation ($chunk: String!) {
8 | compress_chunk_named(args: {arg_1: $chunk}) {
9 | compress_chunk
10 | }
11 | }
12 | `;
13 |
14 | const DECOMPRESS_CHUNK = gql`
15 | mutation ($chunk: String!) {
16 | decompress_chunk_named(args: { arg_1: $chunk }) {
17 | compress_chunk
18 | }
19 | }
20 | `;
21 |
22 | const ADD_DATA = '';
23 |
24 | const mutationsMap = {
25 | compress: COMPRESS_CHUNK,
26 | decompress: DECOMPRESS_CHUNK,
27 | addData: ADD_DATA,
28 | };
29 |
30 | function Button({ jobComplete, setLoadModal, isCompressed, chunkName }) {
31 | const buttonType = isCompressed ? 'decompress' : isCompressed === undefined ? 'addData' : 'compress';
32 | const btnClassNames = classNames({'btn': true, [`btn__${buttonType}`]: true, [`btn__${buttonType}--disabled`]: jobComplete});
33 | const [mutation] = useMutation(mutationsMap[buttonType]);
34 | const mutationVariables = chunkName ? { variables: {chunk: chunkName} } : {variables: {}};
35 | const label = isCompressed !== undefined ? buttonType.toUpperCase() : 'ADD DATA';
36 |
37 |
38 | const handleClick = () => {
39 | setLoadModal(true);
40 | mutation(mutationVariables);
41 | }
42 |
43 | return ;
44 | }
45 |
46 | export default Button;
47 |
--------------------------------------------------------------------------------
/compression-preview/src/components/cardInfo.js:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 |
3 | import Count from './count';
4 |
5 | const CardInfo = ({
6 | chunk_name,
7 | before_compression_total_bytes,
8 | after_compression_total_bytes,
9 | cardPosition,
10 | }) => {
11 | const getCompressionRatio = (before, after) => {
12 | if (!after) {
13 | return 0;
14 | }
15 | return (before / after).toFixed(2);
16 | };
17 |
18 | const compressionRatio = getCompressionRatio(
19 | before_compression_total_bytes,
20 | after_compression_total_bytes
21 | );
22 |
23 | const { top, bottom, left, right } = cardPosition || {};
24 |
25 | return (
26 |
36 |
37 |
{chunk_name}
38 | Before Compression
39 |
44 | After Compression
45 |
50 |
55 |
56 |
57 | );
58 | };
59 |
60 | export default CardInfo;
61 |
--------------------------------------------------------------------------------
/pi-light/photoresistor.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | from datetime import datetime
3 | import time
4 | import os
5 |
6 | from digitalio import DigitalInOut, Direction
7 | from psycopg2.extras import execute_values
8 | import board
9 | import psycopg2
10 |
11 | print("Collecting sensor data...")
12 |
13 | """
14 | Description:
15 | make a postgres connection and talk to a GPIO pin
16 | Loop forever:
17 | Loop for one second:
18 | sleep for 99ms
19 | reset pin and see how long it takes to fill up
20 | repeat
21 | Insert one second worth of raw data.
22 | """
23 |
24 | connection_string = os.environ.get("TIMESCALEDB_CONNECTION", default="dbname=pi user=pi")
25 |
26 | with psycopg2.connect(connection_string) as conn:
27 | with conn.cursor() as cur:
28 | with DigitalInOut(board.D23) as pin:
29 | while True:
30 | start = time.time()
31 | values = []
32 | while time.time() - start < 1.0:
33 | time.sleep(0.099)
34 | pin.direction = Direction.OUTPUT
35 | pin.value = False
36 | pin.direction = Direction.INPUT
37 | reading = 0
38 | while pin.value is False:
39 | reading += 1
40 | values.append((datetime.utcnow(), "photoresistor", reading))
41 |
42 | execute_values(
43 | cur,
44 | """
45 | INSERT INTO pi_obs (time, metric, value)
46 | VALUES %s
47 | """,
48 | values,
49 | )
50 | conn.commit()
51 | print("Inserted ", len(values))
52 |
--------------------------------------------------------------------------------
/mta/aws_lambda/README.md:
--------------------------------------------------------------------------------
1 | # Deploying on AWS Lambda
2 |
3 | This document describes how to create and package an AWS Lambda function
4 | that will poll the MTA API once a minute and insert to a TimescaleDB hypertable.
5 |
6 | AWS Lambda allows you to run "serverless" functions.
7 | Lambda allows you to forget about server maintenance and scaling. In exchange,
8 | you must package your entire application into a zip bundle according to the
9 | compute environment you're using.
10 |
11 | The MTA ingest script is written in python but several of the dependencies
12 | rely on compiled C extensions. These must be compiled against compatible versions
13 | of the AWS Lambda environment; 64-bit RHEL-based linux.
14 |
15 | ## Approach
16 |
17 | 1. Create a Docker image based on `amazonlinux` containing the build tools required.
18 | 2. Run the Docker container and install the python dependencies into a virtualenv.
19 | 3. Still within the docker container, package the contents of the virtualenv along with your python code into a zip file
20 | 4. Save zipfile to a mounted volume so that it persists on the host.
21 | 5. Upload the zipfile to s3
22 | 6. Configure recurring 1 minute trigger
23 | 7. Congigure required environment variables
24 |
25 | ## Details
26 |
27 | The lambda function itself is slightly modified from the original ingest script.
28 | Instead of looping indefinitely, it's refactored to run exactly once. The process gets restarted
29 | periodically. See `build/lambda_function.py`
30 |
31 | Steps 1-4 can be handled automatically with the Makefile system in this directory.
32 | Running `make` will create the image and run the container, resulting in a zip file on your local machine.
33 |
34 | Uploading the zipfile to s3 can be done with `aws s3 cp my.zip s3://bucket/my.zip`
35 |
36 | The remaining configuration of the Lambda function is done through the AWS console.
37 |
--------------------------------------------------------------------------------
/compression-preview/public/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
12 |
13 |
17 |
18 |
27 | React App
28 |
29 |
30 |
31 |
32 |
42 |
43 |
44 |
--------------------------------------------------------------------------------
/air-quality/schema.sql:
--------------------------------------------------------------------------------
1 | CREATE DATABASE airquality;
2 |
3 | \c airquality
4 |
5 | CREATE EXTENSION IF NOT EXISTS timescaledb CASCADE;
6 |
7 | CREATE TABLE measurement_types (
8 | parameter_id SERIAL PRIMARY KEY,
9 | parameter TEXT NOT NULL,
10 | unit TEXT NOT NULL,
11 | UNIQUE(parameter, unit)
12 | );
13 |
14 | CREATE TABLE locations (
15 | location_id SERIAL PRIMARY KEY,
16 | city_name TEXT NOT NULL,
17 | country_name TEXT NOT NULL,
18 | location_name TEXT NOT NULL,
19 | UNIQUE(city_name, location_name, country_name)
20 | );
21 |
22 | CREATE TABLE measurements (
23 | time TIMESTAMPTZ,
24 | parameter_id INTEGER REFERENCES measurement_types(parameter_id),
25 | location_id INTEGER REFERENCES locations(location_id),
26 | value FLOAT
27 | );
28 |
29 | CREATE TABLE temp_measurements (
30 | time TIMESTAMPTZ,
31 | parameter_id INTEGER REFERENCES measurement_types(parameter_id),
32 | location_id INTEGER REFERENCES locations(location_id),
33 | value FLOAT
34 | );
35 |
36 | SELECT create_hypertable('measurements', 'time');
37 |
38 | -- Continuous Aggregates Example
39 |
40 | CREATE VIEW measurements_15min
41 | WITH (timescaledb.continuous)
42 | AS
43 | SELECT
44 | time_bucket('15 minute', time) as bucket,
45 | parameter_id,
46 | avg(value) as avg,
47 | max(value) as max,
48 | min(value) as min
49 | FROM
50 | measurements
51 | GROUP BY bucket, parameter_id;
52 |
53 | CREATE VIEW measurements_hourly
54 | WITH (timescaledb.continuous)
55 | AS
56 | SELECT
57 | time_bucket('1 hour', time) as bucket,
58 | parameter_id,
59 | avg(value) as avg,
60 | max(value) as max,
61 | min(value) as min
62 | FROM
63 | measurements
64 | GROUP BY bucket, parameter_id;
65 |
66 | CREATE VIEW measurements_daily
67 | WITH (timescaledb.continuous)
68 | AS
69 | SELECT
70 | time_bucket('1 day', time) as bucket,
71 | parameter_id,
72 | avg(value) as avg,
73 | max(value) as max,
74 | min(value) as min
75 | FROM
76 | measurements
77 | GROUP BY bucket, parameter_id;
78 |
--------------------------------------------------------------------------------
/prometheus-grafana/README.md:
--------------------------------------------------------------------------------
1 | # Example Dashboards to get started analyzing Prometheus metrics using Timescale and Grafana
2 |
3 | ### Context and Intended Use
4 |
5 | These sample dashboards are from this [tutorial](https://docs.timescale.com/latest/tutorials/tutorial-use-timescale-prometheus-grafana) on how to use Grafana and Timescale to analyze Prometheus metrics from a PostgreSQL database. The tutorial details step by step how to build each sort of visualization. These two sample dashboards are intended to give you (1) a starting point and code recipies for building your own dashboards which visualize Prometheus metrics from your infrastructure and (2) some sample code, recipies and ideas for useful visualizations when monitoring a database instance with Prometheus.
6 |
7 | ### Short Term Monitoring
8 |
9 | This file (shortterm-monitoring.json) contains dashboards to monitor metrics for the short term.
10 |
11 | ### Long Term Monitoring
12 |
13 | This file (longterm-monitoring.json) contains dashboards to monitor metrics for the long term, using Timescale's Continuous Aggregates feature.
14 |
15 | ### How to upload to Grafana
16 |
17 | For each file:
18 | 1. Create a new Grafana dashboard
19 | 2. Copy and paste the JSON from the file and save the dashboard
20 | 3. You should see several panels created, but all of them complaining of no data
21 | 4.1 Follow [this tutorial](https://docs.timescale.com/latest/tutorials/tutorial-setup-timescale-prometheus) to setup a monitoring system to create data to populate your dashboards.
22 | 4.2 Alternatively, you can follow [this tutorial](https://docs.timescale.com/latest/tutorials/tutorial-use-timescale-prometheus-grafana) and download the sample prometheus metrics dataset at the start of the tutorial.
23 |
24 | ### Note about metric ids
25 |
26 | Note that the metric IDs may be different for your postgreSQL database, there will be a chance they won't match the IDs used to create the dashboard. Should this occur, find the correct ID and substitute it into the query
27 |
--------------------------------------------------------------------------------
/crypto_tutorial/schema.sql:
--------------------------------------------------------------------------------
1 | --schema.sql
2 | --SQL statements for a database schema cryptocurrency analysis
3 | --Timescale Inc.
4 | --Author: Avthar Sewrathan
5 |
6 | --Schema for currency_info table
7 | DROP TABLE IF EXISTS "currency_info";
8 | CREATE TABLE "currency_info"(
9 | currency_code VARCHAR (10),
10 | currency TEXT
11 | );
12 |
13 | --Schema for btc_prices table
14 | DROP TABLE IF EXISTS "btc_prices";
15 | CREATE TABLE "btc_prices"(
16 | time TIMESTAMP WITH TIME ZONE NOT NULL,
17 | opening_price DOUBLE PRECISION,
18 | highest_price DOUBLE PRECISION,
19 | lowest_price DOUBLE PRECISION,
20 | closing_price DOUBLE PRECISION,
21 | volume_btc DOUBLE PRECISION,
22 | volume_currency DOUBLE PRECISION,
23 | currency_code VARCHAR (10)
24 | );
25 |
26 | --Schema for crypto_prices table
27 | DROP TABLE IF EXISTS "crypto_prices";
28 | CREATE TABLE "crypto_prices"(
29 | time TIMESTAMP WITH TIME ZONE NOT NULL,
30 | opening_price DOUBLE PRECISION,
31 | highest_price DOUBLE PRECISION,
32 | lowest_price DOUBLE PRECISION,
33 | closing_price DOUBLE PRECISION,
34 | volume_crypto DOUBLE PRECISION,
35 | volume_btc DOUBLE PRECISION,
36 | currency_code VARCHAR (10)
37 | );
38 |
39 | --Schema for eth_prices table
40 | DROP TABLE IF EXISTS "eth_prices";
41 | CREATE TABLE "eth_prices"(
42 | time TIMESTAMP WITH TIME ZONE NOT NULL,
43 | opening_price DOUBLE PRECISION,
44 | highest_price DOUBLE PRECISION,
45 | lowest_price DOUBLE PRECISION,
46 | closing_price DOUBLE PRECISION,
47 | volume_eth DOUBLE PRECISION,
48 | volume_currency DOUBLE PRECISION,
49 | currency_code VARCHAR (10)
50 | );
51 |
52 | --Timescale specific statements to create hypertables for better performance
53 | SELECT create_hypertable('btc_prices', 'time', 'opening_price', 2);
54 | SELECT create_hypertable('eth_prices', 'time', 'opening_price', 2);
55 | SELECT create_hypertable('crypto_prices', 'time', 'currency_code', 2);
--------------------------------------------------------------------------------
/compression-preview/src/assets/images/hasuraio-logo.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Example Applications
2 |
3 | Collection of example applications to help you get familiar with TimescaleDB.
4 |
5 | | Example | Type | Description |
6 | |---|---|---|
7 | | [Hello Timescale](hello-timescale)| Geospatial | Analyze taxi cab data in New York City. |
8 | | [Raspberry Pi Light Sensor](pi-light)| Edge | Record light changes using a photoregister & Raspberry Pi. |
9 | | [MTA](mta) | Geospatial | Store & visualize bus activity in New York City. |
10 | | [Clients](clients) | Client Libraries | Sample code for connecting to TimescaleDB. |
11 | | [Air Quality](air-quality) | IoT | Sample code for collecting air quality measurements. |
12 | | [Crypto Tutorial](https://github.com/timescale/examples/tree/master/crypto_tutorial) | Financial | Sample code and dataset for analyzing cryptocurrency market data|
13 | | [Compression Preview](compression-preview) | Visualization | Preview compression data from hypertables with React and GraphQL. |
14 | | [Analyze intraday stocks data](analyze-intraday-stocks) | Financial | Store & analyze intraday candlestick data |
15 |
16 |
17 | ### Integration Catalog
18 |
19 | Collection of technologies you can use with TimescaleDB.
20 |
21 | #### Ingest
22 |
23 | - [Kafka](https://streamsets.com/blog/ingesting-data-apache-kafka-timescaledb/)
24 | - [Telegraf](https://docs.timescale.com/latest/tutorials/telegraf-output-plugin)
25 | - [Netdata](https://github.com/mahlonsmith/netdata-timescale-relay)
26 |
27 | #### Visualization
28 |
29 | - [Grafana](https://docs.timescale.com/latest/using-timescaledb/visualizing-data#grafana)
30 | - [Seeq](https://seeq12.atlassian.net/wiki/spaces/KB/pages/376963207/SQL+Connection+Configuration#SQLConnectionConfiguration-TimescaleDB)
31 | - [Tableau, PowerBI, Others](https://docs.timescale.com/latest/using-timescaledb/visualizing-data#other-viz-tools)
32 | - [Hasura, GraphQL, React](https://docs.timescale.com/latest/timescaledb/tutorials/visualize-compression-status)
33 |
34 | #### Monitoring
35 |
36 | - [Prometheus](https://docs.timescale.com/latest/tutorials/prometheus-adapter)
37 | - [Zabbix](https://support.zabbix.com/browse/ZBXNEXT-4868)
38 |
--------------------------------------------------------------------------------
/hello-timescale/nyc_data_setup.sql:
--------------------------------------------------------------------------------
1 | -- nyc_data_setup.sql
2 | -- Timescale Inc.
3 |
4 | -- Create table 'rides' which will store trip data
5 | DROP TABLE IF EXISTS "rides";
6 | CREATE TABLE "rides"(
7 | vendor_id TEXT,
8 | pickup_datetime TIMESTAMP WITHOUT TIME ZONE NOT NULL,
9 | dropoff_datetime TIMESTAMP WITHOUT TIME ZONE NOT NULL,
10 | passenger_count NUMERIC,
11 | trip_distance NUMERIC,
12 | pickup_longitude NUMERIC,
13 | pickup_latitude NUMERIC,
14 | rate_code INTEGER,
15 | dropoff_longitude NUMERIC,
16 | dropoff_latitude NUMERIC,
17 | payment_type INTEGER,
18 | fare_amount NUMERIC,
19 | extra NUMERIC,
20 | mta_tax NUMERIC,
21 | tip_amount NUMERIC,
22 | tolls_amount NUMERIC,
23 | improvement_surcharge NUMERIC,
24 | total_amount NUMERIC
25 | );
26 |
27 | -- Create hypertable for rides
28 | -- This allows us to take advantage of timescaledb's space and time parititioning
29 | SELECT create_hypertable('rides', 'pickup_datetime', 'payment_type', 2, create_default_indexes=>FALSE);
30 | -- Create indexes (special look up tables/ pointers) on the following columns to speed up data retrieval
31 | CREATE INDEX ON rides (vendor_id, pickup_datetime desc);
32 | CREATE INDEX ON rides (pickup_datetime desc, vendor_id);
33 | CREATE INDEX ON rides (rate_code, pickup_datetime DESC);
34 | CREATE INDEX ON rides (passenger_count, pickup_datetime desc);
35 |
36 | -- Create table 'payment_types' to store description of payment types for easy lookup
37 | CREATE TABLE IF NOT EXISTS "payment_types"(
38 | payment_type INTEGER,
39 | description TEXT
40 | );
41 | INSERT INTO payment_types(payment_type, description) VALUES
42 | (1, 'credit card'),
43 | (2, 'cash'),
44 | (3, 'no charge'),
45 | (4, 'dispute'),
46 | (5, 'unknown'),
47 | (6, 'voided trip');
48 |
49 | -- Create table 'rates' to store description of rate codes for for easy lookup
50 | CREATE TABLE IF NOT EXISTS "rates"(
51 | rate_code INTEGER,
52 | description TEXT
53 | );
54 | INSERT INTO rates(rate_code, description) VALUES
55 | (1, 'standard rate'),
56 | (2, 'JFK'),
57 | (3, 'Newark'),
58 | (4, 'Nassau or Westchester'),
59 | (5, 'negotiated fare'),
60 | (6, 'group ride');
61 |
--------------------------------------------------------------------------------
/pi-light/README.md:
--------------------------------------------------------------------------------
1 | # Raspberry Pi Light Sensor
2 |
3 | This example will read data from a light sensor (wired to a Raspberry Pi device) and send that
4 | data to a TimescaleDB instance (hosted by [Timescale Cloud](https://www.timescale.com/cloud)).
5 |
6 | ## Contents
7 |
8 | A brief overview of the files in this directory and how they're used:
9 |
10 | * `photoresistor.py`: Python script to read sensor values and insert them into TimescaleDB.
11 | * `pi-schema.sql`: data definition (DDL) to create the necessary hypertables.
12 | * `grafana.json`: Grafana dashboard configuration.
13 | * `pi_photoresistor.service`: systemd service definition to ensure the sensor is restarted on reboot.
14 |
15 | ## The Cloud ([Timescale Cloud](https://www.timescale.com/cloud))
16 |
17 | Prepare TimescaleDB instance by creating the pi schema
18 |
19 | psql postgres://USERNAME:PASSWORD@HOST:PORT/defaultdb?sslmode=require -f ./pi-schema.sql
20 |
21 | Prepare Grafana instance by creating a datasource
22 |
23 | Login to Grafana > Configuration > Data Sources > Add data source > PostgreSQL
24 |
25 | Create Grafana dashboard
26 |
27 | Login to Grafana > Dashboard > Import > ./grafana.json
28 |
29 | ## The Edge (Raspberry Pi)
30 |
31 | On device, install [PostgreSQL Database Adapter](https://github.com/psycopg/psycopg2) for python
32 |
33 | sudo apt-get install libpq-dev
34 | pip3 install psycopg2
35 |
36 | On device, install [CircuitPython](https://learn.adafruit.com/circuitpython-on-raspberrypi-linux/installing-circuitpython-on-raspberry-pi) libraries
37 |
38 | pip3 install adafruit-blinka
39 |
40 | Copy python script to device
41 |
42 | scp ./photoresistor.py pi@10.0.1.14:/home/pi
43 |
44 | > The `photoresistor.py` script assumes that you're implementing a pull-down resistor on GPIO pin 23.
45 | You'll need to modify this depending on the specifics of your own sensor configuration.
46 |
47 | Copy systemd started setup in place
48 |
49 | scp ./pi_photoresistor.service /etc/systemd/system
50 |
51 | > Be sure to set the TIMESCALEDB_CONNECTION string to your Service URI "postgres://..." for your TimescaleDB instance.
52 |
53 | On device, start the service
54 |
55 | sudo systemctl start pi_photoresistor.service
56 |
--------------------------------------------------------------------------------
/analyze-intraday-stocks/readme.md:
--------------------------------------------------------------------------------
1 | # Analyze historical intraday stock data
2 |
3 | This project is an example on how to collect, store, and analyze intraday stock data with Python and
4 | TimescaleDB. [To read the full tutorial, go to the TimescaleDB documentation!](https://docs.timescale.com/timescaledb/latest/tutorials/analyze-intraday-stocks/)
5 |
6 | ## Prerequisites
7 |
8 | * Python 3
9 | * TimescaleDB (see [installation options](https://docs.timescale.com/timescaledb/latest/how-to-guides/install-timescaledb/))
10 | * Alpha Vantage API key ([get one for free](https://www.alphavantage.co/support/#api-key))
11 | * Virtualenv (`pip install virtualenv`)
12 |
13 | ## Installation and database setup
14 |
15 | ### Clone repository and open analyze-intraday-stocks folder
16 |
17 | ```bash
18 | git clone https://github.com/timescale/examples.git
19 | cd analyze-intraday-stocks/
20 | ```
21 |
22 | ### Create new virtual environment
23 |
24 | ```bash
25 | virtualenv env
26 | source env/bin/activate
27 | ```
28 |
29 | ### Install requirements
30 |
31 | ```bash
32 | pip install -r requirements.txt
33 | ```
34 |
35 | ### Create table
36 | Run `sql_script/create_table.sql`:
37 |
38 | ```sql
39 | CREATE TABLE public.stocks_intraday (
40 | "time" timestamp(0) NOT NULL,
41 | symbol varchar NULL,
42 | price_open float8 NULL,
43 | price_close float8 NULL,
44 | price_low float8 NULL,
45 | price_high float8 NULL,
46 | trading_volume int4 NULL,
47 | );
48 | ```
49 |
50 | ### Turn it into a hypertable
51 | Run `sql_script/create_hypertable.sql`
52 |
53 | ```sql
54 | /* Enable the TimscaleDB extension */
55 | CREATE EXTENSION IF NOT EXISTS timescaledb;
56 |
57 | /*
58 | Turn the 'stocks_intraday' table into a hypertable.
59 | This is important to be able to make use of TimescaleDB features later on.
60 | */
61 | SELECT create_hypertable('stocks_intraday', 'time');
62 | ```
63 |
64 | ### Edit configuration file
65 | Edit `config.py` according to your database connection details.
66 | ```python
67 | # Make sure to edit this configuration file with your database connection details
68 | # and Alpha Vantage API key
69 | DB_USER = 'user'
70 | DB_PASS = 'passwd'
71 | DB_HOST = 'host'
72 | DB_PORT = '000'
73 | DB_NAME = 'db'
74 | APIKEY = 'alpha_vantage_apikey'
75 | ```
76 | Also, `config.py` holds your Alpha Vantage API key, so make sure to edit that too.
77 |
78 |
79 | ## Usage
80 |
81 | Run `explore.py` to run your first query against your database.
82 |
83 | ```bash
84 | python explore.py
85 | ```
86 |
--------------------------------------------------------------------------------
/grafana-guide/advanced-tips/webinar-demo-queries.sql:
--------------------------------------------------------------------------------
1 | -----------------------------------
2 | -- Demo 1
3 | -- 3 day time shift
4 | -----------------------------------
5 | -- What to name the series
6 | SELECT time, ride_count, CASE WHEN step = 0 THEN 'today' ELSE (-interval)::text END AS metric
7 | FROM
8 | -- sub-query to generate the intervals
9 | ( SELECT step, (step||'day')::interval AS interval FROM generate_series(0,3) g(step)) g_offsets
10 | JOIN LATERAL (
11 | -- subquery to select the rides
12 | SELECT
13 | -- adding set interval to time values
14 | time_bucket('15m',pickup_datetime + interval)::timestamptz AS time, count(*) AS ride_count FROM rides
15 | -- subtract value of interval from time to plot
16 | -- today = 0, 1 day ago = 1, etc
17 | WHERE
18 | pickup_datetime BETWEEN $__timeFrom()::timestamptz - interval AND $__timeTo()::timestamptz - interval
19 | GROUP BY 1
20 | ORDER BY 1
21 | ) l ON true
22 |
23 |
24 | -----------------------------------
25 | -- Demo 1
26 | -- 7 day time shift
27 | -----------------------------------
28 | SELECT time, ride_count, CASE WHEN step = 0 THEN 'today' ELSE (-interval)::text END AS metric
29 | FROM
30 | ( SELECT step, (step||'week')::interval AS interval FROM generate_series(0,1) g(step)) g_offsets
31 | JOIN LATERAL (
32 | SELECT
33 | time_bucket('15m',pickup_datetime + interval)::timestamptz AS time, count(*) AS ride_count FROM rides
34 | WHERE
35 | pickup_datetime BETWEEN $__timeFrom()::timestamptz - interval AND $__timeTo()::timestamptz - interval
36 | GROUP BY 1
37 | ORDER BY 1
38 | ) l ON true
39 |
40 | -----------------------------------
41 | -- Demo 2
42 | -- Auto-changing aggregate queried
43 | -----------------------------------
44 | -- Use Daily aggregate for intervals greater than 14 days
45 | SELECT day as time, ride_count, 'daily' AS metric
46 | FROM rides_daily
47 | WHERE
48 | $__timeTo()::timestamp - $__timeFrom()::timestamp > '14 days'::interval AND
49 | $__timeFilter(day)
50 | UNION ALL
51 | -- Use hourly aggregate for intervals between 3 and 14 days
52 | SELECT hour, ride_count, 'hourly' AS metric
53 | FROM rides_hourly
54 | WHERE
55 | $__timeTo()::timestamp - $__timeFrom()::timestamp BETWEEN '3 days'::interval AND '14 days'::interval AND
56 | $__timeFilter(hour)
57 | UNION ALL
58 | -- Use raw data (minute intervals) intervals between 0 and 3 days
59 | SELECT * FROM
60 | (
61 | SELECT time_bucket('1m',pickup_datetime) AS time, count(*), 'minute' AS metric
62 | FROM rides
63 | WHERE
64 | $__timeTo()::timestamp - $__timeFrom()::timestamp < '3 days'::interval AND
65 | $__timeFilter(pickup_datetime)
66 | GROUP BY 1
67 | ) minute
68 | ORDER BY 1;
69 |
--------------------------------------------------------------------------------
/mta/aws_lambda/build/lambda_function.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | """gtfs-ingest.py
3 | Load a GTFS feed into a PostgreSQL database with PostGIS and TimescaleDB.
4 |
5 | Assumes the schema:
6 | CREATE TABLE mta (
7 | vid text,
8 | time timestamptz,
9 | route_id text,
10 | bearing numeric,
11 | geom geometry(POINT, 4326));
12 | and converted to a TimescaleDB hypertable.
13 | SELECT create_hypertable('mta', 'time');
14 | """
15 |
16 | from datetime import timedelta, datetime
17 | import time
18 | import os
19 |
20 | from google.transit import gtfs_realtime_pb2 as gtfs
21 | import requests
22 | import psycopg2
23 | from psycopg2.extras import execute_values
24 |
25 |
26 | # Required Environment Variables
27 | API_KEY = os.environ['MTA_API_KEY']
28 | CONNECTION = os.environ['MTA_CONNECTION']
29 |
30 |
31 | def parse_vehicles(feed):
32 | """Given a GTFS feed, return a generator of 5-element tuples,
33 | each matching the following insert statement
34 | INSERT INTO mta (vid, time, route_id, bearing, geom)
35 | VALUES (...);
36 | """
37 |
38 | for entity in list(feed.entity):
39 | value = datetime.fromtimestamp(entity.vehicle.timestamp)
40 | timestamp = value.strftime('%d %B %Y %H:%M:%S')
41 | yield (
42 | entity.id,
43 | timestamp,
44 | entity.vehicle.trip.route_id,
45 | entity.vehicle.position.bearing,
46 | "SRID=4326;POINT( %f %f )" % (
47 | entity.vehicle.position.longitude,
48 | entity.vehicle.position.latitude))
49 |
50 |
51 |
52 | # Global config
53 | # Using http://bustime.mta.info/wiki/Developers/Index
54 | URL = f"http://gtfsrt.prod.obanyc.com/vehiclePositions?key={API_KEY}"
55 |
56 |
57 | def lambda_handler(event, context):
58 | with psycopg2.connect(CONNECTION) as conn:
59 | with conn.cursor() as cursor:
60 | response = requests.get(URL)
61 | feed = gtfs.FeedMessage()
62 | feed.ParseFromString(response.content)
63 |
64 | # performant way to batch inserts
65 | # see http://initd.org/psycopg/docs/extras.html#psycopg2.extras.execute_batch
66 | start = time.time()
67 | execute_values(
68 | cursor,
69 | "INSERT INTO mta (vid, time, route_id, bearing, geom)"
70 | "VALUES %s", parse_vehicles(feed))
71 | conn.commit()
72 | end = time.time()
73 | nrows = len(feed.entity)
74 |
75 | return {
76 | 'statusCode': 200,
77 | 'body': f"INSERTED {nrows} rows at {end}, (elapsed: {end - start})"
78 | }
79 |
--------------------------------------------------------------------------------
/analyze-intraday-stocks/insert_stocks_data.py:
--------------------------------------------------------------------------------
1 | import csv
2 | import pandas as pd
3 | import config
4 | import psycopg2
5 | from pgcopy import CopyManager
6 |
7 | conn = psycopg2.connect(database=config.DB_NAME,
8 | host=config.DB_HOST,
9 | user=config.DB_USER,
10 | password=config.DB_PASS,
11 | port=config.DB_PORT)
12 | columns = ('stock_datetime', 'price_open', 'price_close',
13 | 'price_low', 'price_high', 'trading_volume', 'symbol')
14 |
15 | def get_symbols():
16 | """Read symbols from a csv file.
17 |
18 | Returns:
19 | [list of strings]: symbols
20 | """
21 | with open('symbols.csv') as f:
22 | reader = csv.reader(f)
23 | return [row[0] for row in reader]
24 |
25 | def fetch_stock_data(symbol, month):
26 | """Fetches historical intraday data for one ticker symbol (1-min interval)
27 |
28 | Args:
29 | symbol (string): ticker symbol
30 | month (int): month value as an integer 1-24 (for example month=4 will fetch data from the last 4 months)
31 |
32 | Returns:
33 | list of tuples: intraday (candlestick) stock data
34 | """
35 | interval = '1min'
36 | slice = 'year1month' + str(month) if month <= 12 else 'year2month1' + str(month)
37 | apikey = config.APIKEY
38 | CSV_URL = 'https://www.alphavantage.co/query?function=TIME_SERIES_INTRADAY_EXTENDED&' \
39 | 'symbol={symbol}&interval={interval}&slice={slice}&apikey={apikey}' \
40 | .format(symbol=symbol, slice=slice, interval=interval,apikey=apikey)
41 | df = pd.read_csv(CSV_URL)
42 | df['symbol'] = symbol
43 |
44 | df['time'] = pd.to_datetime(df['time'], format='%Y-%m-%d %H:%M:%S')
45 | df = df.rename(columns={'time': 'time',
46 | 'open': 'price_open',
47 | 'close': 'price_close',
48 | 'high': 'price_high',
49 | 'low': 'price_low',
50 | 'volume': 'trading_volume'}
51 | )
52 | return [row for row in df.itertuples(index=False, name=None)]
53 |
54 | def main():
55 | symbols = get_symbols()
56 | for symbol in symbols:
57 | print("Fetching data for: ", symbol)
58 | for month in range(1, 3): # last 2 months, you can go up to 24 month if you want to
59 | stock_data = fetch_stock_data(symbol, month)
60 | print('Inserting data...')
61 | mgr = CopyManager(conn, 'stocks_intraday', columns)
62 | mgr.copy(stock_data)
63 | conn.commit()
64 |
65 |
66 | if __name__ == '__main__':
67 | main()
68 |
--------------------------------------------------------------------------------
/mta/gtfs-ingest.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | """gtfs-ingest.py
3 |
4 | Load a GTFS feed into a PostgreSQL database with PostGIS and TimescaleDB.
5 |
6 | Assumes the schema:
7 |
8 | CREATE TABLE mta (
9 | vid text,
10 | time timestamptz,
11 | route_id text,
12 | bearing numeric,
13 | geom geometry(POINT, 4326));
14 |
15 | and converted to a TimescaleDB hypertable.
16 |
17 | SELECT create_hypertable('mta', 'time');
18 |
19 | example GTFS entity
20 |
21 | id: "MTABC_6048"
22 | vehicle {
23 | trip {
24 | trip_id: "22504538-LGPA9-LG_A9-Weekday-10"
25 | start_date: "20190108"
26 | route_id: "Q53+"
27 | direction_id: 0
28 | }
29 | position {
30 | latitude: 40.71529006958008
31 | longitude: -73.8602294921875
32 | bearing: 134.45303344726562
33 | }
34 | timestamp: 1547004538
35 | stop_id: "553375"
36 | vehicle {
37 | id: "MTABC_6048"
38 | }
39 | }
40 |
41 | Full spec https://github.com/google/transit/blob/master/gtfs-realtime/spec/en/reference.md#element-index
42 | """
43 |
44 | from datetime import timedelta, datetime
45 | import time
46 | import os
47 |
48 | from google.transit import gtfs_realtime_pb2 as gtfs
49 | import requests
50 | import requests_cache
51 | import psycopg2
52 | from psycopg2.extras import execute_values
53 |
54 |
55 | def parse_vehicles(feed):
56 | """Given a GTFS feed, return a generator of 5-element tuples,
57 | each matching the following insert statement
58 |
59 | INSERT INTO mta (vid, time, route_id, bearing, geom)
60 | VALUES (...);
61 | """
62 |
63 | for entity in list(feed.entity):
64 | value = datetime.fromtimestamp(entity.vehicle.timestamp)
65 | timestamp = value.strftime('%d %B %Y %H:%M:%S')
66 | yield (
67 | entity.id,
68 | timestamp,
69 | entity.vehicle.trip.route_id,
70 | entity.vehicle.position.bearing,
71 | "SRID=4326;POINT( %f %f )" % (
72 | entity.vehicle.position.longitude,
73 | entity.vehicle.position.latitude))
74 |
75 |
76 | # Required Environment Variables
77 | API_KEY = os.environ['MTA_API_KEY']
78 | CONNECTION = os.environ['MTA_CONNECTION']
79 |
80 | # Global config
81 | # Using http://bustime.mta.info/wiki/Developers/Index
82 | URL = f"http://gtfsrt.prod.obanyc.com/vehiclePositions?key={API_KEY}"
83 | POLLING_INTERVAL = 85 # seconds
84 | requests_cache.install_cache('.gtfs-cache', expire_after=timedelta(seconds=POLLING_INTERVAL))
85 |
86 |
87 | if __name__ == "__main__":
88 | with psycopg2.connect(CONNECTION) as conn:
89 | while True:
90 | with conn.cursor() as cursor:
91 | response = requests.get(URL)
92 | feed = gtfs.FeedMessage()
93 | feed.ParseFromString(response.content)
94 |
95 | # performant way to batch inserts
96 | # see http://initd.org/psycopg/docs/extras.html#psycopg2.extras.execute_batch
97 | start = time.time()
98 | execute_values(
99 | cursor,
100 | "INSERT INTO mta (vid, time, route_id, bearing, geom)"
101 | "VALUES %s", parse_vehicles(feed))
102 | conn.commit()
103 | end = time.time()
104 |
105 | nrows = len(feed.entity)
106 | print(f"INSERTED {nrows} rows at {end}, (elapsed: {end - start})")
107 | time.sleep(POLLING_INTERVAL)
108 |
--------------------------------------------------------------------------------
/grafana-guide/variables/before_variable.json:
--------------------------------------------------------------------------------
1 | {
2 | "annotations": {
3 | "list": [
4 | {
5 | "builtIn": 1,
6 | "datasource": "-- Grafana --",
7 | "enable": true,
8 | "hide": true,
9 | "iconColor": "rgba(0, 211, 255, 1)",
10 | "name": "Annotations & Alerts",
11 | "type": "dashboard"
12 | }
13 | ]
14 | },
15 | "editable": true,
16 | "gnetId": null,
17 | "graphTooltip": 0,
18 | "id": 4,
19 | "links": [],
20 | "panels": [
21 | {
22 | "circleMaxSize": "1",
23 | "circleMinSize": "2",
24 | "colors": [
25 | "#F2495C",
26 | "#5794F2",
27 | "#B877D9",
28 | "#FF9830",
29 | "#FADE2A"
30 | ],
31 | "datasource": "MTA Bus DB",
32 | "decimals": 0,
33 | "esMetric": "Count",
34 | "gridPos": {
35 | "h": 22,
36 | "w": 21,
37 | "x": 0,
38 | "y": 0
39 | },
40 | "hideEmpty": false,
41 | "hideZero": false,
42 | "id": 14,
43 | "initialZoom": "11",
44 | "locationData": "table",
45 | "mapCenter": "custom",
46 | "mapCenterLatitude": "40.879070000000056",
47 | "mapCenterLongitude": "-73.88537600000012",
48 | "maxDataPoints": 1,
49 | "mouseWheelZoom": true,
50 | "options": {},
51 | "showLegend": true,
52 | "stickyLabels": false,
53 | "tableQueryOptions": {
54 | "geohashField": "geohash",
55 | "labelField": "route_id",
56 | "latitudeField": "latitude",
57 | "longitudeField": "longitude",
58 | "metricField": "color",
59 | "queryType": "coordinates"
60 | },
61 | "targets": [
62 | {
63 | "format": "table",
64 | "group": [],
65 | "metricColumn": "none",
66 | "rawQuery": true,
67 | "rawSql": "SELECT\n max(time) as \"time\",\n vid AS \"vehicle_id\",\n route_id,\n CASE WHEN route_id like 'M%' THEN 1\n WHEN route_id like 'B%' THEN 2\n WHEN route_id like 'Q%' THEN 3\n WHEN route_id like 'S%' THEN 4\n ELSE 0\n END AS \"color\",\n ST_X(geom) AS \"longitude\",\n ST_Y(geom) AS \"latitude\"\nFROM mta where time > now()-interval '1.5 minutes'\ngroup by vid, route_id, geom order by 1;",
68 | "refId": "A",
69 | "select": [
70 | [
71 | {
72 | "params": [
73 | "value"
74 | ],
75 | "type": "column"
76 | }
77 | ]
78 | ],
79 | "timeColumn": "time",
80 | "where": [
81 | {
82 | "name": "$__timeFilter",
83 | "params": [],
84 | "type": "macro"
85 | }
86 | ]
87 | }
88 | ],
89 | "thresholds": "1,2,3,4",
90 | "timeFrom": null,
91 | "timeShift": null,
92 | "title": "Current MTA Bus Locations",
93 | "type": "grafana-worldmap-panel",
94 | "unitPlural": "",
95 | "unitSingle": "",
96 | "valueName": "current"
97 | }
98 | ],
99 | "schemaVersion": 22,
100 | "style": "dark",
101 | "tags": [],
102 | "templating": {
103 | "list": []
104 | },
105 | "time": {
106 | "from": "now-30d",
107 | "to": "now"
108 | },
109 | "timepicker": {
110 | "refresh_intervals": [
111 | "5s",
112 | "10s",
113 | "30s",
114 | "1m",
115 | "5m",
116 | "15m",
117 | "30m",
118 | "1h",
119 | "2h",
120 | "1d"
121 | ]
122 | },
123 | "timezone": "",
124 | "title": "Variables MTA Bus Example",
125 | "uid": "P07kQLjZz",
126 | "version": 15
127 | }
--------------------------------------------------------------------------------
/mta/README.md:
--------------------------------------------------------------------------------
1 | # MTA Bus Demo
2 |
3 | This project is a demo implementation of a TimescaleDB-backed database to ingest and query real-time mass transit information from the New York Metropolitan Transportation Authority (MTA).
4 |
5 | 
6 |
7 | ## Installation
8 |
9 | * Set up PostgreSQL with TimescaleDB and PostGIS extensions.
10 | * Install Python 3.6
11 | * Install python dependencies with `pip install -r requirements.txt`
12 | - Uses Google’s GTFS realtime protobuf spec and related python lib for parsing.
13 | - Uses psycopg2 batch inserts for efficiency.
14 |
15 | ## Usage
16 |
17 | First, create the database, users and tables as required. See Data Model below.
18 |
19 | Run `python3 gtfs-ingest.py` and the script will run indefinitely, polling the MTA data feed at
20 | regular intervals and inserting the data into our table.
21 |
22 | The deployment-specific variables must be set as environment variables. The required
23 | variables are:
24 |
25 | ```bash
26 | # Obtain a key from http://bustime.mta.info/wiki/Developers/Index
27 | export MTA_API_KEY = 'abc-123'
28 | export MTA_CONNECTION = "host=localhost dbname=mta user=mta"
29 | python3 gtfs-ingest.py
30 | ```
31 |
32 | ## Dataset
33 |
34 | We’ll be using the SIRI Real-Time API and the OneBusAway "Discovery" API provided by the MTA. See http://bustime.mta.info/wiki/Developers/Index for details.
35 |
36 | This should be generalizable to other transit systems using the General Transit Feed Specification or GTFS (see https://developers.google.com/transit/gtfs-realtime/)
37 |
38 |
39 | ## Data Model
40 |
41 | DDL:
42 |
43 | ```sql
44 | CREATE EXTENSION timescaledb;
45 | CREATE EXTENSION postgis;
46 | CREATE TABLE mta (
47 | vid text,
48 | time timestamptz,
49 | route_id text,
50 | bearing numeric,
51 | geom geometry(POINT, 4326));
52 |
53 | SELECT create_hypertable('mta', 'time');
54 |
55 | CREATE INDEX idx_mta_geom ON mta USING GIST (geom);
56 | CREATE INDEX idx_mta_route_id ON mta USING btree (route_id);
57 | ```
58 |
59 | ### spatial data
60 |
61 | In order to answer certain questions, we need additional geographic data from the MTA defining the bus route.
62 | The goal is to implement a traffic anomaly detection system capable of alerting stakeholders when any bus deviates from it’s scheduled route. This information could be used to detect traffic problems in nearly real time - useful to logistics, law enforcement and transportation planners to identify traffic situations or adjust routing strategies.
63 |
64 | * download bus routes shapefile from http://web.mta.info/developers/developer-data-terms.html#data.
65 | * Importing the MTA bus routes with a LineString geometry.
66 |
67 | ```bash
68 | shp2pgsql -s 4326 "NYCT Bus Routes" public.bus_routes | psql -U mta -d mta -h localhost`
69 | ```
70 |
71 | * Buffer the route line to get a polygon for each route to add a small margin of error for point location imprecision. (+/- 0.0002 decimal degrees or ~16 meters at 45N latitude)
72 |
73 | ```sql
74 | CREATE TABLE route_geofences AS
75 | SELECT route_id,
76 | St_buffer(St_collect(geom), 0.0002) AS geom
77 | FROM bus_routes
78 | GROUP BY route_id;
79 | ```
80 |
81 | ## Queries
82 |
83 | ### Example 1: Vehicles off-route in the last 15 minutes
84 |
85 | ```sql
86 | --
87 | -- Vehicles off-route in the last 15 minutes
88 | --
89 | SELECT
90 | bus.route_id,
91 | bus.time,
92 | bus.geom
93 | FROM
94 | route_geofences AS route
95 | JOIN mta AS bus
96 | ON (route.route_id = bus.route_id)
97 | WHERE
98 | bus.time > now() - interval '15 minutes'
99 | AND NOT
100 | st_within(bus.geom, route.geom)
101 | ```
102 |
103 | The red dots are observations not within the route in yellow.
104 |
105 | 
106 |
107 | ### Example 2: What bus routes pass near 100 6th Ave each hour?
108 |
109 | ```sql
110 | --
111 | -- What bus routes pass near 100 6th Ave each hour?
112 | --
113 | SELECT
114 | time_bucket_gapfill(
115 | '1 hour',
116 | time,
117 | '2019-01-20 09:00',
118 | '2019-01-25 09:00') as bucket,
119 | array_agg(distinct route_id) as nearby_routes
120 | FROM
121 | mta
122 | WHERE
123 | time between '2019-01-20 09:00' and '2019-01-25 09:00'
124 | AND
125 | -- Search within ~160 meters of this building
126 | st_dwithin('SRID=4326;POINT(-74.00482 40.7233)', mta.geom, 0.002)
127 | GROUP BY
128 | bucket;
129 | ```
130 |
131 | ### Example 3: What is the hourly bus traffic on the M100 route?
132 |
133 | ```sql
134 | --
135 | -- What is the hourly bus traffic on the M100 route?
136 | --
137 | SELECT
138 | time_bucket_gapfill(
139 | '1 hour',
140 | time,
141 | '2019-01-23 09:00',
142 | '2019-01-25 09:00') as bucket,
143 | coalesce(count(distinct vid), 0) as n_unique_vehicles
144 | FROM
145 | mta
146 | WHERE
147 | time between '2019-01-23 09:00' and '2019-01-25 09:00'
148 | AND
149 | route_id = 'M100'
150 | GROUP BY
151 | bucket;
152 | ```
153 |
--------------------------------------------------------------------------------
/compression-preview/src/components/chunk.js:
--------------------------------------------------------------------------------
1 | import React, { useState, useEffect } from 'react';
2 | import classNames from 'classnames';
3 | import useHover from '../hooks/useOnHover';
4 | import { useMutation, gql } from '@apollo/client';
5 |
6 | const COMPRESS_CHUNK = gql`
7 | mutation ($chunk: String!) {
8 | compress_chunk_named(args: { arg_1: $chunk }) {
9 | compress_chunk
10 | }
11 | }
12 | `;
13 |
14 | const DECOMPRESS_CHUNK = gql`
15 | mutation ($chunk: String!) {
16 | decompress_chunk_named(args: { arg_1: $chunk }) {
17 | compress_chunk
18 | }
19 | }
20 | `;
21 |
22 | function Chunk({
23 | after_compression_total_bytes,
24 | before_compression_total_bytes,
25 | biggestChunk,
26 | chunk_name,
27 | index,
28 | range_start,
29 | range_end,
30 | handleCardInfo,
31 | handleBiggestChunk,
32 | handleCompressingModal,
33 | screenDimensions,
34 | totalChunks,
35 | totalBytesUncompressed,
36 | }) {
37 | const [ref, hovered] = useHover();
38 |
39 | const [isCompressed, setIsCompressed] = useState(
40 | after_compression_total_bytes !== null
41 | );
42 |
43 | const [loadModal, setLoadModal] = useState(true);
44 |
45 | const [radioSize, setRadioSize] = useState(24);
46 |
47 | const [cardPosition, setCardPosition] = useState({});
48 |
49 | const [spreadFactor, setSpreadFactor] = useState(() => {
50 | if (typeof window !== undefined){
51 | const pixelsPerByte = (window.innerWidth * window.innerHeight) / totalBytesUncompressed;
52 | return Math.sqrt(pixelsPerByte) / totalChunks;
53 | }
54 | }
55 | );
56 |
57 | const [circlePosition, setCirclePosition] = useState({
58 | cx: 700,
59 | cy: 300,
60 | });
61 |
62 | const [mutation] = useMutation(
63 | isCompressed ? DECOMPRESS_CHUNK : COMPRESS_CHUNK
64 | );
65 |
66 | const circleClassNames = classNames(
67 | 'ts-compression__inner__chunks__cards-wrapper__card',
68 | {
69 | 'ts-compression__inner__chunks__cards-wrapper__card--compressed':
70 | isCompressed,
71 | 'ts-compression__inner__chunks__cards-wrapper__card--decompressed':
72 | !isCompressed,
73 | 'ts-compression__inner__chunks__cards-wrapper__card--hovered': hovered,
74 | }
75 | );
76 |
77 | const handleCirclePosition = () => {
78 | const squaredTotalChunks = Math.sqrt(totalChunks);
79 |
80 | const circlePosition = document.getElementById('chunks').getBoundingClientRect();
81 |
82 | const compensationRatio = circlePosition.width / circlePosition.height;
83 | const widthRatio = circlePosition.width / squaredTotalChunks;
84 | const heightRatio = compensationRatio * (circlePosition.height / squaredTotalChunks);
85 |
86 | const cx = 20 + ((widthRatio * ((index+1) % squaredTotalChunks)) * 0.97);
87 | const cy = 20 + ((heightRatio * ((index+1) / squaredTotalChunks)) * 0.5);
88 |
89 | setCirclePosition({ cx, cy});
90 | };
91 |
92 | const handleSpreadFactor = () =>
93 | setSpreadFactor(
94 | typeof window !== undefined &&
95 | Math.sqrt(
96 | (window.innerWidth * window.innerHeight) /
97 | totalBytesUncompressed
98 | ) / totalChunks
99 | );
100 |
101 | const handleClick = () => {
102 | setLoadModal(true);
103 | handleCompressingModal(true);
104 | mutation({ variables: { chunk: chunk_name } });
105 | };
106 |
107 | const getCardPosition = () => document.getElementById(chunk_name).getBoundingClientRect();
108 |
109 | useEffect(() => {
110 | setLoadModal(false);
111 | setIsCompressed(after_compression_total_bytes !== null);
112 | }, [after_compression_total_bytes, before_compression_total_bytes]);
113 |
114 | useEffect(() => {
115 | if (hovered)
116 | return handleCardInfo({
117 | chunk_name,
118 | before_compression_total_bytes,
119 | after_compression_total_bytes,
120 | range_start,
121 | range_end,
122 | cardPosition,
123 | });
124 | return handleCardInfo({});
125 | }, [hovered]);
126 |
127 | useEffect(() => {
128 | handleBiggestChunk({ chunk_name, before_compression_total_bytes });
129 | }, []);
130 |
131 | useEffect(() => {
132 | setRadioSize(() => {
133 | if (after_compression_total_bytes)
134 | return after_compression_total_bytes * spreadFactor;
135 | return before_compression_total_bytes * spreadFactor;
136 | });
137 | handleCirclePosition();
138 | setCardPosition(getCardPosition());
139 | }, [isCompressed, biggestChunk, totalChunks]);
140 |
141 | useEffect(() => {
142 | handleSpreadFactor();
143 | }, [totalBytesUncompressed]);
144 |
145 | useEffect(() => {
146 | handleCirclePosition();
147 | }, []);
148 |
149 | return (
150 | <>
151 |
161 | >
162 | );
163 | }
164 |
165 | export default Chunk;
166 |
--------------------------------------------------------------------------------
/crypto_tutorial/crypto_queries.sql:
--------------------------------------------------------------------------------
1 | -- crypto_queries.sql
2 | -- A set of questions and queries to run on a cryptocurrency market dataset
3 | -- Author: Avthar Sewrathan
4 | -- Timescale Inc
5 | -- 16 September 2019
6 |
7 | --Query 1
8 | -- How did Bitcoin price in USD vary over time?
9 | -- BTC 7 day prices
10 | SELECT time_bucket('7 days', time) as period,
11 | last(closing_price, time) AS last_closing_price
12 | FROM btc_prices
13 | WHERE currency_code = 'USD'
14 | GROUP BY period
15 | ORDER BY period
16 |
17 | --Query 2
18 | -- How did BTC daily returns vary over time?
19 | -- Which days had the worst and best returns?
20 | -- BTC daily return
21 | SELECT time,
22 | closing_price / lead(closing_price) over prices AS daily_factor
23 | FROM (
24 | SELECT time,
25 | closing_price
26 | FROM btc_prices
27 | WHERE currency_code = 'USD'
28 | GROUP BY 1,2
29 | ) sub window prices AS (ORDER BY time DESC)
30 |
31 | --Query 3
32 | -- How did the trading volume of Bitcoin vary over time in different fiat currencies?
33 | -- BTC volume in different fiat in 7 day intervals
34 | SELECT time_bucket('7 days', time) as period,
35 | currency_code,
36 | sum(volume_btc)
37 | FROM btc_prices
38 | GROUP BY currency_code, period
39 | ORDER BY period
40 |
41 | -- Q4
42 | -- How did Ethereum (ETH) price in BTC vary over time?
43 | -- ETH prices in BTC in 7 day intervals
44 | SELECT
45 | time_bucket('7 days', time) AS time_period,
46 | last(closing_price, time) AS closing_price_btc
47 | FROM crypto_prices
48 | WHERE currency_code='ETH'
49 | GROUP BY time_period
50 | ORDER BY time_period
51 |
52 | --Q5
53 | -- How did ETH prices, in different fiat currencies, vary over time?
54 | -- (using the BTC/Fiat exchange rate at the time)
55 | -- ETH prices in fiat
56 | SELECT time_bucket('7 days', c.time) AS time_period,
57 | last(c.closing_price, c.time) AS last_closing_price_in_btc,
58 | last(c.closing_price, c.time) * last(b.closing_price, c.time) FILTER (WHERE b.currency_code = 'USD') AS last_closing_price_in_usd,
59 | last(c.closing_price, c.time) * last(b.closing_price, c.time) FILTER (WHERE b.currency_code = 'EUR') AS last_closing_price_in_eur,
60 | last(c.closing_price, c.time) * last(b.closing_price, c.time) FILTER (WHERE b.currency_code = 'CNY') AS last_closing_price_in_cny,
61 | last(c.closing_price, c.time) * last(b.closing_price, c.time) FILTER (WHERE b.currency_code = 'JPY') AS last_closing_price_in_jpy,
62 | last(c.closing_price, c.time) * last(b.closing_price, c.time) FILTER (WHERE b.currency_code = 'KRW') AS last_closing_price_in_krw
63 | FROM crypto_prices c
64 | JOIN btc_prices b
65 | ON time_bucket('1 day', c.time) = time_bucket('1 day', b.time)
66 | WHERE c.currency_code = 'ETH'
67 | GROUP BY time_period
68 | ORDER BY time_period
69 |
70 | --Q6
71 | -- Which are the newest cryptocurrencies?
72 | -- Crypto by date of first data
73 | SELECT ci.currency_code, min(c.time)
74 | FROM currency_info ci JOIN crypto_prices c ON ci.currency_code = c.currency_code
75 | AND c.closing_price > 0
76 | GROUP BY ci.currency_code
77 | ORDER BY min(c.time) DESC
78 |
79 | --Q7
80 | -- Number of new cryptocurrencies by day
81 | -- Which days had the most new cryptocurrencies added?
82 | SELECT day, COUNT(code)
83 | FROM (
84 | SELECT min(c.time) AS day, ci.currency_code AS code
85 | FROM currency_info ci JOIN crypto_prices c ON ci.currency_code = c.currency_code
86 | AND c.closing_price > 0
87 | GROUP BY ci.currency_code
88 | ORDER BY min(c.time)
89 | )a
90 | GROUP BY day
91 | ORDER BY day DESC
92 |
93 |
94 | --Q8
95 | -- Which cryptocurrencies had the most transaction volume in the past 14 days?
96 | -- Crypto transaction volume during a certain time period
97 | SELECT 'BTC' as currency_code,
98 | sum(b.volume_currency) as total_volume_in_usd
99 | FROM btc_prices b
100 | WHERE b.currency_code = 'USD'
101 | AND now() - date(b.time) < INTERVAL '14 day'
102 | GROUP BY b.currency_code
103 | UNION
104 | SELECT c.currency_code as currency_code,
105 | sum(c.volume_btc) * avg(b.closing_price) as total_volume_in_usd
106 | FROM crypto_prices c JOIN btc_prices b ON date(c.time) = date(b.time)
107 | WHERE c.volume_btc > 0
108 | AND b.currency_code = 'USD'
109 | AND now() - date(b.time) < INTERVAL '14 day'
110 | AND now() - date(c.time) < INTERVAL '14 day'
111 | GROUP BY c.currency_code
112 | ORDER BY total_volume_in_usd DESC
113 |
114 | --Q9
115 | -- Which cryptocurrencies had the top daily return?
116 | -- Top crypto by daily return
117 | WITH
118 | prev_day_closing AS (
119 | SELECT
120 | currency_code,
121 | time,
122 | closing_price,
123 | LEAD(closing_price) OVER (PARTITION BY currency_code ORDER BY TIME DESC) AS prev_day_closing_price
124 | FROM
125 | crypto_prices
126 | )
127 | , daily_factor AS (
128 | SELECT
129 | currency_code,
130 | time,
131 | CASE WHEN prev_day_closing_price = 0 THEN 0 ELSE closing_price/prev_day_closing_price END AS daily_factor
132 | FROM
133 | prev_day_closing
134 | )
135 | SELECT
136 | time,
137 | LAST(currency_code, daily_factor) as currency_code,
138 | MAX(daily_factor) as max_daily_factor
139 | FROM
140 | daily_factor
141 | GROUP BY
142 | TIME
--------------------------------------------------------------------------------
/air-quality/airquality_ingest.py:
--------------------------------------------------------------------------------
1 | """airquality_ingest.py
2 |
3 | Assumes the schema as specified in schema.sql.
4 | """
5 |
6 | import requests
7 | import time
8 | import datetime
9 | import psycopg2
10 | from pgcopy import CopyManager
11 |
12 | URL_MEASUREMENTS = "https://api.openaq.org/v1/measurements"
13 | # WARNING: in the real world, make this an environment variable
14 | CONNECTION = "postgres://[user]:[password]@192.168.99.101:5432/airquality"
15 | POLL_INTERVAL = 300
16 | # Make global dicts to cache meta data
17 | measurements_dict = {}
18 | locations_dict = {}
19 |
20 | # Make the actual API request
21 | def make_request(url, params):
22 | r = requests.get(url, params=params)
23 | # If no valid reponse from the API, print error and retry
24 | while not r:
25 | print("API is not returning a 200 response")
26 | time.sleep(5)
27 | r = requests.get(url, params=params)
28 | return r
29 |
30 | # Force populate the cache
31 | def populate_cache(conn):
32 | try:
33 | cursor = conn.cursor()
34 | cursor.execute("SELECT * FROM measurement_types")
35 | for row in cursor.fetchall():
36 | measurements_dict[row[1]] = [row[0], row[2]]
37 | cursor.execute("SELECT * FROM locations")
38 | for row in cursor.fetchall():
39 | locations_dict[row[1]+' '+row[3]] = row[0]
40 | cursor.close()
41 | except (Exception, psycopg2.Error) as error:
42 | print("Error thrown while trying to populate cache")
43 | print(error)
44 | print("Finished populating cache")
45 |
46 | # Iterate through paginated API responses
47 | def iterate_request(url, params):
48 | r = make_request(url, params)
49 | meta = r.json()['meta']
50 | num_pages = int(meta['found'] / meta['limit'] + 1)
51 | dataset = r.json()['results']
52 | for i in range(2, num_pages+1):
53 | params['page'] = i
54 | r = make_request(url, params)
55 | print("Requesting page " + str(i) + " of " + str(num_pages) + " from API")
56 | dataset = dataset + r.json()['results']
57 | return dataset
58 |
59 | # Parse all measurements from dataset returned by iterate_request
60 | def parse_measurements(country_name, conn):
61 | url = URL_MEASUREMENTS
62 | params = {}
63 | params['limit'] = 10000
64 | params['country'] = country_name
65 | dataset = iterate_request(url, params)
66 | request = list()
67 | measurements_request = list()
68 | locations_request = list()
69 | for entry in dataset:
70 | parameter = entry['parameter']
71 | unit = entry['unit']
72 | parameter_id = 0
73 | measurements_cache_result = measurements_dict.get(parameter)
74 | # Measurement found in cached dict, so use the associated measurement_id
75 | if measurements_cache_result:
76 | parameter_id = measurements_cache_result[0]
77 | print("Found "+ str(parameter_id) +" in measurements cache")
78 | # Measurement not found in cached dict, so need to update metadata table
79 | else:
80 | measurements_request.append((parameter_id, parameter, unit))
81 | try:
82 | cursor = conn.cursor()
83 | cursor.execute("INSERT INTO measurement_types (parameter, unit) VALUES (%s, %s) ON CONFLICT DO NOTHING", (parameter, unit))
84 | conn.commit()
85 | cursor.execute("SELECT * FROM measurement_types WHERE parameter = %s AND unit = %s", (parameter, unit))
86 | parameter_id = cursor.fetchall()[0][0]
87 | cursor.close()
88 | measurements_dict[parameter] = [parameter_id, unit]
89 | print("Updated measurements cache")
90 | except (Exception, psycopg2.Error) as error:
91 | print(error.pgerror)
92 | city_name = entry['city']
93 | location_name = entry['location']
94 | city_and_location = city_name + ' ' + location_name
95 | location_id = 0
96 | locations_cache_result = locations_dict.get(city_and_location)
97 | # Location found in cached dict, so use the associated city_id
98 | if locations_cache_result:
99 | location_id = locations_cache_result
100 | print("Found "+ str(location_id) +" in locations cache")
101 | # Location not found in cached dict, so need to update metadata table
102 | else:
103 | try:
104 | cursor = conn.cursor()
105 | cursor.execute("INSERT INTO locations (city_name, location_name, country_name) VALUES (%s, %s, %s) ON CONFLICT DO NOTHING", (city_name, location_name, country_name))
106 | conn.commit()
107 | cursor.execute("SELECT * FROM locations WHERE city_name = %s AND location_name = %s AND country_name = %s", (city_name, location_name, country_name))
108 | location_id = cursor.fetchall()[0][0]
109 | cursor.close()
110 | locations_dict[city_and_location] = location_id
111 | print("Updated locations cache")
112 | except (Exception, psycopg2.Error) as error:
113 | print(error.pgerror)
114 | timestamp = datetime.datetime.strptime(entry['date']['utc'],'%Y-%m-%dT%H:%M:%S.%fZ')
115 | value = entry['value']
116 | request.append((timestamp,parameter_id,location_id, value))
117 | cols = ('time', 'parameter_id', 'location_id','value')
118 | mgr = CopyManager(conn, 'temp_measurements', cols)
119 | mgr.copy(request)
120 | try:
121 | cursor = conn.cursor()
122 | cursor.execute("INSERT INTO measurements SELECT * FROM temp_measurements ON CONFLICT DO NOTHING")
123 | cursor.execute("TRUNCATE temp_measurements CASCADE")
124 | cursor.close()
125 | except (Exception, psycopg2.Error) as error:
126 | print(error.pgerror)
127 | conn.commit()
128 |
129 | if __name__ == "__main__":
130 | # Populate dict variables when program initialized
131 | measurements_dict = {}
132 | locations_dict = {}
133 | with psycopg2.connect(CONNECTION) as conn:
134 | # Populate dict variables when program is initialized
135 | populate_cache(conn)
136 | while True:
137 | parse_measurements('GB', conn)
138 | print("SLEEPING FOR " + str(POLL_INTERVAL))
139 | time.sleep(POLL_INTERVAL)
140 |
--------------------------------------------------------------------------------
/analyze-intraday-stocks/explore.py:
--------------------------------------------------------------------------------
1 | import plotly.express as px
2 | import pandas as pd
3 | import psycopg2
4 | import config
5 |
6 | conn = psycopg2.connect(database=config.DB_NAME,
7 | host=config.DB_HOST,
8 | user=config.DB_USER,
9 | password=config.DB_PASS,
10 | port=config.DB_PORT)
11 |
12 |
13 | # Which symbols have the highest transaction volumes?
14 | def query1():
15 | query = """
16 | SELECT symbol, sum(trading_volume) AS volume
17 | FROM stocks_intraday
18 | WHERE time < now() - INTERVAL '{bucket}'
19 | GROUP BY symbol
20 | ORDER BY volume DESC
21 | LIMIT 5
22 | """.format(bucket="14 day")
23 | df = pd.read_sql(query, conn)
24 | fig = px.bar(df, x='symbol', y='volume', title="Most traded symbols in the last 14 days")
25 | fig.show()
26 |
27 | # How did Apple's trading volume change over time?
28 | def query2():
29 | query = """
30 | SELECT time_bucket('{bucket}', time) AS bucket, sum(trading_volume) AS volume
31 | FROM stocks_intraday
32 | WHERE symbol = '{symbol}'
33 | GROUP BY bucket
34 | ORDER BY bucket
35 | """.format(bucket="1 day", symbol="AAPL")
36 | df = pd.read_sql(query, conn)
37 | fig = px.line(df, x='bucket', y='volume', title="Apple's daily trading volume over time")
38 | fig.show()
39 |
40 | # How did Apple's stock price change over time?
41 | def query3():
42 | query = """
43 | SELECT time_bucket('{bucket}', time) AS bucket,
44 | last(price_close, time) AS last_closing_price
45 | FROM stocks_intraday
46 | WHERE symbol = '{symbol}'
47 | GROUP BY bucket
48 | ORDER BY bucket
49 | """.format(bucket="7 days", symbol="AAPL")
50 | df = pd.read_sql(query, conn)
51 | fig = px.line(df, x='bucket', y='last_closing_price')
52 | fig.show()
53 |
54 | # Which symbols had the highest weekly gains?
55 | def query4():
56 | query = """
57 | SELECT symbol, bucket, max((closing_price-opening_price)/closing_price*100) AS price_change_pct
58 | FROM (
59 | SELECT
60 | symbol,
61 | time_bucket('{bucket}', time) AS bucket,
62 | first(price_open, time) AS opening_price,
63 | last(price_close, time) AS closing_price
64 | FROM stocks_intraday
65 | GROUP BY bucket, symbol
66 | ) s
67 | GROUP BY symbol, s.bucket
68 | ORDER BY price_change_pct {orderby}
69 | LIMIT 5
70 | """.format(bucket="7 days", orderby="DESC")
71 | df = pd.read_sql(query, conn)
72 | print(df)
73 |
74 | # Weekly FAANG prices over time?
75 | def query5():
76 | query = """
77 | SELECT symbol, time_bucket('{bucket}', time) AS bucket,
78 | last(price_close, time) AS last_closing_price
79 | FROM stocks_intraday
80 | WHERE symbol in {symbols}
81 | GROUP BY bucket, symbol
82 | ORDER BY bucket
83 | """.format(bucket="7 days", symbols="('AAPL', 'FB', 'AMZN', 'NFLX', 'GOOG')")
84 | df = pd.read_sql(query, conn)
85 | fig = px.line(df, x='bucket', y='last_closing_price', color='symbol', title="FAANG prices over time")
86 | fig.show()
87 |
88 | # Weekly price changes of Apple, Facebook, Google?
89 | def query6():
90 | query = """
91 | SELECT symbol, bucket, max((closing_price-opening_price)/closing_price) AS price_change_pct
92 | FROM (
93 | SELECT
94 | symbol,
95 | time_bucket('{bucket}}', time) AS bucket,
96 | first(price_open, time) AS opening_price,
97 | last(price_close, time) AS closing_price
98 | FROM stocks_intraday
99 | WHERE symbol IN {symbols}
100 | GROUP BY bucket, symbol
101 | ) s
102 | GROUP BY symbol, s.bucket
103 | ORDER BY bucket
104 | """.format(bucket="7 days", symbols="('AAPL', 'FB', 'GOOG')")
105 | df = pd.read_sql(query, conn)
106 | figure = px.line(df, x="bucket", y="price_change_pct", color="symbol", title="Apple, Facebook, Google weekly price changes")
107 | figure = figure.update_layout(yaxis={'tickformat': '.2%'})
108 | figure.show()
109 |
110 | # Distribution of daily price changes of Amazon and Zoom
111 | def query7():
112 | query = """
113 | SELECT symbol, bucket, max((closing_price-opening_price)/closing_price) AS price_change_pct
114 | FROM (
115 | SELECT
116 | symbol,
117 | time_bucket('{bucket}', time) AS bucket,
118 | first(price_open, time) AS opening_price,
119 | last(price_close, time) AS closing_price
120 | FROM stocks_intraday
121 | WHERE symbol IN {symbols}
122 | GROUP BY bucket, symbol
123 | ) s
124 | GROUP BY symbol, s.bucket
125 | ORDER BY bucket
126 | """.format(bucket="1 day", symbols="('ZM', 'AMZN')")
127 | df = pd.read_sql(query, conn)
128 | figure = px.scatter(df, x="price_change_pct", color="symbol", title="Distribution of daily price changes (Amazon, Zoom)")
129 | figure = figure.update_layout(xaxis={'tickformat': '.2%'})
130 | figure.show()
131 |
132 | # Apple 15-min candlestick chart
133 | def query8():
134 | import plotly.graph_objects as go
135 | query = """
136 | SELECT time_bucket('{bucket}', time) AS bucket,
137 | FIRST(price_open, time) AS price_open,
138 | LAST(price_close, time) AS price_close,
139 | MAX(price_high) AS price_high,
140 | MIN(price_low) AS price_low
141 | FROM stocks_intraday
142 | WHERE symbol = '{symbol}' AND date(time) = date('{date}')
143 | GROUP BY bucket
144 | """.format(bucket="15 min", symbol="AAPL", date="2021-06-09")
145 | df = pd.read_sql(query, conn)
146 | figure = go.Figure(data=[go.Candlestick(x=df['bucket'],
147 | open=df['price_open'],
148 | high=df['price_high'],
149 | low=df['price_low'],
150 | close=df['price_close'],)])
151 | figure.update_layout(title="15-min candlestick chart of Apple, 2021-06-09")
152 | figure.show()
153 |
154 |
155 | def main():
156 | query1()
157 | #query2()
158 | #query3()
159 | #query4()
160 | #query5()
161 | #query6()
162 | #query7()
163 | #query8()
164 |
165 | if __name__=='__main__':
166 | main()
--------------------------------------------------------------------------------
/pi-light/grafana.json:
--------------------------------------------------------------------------------
1 | {
2 | "annotations": {
3 | "list": [
4 | {
5 | "builtIn": 1,
6 | "datasource": "-- Grafana --",
7 | "enable": true,
8 | "hide": true,
9 | "iconColor": "rgba(0, 211, 255, 1)",
10 | "name": "Annotations & Alerts",
11 | "type": "dashboard"
12 | }
13 | ]
14 | },
15 | "editable": true,
16 | "gnetId": null,
17 | "graphTooltip": 0,
18 | "id": 2,
19 | "links": [],
20 | "panels": [
21 | {
22 | "alert": {
23 | "conditions": [
24 | {
25 | "evaluator": {
26 | "params": [
27 | 10,
28 | 512
29 | ],
30 | "type": "outside_range"
31 | },
32 | "operator": {
33 | "type": "and"
34 | },
35 | "query": {
36 | "params": [
37 | "A",
38 | "4s",
39 | "now"
40 | ]
41 | },
42 | "reducer": {
43 | "params": [],
44 | "type": "avg"
45 | },
46 | "type": "query"
47 | }
48 | ],
49 | "executionErrorState": "alerting",
50 | "for": "14s",
51 | "frequency": "2s",
52 | "handler": 1,
53 | "message": "Hey, it's getting dark in here.",
54 | "name": "Photoresistor, Raspberry Pi alert",
55 | "noDataState": "alerting",
56 | "notifications": [
57 | {
58 | "uid": "264vOAWWz"
59 | },
60 | {
61 | "uid": "_J32ALZWk"
62 | }
63 | ]
64 | },
65 | "aliasColors": {},
66 | "bars": false,
67 | "dashLength": 10,
68 | "dashes": false,
69 | "datasource": "Timescale Cloud",
70 | "decimals": 0,
71 | "description": "",
72 | "fill": 1,
73 | "gridPos": {
74 | "h": 14,
75 | "w": 24,
76 | "x": 0,
77 | "y": 0
78 | },
79 | "id": 2,
80 | "interval": "2s",
81 | "legend": {
82 | "avg": true,
83 | "current": true,
84 | "max": true,
85 | "min": true,
86 | "rightSide": false,
87 | "show": true,
88 | "total": false,
89 | "values": true
90 | },
91 | "lines": true,
92 | "linewidth": 1,
93 | "links": [],
94 | "nullPointMode": "null",
95 | "percentage": false,
96 | "pointradius": 1,
97 | "points": false,
98 | "renderer": "flot",
99 | "seriesOverrides": [],
100 | "spaceLength": 10,
101 | "stack": false,
102 | "steppedLine": true,
103 | "targets": [
104 | {
105 | "format": "time_series",
106 | "group": [
107 | {
108 | "params": [
109 | "$__interval",
110 | "none"
111 | ],
112 | "type": "time"
113 | }
114 | ],
115 | "metricColumn": "metric",
116 | "rawQuery": false,
117 | "rawSql": "SELECT\n $__timeGroupAlias(\"time\",$__interval),\n metric AS metric,\n avg(value) AS \"value\"\nFROM pi_obs\nWHERE\n $__timeFilter(\"time\")\nGROUP BY 1,2\nORDER BY 1,2",
118 | "refId": "A",
119 | "select": [
120 | [
121 | {
122 | "params": [
123 | "value"
124 | ],
125 | "type": "column"
126 | },
127 | {
128 | "params": [
129 | "avg"
130 | ],
131 | "type": "aggregate"
132 | },
133 | {
134 | "params": [
135 | "value"
136 | ],
137 | "type": "alias"
138 | }
139 | ]
140 | ],
141 | "table": "pi_obs",
142 | "timeColumn": "\"time\"",
143 | "timeColumnType": "timestamptz",
144 | "where": [
145 | {
146 | "name": "$__timeFilter",
147 | "params": [],
148 | "type": "macro"
149 | }
150 | ]
151 | }
152 | ],
153 | "thresholds": [
154 | {
155 | "colorMode": "critical",
156 | "fill": true,
157 | "line": true,
158 | "op": "lt",
159 | "value": 10
160 | },
161 | {
162 | "colorMode": "critical",
163 | "fill": true,
164 | "line": true,
165 | "op": "gt",
166 | "value": 512
167 | }
168 | ],
169 | "timeFrom": null,
170 | "timeRegions": [],
171 | "timeShift": null,
172 | "title": "Photoresistor, Raspberry Pi",
173 | "tooltip": {
174 | "shared": true,
175 | "sort": 0,
176 | "value_type": "individual"
177 | },
178 | "type": "graph",
179 | "xaxis": {
180 | "buckets": null,
181 | "mode": "time",
182 | "name": null,
183 | "show": true,
184 | "values": []
185 | },
186 | "yaxes": [
187 | {
188 | "format": "short",
189 | "label": null,
190 | "logBase": 10,
191 | "max": null,
192 | "min": "8",
193 | "show": true
194 | },
195 | {
196 | "format": "short",
197 | "label": null,
198 | "logBase": 1,
199 | "max": null,
200 | "min": null,
201 | "show": true
202 | }
203 | ],
204 | "yaxis": {
205 | "align": false,
206 | "alignLevel": null
207 | }
208 | }
209 | ],
210 | "refresh": "2s",
211 | "schemaVersion": 18,
212 | "style": "dark",
213 | "tags": [],
214 | "templating": {
215 | "list": []
216 | },
217 | "time": {
218 | "from": "now-5m",
219 | "to": "now"
220 | },
221 | "timepicker": {
222 | "refresh_intervals": [
223 | "2s",
224 | "5s",
225 | "10s",
226 | "30s",
227 | "1m",
228 | "5m",
229 | "15m",
230 | "30m",
231 | "1h",
232 | "2h",
233 | "1d"
234 | ],
235 | "time_options": [
236 | "5m",
237 | "15m",
238 | "1h",
239 | "6h",
240 | "12h",
241 | "24h",
242 | "2d",
243 | "7d",
244 | "30d"
245 | ]
246 | },
247 | "timezone": "",
248 | "title": "Raspberry Pi -> Timescale Cloud -> Grafana",
249 | "uid": "iOzzdAWWk",
250 | "version": 35
251 | }
252 |
--------------------------------------------------------------------------------
/crypto_tutorial/crypto_data_extraction.py:
--------------------------------------------------------------------------------
1 | ###################################################################
2 | # crypto_data_extraction.py
3 | # Script to construct a CSV data set of historical crypto prices
4 | # using data from cryptocompare.com
5 | # Author: Avthar Sewrathan
6 | # Timescale Inc
7 | # 20 August 2019
8 | ###################################################################
9 | # The author apologizes in advance for any bad style practices.
10 | # Extraneous statements have been added to help users of this script
11 | # vizualize the progress constructing the CSV
12 | ###################################################################
13 | import urllib.request
14 | import json
15 | import csv
16 | from datetime import datetime
17 | import argparse
18 |
19 | # accepting api key as a command line argument
20 | # run this script with an argument:
21 | # python crypto_data_extraction.py -a my_api_key
22 | # or
23 | # python crypto_data_extraction.py --apikey my_api_key
24 | parser = argparse.ArgumentParser(description='Api key',
25 | formatter_class=argparse.ArgumentDefaultsHelpFormatter)
26 | parser.add_argument('-a', '--apikey', help='valid api key', required=True)
27 | args = parser.parse_args()
28 | config = vars(args)
29 | apikey = 'apikey' if config['apikey'] is None else config["apikey"]
30 |
31 | # If you don't provide an `apikey` argument when running the script,
32 | # replace and uncomment this string below with your cryptocompare API Key or
33 | # store it as an environment variable
34 | # Note: the script will not run properly if you do not use your own valid API key
35 | # apikey = 'YOUR_CRYPTO_COMPARE_API_KEY'
36 |
37 | #attach to end of URLstring
38 | url_api_part = '&api_key=' + apikey
39 |
40 | #####################################################################
41 | #1. Populate list of all coin names
42 | #####################################################################
43 | #URL to get a list of coins from cryptocompare API
44 | URLcoinslist = 'https://min-api.cryptocompare.com/data/all/coinlist'
45 |
46 | #Get list of cryptos with their symbols
47 | with urllib.request.urlopen(URLcoinslist) as response:
48 | res1 = response.read()
49 | res1_json = json.loads(res1.decode('utf-8'))
50 | data1 = res1_json['Data']
51 | symbol_array = []
52 | cryptoDict = dict(data1)
53 |
54 | #write to CSV
55 | with open('coin_names.csv', mode = 'w') as test_file:
56 | test_file_writer = csv.writer(test_file, delimiter = ',', quotechar = '"', quoting=csv.QUOTE_MINIMAL)
57 | for coin in cryptoDict.values():
58 | name = coin['Name']
59 | symbol = coin['Symbol']
60 | symbol_array.append(symbol)
61 | coin_name = coin['CoinName']
62 | full_name = coin['FullName']
63 | entry = [symbol, coin_name]
64 | test_file_writer.writerow(entry)
65 | print('Done getting crypto names and symbols. See coin_names.csv for result')
66 |
67 | #####################################################################
68 | #2. Populate historical price for each crypto in BTC
69 | #####################################################################
70 | #Note: this part might take a while to run since we're populating data for 4k+ coins
71 | #counter variable for progress made
72 | progress = 0
73 | num_cryptos = str(len(symbol_array))
74 | for symbol in symbol_array:
75 | # get data for that currency
76 | URL = 'https://min-api.cryptocompare.com/data/histoday?fsym='+ symbol +'&tsym=BTC&allData=true' + url_api_part
77 | with urllib.request.urlopen(URL) as response:
78 | res = response.read()
79 | res_json = json.loads(res.decode('utf-8'))
80 | data = res_json['Data']
81 | # write required fields into csv
82 | with open('crypto_prices.csv', mode = 'a') as test_file:
83 | test_file_writer = csv.writer(test_file, delimiter = ',', quotechar = '"', quoting=csv.QUOTE_MINIMAL)
84 | for day in data:
85 | rawts = day['time']
86 | ts = datetime.utcfromtimestamp(rawts).strftime('%Y-%m-%d %H:%M:%S')
87 | o = day['open']
88 | h = day['high']
89 | l = day['low']
90 | c = day['close']
91 | vfrom = day['volumefrom']
92 | vto = day['volumeto']
93 | entry = [ts, o, h, l, c, vfrom, vto, symbol]
94 | test_file_writer.writerow(entry)
95 | progress = progress + 1
96 | print('Processed ' + str(symbol))
97 | print(str(progress) + ' currencies out of ' + num_cryptos + ' written to csv')
98 | print('Done getting price data for all coins. See crypto_prices.csv for result')
99 |
100 | #####################################################################
101 | #3. Populate BTC prices in different fiat currencies
102 | #####################################################################
103 | # List of fiat currencies we want to query
104 | # You can expand this list, but CryptoCompare does not have
105 | # a comprehensive fiat lsit on their site
106 | fiatList = ['AUD', 'CAD', 'CNY', 'EUR', 'GBP', 'GOLD', 'HKD',
107 | 'ILS', 'INR', 'JPY', 'KRW', 'PLN', 'RUB', 'SGD', 'UAH', 'USD', 'ZAR']
108 |
109 | #counter variable for progress made
110 | progress2 = 0
111 | for fiat in fiatList:
112 | # get data for bitcoin price in that fiat
113 | URL = 'https://min-api.cryptocompare.com/data/histoday?fsym=BTC&tsym='+fiat+'&allData=true' + url_api_part
114 | with urllib.request.urlopen(URL) as response:
115 | res = response.read()
116 | res_json = json.loads(res.decode('utf-8'))
117 | data = res_json['Data']
118 | # write required fields into csv
119 | with open('btc_prices.csv', mode = 'a') as test_file:
120 | test_file_writer = csv.writer(test_file, delimiter = ',', quotechar = '"', quoting=csv.QUOTE_MINIMAL)
121 | for day in data:
122 | rawts = day['time']
123 | ts = datetime.utcfromtimestamp(rawts).strftime('%Y-%m-%d %H:%M:%S')
124 | o = day['open']
125 | h = day['high']
126 | l = day['low']
127 | c = day['close']
128 | vfrom = day['volumefrom']
129 | vto = day['volumeto']
130 | entry = [ts, o, h, l, c, vfrom, vto, fiat]
131 | test_file_writer.writerow(entry)
132 | progress2 = progress2 + 1
133 | print('processed ' + str(fiat))
134 | print(str(progress2) + ' currencies out of 17 written')
135 | print('Done getting price data for btc. See btc_prices.csv for result')
136 |
137 | #####################################################################
138 | #4. Populate ETH prices in different fiat currencies
139 | #####################################################################
140 | #counter variable for progress made
141 | progress3 = 0
142 | for fiat in fiatList:
143 | # get data for bitcoin price in that fiat
144 | URL = 'https://min-api.cryptocompare.com/data/histoday?fsym=ETH&tsym='+fiat+'&allData=true' + url_api_part
145 | with urllib.request.urlopen(URL) as response:
146 | res = response.read()
147 | res_json = json.loads(res.decode('utf-8'))
148 | data = res_json['Data']
149 | # write required fields into csv
150 | with open('eth_prices.csv', mode = 'a') as test_file:
151 | test_file_writer = csv.writer(test_file, delimiter = ',', quotechar = '"', quoting=csv.QUOTE_MINIMAL)
152 | for day in data:
153 | rawts = day['time']
154 | ts = datetime.utcfromtimestamp(rawts).strftime('%Y-%m-%d %H:%M:%S')
155 | o = day['open']
156 | h = day['high']
157 | l = day['low']
158 | c = day['close']
159 | vfrom = day['volumefrom']
160 | vto = day['volumeto']
161 | entry = [ts, o, h, l, c, vfrom, vto, fiat]
162 | test_file_writer.writerow(entry)
163 | progress3 = progress3 + 1
164 | print('processed ' + str(fiat))
165 | print(str(progress3) + ' currencies out of 17 written')
166 | print('Done getting price data for eth. See eth_prices.csv for result')
167 |
--------------------------------------------------------------------------------
/compression-preview/src/styles/_normalize.scss:
--------------------------------------------------------------------------------
1 | /*! normalize.css v8.0.1 | MIT License | github.com/necolas/normalize.css */
2 |
3 | /* Document
4 | ========================================================================== */
5 |
6 | /**
7 | * 1. Correct the line height in all browsers.
8 | * 2. Prevent adjustments of font size after orientation changes in iOS.
9 | */
10 |
11 | html {
12 | line-height: 1.15; /* 1 */
13 | -webkit-text-size-adjust: 100%; /* 2 */
14 | }
15 |
16 | /* Sections
17 | ========================================================================== */
18 |
19 | /**
20 | * Remove the margin in all browsers.
21 | */
22 |
23 | body {
24 | margin: 0;
25 |
26 | * {
27 | box-sizing: border-box;
28 | -webkit-font-smoothing: antialiased;
29 | -moz-osx-font-smoothing: grayscale;
30 | }
31 | }
32 |
33 | /**
34 | * Render the `main` element consistently in IE.
35 | */
36 |
37 | main {
38 | display: block;
39 | }
40 |
41 | /**
42 | * Correct the font size and margin on `h1` elements within `section` and
43 | * `article` contexts in Chrome, Firefox, and Safari.
44 | */
45 |
46 | h1 {
47 | font-size: 2em;
48 | margin: 0.67em 0;
49 | }
50 |
51 | /* Grouping content
52 | ========================================================================== */
53 |
54 | /**
55 | * 1. Add the correct box sizing in Firefox.
56 | * 2. Show the overflow in Edge and IE.
57 | */
58 |
59 | hr {
60 | box-sizing: content-box; /* 1 */
61 | height: 0; /* 1 */
62 | overflow: visible; /* 2 */
63 | }
64 |
65 | /**
66 | * 1. Correct the inheritance and scaling of font size in all browsers.
67 | * 2. Correct the odd `em` font sizing in all browsers.
68 | */
69 |
70 | pre {
71 | font-family: monospace, monospace; /* 1 */
72 | font-size: 1em; /* 2 */
73 | }
74 |
75 | /* Text-level semantics
76 | ========================================================================== */
77 |
78 | /**
79 | * Remove the gray background on active links in IE 10.
80 | */
81 |
82 | a {
83 | background-color: transparent;
84 | }
85 |
86 | /**
87 | * 1. Remove the bottom border in Chrome 57-
88 | * 2. Add the correct text decoration in Chrome, Edge, IE, Opera, and Safari.
89 | */
90 |
91 | abbr[title] {
92 | border-bottom: none; /* 1 */
93 | text-decoration: underline; /* 2 */
94 | text-decoration: underline dotted; /* 2 */
95 | }
96 |
97 | /**
98 | * Add the correct font weight in Chrome, Edge, and Safari.
99 | */
100 |
101 | b,
102 | strong {
103 | font-weight: bolder;
104 | }
105 |
106 | /**
107 | * 1. Correct the inheritance and scaling of font size in all browsers.
108 | * 2. Correct the odd `em` font sizing in all browsers.
109 | */
110 |
111 | code,
112 | kbd,
113 | samp {
114 | font-family: monospace, monospace; /* 1 */
115 | font-size: 1em; /* 2 */
116 | }
117 |
118 | /**
119 | * Add the correct font size in all browsers.
120 | */
121 |
122 | small {
123 | font-size: 80%;
124 | }
125 |
126 | /**
127 | * Prevent `sub` and `sup` elements from affecting the line height in
128 | * all browsers.
129 | */
130 |
131 | sub,
132 | sup {
133 | font-size: 75%;
134 | line-height: 0;
135 | position: relative;
136 | vertical-align: baseline;
137 | }
138 |
139 | sub {
140 | bottom: -0.25em;
141 | }
142 |
143 | sup {
144 | top: -0.5em;
145 | }
146 |
147 | /* Embedded content
148 | ========================================================================== */
149 |
150 | /**
151 | * Remove the border on images inside links in IE 10.
152 | */
153 |
154 | img {
155 | border-style: none;
156 | }
157 |
158 | /* Forms
159 | ========================================================================== */
160 |
161 | /**
162 | * 1. Change the font styles in all browsers.
163 | * 2. Remove the margin in Firefox and Safari.
164 | */
165 |
166 | button,
167 | input,
168 | optgroup,
169 | select,
170 | textarea {
171 | font-family: inherit; /* 1 */
172 | font-size: 100%; /* 1 */
173 | line-height: 1.15; /* 1 */
174 | margin: 0; /* 2 */
175 | }
176 |
177 | /**
178 | * Show the overflow in IE.
179 | * 1. Show the overflow in Edge.
180 | */
181 |
182 | button,
183 | input { /* 1 */
184 | overflow: visible;
185 | }
186 |
187 | /**
188 | * Remove the inheritance of text transform in Edge, Firefox, and IE.
189 | * 1. Remove the inheritance of text transform in Firefox.
190 | */
191 |
192 | button,
193 | select { /* 1 */
194 | text-transform: none;
195 | }
196 |
197 | /**
198 | * Correct the inability to style clickable types in iOS and Safari.
199 | */
200 |
201 | button,
202 | [type="button"],
203 | [type="reset"],
204 | [type="submit"] {
205 | -webkit-appearance: button;
206 | }
207 |
208 | /**
209 | * Remove the inner border and padding in Firefox.
210 | */
211 |
212 | button::-moz-focus-inner,
213 | [type="button"]::-moz-focus-inner,
214 | [type="reset"]::-moz-focus-inner,
215 | [type="submit"]::-moz-focus-inner {
216 | border-style: none;
217 | padding: 0;
218 | }
219 |
220 | /**
221 | * Restore the focus styles unset by the previous rule.
222 | */
223 |
224 | button:-moz-focusring,
225 | [type="button"]:-moz-focusring,
226 | [type="reset"]:-moz-focusring,
227 | [type="submit"]:-moz-focusring {
228 | outline: 1px dotted ButtonText;
229 | }
230 |
231 | /**
232 | * Correct the padding in Firefox.
233 | */
234 |
235 | fieldset {
236 | padding: 0.35em 0.75em 0.625em;
237 | }
238 |
239 | /**
240 | * 1. Correct the text wrapping in Edge and IE.
241 | * 2. Correct the color inheritance from `fieldset` elements in IE.
242 | * 3. Remove the padding so developers are not caught out when they zero out
243 | * `fieldset` elements in all browsers.
244 | */
245 |
246 | legend {
247 | box-sizing: border-box; /* 1 */
248 | color: inherit; /* 2 */
249 | display: table; /* 1 */
250 | max-width: 100%; /* 1 */
251 | padding: 0; /* 3 */
252 | white-space: normal; /* 1 */
253 | }
254 |
255 | /**
256 | * Add the correct vertical alignment in Chrome, Firefox, and Opera.
257 | */
258 |
259 | progress {
260 | vertical-align: baseline;
261 | }
262 |
263 | /**
264 | * Remove the default vertical scrollbar in IE 10+.
265 | */
266 |
267 | textarea {
268 | overflow: auto;
269 | }
270 |
271 | /**
272 | * 1. Add the correct box sizing in IE 10.
273 | * 2. Remove the padding in IE 10.
274 | */
275 |
276 | [type="checkbox"],
277 | [type="radio"] {
278 | box-sizing: border-box; /* 1 */
279 | padding: 0; /* 2 */
280 | }
281 |
282 | /**
283 | * Correct the cursor style of increment and decrement buttons in Chrome.
284 | */
285 |
286 | [type="number"]::-webkit-inner-spin-button,
287 | [type="number"]::-webkit-outer-spin-button {
288 | height: auto;
289 | }
290 |
291 | /**
292 | * 1. Correct the odd appearance in Chrome and Safari.
293 | * 2. Correct the outline style in Safari.
294 | */
295 |
296 | [type="search"] {
297 | -webkit-appearance: textfield; /* 1 */
298 | outline-offset: -2px; /* 2 */
299 | }
300 |
301 | /**
302 | * Remove the inner padding in Chrome and Safari on macOS.
303 | */
304 |
305 | [type="search"]::-webkit-search-decoration {
306 | -webkit-appearance: none;
307 | }
308 |
309 | /**
310 | * 1. Correct the inability to style clickable types in iOS and Safari.
311 | * 2. Change font properties to `inherit` in Safari.
312 | */
313 |
314 | ::-webkit-file-upload-button {
315 | -webkit-appearance: button; /* 1 */
316 | font: inherit; /* 2 */
317 | }
318 |
319 | /* Interactive
320 | ========================================================================== */
321 |
322 | /*
323 | * Add the correct display in Edge, IE 10+, and Firefox.
324 | */
325 |
326 | details {
327 | display: block;
328 | }
329 |
330 | /*
331 | * Add the correct display in all browsers.
332 | */
333 |
334 | summary {
335 | display: list-item;
336 | }
337 |
338 | /* Misc
339 | ========================================================================== */
340 |
341 | /**
342 | * Add the correct display in IE 10+.
343 | */
344 |
345 | template {
346 | display: none;
347 | }
348 |
349 | /**
350 | * Add the correct display in IE 10.
351 | */
352 |
353 | [hidden] {
354 | display: none;
355 | }
356 | h1, h2, h3, h4, h5, h6, p {
357 | margin-top: 0;
358 | }
359 |
--------------------------------------------------------------------------------
/grafana-guide/variables/after_variable.json:
--------------------------------------------------------------------------------
1 | {
2 | "annotations": {
3 | "list": [
4 | {
5 | "builtIn": 1,
6 | "datasource": "-- Grafana --",
7 | "enable": true,
8 | "hide": true,
9 | "iconColor": "rgba(0, 211, 255, 1)",
10 | "name": "Annotations & Alerts",
11 | "type": "dashboard"
12 | }
13 | ]
14 | },
15 | "editable": true,
16 | "gnetId": null,
17 | "graphTooltip": 0,
18 | "id": 7,
19 | "iteration": 1586991162850,
20 | "links": [],
21 | "panels": [
22 | {
23 | "circleMaxSize": "1",
24 | "circleMinSize": "2",
25 | "colors": [
26 | "#F2495C",
27 | "#5794F2",
28 | "#FADE2A",
29 | "#B877D9",
30 | "#FF9830"
31 | ],
32 | "datasource": "MTA Bus DB",
33 | "decimals": 0,
34 | "esMetric": "Count",
35 | "gridPos": {
36 | "h": 15,
37 | "w": 12,
38 | "x": 0,
39 | "y": 0
40 | },
41 | "hideEmpty": false,
42 | "hideZero": false,
43 | "id": 15,
44 | "initialZoom": "11",
45 | "locationData": "table",
46 | "mapCenter": "custom",
47 | "mapCenterLatitude": "40.879070000000056",
48 | "mapCenterLongitude": "-73.88537600000012",
49 | "maxDataPoints": 1,
50 | "mouseWheelZoom": true,
51 | "options": {},
52 | "showLegend": true,
53 | "stickyLabels": false,
54 | "tableQueryOptions": {
55 | "geohashField": "geohash",
56 | "labelField": "route_id",
57 | "latitudeField": "latitude",
58 | "longitudeField": "longitude",
59 | "metricField": "color",
60 | "queryType": "coordinates"
61 | },
62 | "targets": [
63 | {
64 | "format": "table",
65 | "group": [],
66 | "metricColumn": "none",
67 | "rawQuery": true,
68 | "rawSql": "SELECT\n max(time) as \"time\",\n vid AS \"vehicle_id\",\n route_id,\n CASE WHEN route_id like 'M%' THEN 1\n WHEN route_id like 'B%' THEN 2\n WHEN route_id like 'Q%' THEN 3\n WHEN route_id like 'S%' THEN 4\n ELSE 0\n END AS \"color\",\n ST_X(geom) AS \"longitude\",\n ST_Y(geom) AS \"latitude\"\nFROM mta where time > now()-interval '1.5 minutes' AND substring(route_id,1,1) IN ($route)\ngroup by vid, route_id, geom order by 1;",
69 | "refId": "A",
70 | "select": [
71 | [
72 | {
73 | "params": [
74 | "value"
75 | ],
76 | "type": "column"
77 | }
78 | ]
79 | ],
80 | "timeColumn": "time",
81 | "where": [
82 | {
83 | "name": "$__timeFilter",
84 | "params": [],
85 | "type": "macro"
86 | }
87 | ]
88 | }
89 | ],
90 | "thresholds": "1,2,3,4",
91 | "timeFrom": null,
92 | "timeShift": null,
93 | "title": "Current MTA Bus Locations (with filter)",
94 | "type": "grafana-worldmap-panel",
95 | "unitPlural": "",
96 | "unitSingle": "",
97 | "valueName": "current"
98 | },
99 | {
100 | "circleMaxSize": "1",
101 | "circleMinSize": "2",
102 | "colors": [
103 | "#F2495C",
104 | "#73BF69"
105 | ],
106 | "datasource": "MTA Bus DB",
107 | "decimals": 0,
108 | "esMetric": "Count",
109 | "gridPos": {
110 | "h": 15,
111 | "w": 12,
112 | "x": 12,
113 | "y": 0
114 | },
115 | "hideEmpty": false,
116 | "hideZero": false,
117 | "id": 9,
118 | "initialZoom": "11",
119 | "locationData": "table",
120 | "mapCenter": "custom",
121 | "mapCenterLatitude": "40.879070000000056",
122 | "mapCenterLongitude": "-73.88537600000012",
123 | "maxDataPoints": 1,
124 | "mouseWheelZoom": false,
125 | "options": {},
126 | "showLegend": true,
127 | "stickyLabels": false,
128 | "tableQueryOptions": {
129 | "geohashField": "geohash",
130 | "labelField": "",
131 | "latitudeField": "latitude",
132 | "longitudeField": "longitude",
133 | "metricField": "on_route",
134 | "queryType": "coordinates"
135 | },
136 | "targets": [
137 | {
138 | "format": "table",
139 | "group": [],
140 | "metricColumn": "none",
141 | "rawQuery": true,
142 | "rawSql": "-- How many busses are off-route in past 5 mins?\nSELECT\n max(bus.time) AS \"time\",\n vid AS \"vehicle_id\",\n CASE WHEN st_within(bus.geom, route.geom) THEN 1 ELSE 0 END AS \"on_route\",\n -- bus.route_id,\n ST_X(bus.geom) AS \"longitude\",\n ST_Y(bus.geom) AS \"latitude\"\nFROM\n route_geofences AS route \n JOIN mta AS bus \n ON (route.route_id = bus.route_id) \nWHERE\n bus.time > now() - interval '1.5 minutes' AND substring(bus.route_id,1,1) IN ($route)\nGROUP BY vid, bus.geom, route.geom order by 1",
143 | "refId": "A",
144 | "select": [
145 | [
146 | {
147 | "params": [
148 | "value"
149 | ],
150 | "type": "column"
151 | }
152 | ]
153 | ],
154 | "timeColumn": "time",
155 | "where": [
156 | {
157 | "name": "$__timeFilter",
158 | "params": [],
159 | "type": "macro"
160 | }
161 | ]
162 | }
163 | ],
164 | "thresholds": "1",
165 | "timeFrom": null,
166 | "timeShift": null,
167 | "title": "How many buses are off-route?",
168 | "type": "grafana-worldmap-panel",
169 | "unitPlural": "",
170 | "unitSingle": "",
171 | "valueName": "current"
172 | }
173 | ],
174 | "schemaVersion": 22,
175 | "style": "dark",
176 | "tags": [],
177 | "templating": {
178 | "list": [
179 | {
180 | "allValue": "",
181 | "current": {
182 | "text": "Manhattan + Bronx/Brooklyn + Queens + Staten Island",
183 | "value": [
184 | "M",
185 | "B",
186 | "Q",
187 | "S"
188 | ]
189 | },
190 | "datasource": "MTA Bus DB",
191 | "definition": "SELECT k AS \"__text\", v AS \"__value\" from (values ('Manhattan','M'),('Bronx/Brooklyn' ,'B'),('Queens','Q'),('Staten Island','S')) v(k,v);",
192 | "hide": 0,
193 | "includeAll": true,
194 | "label": "MTA Bus Route",
195 | "multi": true,
196 | "name": "route",
197 | "options": [
198 | {
199 | "selected": false,
200 | "text": "All",
201 | "value": "$__all"
202 | },
203 | {
204 | "selected": true,
205 | "text": "Manhattan",
206 | "value": "M"
207 | },
208 | {
209 | "selected": true,
210 | "text": "Bronx/Brooklyn",
211 | "value": "B"
212 | },
213 | {
214 | "selected": true,
215 | "text": "Queens",
216 | "value": "Q"
217 | },
218 | {
219 | "selected": true,
220 | "text": "Staten Island",
221 | "value": "S"
222 | }
223 | ],
224 | "query": "SELECT k AS \"__text\", v AS \"__value\" from (values ('Manhattan','M'),('Bronx/Brooklyn' ,'B'),('Queens','Q'),('Staten Island','S')) v(k,v);",
225 | "refresh": 0,
226 | "regex": "",
227 | "skipUrlSync": false,
228 | "sort": 0,
229 | "tagValuesQuery": "",
230 | "tags": [],
231 | "tagsQuery": "",
232 | "type": "query",
233 | "useTags": false
234 | }
235 | ]
236 | },
237 | "time": {
238 | "from": "now-30d",
239 | "to": "now"
240 | },
241 | "timepicker": {
242 | "refresh_intervals": [
243 | "5s",
244 | "10s",
245 | "30s",
246 | "1m",
247 | "5m",
248 | "15m",
249 | "30m",
250 | "1h",
251 | "2h",
252 | "1d"
253 | ]
254 | },
255 | "timezone": "",
256 | "title": "Variables Example MTA Bus",
257 | "uid": "Wci68m3Zk",
258 | "version": 2
259 | }
--------------------------------------------------------------------------------
/compression-preview/src/assets/images/TimescaleLogoHorizontal1Svg.svg:
--------------------------------------------------------------------------------
1 |
40 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 |
179 | Copyright (c) 2019 Timescale, Inc. All Rights Reserved.
180 |
181 | Licensed under the Apache License, Version 2.0 (the "License");
182 | you may not use this file except in compliance with the License.
183 | You may obtain a copy of the License at
184 |
185 | http://www.apache.org/licenses/LICENSE-2.0
186 |
187 | Unless required by applicable law or agreed to in writing, software
188 | distributed under the License is distributed on an "AS IS" BASIS,
189 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
190 | See the License for the specific language governing permissions and
191 | limitations under the License.
192 |
--------------------------------------------------------------------------------
/grafana-guide/series-override/series_override.json:
--------------------------------------------------------------------------------
1 | {
2 | "annotations": {
3 | "list": [
4 | {
5 | "builtIn": 1,
6 | "datasource": "-- Grafana --",
7 | "enable": true,
8 | "hide": true,
9 | "iconColor": "rgba(0, 211, 255, 1)",
10 | "name": "Annotations & Alerts",
11 | "type": "dashboard"
12 | }
13 | ]
14 | },
15 | "editable": true,
16 | "gnetId": null,
17 | "graphTooltip": 0,
18 | "id": 9,
19 | "iteration": 1586991523872,
20 | "links": [],
21 | "panels": [
22 | {
23 | "aliasColors": {
24 | "total_cases": "yellow",
25 | "total_deaths": "red"
26 | },
27 | "bars": false,
28 | "dashLength": 10,
29 | "dashes": false,
30 | "datasource": "COVID-19 Data",
31 | "description": "COVID cases and deaths in USA using Data from New York Times",
32 | "fill": 1,
33 | "fillGradient": 0,
34 | "gridPos": {
35 | "h": 9,
36 | "w": 12,
37 | "x": 0,
38 | "y": 0
39 | },
40 | "hiddenSeries": false,
41 | "id": 18,
42 | "legend": {
43 | "avg": false,
44 | "current": false,
45 | "max": false,
46 | "min": false,
47 | "show": true,
48 | "total": false,
49 | "values": false
50 | },
51 | "lines": true,
52 | "linewidth": 1,
53 | "nullPointMode": "null",
54 | "options": {
55 | "dataLinks": []
56 | },
57 | "percentage": false,
58 | "pointradius": 2,
59 | "points": true,
60 | "renderer": "flot",
61 | "seriesOverrides": [],
62 | "spaceLength": 10,
63 | "stack": false,
64 | "steppedLine": false,
65 | "targets": [
66 | {
67 | "format": "time_series",
68 | "group": [],
69 | "metricColumn": "none",
70 | "rawQuery": true,
71 | "rawSql": "SELECT date as \"time\", sum (cases) as total_cases, sum(deaths) as total_deaths\nFROM states\nGROUP BY time\nORDER BY time;\n",
72 | "refId": "A",
73 | "select": [
74 | [
75 | {
76 | "params": [
77 | "avg"
78 | ],
79 | "type": "column"
80 | }
81 | ]
82 | ],
83 | "table": "metrics_5mins",
84 | "timeColumn": "bucket",
85 | "timeColumnType": "timestamp",
86 | "where": [
87 | {
88 | "name": "$__timeFilter",
89 | "params": [],
90 | "type": "macro"
91 | }
92 | ]
93 | }
94 | ],
95 | "thresholds": [],
96 | "timeFrom": null,
97 | "timeRegions": [],
98 | "timeShift": null,
99 | "title": "COVID Cases in USA (no-series override)",
100 | "tooltip": {
101 | "shared": true,
102 | "sort": 0,
103 | "value_type": "individual"
104 | },
105 | "type": "graph",
106 | "xaxis": {
107 | "buckets": null,
108 | "mode": "time",
109 | "name": null,
110 | "show": true,
111 | "values": []
112 | },
113 | "yaxes": [
114 | {
115 | "format": "short",
116 | "label": null,
117 | "logBase": 1,
118 | "max": null,
119 | "min": null,
120 | "show": true
121 | },
122 | {
123 | "format": "short",
124 | "label": null,
125 | "logBase": 1,
126 | "max": null,
127 | "min": null,
128 | "show": true
129 | }
130 | ],
131 | "yaxis": {
132 | "align": false,
133 | "alignLevel": null
134 | }
135 | },
136 | {
137 | "aliasColors": {
138 | "total_cases": "yellow",
139 | "total_deaths": "red"
140 | },
141 | "bars": false,
142 | "dashLength": 10,
143 | "dashes": false,
144 | "datasource": "COVID-19 Data",
145 | "description": "COVID cases and deaths in USA using Data from New York Times",
146 | "fill": 1,
147 | "fillGradient": 0,
148 | "gridPos": {
149 | "h": 9,
150 | "w": 12,
151 | "x": 12,
152 | "y": 0
153 | },
154 | "hiddenSeries": false,
155 | "id": 19,
156 | "legend": {
157 | "avg": false,
158 | "current": false,
159 | "max": false,
160 | "min": false,
161 | "show": true,
162 | "total": false,
163 | "values": false
164 | },
165 | "lines": true,
166 | "linewidth": 1,
167 | "nullPointMode": "null",
168 | "options": {
169 | "dataLinks": []
170 | },
171 | "percentage": false,
172 | "pointradius": 2,
173 | "points": true,
174 | "renderer": "flot",
175 | "seriesOverrides": [],
176 | "spaceLength": 10,
177 | "stack": false,
178 | "steppedLine": false,
179 | "targets": [
180 | {
181 | "format": "time_series",
182 | "group": [],
183 | "metricColumn": "none",
184 | "rawQuery": true,
185 | "rawSql": "SELECT date as \"time\", sum (deaths) as total_deaths\nFROM states\nGROUP BY time\nORDER BY time;\n",
186 | "refId": "B",
187 | "select": [
188 | [
189 | {
190 | "params": [
191 | "avg"
192 | ],
193 | "type": "column"
194 | }
195 | ]
196 | ],
197 | "table": "metrics_5mins",
198 | "timeColumn": "bucket",
199 | "timeColumnType": "timestamp",
200 | "where": [
201 | {
202 | "name": "$__timeFilter",
203 | "params": [],
204 | "type": "macro"
205 | }
206 | ]
207 | }
208 | ],
209 | "thresholds": [],
210 | "timeFrom": null,
211 | "timeRegions": [],
212 | "timeShift": null,
213 | "title": "COVID Deaths in USA",
214 | "tooltip": {
215 | "shared": true,
216 | "sort": 0,
217 | "value_type": "individual"
218 | },
219 | "type": "graph",
220 | "xaxis": {
221 | "buckets": null,
222 | "mode": "time",
223 | "name": null,
224 | "show": true,
225 | "values": []
226 | },
227 | "yaxes": [
228 | {
229 | "format": "short",
230 | "label": null,
231 | "logBase": 1,
232 | "max": null,
233 | "min": null,
234 | "show": true
235 | },
236 | {
237 | "format": "short",
238 | "label": null,
239 | "logBase": 1,
240 | "max": null,
241 | "min": null,
242 | "show": true
243 | }
244 | ],
245 | "yaxis": {
246 | "align": false,
247 | "alignLevel": null
248 | }
249 | },
250 | {
251 | "aliasColors": {
252 | "total_cases": "yellow",
253 | "total_deaths": "red"
254 | },
255 | "bars": false,
256 | "dashLength": 10,
257 | "dashes": false,
258 | "datasource": "COVID-19 Data",
259 | "description": "COVID-19 in USA: Cases and Deaths",
260 | "fill": 1,
261 | "fillGradient": 0,
262 | "gridPos": {
263 | "h": 9,
264 | "w": 24,
265 | "x": 0,
266 | "y": 9
267 | },
268 | "hiddenSeries": false,
269 | "id": 21,
270 | "legend": {
271 | "alignAsTable": false,
272 | "avg": false,
273 | "current": true,
274 | "max": false,
275 | "min": false,
276 | "rightSide": false,
277 | "show": true,
278 | "total": false,
279 | "values": true
280 | },
281 | "lines": true,
282 | "linewidth": 1,
283 | "nullPointMode": "null",
284 | "options": {
285 | "dataLinks": []
286 | },
287 | "percentage": false,
288 | "pointradius": 2,
289 | "points": true,
290 | "renderer": "flot",
291 | "seriesOverrides": [
292 | {
293 | "alias": "total_deaths",
294 | "yaxis": 2
295 | }
296 | ],
297 | "spaceLength": 10,
298 | "stack": false,
299 | "steppedLine": false,
300 | "targets": [
301 | {
302 | "format": "time_series",
303 | "group": [],
304 | "metricColumn": "none",
305 | "rawQuery": true,
306 | "rawSql": "SELECT date as \"time\", sum (cases) as total_cases, sum(deaths) as total_deaths\nFROM states\nGROUP BY time\nORDER BY time;\n",
307 | "refId": "A",
308 | "select": [
309 | [
310 | {
311 | "params": [
312 | "value"
313 | ],
314 | "type": "column"
315 | }
316 | ]
317 | ],
318 | "timeColumn": "time",
319 | "where": [
320 | {
321 | "name": "$__timeFilter",
322 | "params": [],
323 | "type": "macro"
324 | }
325 | ]
326 | }
327 | ],
328 | "thresholds": [],
329 | "timeFrom": null,
330 | "timeRegions": [],
331 | "timeShift": null,
332 | "title": "COVID-19 in USA (w/series override)",
333 | "tooltip": {
334 | "shared": true,
335 | "sort": 0,
336 | "value_type": "individual"
337 | },
338 | "type": "graph",
339 | "xaxis": {
340 | "buckets": null,
341 | "mode": "time",
342 | "name": null,
343 | "show": true,
344 | "values": []
345 | },
346 | "yaxes": [
347 | {
348 | "format": "short",
349 | "label": null,
350 | "logBase": 1,
351 | "max": null,
352 | "min": null,
353 | "show": true
354 | },
355 | {
356 | "format": "short",
357 | "label": null,
358 | "logBase": 1,
359 | "max": null,
360 | "min": null,
361 | "show": true
362 | }
363 | ],
364 | "yaxis": {
365 | "align": false,
366 | "alignLevel": null
367 | }
368 | }
369 | ],
370 | "schemaVersion": 22,
371 | "style": "dark",
372 | "tags": [],
373 | "templating": {
374 | "list": []
375 | },
376 | "time": {
377 | "from": "now-30d",
378 | "to": "now"
379 | },
380 | "timepicker": {
381 | "refresh_intervals": [
382 | "5s",
383 | "10s",
384 | "30s",
385 | "1m",
386 | "5m",
387 | "15m",
388 | "30m",
389 | "1h",
390 | "2h",
391 | "1d"
392 | ]
393 | },
394 | "timezone": "",
395 | "title": "Series Override Example",
396 | "uid": "Y4OIQm3Zk",
397 | "version": 8
398 | }
--------------------------------------------------------------------------------
/compression-preview/src/styles/subscription.scss:
--------------------------------------------------------------------------------
1 | @import './_normalize.scss';
2 | @import '~@timescale/web-styles/src/styles/utils';
3 | @import '~@timescale/web-styles/src/styles/styles';
4 | @keyframes spinner {
5 | 0% {
6 | stroke-dashoffset: (0.66 * 108);
7 | transform: rotate(0deg);
8 | }
9 | 50% {
10 | stroke-dashoffset: (3.14 * 108);
11 | transform: rotate(540deg);
12 | transform: rotate(720deg);
13 | }
14 | 100% {
15 | stroke-dashoffset: (0.66 * 108);
16 | transform: rotate(1080deg);
17 | }
18 | }
19 |
20 | @keyframes compress-inner {
21 | 0% {
22 | fill: #141e35;
23 | }
24 | 100% {
25 | fill: #fdb515;
26 | }
27 | }
28 |
29 | @keyframes compress-outer {
30 | 0% {
31 | stroke: #141e35;
32 | }
33 | 100% {
34 | stroke: #fdb515;
35 | }
36 | }
37 |
38 | @keyframes decompress-inner {
39 | 0% {
40 | fill: #fdb515;
41 | }
42 | 100% {
43 | fill: #141e35;
44 | }
45 | }
46 |
47 | @keyframes decompress-outer {
48 | 0% {
49 | stroke: #fdb515;
50 | }
51 | 100% {
52 | stroke: #141e35;
53 | }
54 | }
55 |
56 | @keyframes dotFlashing {
57 | 0% {
58 | background-color: color($navy);
59 | }
60 | 50%,
61 | 100% {
62 | background-color: color($navy, 300);
63 | }
64 | }
65 |
66 |
67 | .dot-flashing {
68 | position: relative;
69 | width: $half-gutter;
70 | height: $half-gutter;
71 | border-radius: 5px;
72 | background-color: color($navy);
73 | color: color($navy);
74 | animation: dotFlashing 1s infinite linear alternate;
75 | animation-delay: 0.5s;
76 | }
77 |
78 | .dot-flashing::before,
79 | .dot-flashing::after {
80 | content: '';
81 | display: inline-block;
82 | position: absolute;
83 | top: 0;
84 | }
85 |
86 | .dot-flashing::before {
87 | left: -15px;
88 | width: $half-gutter;
89 | height: $half-gutter;
90 | border-radius: 5px;
91 | background-color: color($navy);
92 | color: color($navy);
93 | animation: dotFlashing 1s infinite alternate;
94 | animation-delay: 0s;
95 | }
96 |
97 | .dot-flashing::after {
98 | left: 15px;
99 | width: $half-gutter;
100 | height: $half-gutter;
101 | border-radius: 5px;
102 | background-color: color($navy);
103 | color: color($navy);
104 | animation: dotFlashing 1s infinite alternate;
105 | animation-delay: 1s;
106 | }
107 |
108 |
109 | .ts-compression {
110 | display: flex;
111 | justify-content: center;
112 | background-color: color($blue, 100);
113 | width: 100%;
114 | height: 100%;
115 | min-height: 100vh;
116 |
117 | &__buttons {
118 | max-width: 800px;
119 | width: 100%;
120 | display: flex;
121 | justify-content: center;
122 | gap: $gutter-4;
123 | margin-bottom: 64px;
124 | }
125 |
126 | &__grid-item {
127 | padding: $gutter-4 $gutter-4 $gutter-4 $gutter-4;
128 | position: relative;
129 | border: 1px solid gray;
130 | display: flex;
131 | flex-direction: column;
132 | align-items: center;
133 | box-sizing: border-box;
134 | background: white;
135 | border: 1px solid #c5ddfc;
136 | box-shadow: 2px 2px 8px rgba(0, 0, 0, 0.05);
137 | border-radius: 8px;
138 | z-index: 1;
139 |
140 | &__circle--compressed {
141 | display: flex;
142 | justify-content: center;
143 |
144 | & > svg {
145 | transition: transform 3s ease-out;
146 | & > circle {
147 | transition: fill 3s ease-out;
148 | transition: stroke 3s ease-out;
149 | }
150 |
151 | & > circle:first-child {
152 | animation: compress-inner 3s ease-out;
153 | fill: #fdb515;
154 | }
155 |
156 | & > circle:last-child {
157 | animation: compress-outer 3s ease-out;
158 | stroke: #fdb515;
159 | }
160 | }
161 | }
162 |
163 | &__circle--decompressed {
164 | position: relative;
165 | justify-content: center;
166 | background-size: cover;
167 | background-repeat: no-repeat;
168 |
169 | & > svg {
170 | transition: transform 3s ease-out;
171 | & > circle {
172 | transition: fill 3s ease-out;
173 | transition: stroke 3s ease-out;
174 | }
175 |
176 | & > circle:first-child {
177 | animation: decompress-inner 3s ease-out;
178 | fill: #141e35;
179 | }
180 |
181 | & > circle:last-child {
182 | animation: decompress-outer 3s ease-out;
183 | stroke: #141e35;
184 | }
185 | }
186 | }
187 |
188 | &__circle-container {
189 | flex-grow: 1;
190 | position: relative;
191 | display: flex;
192 | align-items: center;
193 | justify-content: center;
194 | margin-bottom: 16px;
195 |
196 | & > .fixed-circle {
197 | position: absolute;
198 | top: 0;
199 | }
200 | }
201 | }
202 |
203 | &__inner {
204 | display: flex;
205 | flex-direction: column;
206 | align-items: center;
207 | width: 100%;
208 | position: relative;
209 | padding-bottom: 40px;
210 |
211 | &__chunks {
212 | border: thin solid color($navy, 300);
213 | box-shadow: 1px solid #0003;
214 | border-radius: $gutter-2;
215 | width: 100%;
216 | min-height: 300px;
217 | margin: 0 auto;
218 | max-width: 90vw;
219 | overflow: visible;
220 | position: relative;
221 |
222 | &--compressing {
223 | @include label;
224 | position: absolute;
225 | background: white;
226 | border: 1px solid color($navy, 400);
227 | box-shadow: 1px solid #0003;
228 | border-radius: $gutter-1;
229 | display: block;
230 | display: flex;
231 | align-items: center;
232 | padding: $gutter-2;
233 | padding-right: $gutter-5;
234 | top: -$gutter-9;
235 | right: 0;
236 |
237 | > div {
238 | margin-left: $gutter-3;
239 | }
240 | }
241 |
242 | &__cards-wrapper {
243 | display: flex;
244 | flex-wrap: wrap;
245 | width: 100%;
246 |
247 | &__card {
248 | max-width: 100px;
249 | margin: 16px;
250 | position: relative;
251 | transition: 0.6s all ease-out;
252 | cursor: pointer;
253 | transition: transform 3s ease-out;
254 | justify-content: center;
255 | background-size: cover;
256 | background-repeat: no-repeat;
257 | z-index: 20;
258 |
259 | &--decompressed {
260 | transition: fill 3s ease-out;
261 | transition: stroke 3s ease-out;
262 | fill: #141e35 !important;
263 | stroke: #141e35 !important;
264 | animation: decompress-inner 3s ease-out;
265 | }
266 |
267 | &--compressed {
268 | transition: fill 3s ease-out;
269 | transition: stroke 3s ease-out;
270 | fill: #fdb515 !important;
271 | stroke: #fdb515 !important;
272 | animation: compress-outer 3s ease-out;
273 | animation: compress-inner 3s ease-out;
274 | }
275 | }
276 | }
277 | }
278 |
279 | &__header {
280 | display: flex;
281 | justify-content: space-between;
282 | align-items: center;
283 | width: 100%;
284 | max-width: 1280px;
285 | margin: 0 auto;
286 | max-height: 240px;
287 |
288 | &--logo {
289 | display: flex;
290 | flex: 1;
291 | justify-content: center;
292 | align-items: center;
293 | width: 100%;
294 |
295 | svg {
296 | min-width: 100px;
297 | min-height: 60px;
298 | }
299 | }
300 | &--title {
301 | text-align: center;
302 | width: 100%;
303 | flex: 2;
304 | & > h2 {
305 | @include header(3);
306 | margin-bottom: 0px;
307 | padding-top: $gutter-4;
308 | }
309 |
310 | & > p {
311 | @include body-copy;
312 | padding-bottom: $gutter-4;
313 | }
314 |
315 | & > span {
316 | @include label;
317 | }
318 | }
319 | }
320 |
321 | &__info {
322 | position: fixed;
323 | height: 0;
324 | width: 100%;
325 | max-width: 250px;
326 | display: block;
327 | flex-direction: column;
328 | z-index: 10;
329 |
330 | &--content {
331 | border: 1px solid color($navy, 300);
332 | background-color: white;
333 | padding: 16px 24px;
334 | border-radius: 16px;
335 |
336 | h4 {
337 | @include body-copy(3, 'bold');
338 | margin-bottom: 16px;
339 | }
340 |
341 | p {
342 | @include body-copy(3);
343 | margin-bottom: 16px;
344 | }
345 | }
346 |
347 | &__wrapper {
348 | position: relative;
349 | display: flex;
350 | width: 100%;
351 | opacity: 0;
352 | z-index: -1;
353 |
354 | &--active {
355 | transition: 0.6s all ease-out;
356 | opacity: 1;
357 | z-index: 10;
358 | }
359 | }
360 | }
361 | }
362 |
363 | &__loading-screen {
364 | position: fixed;
365 | z-index: 5;
366 | top: 0;
367 | right: 0;
368 | bottom: 0;
369 | left: 0;
370 |
371 | &__inner {
372 | width: 100%;
373 | height: 100%;
374 | display: flex;
375 | justify-content: center;
376 | align-items: center;
377 | position: sticky;
378 | top: 0;
379 | }
380 |
381 | &__card {
382 | padding: 64px 128px;
383 | background: white;
384 | border: 1px solid #c5ddfc;
385 | box-shadow: 2px 2px 8px rgba(0, 0, 0, 0.05);
386 | border-radius: 8px;
387 | display: flex;
388 | flex-direction: column;
389 | align-items: center;
390 | position: sticky;
391 | top: 0;
392 |
393 | & > h2 {
394 | @include header(2);
395 | }
396 |
397 | & > svg {
398 | width: 108px;
399 | height: 108px;
400 | & > circle {
401 | fill: transparent;
402 | stroke: #5b9cf2;
403 | stroke-width: 4;
404 | stroke-linecap: round;
405 | stroke-dasharray: (3.14 * 108);
406 | transform-origin: (0.5px * 108) (0.5px * 108) 0;
407 | animation: spinner 4s linear infinite;
408 | }
409 | }
410 | }
411 | }
412 |
413 | &__loading-screen-single {
414 | position: absolute;
415 | z-index: 5;
416 | top: 0;
417 | left: 0;
418 | width: 100%;
419 | height: 100%;
420 | background: rgba(0, 0, 0, 0.05);
421 | display: flex;
422 | align-items: center;
423 | justify-content: center;
424 |
425 | &__inner {
426 | display: flex;
427 | justify-content: center;
428 | align-items: center;
429 | }
430 |
431 | &__card {
432 | & > h2 {
433 | @include header(2);
434 | }
435 |
436 | & > svg {
437 | width: 108px;
438 | height: 108px;
439 | & > circle {
440 | fill: transparent;
441 | stroke: #5b9cf2;
442 | stroke-width: 4;
443 | stroke-linecap: round;
444 | stroke-dasharray: (3.14 * 108);
445 | transform-origin: (0.5px * 108) (0.5px * 108) 0;
446 | animation: spinner 4s linear infinite;
447 | }
448 | }
449 | }
450 | }
451 |
452 | .ts-select {
453 | width: 464px;
454 | &__options--open {
455 | max-height: 168px;
456 | overflow: auto;
457 | }
458 | @include break-on('small') {
459 | width: 100%;
460 | left: 0;
461 | }
462 | }
463 | }
--------------------------------------------------------------------------------
/air-quality/grafana.json:
--------------------------------------------------------------------------------
1 | {
2 | "annotations": {
3 | "list": [
4 | {
5 | "builtIn": 1,
6 | "datasource": "-- Grafana --",
7 | "enable": true,
8 | "hide": true,
9 | "iconColor": "rgba(0, 211, 255, 1)",
10 | "name": "Annotations & Alerts",
11 | "type": "dashboard"
12 | }
13 | ]
14 | },
15 | "editable": true,
16 | "gnetId": null,
17 | "graphTooltip": 0,
18 | "id": 1,
19 | "links": [],
20 | "panels": [
21 | {
22 | "content": "\n# Getting started with time_bucket()\n\nThe `time_bucket()` function was one of the first time-series specific functions built into TimescaleDB. It was designed to make it easier to aggregate metrics into \"time buckets\" and allows greater querying flexibility than the functions that already exist within PostgreSQL. Specifically, the `date_trunc()` function is limited in only being able to select by a single time unit. For example, you can query for 1 minute, 1 day, 1 month, or 1 year. `time_bucket()` allows you to define arbitrary time buckets - think 1 minute, 5 minutes, 1 day, 14 days, etc. \n\nThe syntax for this function is very straight forward - you essentially embed the function directly in a select statement. Let's try a simple example using the Air Quality sample application. This application pulls data from an open API, assigns unique ids for different locations and parameter types, and stores the measurement value obtained from different sensors. Let's say that we want to find all measurements associated with `location_id = 1`. \n\n`SELECT * FROM measurements WHERE location_id = 150;`\n\nYou'll notice that Grafana has nicely graphed each parameter in a separate color. You can achieve this by setting the metric value to `parameter_id`.\n\n\n\n",
23 | "gridPos": {
24 | "h": 10,
25 | "w": 12,
26 | "x": 0,
27 | "y": 0
28 | },
29 | "id": 2,
30 | "links": [],
31 | "mode": "markdown",
32 | "options": {},
33 | "targets": [
34 | {
35 | "format": "time_series",
36 | "group": [],
37 | "metricColumn": "none",
38 | "rawQuery": false,
39 | "rawSql": "SELECT\n \"time\" AS \"time\",\n value\nFROM measurements\nWHERE\n $__timeFilter(\"time\")\nORDER BY 1",
40 | "refId": "A",
41 | "select": [
42 | [
43 | {
44 | "params": [
45 | "value"
46 | ],
47 | "type": "column"
48 | }
49 | ]
50 | ],
51 | "table": "measurements",
52 | "timeColumn": "\"time\"",
53 | "timeColumnType": "timestamp",
54 | "where": [
55 | {
56 | "name": "$__timeFilter",
57 | "params": [],
58 | "type": "macro"
59 | }
60 | ]
61 | }
62 | ],
63 | "timeFrom": null,
64 | "timeShift": null,
65 | "title": "Step 1",
66 | "type": "text"
67 | },
68 | {
69 | "aliasColors": {
70 | "9": "purple"
71 | },
72 | "bars": false,
73 | "dashLength": 10,
74 | "dashes": false,
75 | "fill": 1,
76 | "gridPos": {
77 | "h": 10,
78 | "w": 12,
79 | "x": 12,
80 | "y": 0
81 | },
82 | "id": 4,
83 | "legend": {
84 | "avg": false,
85 | "current": false,
86 | "max": false,
87 | "min": false,
88 | "show": true,
89 | "total": false,
90 | "values": false
91 | },
92 | "lines": false,
93 | "linewidth": 1,
94 | "links": [],
95 | "nullPointMode": "null",
96 | "options": {},
97 | "percentage": false,
98 | "pointradius": 2,
99 | "points": true,
100 | "renderer": "flot",
101 | "seriesOverrides": [],
102 | "spaceLength": 10,
103 | "stack": false,
104 | "steppedLine": false,
105 | "targets": [
106 | {
107 | "format": "time_series",
108 | "group": [],
109 | "metricColumn": "parameter_id::TEXT",
110 | "rawQuery": true,
111 | "rawSql": "SELECT\n \"time\" AS \"time\",\n parameter_id::TEXT AS metric,\n value\nFROM measurements\nWHERE\n $__timeFilter(\"time\") AND\n location_id = 150\nORDER BY 1,2",
112 | "refId": "A",
113 | "select": [
114 | [
115 | {
116 | "params": [
117 | "value"
118 | ],
119 | "type": "column"
120 | }
121 | ]
122 | ],
123 | "table": "measurements",
124 | "timeColumn": "\"time\"",
125 | "timeColumnType": "timestamp",
126 | "where": [
127 | {
128 | "name": "$__timeFilter",
129 | "params": [],
130 | "type": "macro"
131 | },
132 | {
133 | "datatype": "int4",
134 | "name": "",
135 | "params": [
136 | "location_id",
137 | "=",
138 | "150"
139 | ],
140 | "type": "expression"
141 | }
142 | ]
143 | }
144 | ],
145 | "thresholds": [],
146 | "timeFrom": null,
147 | "timeRegions": [],
148 | "timeShift": null,
149 | "title": "Measurements for Location 150",
150 | "tooltip": {
151 | "shared": true,
152 | "sort": 0,
153 | "value_type": "individual"
154 | },
155 | "type": "graph",
156 | "xaxis": {
157 | "buckets": null,
158 | "mode": "time",
159 | "name": null,
160 | "show": true,
161 | "values": []
162 | },
163 | "yaxes": [
164 | {
165 | "format": "short",
166 | "label": null,
167 | "logBase": 1,
168 | "max": null,
169 | "min": null,
170 | "show": true
171 | },
172 | {
173 | "format": "short",
174 | "label": null,
175 | "logBase": 1,
176 | "max": null,
177 | "min": null,
178 | "show": true
179 | }
180 | ],
181 | "yaxis": {
182 | "align": false,
183 | "alignLevel": null
184 | }
185 | },
186 | {
187 | "content": "\n# Using time_bucket()\n\nIn the above module, you created a really simple graph that displayed all the parameters being collected for `location_id = 150`. You'll notice that we get measurements from these sensors every hour. Let's say that actually what you really care about is the average measurement over a four hour period. That's where `time_bucket()` comes in. Grafana comes with built-in support for Timescale-specific functions, so you can directly use `time_bucket()` in the Grafana interface. \n\nThe query would look something like this: \n`SELECT time_bucket('4 hours', time) AS four_hours, avg(value) FROM measurements WHERE location_id = 150 GROUP BY four_hours, parameter_id ORDER BY four_hours;`\n\n\n\n",
188 | "gridPos": {
189 | "h": 8,
190 | "w": 12,
191 | "x": 0,
192 | "y": 10
193 | },
194 | "id": 6,
195 | "links": [],
196 | "mode": "markdown",
197 | "options": {},
198 | "targets": [
199 | {
200 | "format": "time_series",
201 | "group": [],
202 | "metricColumn": "none",
203 | "rawQuery": false,
204 | "refId": "A",
205 | "select": [
206 | [
207 | {
208 | "params": [
209 | "value"
210 | ],
211 | "type": "column"
212 | }
213 | ]
214 | ],
215 | "timeColumn": "time",
216 | "where": [
217 | {
218 | "name": "$__timeFilter",
219 | "params": [],
220 | "type": "macro"
221 | }
222 | ]
223 | }
224 | ],
225 | "timeFrom": null,
226 | "timeShift": null,
227 | "title": "Step 2",
228 | "type": "text"
229 | },
230 | {
231 | "aliasColors": {
232 | "9": "purple"
233 | },
234 | "bars": false,
235 | "dashLength": 10,
236 | "dashes": false,
237 | "fill": 1,
238 | "gridPos": {
239 | "h": 8,
240 | "w": 12,
241 | "x": 12,
242 | "y": 10
243 | },
244 | "id": 8,
245 | "legend": {
246 | "avg": false,
247 | "current": false,
248 | "max": false,
249 | "min": false,
250 | "show": true,
251 | "total": false,
252 | "values": false
253 | },
254 | "lines": false,
255 | "linewidth": 1,
256 | "links": [],
257 | "nullPointMode": "null",
258 | "options": {},
259 | "percentage": false,
260 | "pointradius": 2,
261 | "points": true,
262 | "renderer": "flot",
263 | "seriesOverrides": [],
264 | "spaceLength": 10,
265 | "stack": false,
266 | "steppedLine": false,
267 | "targets": [
268 | {
269 | "format": "time_series",
270 | "group": [],
271 | "metricColumn": "none",
272 | "rawQuery": true,
273 | "rawSql": "SELECT\n time_bucket('4 hours', time) AS \"time\",\n parameter_id::TEXT AS metric,\n avg(value)\nFROM measurements\nWHERE\n $__timeFilter(\"time\") AND \n location_id = 150\nGROUP BY 1, 2\nORDER BY 1, 2",
274 | "refId": "A",
275 | "select": [
276 | [
277 | {
278 | "params": [
279 | "value"
280 | ],
281 | "type": "column"
282 | }
283 | ]
284 | ],
285 | "table": "measurements",
286 | "timeColumn": "\"time\"",
287 | "timeColumnType": "timestamp",
288 | "where": [
289 | {
290 | "name": "$__timeFilter",
291 | "params": [],
292 | "type": "macro"
293 | }
294 | ]
295 | }
296 | ],
297 | "thresholds": [],
298 | "timeFrom": null,
299 | "timeRegions": [],
300 | "timeShift": null,
301 | "title": "4 hour average for Location 150",
302 | "tooltip": {
303 | "shared": true,
304 | "sort": 0,
305 | "value_type": "individual"
306 | },
307 | "type": "graph",
308 | "xaxis": {
309 | "buckets": null,
310 | "mode": "time",
311 | "name": null,
312 | "show": true,
313 | "values": []
314 | },
315 | "yaxes": [
316 | {
317 | "format": "short",
318 | "label": null,
319 | "logBase": 1,
320 | "max": null,
321 | "min": null,
322 | "show": true
323 | },
324 | {
325 | "format": "short",
326 | "label": null,
327 | "logBase": 1,
328 | "max": null,
329 | "min": null,
330 | "show": true
331 | }
332 | ],
333 | "yaxis": {
334 | "align": false,
335 | "alignLevel": null
336 | }
337 | },
338 | {
339 | "content": "\n# Why time_bucket_gapfill()?\n\nIf you look closely at the first dashboard, you'll notice that certain parameters don't report every hour - sometimes you have gaps in data. Specifically, notice how `parameter_id = 9` reports pretty rarely and inconsistently. This is particularly common in IoT use cases, where devices might be occassionally offline. However, what if we actually want to fill in these gaps, even if the sensor didn't report back in? \n\nTimescaleDB has a couple built-in options that you can use, all built around the `time_bucket_gapfill()` function. First, let's do something quite simple - let's fill in the gaps with the last known value. We call this function `locf()`, which stands for \"last object carried forward.\"\n\nHere's what a query might look like:\n\n`SELECT time_bucket_gapfill('4 hour', time) AS four_hours, parameter_id, locf(avg(value)) FROM measurements GROUP BY four_hours, parameter_id ORDER BY four_hours;`\n\nYou'll notice that the purple line that maps to `parameter_id = 9` doesn't have any gaps anymore! You can actually also linearly interpolate instead of carrying the last object forward. \n\nThat query would look something like: \n\n`SELECT time_bucket_gapfill('4 hour', time) AS four_hours, parameter_id, interpolate(avg(value)) FROM measurements GROUP BY four_hours, parameter_id ORDER BY four_hours;`\n\nYou'll notice that `parameter_id = 7` and `parameter_id = 8` aren't filled in for the beginning portion of the graph. This is because when you run `time_bucket_gapfill()`, the function only pulls values from the specified time range. The values needed to fill in `parameter_id = 7` and `parameter_id = 8` fall out of the bounds of the specified time range. In order to fill in those gaps, you have to explicitly tell the function to go search for the necessary value outside of the existing range. For an example of how to do that, please check out our documentation https://docs.timescale.com/latest/api#interpolate. \n",
340 | "gridPos": {
341 | "h": 8,
342 | "w": 12,
343 | "x": 0,
344 | "y": 18
345 | },
346 | "id": 10,
347 | "links": [],
348 | "mode": "markdown",
349 | "options": {},
350 | "targets": [
351 | {
352 | "format": "time_series",
353 | "group": [],
354 | "metricColumn": "none",
355 | "rawQuery": false,
356 | "rawSql": "SELECT\n \"time\" AS \"time\",\n value\nFROM measurements\nWHERE\n $__timeFilter(\"time\")\nORDER BY 1",
357 | "refId": "A",
358 | "select": [
359 | [
360 | {
361 | "params": [
362 | "value"
363 | ],
364 | "type": "column"
365 | }
366 | ]
367 | ],
368 | "table": "measurements",
369 | "timeColumn": "\"time\"",
370 | "timeColumnType": "timestamp",
371 | "where": [
372 | {
373 | "name": "$__timeFilter",
374 | "params": [],
375 | "type": "macro"
376 | }
377 | ]
378 | }
379 | ],
380 | "timeFrom": null,
381 | "timeShift": null,
382 | "title": "Step 3",
383 | "type": "text"
384 | },
385 | {
386 | "aliasColors": {
387 | "7": "orange",
388 | "8": "red",
389 | "9": "purple"
390 | },
391 | "bars": false,
392 | "dashLength": 10,
393 | "dashes": false,
394 | "fill": 1,
395 | "gridPos": {
396 | "h": 8,
397 | "w": 12,
398 | "x": 12,
399 | "y": 18
400 | },
401 | "id": 11,
402 | "legend": {
403 | "avg": false,
404 | "current": false,
405 | "max": false,
406 | "min": false,
407 | "show": true,
408 | "total": false,
409 | "values": false
410 | },
411 | "lines": false,
412 | "linewidth": 1,
413 | "links": [],
414 | "nullPointMode": "null",
415 | "options": {},
416 | "percentage": false,
417 | "pointradius": 2,
418 | "points": true,
419 | "renderer": "flot",
420 | "seriesOverrides": [],
421 | "spaceLength": 10,
422 | "stack": false,
423 | "steppedLine": false,
424 | "targets": [
425 | {
426 | "format": "time_series",
427 | "group": [],
428 | "metricColumn": "none",
429 | "rawQuery": true,
430 | "rawSql": "SELECT\n time_bucket_gapfill('4 hours', time) AS \"time\",\n parameter_id::TEXT AS metric,\n locf(avg(value))\nFROM measurements\nWHERE\n $__timeFilter(\"time\") AND \n location_id = 150\nGROUP BY 1, 2\nORDER BY 1, 2",
431 | "refId": "A",
432 | "select": [
433 | [
434 | {
435 | "params": [
436 | "value"
437 | ],
438 | "type": "column"
439 | }
440 | ]
441 | ],
442 | "table": "measurements",
443 | "timeColumn": "\"time\"",
444 | "timeColumnType": "timestamp",
445 | "where": [
446 | {
447 | "name": "$__timeFilter",
448 | "params": [],
449 | "type": "macro"
450 | }
451 | ]
452 | }
453 | ],
454 | "thresholds": [],
455 | "timeFrom": null,
456 | "timeRegions": [],
457 | "timeShift": null,
458 | "title": "4 hour LOCF average for Location 150",
459 | "tooltip": {
460 | "shared": true,
461 | "sort": 0,
462 | "value_type": "individual"
463 | },
464 | "type": "graph",
465 | "xaxis": {
466 | "buckets": null,
467 | "mode": "time",
468 | "name": null,
469 | "show": true,
470 | "values": []
471 | },
472 | "yaxes": [
473 | {
474 | "format": "short",
475 | "label": null,
476 | "logBase": 1,
477 | "max": null,
478 | "min": null,
479 | "show": true
480 | },
481 | {
482 | "format": "short",
483 | "label": null,
484 | "logBase": 1,
485 | "max": null,
486 | "min": null,
487 | "show": true
488 | }
489 | ],
490 | "yaxis": {
491 | "align": false,
492 | "alignLevel": null
493 | }
494 | }
495 | ],
496 | "refresh": false,
497 | "schemaVersion": 18,
498 | "style": "dark",
499 | "tags": [],
500 | "templating": {
501 | "list": []
502 | },
503 | "time": {
504 | "from": "2019-07-31T15:42:01.232Z",
505 | "to": "2019-08-07T15:42:01.232Z"
506 | },
507 | "timepicker": {
508 | "refresh_intervals": [
509 | "5s",
510 | "10s",
511 | "30s",
512 | "1m",
513 | "5m",
514 | "15m",
515 | "30m",
516 | "1h",
517 | "2h",
518 | "1d"
519 | ],
520 | "time_options": [
521 | "5m",
522 | "15m",
523 | "1h",
524 | "6h",
525 | "12h",
526 | "24h",
527 | "2d",
528 | "7d",
529 | "30d"
530 | ]
531 | },
532 | "timezone": "",
533 | "title": "Air Quality - time_bucket()",
534 | "uid": "FAONPsDZz",
535 | "version": 13
536 | }
--------------------------------------------------------------------------------
/compression-preview/src/Subscription.js:
--------------------------------------------------------------------------------
1 | import React, { useEffect, useState } from 'react';
2 | import classNames from 'classnames';
3 | import { useSubscription, gql } from '@apollo/client';
4 | import Chunk from './components/chunk';
5 | import CardInfo from './components/cardInfo';
6 | import './styles/subscription.scss';
7 |
8 | const Subscription = () => {
9 | const { data } = useSubscription(
10 | gql`
11 | subscription Chunks {
12 | chunks_with_compression {
13 | hypertable_name
14 | chunk_name
15 | range_start
16 | range_end
17 | before_compression_total_bytes
18 | after_compression_total_bytes
19 | }
20 | }
21 | `
22 | );
23 |
24 | const [loadModal, setLoadModal] = useState(false);
25 | const [compressingModal, setCompressingModal] = useState(false);
26 | const [, setCompressAllComplete] = useState(false);
27 | const [, setAllChunks] = useState([]);
28 | const [cardInfo, setCardInfo] = useState({});
29 | const [biggestChunk, setBiggestChunk] = useState({});
30 | const [chunks, setChunks] = useState([]);
31 |
32 | const handleBiggestChunk = (chunk) => {
33 | if (Object.keys(biggestChunk).length === 0) return setBiggestChunk(chunk);
34 | if (
35 | biggestChunk.before_compression_total_bytes <
36 | chunk.before_compression_total_bytes
37 | )
38 | return setBiggestChunk(chunk);
39 | return null;
40 | };
41 |
42 | const handleCardInfo = (info) => info !== cardInfo && setCardInfo(info);
43 |
44 | const handleCompressingModal = (newState) => setCompressingModal(newState);
45 |
46 | const calculateTotalBytesUncompressed = () =>
47 | chunks &&
48 | chunks.reduce((totalBytes, currentChunk) => {
49 | return totalBytes + currentChunk.before_compression_total_bytes;
50 | }, 0);
51 |
52 | const svg =
53 | typeof window !== 'undefined' && document.getElementById('chunks');
54 | const chunksRect = svg?.getBoundingClientRect();
55 |
56 | useEffect(() => {
57 | // start up loading screen
58 | if (data === undefined) {
59 | setLoadModal(true);
60 | } else {
61 | setLoadModal(false);
62 | setAllChunks(
63 | data.chunks_with_compression.map((chunk) => chunk.chunk_name)
64 | );
65 | }
66 | }, [chunks]);
67 |
68 | useEffect(() => {
69 | // check if compression is complete
70 | const compressionComplete = data?.chunks_with_compression.every(
71 | (x) => x.after_compression_total_bytes !== null
72 | );
73 |
74 | if (compressionComplete) {
75 | setCompressAllComplete(true);
76 | setLoadModal(false);
77 | } else {
78 | setCompressAllComplete(false);
79 | }
80 | }, [chunks]);
81 |
82 | useEffect(() => {
83 | if (data && data.chunks_with_compression) {
84 | setChunks(data.chunks_with_compression);
85 | handleCompressingModal(false);
86 | }
87 | }, [data]);
88 |
89 | const cardInfoClasses = classNames('ts-compression__inner__info__wrapper', {
90 | 'ts-compression__inner__info__wrapper--active':
91 | Object.keys(cardInfo).length > 0,
92 | });
93 |
94 | return (
95 |
96 |
100 |
101 |
102 |
Loading...
103 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
247 |
248 |
249 |
Compression
250 |
Interactive visualization
251 |
252 | {loadModal
253 | ? 'Loading chunks...'
254 | : `Total chunks: ${chunks && chunks.length}`}
255 |
256 |
257 |
258 |
276 |
277 |
278 |
279 |
280 |
281 |
282 | {compressingModal && (
283 |
284 | Compressing chunks
285 |
286 |
287 | )}
288 |
314 |
315 |
316 |
317 | );
318 | };
319 |
320 | export default Subscription;
321 |
--------------------------------------------------------------------------------