/node_modules/'],
35 |
36 | // Indicates whether each individual test should be reported during the run
37 | verbose: true,
38 | };
39 |
--------------------------------------------------------------------------------
/frontend/config/paths.js:
--------------------------------------------------------------------------------
1 | // paths.js
2 |
3 | // Paths will export some path variables that we'll
4 | // use in other Webpack config and server files
5 |
6 | const path = require('path');
7 | const fs = require('fs');
8 |
9 | const appDirectory = fs.realpathSync(process.cwd());
10 | const resolveApp = relativePath => path.resolve(appDirectory, relativePath);
11 |
12 | module.exports = {
13 | appAssets: resolveApp('src/assets'), // For images and other assets
14 | appBuild: resolveApp('build'), // Prod built files end up here
15 | appConfig: resolveApp('config'), // App config files
16 | appHtml: resolveApp('src/index.html'),
17 | appIndexJs: resolveApp('src/index.jsx'), // Main entry point
18 | appSrc: resolveApp('src'), // App source
19 | };
20 |
--------------------------------------------------------------------------------
/frontend/config/webpack-common-config.js:
--------------------------------------------------------------------------------
1 | // webpack-common-config.js
2 |
3 | // This file will contain configuration data that
4 | // is shared between development and production builds.
5 |
6 | const HtmlWebpackPlugin = require('html-webpack-plugin');
7 | const webpack = require('webpack');
8 | const path = require('path');
9 |
10 | const paths = require('./paths');
11 |
12 | module.exports = {
13 | plugins: [
14 | new HtmlWebpackPlugin({
15 | inject: true,
16 | template: paths.appHtml,
17 | }),
18 | new webpack.DefinePlugin({
19 | 'process.env.GRAPH_HOST': JSON.stringify(process.env.GRAPH_HOST),
20 | 'process.env.ELASTICSEARCH_HOST': JSON.stringify(
21 | process.env.ELASTICSEARCH_HOST,
22 | ),
23 | 'process.env.ELASTICSEARCH_PROTOCOL': JSON.stringify(
24 | process.env.ELASTICSEARCH_PROTOCOL,
25 | ),
26 | 'process.env.ELASTICSEARCH_PORT': JSON.stringify(
27 | process.env.ELASTICSEARCH_PORT,
28 | ),
29 | }),
30 | ],
31 | resolve: {
32 | // File extensions. Add others and needed (e.g. scss, json)
33 | extensions: ['.js', '.jsx', '.scss', '.css'],
34 | modules: ['node_modules'],
35 | // Aliases help with shortening relative paths
36 | // 'Components/button' === '../../../components/button'
37 | alias: {
38 | Actions: path.resolve(paths.appSrc, 'actions'),
39 | Components: path.resolve(paths.appSrc, 'components'),
40 | Constants: path.resolve(paths.appSrc, 'constants'),
41 | Containers: path.resolve(paths.appSrc, 'containers'),
42 | Store: path.resolve(paths.appSrc, 'store'),
43 | Styles: path.resolve(paths.appSrc, 'styles'),
44 | Utils: path.resolve(paths.appSrc, 'utils'),
45 | },
46 | },
47 | module: {
48 | rules: [
49 | {
50 | test: /\.(png|svg|jpg)$/,
51 | use: ['file-loader'],
52 | },
53 | ],
54 | },
55 | devServer: {
56 | historyApiFallback: true,
57 | },
58 | };
59 |
--------------------------------------------------------------------------------
/frontend/config/webpack-dev-config.js:
--------------------------------------------------------------------------------
1 | // webpack-dev-config.js
2 |
3 | // configuration data related to development only
4 | const path = require('path');
5 | const webpack = require('webpack');
6 | const merge = require('webpack-merge');
7 | const paths = require('./paths');
8 |
9 | require('dotenv').config({
10 | path: path.join(__dirname, '.env.dev'),
11 | });
12 |
13 | // import common webpack config
14 | const common = require('./webpack-common-config');
15 | module.exports = merge(common, {
16 | entry: [paths.appIndexJs],
17 | mode: 'development',
18 | // devtool option controls if and how source maps are generated.
19 | // see https://webpack.js.org/configuration/devtool/
20 | // If you find that you need more control of source map generation,
21 | // see https://webpack.js.org/plugins/source-map-dev-tool-plugin/
22 | devtool: 'eval',
23 | plugins: [
24 | new webpack.HotModuleReplacementPlugin(),
25 | //new webpack.NamedModulesPlugin(),
26 | new webpack.DefinePlugin({
27 | 'process.env.NODE_ENV': JSON.stringify('development'),
28 | }),
29 | ],
30 | module: {
31 | rules: [
32 | {
33 | // look for .js or .jsx files
34 | test: /\.(js|jsx)$/,
35 | // in the `src` directory
36 | include: path.resolve(paths.appSrc),
37 | exclude: /(node_modules)/,
38 | use: {
39 | // use babel for transpiling JavaScript files
40 | loader: 'babel-loader',
41 | options: {
42 | presets: ['@babel/react'],
43 | },
44 | },
45 | },
46 | {
47 | // look for .css or .scss files
48 | test: /\.(css|scss)$/,
49 | // in the `src` directory
50 | include: [path.resolve(paths.appSrc), /node_modules/],
51 | use: [
52 | {
53 | loader: 'style-loader',
54 | },
55 | {
56 | loader: 'css-loader',
57 | options: {
58 | modules: false,
59 | },
60 | },
61 | {
62 | loader: 'sass-loader',
63 | options: {
64 | sourceMap: true,
65 | },
66 | },
67 | ],
68 | },
69 | ],
70 | },
71 | });
72 |
--------------------------------------------------------------------------------
/frontend/config/webpack-dev-server.js:
--------------------------------------------------------------------------------
1 | // webpack-dev-server.js
2 |
3 | // dev-server is responsible for running your project locally
4 |
5 | const WebpackDevServer = require("webpack-dev-server");
6 | const webpack = require("webpack");
7 | const paths = require("./paths");
8 | const config = require("./webpack-dev-config.js");
9 |
10 | // Change port to suit your preference
11 | const Port = 3000;
12 | const Host = "0.0.0.0";
13 |
14 | const options = {
15 | host: Host,
16 | // Enable webpack's Hot Module Replacement feature
17 | hot: true,
18 | // full-screen overlay in the browser for compiler errors or warnings
19 | overlay: {
20 | warnings: false,
21 | errors: true
22 | },
23 | // Show errors and warnings in console
24 | quiet: false,
25 | // Hide the build info
26 | noInfo: false,
27 | // Tell the server where to serve static files from.
28 | // Set this is the `paths.js` file.
29 | contentBase: paths.appAssets,
30 | // If static content changes, reload the page.
31 | // In other words, editing a photo within the assets
32 | // directory will force the page to reload.
33 | watchContentBase: true,
34 | after() {
35 | process.stdout.write(`dev server is running: http://${Host}:${Port}\n`);
36 | }
37 | };
38 |
39 | WebpackDevServer.addDevServerEntrypoints(config, options);
40 | const compiler = webpack(config);
41 | const server = new WebpackDevServer(compiler, options);
42 |
43 | server.listen(Port, Host, () => {});
44 |
--------------------------------------------------------------------------------
/frontend/config/webpack-prod-config.js:
--------------------------------------------------------------------------------
1 | // webpack-prod-config.js
2 |
3 | // contains configuration data related to prod build
4 |
5 | const path = require('path');
6 |
7 | const webpack = require('webpack');
8 | const merge = require('webpack-merge');
9 | const TerserPlugin = require('terser-webpack-plugin');
10 | const ExtractTextPlugin = require('extract-text-webpack-plugin');
11 |
12 | const paths = require('./paths');
13 | const common = require('./webpack-common-config.js');
14 |
15 | module.exports = merge(common, {
16 | entry: {
17 | // Split vendor code into separate bundles
18 | vendor: ['react'],
19 | app: paths.appIndexJs,
20 | },
21 | mode: 'production',
22 | // Set the name of our JS bundle using a chuckhash
23 | // (e.g. '5124f5efa5436b5b5e7d_app.js')
24 | // Location where built files will go.
25 | output: {
26 | filename: '[chunkhash]_[name].js',
27 | path: paths.appBuild,
28 | publicPath: '/',
29 | },
30 | optimization: {
31 | minimizer: [new TerserPlugin()],
32 | },
33 | plugins: [
34 | // Set process.env.NODE_ENV to production
35 | new webpack.DefinePlugin({
36 | 'process.env': {
37 | NODE_ENV: JSON.stringify('production'),
38 | },
39 | }),
40 | // Extract text/(s)css from a bundle, or bundles, into a separate file.
41 | new ExtractTextPlugin('styles.css'),
42 | ],
43 | module: {
44 | rules: [
45 | {
46 | // look for .js or .jsx files
47 | test: /\.(js|jsx)$/,
48 | // in the `src` directory
49 | include: path.resolve(paths.appSrc),
50 | exclude: /node_modules/,
51 | use: {
52 | // use babel for transpiling JavaScript files
53 | loader: 'babel-loader',
54 | options: {
55 | presets: ['@babel/react'],
56 | },
57 | },
58 | },
59 | {
60 | // look for .css or .scss files.
61 | test: /\.(css|scss)$/,
62 | // in the `src` directory
63 | include: [path.resolve(paths.appSrc), /node_modules/],
64 | use: ExtractTextPlugin.extract({
65 | fallback: 'style-loader',
66 | use: [
67 | {
68 | loader: 'css-loader',
69 | options: {
70 | modules: false,
71 | },
72 | },
73 | {
74 | loader: 'sass-loader',
75 | options: {
76 | sourceMap: false,
77 | },
78 | },
79 | ],
80 | }),
81 | },
82 | ],
83 | },
84 | });
85 |
--------------------------------------------------------------------------------
/frontend/src/__mocks__/apiMockResponse.js:
--------------------------------------------------------------------------------
1 | const apiMock = {
2 | nodes: [
3 | {
4 | id: 1333832958231356438,
5 | group: 'table',
6 | label: 'SRCETL_COMBINED_AUDITINFO_DIM',
7 | },
8 | { id: -5164229199702592055, group: 'table', label: 'STGETL_AUDITINFO_DIM' },
9 | {
10 | id: -6257875581471897958,
11 | group: 'table',
12 | label: 'FACT_QBO_PRODUCT_ACTION_EVENT',
13 | },
14 | {
15 | id: 8393607891502255639,
16 | group: 'table',
17 | label: 'DIM_QBO_COMPANY_PRODUCT_USAGE_KPI',
18 | },
19 | { id: 4931414091262092880, group: 'table', label: 'DIM_QBO_USER' },
20 | {
21 | id: 5505949582699571067,
22 | group: 'table',
23 | label: 'QBO_COMPANY_PRODUCT_USAGE_KPI_VW',
24 | },
25 | { id: 2897333401185500543, group: 'table', label: 'CFG_IOP_ETL_STATE' },
26 | ],
27 | edges: [
28 | { from: 4931414091262092880, to: -5164229199702592055 },
29 | { from: -5164229199702592055, to: -6257875581471897958 },
30 | { from: -6257875581471897958, to: 2897333401185500543 },
31 | { from: -6257875581471897958, to: 8393607891502255639 },
32 | { from: 4931414091262092880, to: 1333832958231356438 },
33 | { from: 8393607891502255639, to: 5505949582699571067 },
34 | { from: 1333832958231356438, to: -6257875581471897958 },
35 | ],
36 | };
37 |
38 | export default apiMock;
39 |
--------------------------------------------------------------------------------
/frontend/src/__mocks__/fileMock.js:
--------------------------------------------------------------------------------
1 | module.exports = 'test-file-stub';
2 |
--------------------------------------------------------------------------------
/frontend/src/__mocks__/styleMock.js:
--------------------------------------------------------------------------------
1 | module.exports = {};
2 |
3 |
--------------------------------------------------------------------------------
/frontend/src/__tests__/actions/LineageActions.test.js:
--------------------------------------------------------------------------------
1 | import * as actions from 'Actions/LineageActions';
2 | import LineageActionTypes from 'Constants/LineageActionTypes';
3 | import LoadingState from 'Constants/LoadingState';
4 | import moxios from 'moxios';
5 | import configureMockStore from 'redux-mock-store';
6 | import thunk from 'redux-thunk';
7 | import apiMock from '../../__mocks__/apiMockResponse';
8 |
9 | describe('actions', () => {
10 | it('should create an action to set lineage data', () => {
11 | const data = { nodes: [], edges: [] };
12 | const expectedAction = {
13 | type: LineageActionTypes.INSERT_LINEAGE,
14 | data,
15 | };
16 | expect(actions.setLineageData(data)).toEqual(expectedAction);
17 | });
18 | });
19 |
20 | describe('actions', () => {
21 | it('should create an action to set lineage loading status', () => {
22 | const state = LoadingState.LOADING;
23 | const expectedAction = {
24 | type: LineageActionTypes.SET_LINEAGE_LOADING,
25 | state,
26 | };
27 | expect(actions.setLineageLoading(state)).toEqual(expectedAction);
28 | });
29 | });
30 |
31 | const middlewares = [thunk];
32 | const mockStore = configureMockStore(middlewares);
33 |
34 | describe('actions', () => {
35 | beforeEach(() => moxios.install());
36 | afterEach(() => moxios.uninstall());
37 |
38 | it('should get the lineage data, given the name and type', () => {
39 | moxios.wait(() => {
40 | const request = moxios.requests.mostRecent();
41 | request.respondWith({
42 | status: 200,
43 | response: apiMock,
44 | });
45 | });
46 |
47 | const expectedActions = [
48 | {
49 | type: LineageActionTypes.SET_LINEAGE_LOADING,
50 | state: LoadingState.LOADING,
51 | },
52 | { type: LineageActionTypes.INSERT_LINEAGE, data: apiMock },
53 | {
54 | type: LineageActionTypes.SET_LINEAGE_LOADING,
55 | state: LoadingState.FINISHED_SUCCESS,
56 | },
57 | ];
58 |
59 | const store = mockStore({
60 | graph: {},
61 | loadingState: LoadingState.NOT_LOADED,
62 | });
63 |
64 | store.dispatch(actions.getLineageData('name', 'type')).then(() => {
65 | // return of async actions
66 | expect(store.getActions()).toEqual(expectedActions);
67 | });
68 | });
69 | });
70 |
--------------------------------------------------------------------------------
/frontend/src/__tests__/actions/SearchActions.test.js:
--------------------------------------------------------------------------------
1 | import * as actions from 'Actions/SearchActions';
2 | import SearchActionTypes from 'Constants/SearchActionTypes';
3 | import moxios from 'moxios';
4 | import configureMockStore from 'redux-mock-store';
5 | import thunk from 'redux-thunk';
6 | import elasticMock from '../../__mocks__/elasticMock';
7 |
8 | describe('actions', () => {
9 | it('should create an action to set search term', () => {
10 | const searchTerm = 'qbo';
11 | const expectedAction = {
12 | type: SearchActionTypes.SET_SEARCH_TERM,
13 | searchTerm,
14 | };
15 | expect(actions.setSearchTerm(searchTerm)).toEqual(expectedAction);
16 | });
17 | });
18 |
19 | describe('actions', () => {
20 | it('should create an action to insert suggestions from elasticsearch', () => {
21 | const suggestions = [];
22 | const expectedAction = {
23 | type: SearchActionTypes.INSERT_SUGGESTIONS,
24 | suggestions,
25 | };
26 | expect(actions.insertSuggestions(suggestions)).toEqual(expectedAction);
27 | });
28 | });
29 |
30 | const middlewares = [thunk];
31 | const mockStore = configureMockStore(middlewares);
32 |
33 | describe('actions', () => {
34 | beforeEach(() => moxios.install());
35 | afterEach(() => moxios.uninstall());
36 |
37 | it('should get the search results, given the search term', () => {
38 | moxios.wait(() => {
39 | const request = moxios.requests.mostRecent();
40 | request.respondWith({
41 | status: 200,
42 | response: elasticMock,
43 | });
44 | });
45 |
46 | const suggestions = elasticMock.hits.hits.map(item => ({
47 | name: item._source.name,
48 | type: item._source.type,
49 | system: item._source.platform,
50 | job_group: item._source.schema,
51 | }));
52 |
53 | const expectedActions = [
54 | { type: SearchActionTypes.SET_SEARCH_TERM, searchTerm: 'qbo' },
55 | { type: SearchActionTypes.INSERT_SUGGESTIONS, suggestions },
56 | ];
57 |
58 | const store = mockStore({
59 | searchTerm: '',
60 | suggestions: [],
61 | });
62 |
63 | store.dispatch(actions.searchEntities('qbo')).then(() => {
64 | expect(store.getActions()).toEqual(expectedActions);
65 | });
66 | });
67 | });
68 |
--------------------------------------------------------------------------------
/frontend/src/__tests__/components/Lineage.test.js:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | import { shallow, mount } from 'enzyme';
3 | import toJson from 'enzyme-to-json';
4 | import configureStore from 'redux-mock-store';
5 | import { Provider } from 'react-redux';
6 | import thunk from 'redux-thunk';
7 | import { Map } from 'immutable';
8 | import ConnectedLineage, { Lineage } from 'Components/Lineage';
9 | import LoadingState from 'Constants/LoadingState';
10 | import Loading from 'Components/Loading';
11 |
12 | const mockStore = configureStore([thunk]);
13 | const initialState = Map({
14 | graph: { nodes: [], edges: [] },
15 | loadingStatus: LoadingState.NOT_LOADED,
16 | });
17 | const store = mockStore(initialState);
18 |
19 | const loadingState = Map({
20 | graph: { nodes: [], edges: [] },
21 | loadingStatus: LoadingState.LOADING,
22 | });
23 | const loadingStore = mockStore(loadingState);
24 |
25 | describe('', () => {
26 | describe('render()', () => {
27 | test('renders the component', () => {
28 | const wrapper = shallow(
29 |
30 |
31 | ,
32 | );
33 | const component = wrapper.dive().dive();
34 | expect(toJson(component)).toMatchSnapshot();
35 | });
36 |
37 | test('renders the component based on the loading status', () => {
38 | const wrapper = shallow(
39 | ,
40 | );
41 |
42 | wrapper.update();
43 | expect(wrapper.find('div').text()).toBe('Lineage Not Loaded');
44 |
45 | wrapper.setProps({ loadingStatus: LoadingState.FINISHED_FAILURE });
46 | wrapper.update();
47 | expect(wrapper.find('div').text()).toBe('No lineage found');
48 |
49 | wrapper.setProps({ loadingStatus: LoadingState.LOADING });
50 | wrapper.update();
51 | expect(wrapper.find('Loading')).toBeTruthy();
52 |
53 | wrapper.setProps({ loadingStatus: LoadingState.FINISHED_SUCCESS });
54 | wrapper.update();
55 | expect(wrapper.find('Network')).toBeTruthy();
56 |
57 | wrapper.setProps({ loadingStatus: '' });
58 | wrapper.update();
59 | expect(wrapper.find('div').text()).toBe('Oops, something went wrong');
60 | });
61 | });
62 | });
63 |
--------------------------------------------------------------------------------
/frontend/src/__tests__/components/Loading.test.js:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | import { shallow } from 'enzyme';
3 | import Loading from 'Components/Loading';
4 |
5 | describe('', () => {
6 | test('the component renders', () => {
7 | const wrapper = shallow();
8 | expect(wrapper.find('img')).toHaveLength(1);
9 | });
10 | });
11 |
--------------------------------------------------------------------------------
/frontend/src/__tests__/components/Network.test.jsx:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | import { shallow, mount } from 'enzyme';
3 | import toJson from 'enzyme-to-json';
4 | import Network from 'Components/Network';
5 |
6 | describe('', () => {
7 | test('it renders the component', () => {
8 | const wrapper = shallow();
9 | expect(toJson(wrapper)).toMatchSnapshot();
10 | });
11 |
12 | test('network container is rendered', () => {
13 | const wrapper = shallow();
14 | expect(wrapper.find('div')).toHaveLength(1);
15 | });
16 | });
17 |
--------------------------------------------------------------------------------
/frontend/src/__tests__/components/SearchBar.test.js:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | import { shallow } from 'enzyme';
3 | import toJson from 'enzyme-to-json';
4 | import configureStore from 'redux-mock-store';
5 | import { Provider } from 'react-redux';
6 | import thunk from 'redux-thunk';
7 | import { Map } from 'immutable';
8 | import ConnectedSearchBar, { SearchBar } from 'Components/SearchBar';
9 |
10 | const mockStore = configureStore([thunk]);
11 | const initialState = Map({
12 | searchTerm: '',
13 | suggestions: [],
14 | });
15 | const store = mockStore(initialState);
16 | // test that the component renders
17 | describe('', () => {
18 | describe('render()', () => {
19 | test('renders the component', () => {
20 | const wrapper = shallow(
21 |
22 |
23 | ,
24 | );
25 | // double dive to get into the component
26 | const component = wrapper.dive().dive();
27 |
28 | expect(toJson(component)).toMatchSnapshot();
29 | });
30 | });
31 |
32 | describe('SearchBar', () => {
33 | const onSearchChange = jest.fn();
34 | const setSearchTerm = jest.fn();
35 | const insertSuggestions = jest.fn();
36 | const wrapper = shallow(
37 | ,
42 | );
43 |
44 | test('it has all three of its subcomponents', () => {
45 | expect(wrapper.find('.searchBarContainer')).toHaveLength(1);
46 | expect(wrapper.find('.search')).toHaveLength(1);
47 | expect(wrapper.find('.searchBar')).toHaveLength(1);
48 | expect(wrapper.find('.close')).toHaveLength(1);
49 | });
50 |
51 | test('it accepts input and calls on the action to set the search term', () => {
52 | const event = {
53 | target: { value: 'hello' },
54 | };
55 | wrapper.find('input').simulate('change', event);
56 | expect(onSearchChange).toBeCalledWith('hello');
57 | });
58 |
59 | test('it calls on the actions to clear the inputs and suggestions', () => {
60 | wrapper.find('.close').simulate('click');
61 | expect(setSearchTerm).toBeCalledWith('');
62 | expect(insertSuggestions).toBeCalledWith([]);
63 | });
64 | });
65 | });
66 |
--------------------------------------------------------------------------------
/frontend/src/__tests__/components/SearchTable.test.js:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | import { shallow } from 'enzyme';
3 | import toJson from 'enzyme-to-json';
4 | import configureStore from 'redux-mock-store';
5 | import { Provider } from 'react-redux';
6 | import thunk from 'redux-thunk';
7 | import { Map } from 'immutable';
8 | import ConnectedSearchTable, { SearchTable } from 'Components/SearchTable';
9 |
10 | const mockStore = configureStore([thunk]);
11 | const initialState = Map({
12 | searchTerm: '',
13 | suggestions: [],
14 | });
15 | const store = mockStore(initialState);
16 | // test that the component renders
17 | describe('', () => {
18 | describe('render()', () => {
19 | test('renders the component', () => {
20 | const wrapper = shallow(
21 |
22 |
23 | ,
24 | );
25 | // double dive to get into the component
26 | const component = wrapper.dive().dive();
27 |
28 | expect(toJson(component)).toMatchSnapshot();
29 | });
30 | });
31 |
32 | describe('SearchTable', () => {
33 | test('it has all of its subcomponents', () => {
34 | const wrapper = shallow();
35 | expect(wrapper.find('.searchTableContainer')).toHaveLength(1);
36 | expect(wrapper.find('ReactTable')).toHaveLength(1);
37 | });
38 | });
39 | });
40 |
--------------------------------------------------------------------------------
/frontend/src/__tests__/containers/App.test.jsx:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | import { shallow } from 'enzyme';
3 | import { MemoryRouter } from 'react-router';
4 | import App from 'Containers/App';
5 | import Search from 'Containers/Search';
6 | import Dashboard from 'Containers/Dashboard';
7 |
8 | describe('App', () => {
9 | describe('component', () => {
10 | let element;
11 | beforeEach(() => {
12 | element = ;
13 | });
14 |
15 | it('renders as expected', () => {
16 | const component = shallow(element);
17 | expect(component).toMatchSnapshot();
18 | });
19 |
20 | it('routes / to Search', () => {
21 | const component = shallow(element);
22 | expect(
23 | component
24 | .find('Route[exact=true][path="/"]')
25 | .first()
26 | .prop('component'),
27 | ).toBe(Search);
28 | });
29 |
30 | it('routes /dashboard to Dashboard', () => {
31 | const component = shallow(element);
32 | expect(
33 | component
34 | .find('Route[path="/dashboard/:entityType/:entityName/"]')
35 | .first()
36 | .prop('component'),
37 | ).toBe(Dashboard);
38 | });
39 | });
40 | });
41 |
--------------------------------------------------------------------------------
/frontend/src/__tests__/containers/Dashboard.test.js:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | import { shallow, mount } from 'enzyme';
3 | import toJson from 'enzyme-to-json';
4 | import configureStore from 'redux-mock-store';
5 | import { Provider } from 'react-redux';
6 | import thunk from 'redux-thunk';
7 | import { Map } from 'immutable';
8 | import ConnectedDashboard, { Dashboard } from 'Containers/Dashboard';
9 | import Lineage from 'Components/Lineage';
10 |
11 | describe('Dashboard Container', () => {
12 | test('container component exists', () => {
13 | const wrapper = shallow();
14 | expect(wrapper.exists()).toBe(true);
15 | });
16 |
17 | test('it has all of its subcomponents', () => {
18 | const match = {
19 | params: {
20 | entityType: '',
21 | entityName: '',
22 | },
23 | };
24 | const getLineageData = jest.fn();
25 | const wrapper = shallow(
26 | ,
27 | );
28 | expect(wrapper.find('.dashboardContainer')).toHaveLength(1);
29 | expect(wrapper.find('h1')).toHaveLength(1);
30 | expect(wrapper.find(Lineage)).toHaveLength(1);
31 | });
32 | });
33 |
--------------------------------------------------------------------------------
/frontend/src/__tests__/containers/Search.test.js:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | import { shallow, mount } from 'enzyme';
3 | import toJson from 'enzyme-to-json';
4 | import configureStore from 'redux-mock-store';
5 | import { Provider } from 'react-redux';
6 | import thunk from 'redux-thunk';
7 | import { Map } from 'immutable';
8 | import Search from 'Containers/Search';
9 | import SearchBar from 'Components/SearchBar';
10 | import SearchTable from 'Components/SearchTable';
11 |
12 | const mockStore = configureStore([thunk]);
13 | const initialState = Map({
14 | searchTerm: '',
15 | suggestions: [],
16 | });
17 | const store = mockStore(initialState);
18 |
19 | describe('Search Container', () => {
20 | test('container component exists', () => {
21 | const wrapper = shallow();
22 | expect(wrapper.exists()).toBe(true);
23 | });
24 |
25 | // TODO: see that it has all of its children
26 | test('it has all of its subcomponents', () => {
27 | const wrapper = shallow();
28 | expect(wrapper.find('.searchContainer')).toHaveLength(1);
29 | expect(wrapper.find('.searchTopContainer')).toHaveLength(1);
30 | expect(wrapper.find('.brand')).toHaveLength(1);
31 | expect(wrapper.find(SearchBar)).toHaveLength(1);
32 | expect(wrapper.find(SearchTable)).toHaveLength(1);
33 | });
34 | });
35 |
--------------------------------------------------------------------------------
/frontend/src/__tests__/reducers/LineageReducer.test.js:
--------------------------------------------------------------------------------
1 | import LineageReducer from 'Reducers/LineageReducer';
2 | import LineageActionTypes from 'Constants/LineageActionTypes';
3 | import { Map } from 'immutable';
4 | import LoadingState from 'Constants/LoadingState';
5 |
6 | describe('lineage reducer', () => {
7 | const initialState = Map({
8 | graph: { nodes: [], edges: [] },
9 | loadingStatus: LoadingState.NOT_LOADED,
10 | });
11 |
12 | it('should return the initial state', () => {
13 | expect(LineageReducer(undefined, {})).toEqual(initialState);
14 | });
15 |
16 | it('should handle INSERT_LINEAGE', () => {
17 | const dataToInsert = { nodes: [], edges: [] };
18 | const action = {
19 | type: LineageActionTypes.INSERT_LINEAGE,
20 | data: dataToInsert,
21 | };
22 | expect(LineageReducer(undefined, action)).toEqual(
23 | Map({
24 | graph: dataToInsert,
25 | loadingStatus: LoadingState.NOT_LOADED,
26 | }),
27 | );
28 | });
29 |
30 | it('should handle SET_LINEAGE_LOADING', () => {
31 | const dataToInsert = LoadingState.LOADING;
32 | const action = {
33 | type: LineageActionTypes.SET_LINEAGE_LOADING,
34 | state: dataToInsert,
35 | };
36 | expect(LineageReducer(undefined, action)).toEqual(
37 | Map({
38 | graph: {nodes: [], edges: []},
39 | loadingStatus: dataToInsert,
40 | }),
41 | );
42 | });
43 | });
44 |
--------------------------------------------------------------------------------
/frontend/src/__tests__/reducers/SearchReducer.test.js:
--------------------------------------------------------------------------------
1 | import SearchReducer from 'Reducers/SearchReducer';
2 | import SearchActionTypes from 'Constants/SearchActionTypes';
3 | import { Map } from 'immutable';
4 |
5 | describe('lineage reducer', () => {
6 | const initialState = Map({
7 | searchTerm: '',
8 | suggestions: [],
9 | });
10 |
11 | it('should return the initial state', () => {
12 | expect(SearchReducer(undefined, {})).toEqual(initialState);
13 | });
14 |
15 | it('should handle SET_SEARCH_TERM', () => {
16 | const term = 'qbo';
17 | const action = {
18 | type: SearchActionTypes.SET_SEARCH_TERM,
19 | searchTerm: term,
20 | };
21 | expect(SearchReducer(undefined, action)).toEqual(
22 | Map({
23 | searchTerm: term,
24 | suggestions: [],
25 | }),
26 | );
27 | });
28 |
29 | it('should handle INSERT_SUGGESTIONS', () => {
30 | const suggestion = ['test'];
31 | const action = {
32 | type: SearchActionTypes.INSERT_SUGGESTIONS,
33 | suggestions: suggestion,
34 | };
35 | expect(SearchReducer(undefined, action)).toEqual(
36 | Map({
37 | searchTerm: '',
38 | suggestions: suggestion,
39 | }),
40 | );
41 | });
42 | });
43 |
--------------------------------------------------------------------------------
/frontend/src/__tests__/setup/setupEnzyme.js:
--------------------------------------------------------------------------------
1 | import Enzyme from 'enzyme';
2 | import Adapter from 'enzyme-adapter-react-16';
3 |
4 | Enzyme.configure({ adapter: new Adapter() });
5 |
--------------------------------------------------------------------------------
/frontend/src/actions/AxiosInstance.js:
--------------------------------------------------------------------------------
1 | import axios from 'axios';
2 |
3 | export default axios.create({
4 | baseURL: `${process.env.GRAPH_HOST}/api/v1/`,
5 | });
6 |
--------------------------------------------------------------------------------
/frontend/src/actions/LineageActions.js:
--------------------------------------------------------------------------------
1 | import LineageActionTypes from 'Constants/LineageActionTypes';
2 | import LoadingState from 'Constants/LoadingState';
3 | import API from './AxiosInstance';
4 |
5 | export const setLineageLoading = state => ({
6 | type: LineageActionTypes.SET_LINEAGE_LOADING,
7 | state,
8 | });
9 |
10 | export const setLineageData = data => ({
11 | type: LineageActionTypes.INSERT_LINEAGE,
12 | data,
13 | });
14 |
15 | export const setDepthTraversal = depth => ({
16 | type: LineageActionTypes.SET_DEPTH,
17 | depth
18 | })
19 |
20 | export const getLineageData = (entityName, entityType, entityDepth) => dispatch => {
21 | dispatch(setLineageLoading(LoadingState.LOADING));
22 | dispatch(setDepthTraversal(entityDepth))
23 | const params = entityDepth === 'Full' ? '' : `?bw=${entityDepth}&fw=${entityDepth}`
24 |
25 | return API.get(`lineage/${entityType}/${entityName}${params}`)
26 | .then(res => {
27 | dispatch(setLineageData(res.data));
28 | dispatch(setLineageLoading(LoadingState.FINISHED_SUCCESS));
29 |
30 | })
31 | .catch(err => {
32 | /* istanbul ignore next */ console.error(err);
33 | dispatch(setLineageLoading(LoadingState.FINISHED_FAILURE));
34 | });
35 | };
36 |
--------------------------------------------------------------------------------
/frontend/src/actions/SearchActions.js:
--------------------------------------------------------------------------------
1 | import elasticsearch from 'elasticsearch';
2 | import SearchActionTypes from 'Constants/SearchActionTypes';
3 |
4 | const log = process.env.NODE_ENV === 'production' ? '' : 'trace';
5 | const searchClient = new elasticsearch.Client({
6 | host: process.env.ELASTICSEARCH_HOST,
7 | log,
8 | });
9 |
10 | export const setSearchTerm = searchTerm => ({
11 | type: SearchActionTypes.SET_SEARCH_TERM,
12 | searchTerm,
13 | });
14 |
15 | export const insertSuggestions = suggestions => ({
16 | type: SearchActionTypes.INSERT_SUGGESTIONS,
17 | suggestions,
18 | });
19 |
20 | export const searchEntities = searchTerm => dispatch => {
21 | dispatch(setSearchTerm(searchTerm));
22 | return searchClient
23 | .search({
24 | index: 'lineage',
25 | body: {
26 | size: 50,
27 | query: {
28 | bool: {
29 | must: [
30 | {
31 | match: {
32 | name: {
33 | query: searchTerm,
34 | // this part makes sure all the keywords are present
35 | operator: 'and',
36 | fuzziness: 1,
37 | },
38 | },
39 | },
40 | ],
41 | should: [
42 | {
43 | term: {
44 | name: {
45 | // this also boosts the exact match results to the top
46 | value: searchTerm,
47 | boost: 20,
48 | },
49 | },
50 | },
51 | ],
52 | },
53 | },
54 | },
55 | })
56 | .then(
57 | result => {
58 | const suggestions = result.hits.hits.map(item => ({
59 | name: item._source.name,
60 | type: item._source.type,
61 | system: item._source.platform,
62 | job_group: item._source.schema,
63 | }));
64 |
65 | // When we receive the suggestions, insert them into the store.
66 | dispatch(insertSuggestions(suggestions));
67 | },
68 | error => {
69 | if (process.env.NODE_ENV === 'development') {
70 | /* istanbul ignore next */ console.error('Elasticsearch error:');
71 | /* istanbul ignore next */ console.error(error);
72 | }
73 | },
74 | );
75 | };
76 |
--------------------------------------------------------------------------------
/frontend/src/assets/loadinga.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/intuit/superglue/5c5e03846cfdfa72663c4c689c4d279480a73d4d/frontend/src/assets/loadinga.png
--------------------------------------------------------------------------------
/frontend/src/assets/loadingb.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/intuit/superglue/5c5e03846cfdfa72663c4c689c4d279480a73d4d/frontend/src/assets/loadingb.png
--------------------------------------------------------------------------------
/frontend/src/assets/spilledglue.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/intuit/superglue/5c5e03846cfdfa72663c4c689c4d279480a73d4d/frontend/src/assets/spilledglue.png
--------------------------------------------------------------------------------
/frontend/src/components/Lineage.jsx:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | import Network from 'Components/Network';
3 | import { getLineageData } from 'Actions/LineageActions';
4 | import { connect } from 'react-redux';
5 | import LoadingState from 'Constants/LoadingState';
6 | import Loading from 'Components/Loading';
7 | import spilledglue from '../assets/spilledglue.png';
8 |
9 | export class Lineage extends React.Component {
10 | render() {
11 | switch (this.props.loadingStatus) {
12 | case LoadingState.NOT_LOADED:
13 | return Lineage Not Loaded
;
14 | case LoadingState.LOADING:
15 | return ;
16 | case LoadingState.FINISHED_SUCCESS:
17 | return ;
18 | case LoadingState.FINISHED_FAILURE:
19 | return (
20 |
21 |

22 |
No lineage found
23 |
24 | );
25 | default:
26 | return Oops, something went wrong
;
27 | }
28 | }
29 | }
30 |
31 | /* istanbul ignore next */ const mapStateToProps = ({ lineage }) => ({
32 | loadingStatus: lineage.get('loadingStatus'),
33 | graph: lineage.get('graph'),
34 | });
35 |
36 | /* istanbul ignore next */ const mapDispatchToProps = dispatch => ({
37 | getLineageData: (name, type) => dispatch(getLineageData(name, type)),
38 | });
39 |
40 | export default connect(
41 | mapStateToProps,
42 | mapDispatchToProps,
43 | )(Lineage);
44 |
--------------------------------------------------------------------------------
/frontend/src/components/Loading.jsx:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | import glue from '../assets/loadingb.png';
3 |
4 | const Loading = props => (
5 |
6 | );
7 |
8 | export default Loading;
9 |
--------------------------------------------------------------------------------
/frontend/src/components/Network.jsx:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | import Graph from 'react-graph-vis';
3 |
4 | const options = {
5 | autoResize: true,
6 | width: '100%',
7 | height: '100%',
8 | nodes: {
9 | shape: 'box',
10 | shapeProperties: {
11 | borderRadius: 2,
12 | },
13 | font: {
14 | face: 'Avenir',
15 | },
16 | },
17 | edges: {
18 | arrows: 'to',
19 | color: {
20 | color: '#D3D3D3'
21 | }
22 | },
23 | layout: {
24 | hierarchical: {
25 | levelSeparation: 250,
26 | nodeSpacing: 150,
27 | blockShifting: true,
28 | edgeMinimization: true,
29 | direction: 'LR',
30 | parentCentralization: false,
31 | sortMethod: 'directed',
32 | },
33 | },
34 | interaction: {
35 | hover: true,
36 | tooltipDelay: 50,
37 | navigationButtons: true,
38 | zoomView: false,
39 | },
40 | physics: false,
41 | groups: {
42 | selected: {
43 | nodes: {
44 | color: '#7777FF',
45 | },
46 | },
47 | table: {
48 | color: '#D3D3D3',
49 | },
50 | },
51 | };
52 |
53 | export default class Network extends React.Component {
54 | constructor(props) {
55 | super(props);
56 |
57 | this.state = {
58 | network: undefined,
59 | };
60 | }
61 |
62 | render() {
63 | const { graph } = this.props;
64 |
65 | return (
66 |
67 | this.setState({ network })}
71 | />
72 |
73 | );
74 | }
75 | }
76 |
--------------------------------------------------------------------------------
/frontend/src/components/SearchBar.jsx:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | import { connect } from 'react-redux';
3 | import MaterialIcon from '@material/react-material-icon';
4 | import {
5 | searchEntities,
6 | setSearchTerm,
7 | insertSuggestions,
8 | } from 'Actions/SearchActions';
9 |
10 | export class SearchBar extends React.Component {
11 | clearInput = () => {
12 | this.props.setSearchTerm('');
13 | this.props.insertSuggestions([]);
14 | };
15 |
16 | handleChange = event => {
17 | const term = event.target.value;
18 | this.props.onSearchChange(term);
19 | };
20 |
21 | render() {
22 | return (
23 |
24 |
25 |
34 |
39 |
40 | );
41 | }
42 | }
43 |
44 | /* istanbul ignore next */ const mapStateToProps = ({ search }) => ({
45 | searchTerm: search.get('searchTerm'),
46 | });
47 |
48 | /* istanbul ignore next */ const mapDispatchToProps = dispatch => ({
49 | onSearchChange: searchTerm => dispatch(searchEntities(searchTerm)),
50 | setSearchTerm: term => dispatch(setSearchTerm(term)),
51 | insertSuggestions: suggestions => dispatch(insertSuggestions(suggestions)),
52 | });
53 |
54 | // exporting the connected component for the app
55 | export default connect(
56 | mapStateToProps,
57 | mapDispatchToProps,
58 | )(SearchBar);
59 |
--------------------------------------------------------------------------------
/frontend/src/components/SearchTable.jsx:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | import { connect } from 'react-redux';
3 | import ReactTable from 'react-table';
4 | import { withRouter } from 'react-router';
5 | import 'react-table/react-table.css';
6 |
7 | const columns = [
8 | {
9 | Header: 'Schema',
10 | accessor: 'schema',
11 | width: 80,
12 | },
13 | {
14 | Header: 'Name',
15 | accessor: 'name',
16 | },
17 | {
18 | Header: 'Type',
19 | id: 'type',
20 | accessor: /* istanbul ignore next */ d => d.type.toLowerCase(),
21 | width: 80,
22 | },
23 | {
24 | Header: 'Platform',
25 | accessor: 'platform',
26 | width: 80,
27 | },
28 | ];
29 |
30 | /* istanbul ignore next */ const getLink = suggestion => {
31 | return `/dashboard/table/${suggestion.name}/`;
32 | };
33 |
34 | export const SearchTable = props => (
35 |
36 | ({
46 | onClick: /* istanbul ignore next */ e =>
47 | props.history.push(`${getLink(rowInfo.original)}`),
48 | })}
49 | />
50 |
51 | );
52 |
53 | /* istanbul ignore next */ const mapStateToProps = ({ search }) => ({
54 | suggestions: search.get('suggestions'),
55 | });
56 |
57 | /* istanbul ignore next */ const mapDispatchToProps = dispatch => ({});
58 |
59 | export default connect(
60 | mapStateToProps,
61 | mapDispatchToProps,
62 | )(withRouter(SearchTable));
63 |
--------------------------------------------------------------------------------
/frontend/src/components/SelectDepth.jsx:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 |
3 | class SelectDepth extends React.Component {
4 | constructor(props){
5 | super(props);
6 | this.state = { value: '1'};
7 | }
8 |
9 | handleChange = event => {
10 | this.setState({value: event.target.value});
11 | const depth = event.target.value;
12 | this.props.getLineageData(this.props.entityName, this.props.entityType, depth);
13 | }
14 |
15 | render() {
16 | return (
17 |
18 |
19 |
26 |
27 | )
28 | }
29 | };
30 |
31 | export default SelectDepth;
32 |
--------------------------------------------------------------------------------
/frontend/src/constants/LineageActionTypes.js:
--------------------------------------------------------------------------------
1 | const LineageActionTypes = {
2 | SET_LINEAGE_LOADING: 'SET_LINEAGE_LOADING',
3 | INSERT_LINEAGE: 'INSERT_LINEAGE',
4 | SET_DEPTH: 'SET_DEPTH',
5 | };
6 |
7 | export default LineageActionTypes;
8 |
--------------------------------------------------------------------------------
/frontend/src/constants/LoadingState.js:
--------------------------------------------------------------------------------
1 | const LoadingState = {
2 | NOT_LOADED: 'NOT_LOADED',
3 | LOADING: 'LOADING',
4 | FINISHED_SUCCESS: 'FINISHED_SUCCESS',
5 | FINISHED_FAILURE: 'FINISHED_FAILURE',
6 | };
7 |
8 | export default LoadingState;
9 |
--------------------------------------------------------------------------------
/frontend/src/constants/SearchActionTypes.js:
--------------------------------------------------------------------------------
1 | const SearchActionTypes = {
2 | SET_SEARCH_TERM: 'SET_SEARCH_TERM',
3 | INSERT_SUGGESTIONS: 'INSERT_SUGGESTIONS',
4 | };
5 |
6 | export default SearchActionTypes;
7 |
--------------------------------------------------------------------------------
/frontend/src/containers/App.jsx:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | import { hot } from 'react-hot-loader';
3 | import { HashRouter, Route, Switch } from 'react-router-dom';
4 | import Search from './Search';
5 | import Dashboard from './Dashboard';
6 |
7 | const App = () => (
8 |
9 |
10 |
11 |
12 |
13 |
14 | );
15 |
16 | export default hot(module)(App);
17 |
--------------------------------------------------------------------------------
/frontend/src/containers/Dashboard.jsx:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | import Lineage from 'Components/Lineage';
3 | import { getLineageData} from '../actions/LineageActions';
4 | import { connect } from 'react-redux';
5 | import SelectDepth from '../components/SelectDepth';
6 |
7 | export class Dashboard extends React.Component {
8 | /* istanbul ignore next */ componentWillMount() {
9 | this.props.getLineageData(
10 | this.props.match.params.entityName,
11 | this.props.match.params.entityType,
12 | this.props.depth
13 | );
14 | }
15 |
16 | /* istanbul ignore next */ shouldComponentUpdate(nextProps, nextState) {
17 | if (
18 | nextProps.match.params.entityName !== this.props.match.params.entityName
19 | ) {
20 | this.props.getLineageData(
21 | nextProps.match.params.entityName,
22 | nextProps.match.params.entityType,
23 | nextProps.depth,
24 | );
25 | return true;
26 | }
27 | return false;
28 | }
29 |
30 | render() {
31 | return (
32 |
33 |
34 | Lineage for {this.props.match.params.entityName}
35 |
36 |
37 | );
38 | }
39 | }
40 |
41 | /* istanbul ignore next */ const mapStateToProps = ({ lineage }) => ({
42 | loadingStatus: lineage.get('loadingStatus'),
43 | graph: lineage.get('graph'),
44 | depth: lineage.get('depth'),
45 | });
46 |
47 | /* istanbul ignore next */ const mapDispatchToProps = dispatch => ({
48 | getLineageData: (name, type, depth) => dispatch(getLineageData(name, type, depth)),
49 | });
50 |
51 | export default connect(
52 | mapStateToProps,
53 | mapDispatchToProps,
54 | )(Dashboard);
55 |
56 |
--------------------------------------------------------------------------------
/frontend/src/containers/Search.jsx:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | import SearchBar from 'Components/SearchBar';
3 | import SearchTable from 'Components/SearchTable';
4 |
5 | const Search = () => (
6 |
7 |
8 |
superglue
9 |
10 |
11 |
12 |
13 | );
14 |
15 | export default Search;
16 |
--------------------------------------------------------------------------------
/frontend/src/enhancers/monitorReducer.js:
--------------------------------------------------------------------------------
1 | const round = number => Math.round(number * 100) / 100;
2 |
3 | const monitorReducerEnhancer = createStore => (
4 | reducer,
5 | initialState,
6 | enhancer,
7 | ) => {
8 | const monitoredReducer = (state, action) => {
9 | const start = performance.now();
10 | const newState = reducer(state, action);
11 | const end = performance.now();
12 | const diff = round(end - start);
13 |
14 | console.log('reducer process time:', diff);
15 |
16 | return newState;
17 | };
18 |
19 | return createStore(monitoredReducer, initialState, enhancer);
20 | };
21 |
22 | export default monitorReducerEnhancer;
23 |
--------------------------------------------------------------------------------
/frontend/src/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
9 | QD SuperGlue
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
--------------------------------------------------------------------------------
/frontend/src/index.jsx:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | import { render } from 'react-dom';
3 | import { Provider } from 'react-redux';
4 | import './styles/main.scss';
5 | import App from 'Containers/App';
6 | import configureStore from './store/configureStore';
7 |
8 | const store = configureStore();
9 |
10 | const renderApp = () =>
11 | render(
12 |
13 |
14 | ,
15 | document.getElementById('root'),
16 | );
17 |
18 | if (process.env.NODE_ENV !== 'production' && module.hot) {
19 | module.hot.accept('./containers/App.jsx', renderApp);
20 | }
21 |
22 | renderApp();
23 |
--------------------------------------------------------------------------------
/frontend/src/middleware/logger.js:
--------------------------------------------------------------------------------
1 | const logger = store => next => action => {
2 | console.group(action.type);
3 | console.info('dispatching', action);
4 | const result = next(action);
5 | console.log('next state', store.getState());
6 | console.groupEnd();
7 | return result;
8 | };
9 |
10 | export default logger;
11 |
--------------------------------------------------------------------------------
/frontend/src/reducers/LineageReducer.js:
--------------------------------------------------------------------------------
1 | import { Map } from 'immutable';
2 | import LoadingState from 'Constants/LoadingState';
3 | import LineageActionTypes from 'Constants/LineageActionTypes';
4 |
5 | const initialState = Map({
6 | graph: { nodes: [], edges: [] },
7 | loadingStatus: LoadingState.NOT_LOADED,
8 | depth: "1",
9 | });
10 |
11 | const LineageReducer = (state = initialState, action) => {
12 | switch (action.type) {
13 | case LineageActionTypes.INSERT_LINEAGE:
14 | return state.set('graph', action.data);
15 | case LineageActionTypes.SET_LINEAGE_LOADING:
16 | return state.set('loadingStatus', action.state);
17 | case LineageActionTypes.SET_DEPTH:
18 | return state.set('depth', action.depth)
19 | default:
20 | return state;
21 | }
22 | };
23 |
24 | export default LineageReducer;
25 |
--------------------------------------------------------------------------------
/frontend/src/reducers/SearchReducer.js:
--------------------------------------------------------------------------------
1 | import { Map } from 'immutable';
2 | import SearchActionTypes from 'Constants/SearchActionTypes';
3 |
4 | const initialState = Map({
5 | searchTerm: '',
6 | suggestions: [],
7 | });
8 |
9 | const SearchReducer = (state = initialState, action) => {
10 | switch (action.type) {
11 | case SearchActionTypes.SET_SEARCH_TERM:
12 | return state.set('searchTerm', action.searchTerm);
13 | case SearchActionTypes.INSERT_SUGGESTIONS:
14 | return state.set('suggestions', action.suggestions);
15 | default:
16 | return state;
17 | }
18 | };
19 |
20 | export default SearchReducer;
21 |
--------------------------------------------------------------------------------
/frontend/src/reducers/index.js:
--------------------------------------------------------------------------------
1 | import { combineReducers } from 'redux';
2 | import LineageReducer from './LineageReducer';
3 | import SearchReducer from './SearchReducer';
4 |
5 | const rootReducer = combineReducers({
6 | lineage: LineageReducer,
7 | search: SearchReducer,
8 | });
9 |
10 | export default rootReducer;
11 |
--------------------------------------------------------------------------------
/frontend/src/store/configureStore.js:
--------------------------------------------------------------------------------
1 | import { createStore, compose, applyMiddleware } from 'redux';
2 | import thunkMiddleware from 'redux-thunk';
3 |
4 | import monitorReducersEnhancer from '../enhancers/monitorReducer';
5 | import loggerMiddleware from '../middleware/logger';
6 | import rootReducer from '../reducers/index';
7 |
8 | export default function configureStore(preloadedState) {
9 | const middlewares = [loggerMiddleware, thunkMiddleware];
10 | const middlewareEnhancer = applyMiddleware(...middlewares);
11 |
12 | const enhancers = [middlewareEnhancer, monitorReducersEnhancer];
13 | const composedEnhancers = compose(...enhancers);
14 |
15 | const store = createStore(rootReducer, preloadedState, composedEnhancers);
16 |
17 | if (process.env.NODE_ENV !== 'production' && module.hot) {
18 | module.hot.accept('../reducers', () => store.replaceReducer(rootReducer));
19 | }
20 |
21 | return store;
22 | }
23 |
--------------------------------------------------------------------------------
/frontend/src/styles/app.scss:
--------------------------------------------------------------------------------
1 | #root {
2 | position: relative;
3 | background: $gray08;
4 | padding: 2em;
5 | box-sizing: border-box;
6 | height: 100%;
7 | }
8 |
--------------------------------------------------------------------------------
/frontend/src/styles/dashboard.scss:
--------------------------------------------------------------------------------
1 | .dashboardContainer {
2 | @include flex(column);
3 | @include card;
4 | height: 100%;
5 |
6 | h1 {
7 | font-size: 1rem;
8 | }
9 | }
10 |
11 | .networkContainer {
12 | height: 100%;
13 | }
14 |
--------------------------------------------------------------------------------
/frontend/src/styles/lineage.scss:
--------------------------------------------------------------------------------
1 | .loadingImage {
2 | width: 100px;
3 | margin: auto;
4 | animation-name: bounce;
5 | animation-duration: 2s;
6 | animation-iteration-count: infinite;
7 | }
8 |
9 | .notFound {
10 | height: 100%;
11 | margin: auto;
12 | @include flex(column);
13 | justify-content: center;
14 |
15 | img {
16 | display: block;
17 | width: 50%;
18 | margin: 0 auto;
19 | }
20 | p {
21 | margin-top: 1em;
22 | text-align: center;
23 | }
24 | }
25 |
26 | @keyframes bounce {
27 | 0% { transform: translateY(15px); opacity: 0.5}
28 | 50% { transform: translateY(-15px); opacity: 1}
29 | 100% { transform: translateY(15px); opacity: 0.5}
30 | }
31 |
32 |
--------------------------------------------------------------------------------
/frontend/src/styles/main.scss:
--------------------------------------------------------------------------------
1 | @import "./global";
2 | @import "./variables";
3 | @import "./app";
4 | @import "./search";
5 | @import "./dashboard";
6 |
7 | @import "./network";
8 | @import "./lineage";
9 |
--------------------------------------------------------------------------------
/frontend/src/styles/network.scss:
--------------------------------------------------------------------------------
1 | .networkContainer {
2 |
3 | .vis-network {
4 | overflow: visible;
5 | }
6 |
7 | .vis-tooltip {
8 | background: $gray05;
9 | border-radius: 3px;
10 | color: $white;
11 | font-size: 0.4em;
12 | padding: 0.625em;
13 | position: absolute;
14 | transform: translate(-45%, 65px);
15 |
16 | &::after {
17 | border-color: transparent transparent $gray05;
18 | border-style: solid;
19 | border-width: 8px;
20 | content: '';
21 | height: 0;
22 | left: 50%;
23 | margin-left: -8px;
24 | position: absolute;
25 | top: -15px;
26 | width: 0;
27 | }
28 | }
29 |
30 | /*
31 | * navigation buttons on the graph
32 | */
33 | .vis-navigation {
34 | position: fixed;
35 | bottom: 3em;
36 | right: 3em;
37 | background: white;
38 | box-shadow: 0px 1px 4px rgba(0, 0, 0, 0.3);
39 | border-radius: 2px;
40 | }
41 |
42 | .vis-button {
43 | padding: 10px 15px;
44 | background-image: none !important;
45 | display: inline-block;
46 | font-weight: 700;
47 | border-right: 1px solid #eee;
48 | @include transition(background 0.8s);
49 | background-position: center;
50 | background-color: white;
51 |
52 | &:hover {
53 | box-shadow: none !important;
54 | background: $gray07 radial-gradient(circle, transparent 1%, $gray07 1%)
55 | center/1500000%;
56 | }
57 |
58 | &:after {
59 | font-size: 1em;
60 | color: $gray02;
61 | }
62 |
63 | &:active {
64 | background-color: $gray08;
65 | background-size: 100%;
66 | @include transition(background 0s);
67 | }
68 | }
69 |
70 | .vis-up,
71 | .vis-down,
72 | .vis-left,
73 | .vis-right {
74 | display: none;
75 | }
76 |
77 | .vis-zoomIn:after {
78 | content: '+';
79 | }
80 |
81 | .vis-zoomOut:after {
82 | content: '−';
83 | }
84 |
85 | .vis-zoomExtends:after {
86 | content: '⤢';
87 | }
88 | }
89 |
--------------------------------------------------------------------------------
/frontend/src/styles/search.scss:
--------------------------------------------------------------------------------
1 | .brand {
2 | text-align: center;
3 | color: $green01;
4 | font-size: 3rem;
5 | margin: 0;
6 | letter-spacing: -0.15rem;
7 | margin-right: 0.5em;
8 | }
9 |
10 | .searchContainer {
11 | width: 100%;
12 | max-width: 1300px;
13 | margin: auto;
14 | }
15 |
16 | .searchTopContainer {
17 | @include flex(row);
18 | margin-bottom: 1em;
19 | }
20 |
21 | .searchBarContainer {
22 | @include flex(row);
23 | background: $white;
24 | width: 100%;
25 | padding: 0 1em;
26 | border-radius: 30px;
27 | @include boxShadow(0, 2px, 4px, 0, rgba(0, 0, 0, .1));
28 |
29 | .material-icons {
30 | color: $gray04;
31 | line-height: 2em;
32 | margin-top: auto;
33 | margin-bottom: auto;
34 | }
35 |
36 | .search {
37 | margin-right: 0.3em;
38 | cursor: default;
39 | }
40 |
41 | .close {
42 | cursor: pointer;
43 | }
44 |
45 | .searchBar {
46 | font-size: 0.8rem;
47 | border: none;
48 | width: 100%;
49 | margin: auto;
50 | font-weight: 500;
51 | color: $gray01;
52 | @include inputPlaceholder {
53 | color: $gray04;
54 | }
55 | }
56 | }
57 |
58 | .searchTableContainer {
59 | font-size: 0.7rem;
60 | @include card;
61 | }
62 |
63 | .ReactTable {
64 | text-align: left;
65 | border: none !important;
66 | font-weight: 500;
67 | color: $gray03;
68 |
69 | .rt-thead {
70 | border-bottom: 2px solid $gray08;
71 | padding: 1em 0;
72 | box-shadow: none !important;
73 |
74 | .rt-th {
75 | border: none !important;
76 | text-align: left;
77 | }
78 | }
79 |
80 | .rt-tbody {
81 | .rt-td {
82 | border: none !important;
83 |
84 | // entity name column
85 | &:nth-child(2) {
86 | font-weight: 700;
87 | color: $green02;
88 | }
89 | // entity type column
90 | &:nth-child(3) {
91 | text-transform: capitalize;
92 | }
93 | }
94 |
95 | .rt-tr-group {
96 | padding: 1em 0;
97 | cursor: pointer;
98 | }
99 | }
100 |
101 | .-pagination {
102 | margin-top: 1em;
103 | border-top: none !important;
104 | box-shadow: none !important;
105 |
106 | .-btn {
107 | font-weight: 500;
108 | padding: 1em !important;
109 | border-radius: 15px !important;
110 | background: $green03 !important;
111 | color: $white !important;
112 | @include transition(background 0.5s ease-in-out !important);
113 |
114 | &:hover {
115 | background: $green02 !important;
116 | }
117 | }
118 | }
119 | }
120 |
--------------------------------------------------------------------------------
/frontend/src/styles/variables.scss:
--------------------------------------------------------------------------------
1 | // COLORS
2 | $green01: #108000;
3 | $green02: #2ca01c;
4 | $green03: #53b700;
5 | $gray01: #393a3d;
6 | $gray02: #6b6c72;
7 | $gray03: #8d9096;
8 | $gray04: #babec5;
9 | $gray05: #d4d7dc;
10 | $gray06: #e3e5e8;
11 | $gray07: #eceef1;
12 | $gray08: #f4f5f8;
13 | $teal03: #00c1bf;
14 | $white: #fff;
15 | $black: #000;
16 |
17 | @mixin flex($direction) {
18 | display: -webkit-box;
19 | display: -moz-box;
20 | display: -ms-flexbox;
21 | display: -webkit-flex;
22 | display: flex;
23 | @if $direction == column
24 | {
25 | -webkit-flex-direction:vertical;
26 | -moz-flex-direction:vertical;
27 | -ms-flex-direction:column;
28 | -webkit-flex-direction:column;
29 | flex-direction:column;
30 | }
31 | @else
32 | {
33 | -webkit-flex-direction:horizontal;
34 | -moz-flex-direction:horizontal;
35 | -ms-flex-direction:row;
36 | -webkit-flex-direction:row;
37 | flex-direction:row;
38 | }
39 | }
40 |
41 | @mixin autoMargins {
42 | margin: {
43 | left: auto;
44 | right: auto;
45 | }
46 | }
47 |
48 | @mixin truncate($truncation-boundary) {
49 | max-width: $truncation-boundary;
50 | white-space: nowrap;
51 | overflow: hidden;
52 | text-overflow: ellipsis;
53 | }
54 |
55 | @mixin opacity($opacity) {
56 | opacity: $opacity;
57 | filter: alpha(opacity=($opacity * 100));
58 | }
59 |
60 | @mixin boxShadow( $h: 10px , $v: 10px , $b: 0px , $s: 0px , $c: #000000 ) {
61 | -webkit-box-shadow: $h $v $b $s $c;
62 | -moz-box-shadow: $h $v $b $s $c;
63 | box-shadow: $h $v $b $s $c;
64 | }
65 |
66 | @mixin verticalAlign {
67 | position: relative;
68 | top: 50%;
69 | -webkit-transform: translateY(-50%);
70 | -ms-transform: translateY(-50%);
71 | transform: translateY(-50%);
72 | }
73 |
74 | @mixin transition($args...) {
75 | -webkit-transition: $args;
76 | -moz-transition: $args;
77 | -ms-transition: $args;
78 | -o-transition: $args;
79 | transition: $args;
80 | }
81 |
82 | @mixin inputPlaceholder() {
83 | $selector: '';
84 | $prefixes: (
85 | moz: "::-moz",
86 | webkit: "::-webkit",
87 | ie: ":-ms"
88 | );
89 | @each $prop, $value in $prefixes {
90 | @if $prop != "moz" {
91 | $selector: #{$value}-input-placeholder;
92 | } @else {
93 | $selector: #{$value}-placeholder;
94 | }
95 | @if & {
96 | {$selector} {
97 | @content;
98 | }
99 | } @else {
100 | #{$selector} {
101 | @content;
102 | }
103 | }
104 | }
105 | &::placeholder {
106 | @content;
107 | }
108 | }
109 |
110 | @include inputPlaceholder() {
111 | color: red;
112 | font-weight: 300;
113 | padding-top: 5px;
114 | }
115 | textarea {
116 | @include inputPlaceholder() {
117 | padding: 15px;
118 | }
119 | }
120 |
121 | @mixin card {
122 | border-radius: 10px;
123 | @include boxShadow(0, 2px, 4px, 0, rgba(0, 0, 0, .1));
124 | background: white;
125 | padding: 1.5em;
126 | }
127 |
--------------------------------------------------------------------------------
/gradle.properties:
--------------------------------------------------------------------------------
1 | org.gradle.parallel=true
2 |
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/intuit/superglue/5c5e03846cfdfa72663c4c689c4d279480a73d4d/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | #Mon Jan 28 09:32:07 PST 2019
2 | distributionBase=GRADLE_USER_HOME
3 | distributionPath=wrapper/dists
4 | zipStoreBase=GRADLE_USER_HOME
5 | zipStorePath=wrapper/dists
6 | distributionUrl=https\://services.gradle.org/distributions/gradle-5.1.1-all.zip
7 |
--------------------------------------------------------------------------------
/gradlew.bat:
--------------------------------------------------------------------------------
1 | @if "%DEBUG%" == "" @echo off
2 | @rem ##########################################################################
3 | @rem
4 | @rem Gradle startup script for Windows
5 | @rem
6 | @rem ##########################################################################
7 |
8 | @rem Set local scope for the variables with windows NT shell
9 | if "%OS%"=="Windows_NT" setlocal
10 |
11 | set DIRNAME=%~dp0
12 | if "%DIRNAME%" == "" set DIRNAME=.
13 | set APP_BASE_NAME=%~n0
14 | set APP_HOME=%DIRNAME%
15 |
16 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
17 | set DEFAULT_JVM_OPTS="-Xmx64m"
18 |
19 | @rem Find java.exe
20 | if defined JAVA_HOME goto findJavaFromJavaHome
21 |
22 | set JAVA_EXE=java.exe
23 | %JAVA_EXE% -version >NUL 2>&1
24 | if "%ERRORLEVEL%" == "0" goto init
25 |
26 | echo.
27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
28 | echo.
29 | echo Please set the JAVA_HOME variable in your environment to match the
30 | echo location of your Java installation.
31 |
32 | goto fail
33 |
34 | :findJavaFromJavaHome
35 | set JAVA_HOME=%JAVA_HOME:"=%
36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
37 |
38 | if exist "%JAVA_EXE%" goto init
39 |
40 | echo.
41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
42 | echo.
43 | echo Please set the JAVA_HOME variable in your environment to match the
44 | echo location of your Java installation.
45 |
46 | goto fail
47 |
48 | :init
49 | @rem Get command-line arguments, handling Windows variants
50 |
51 | if not "%OS%" == "Windows_NT" goto win9xME_args
52 |
53 | :win9xME_args
54 | @rem Slurp the command line arguments.
55 | set CMD_LINE_ARGS=
56 | set _SKIP=2
57 |
58 | :win9xME_args_slurp
59 | if "x%~1" == "x" goto execute
60 |
61 | set CMD_LINE_ARGS=%*
62 |
63 | :execute
64 | @rem Setup the command line
65 |
66 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
67 |
68 | @rem Execute Gradle
69 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
70 |
71 | :end
72 | @rem End local scope for the variables with windows NT shell
73 | if "%ERRORLEVEL%"=="0" goto mainEnd
74 |
75 | :fail
76 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
77 | rem the _cmd.exe /c_ return code!
78 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
79 | exit /b 1
80 |
81 | :mainEnd
82 | if "%OS%"=="Windows_NT" endlocal
83 |
84 | :omega
85 |
--------------------------------------------------------------------------------
/parser/.gitignore:
--------------------------------------------------------------------------------
1 | .idea/
2 | build/
3 | out/
4 | .gradle/
5 | !gradle-wrapper.jar
6 |
7 | *.class
8 | *.log
9 | **/*.sc
10 |
11 | # sbt specific
12 | .cache
13 | .history
14 | .lib/
15 | dist/*
16 | target/
17 | lib_managed/
18 | src_managed/
19 | project/boot/
20 | project/plugins/project/
21 |
22 | # Scala-IDE specific
23 | .scala_dependencies
24 | .worksheet
25 |
26 | *.swp
27 |
--------------------------------------------------------------------------------
/parser/build.gradle:
--------------------------------------------------------------------------------
1 | plugins {
2 | // Apply the scala plugin to add support for Scala
3 | id "scala"
4 | id "org.scoverage" version "2.5.0"
5 | id "com.github.maiflai.scalatest" version "0.23"
6 | }
7 |
8 | repositories {
9 | // Use jcenter for resolving your dependencies.
10 | // You can declare any Maven/Ivy/file repository here.
11 | jcenter()
12 | mavenCentral()
13 | }
14 |
15 | def scala_minor_version = "2.12"
16 |
17 | dependencies {
18 |
19 | implementation project(":dao")
20 |
21 | // Automatic resource management
22 | implementation "io.tmos:arm4s_${scala_minor_version}:1.1.0"
23 |
24 | // Json serialization
25 | implementation "com.typesafe.play:play-json_${scala_minor_version}:2.6.10"
26 |
27 | // Stream editing
28 | implementation "com.github.rwitzel.streamflyer:streamflyer-core:1.2.0"
29 |
30 | // Slick DB dependencies
31 | implementation "com.typesafe.slick:slick_${scala_minor_version}:3.3.0"
32 | implementation "com.typesafe.slick:slick-hikaricp_${scala_minor_version}:3.3.0"
33 | implementation "mysql:mysql-connector-java:8.0.15"
34 |
35 | // Use Calcite parser
36 | implementation 'org.apache.calcite:calcite-core:1.27.0'
37 | implementation 'org.apache.calcite:calcite-server:1.27.0'
38 |
39 | implementation "com.h2database:h2:1.4.199"
40 |
41 | scoverage "org.scoverage:scalac-scoverage-plugin_${scala_minor_version}:1.4.0-M5",
42 | "org.scoverage:scalac-scoverage-runtime_${scala_minor_version}:1.4.0-M5"
43 | }
44 |
45 | checkScoverage {
46 | minimumRate = 0.75
47 | coverageType = "Branch"
48 | }
49 |
--------------------------------------------------------------------------------
/parser/src/main/resources/reference.conf:
--------------------------------------------------------------------------------
1 | com.intuit.superglue.pipeline {
2 | parsers {
3 | sql.input-kinds = ["sql"]
4 | }
5 | outputs {
6 | console.enabled=false
7 | database {
8 | enabled = false
9 | batch-size = 50
10 | timeout = 1000 # Seconds
11 | }
12 | reporter {
13 | enabled=false
14 | errors-only = false
15 | }
16 | }
17 | }
--------------------------------------------------------------------------------
/parser/src/main/scala/com/intuit/superglue/pipeline/Metadata.scala:
--------------------------------------------------------------------------------
1 | package com.intuit.superglue.pipeline
2 |
3 | import java.time.LocalDateTime
4 |
5 | object Metadata {
6 |
7 | val UNKNOWN_STATEMENT_TYPE = "UNKNOWN"
8 |
9 | case class ScriptMetadata(
10 | scriptName: String,
11 | scriptSource: String,
12 | scriptKind: String,
13 | scriptDialect: Option[String],
14 | scriptParser: String,
15 | scriptParseStartTime: LocalDateTime,
16 | scriptParseEndTime: LocalDateTime,
17 | statementsMetadata: List[StatementMetadata],
18 | errors: List[Throwable],
19 | )
20 |
21 | case class StatementMetadata(
22 | statementText: String,
23 | statementIndex: Int,
24 | statementParseStartTime: LocalDateTime,
25 | statementParseEndTime: LocalDateTime,
26 | statementMetadataFragment: StatementMetadataFragment,
27 | )
28 |
29 | case class StatementMetadataFragment(
30 | statementParser: String,
31 | statementType: String,
32 | inputObjects: List[String],
33 | outputObjects: List[String],
34 | errors: List[Throwable],
35 | )
36 |
37 | object StatementMetadataFragment {
38 | def apply(statementParser: String, errors: List[Throwable]): StatementMetadataFragment = {
39 | StatementMetadataFragment(
40 | statementParser,
41 | UNKNOWN_STATEMENT_TYPE,
42 | List.empty[String],
43 | List.empty[String],
44 | errors,
45 | )
46 | }
47 | }
48 | }
49 |
--------------------------------------------------------------------------------
/parser/src/main/scala/com/intuit/superglue/pipeline/ParsingPipeline.scala:
--------------------------------------------------------------------------------
1 | package com.intuit.superglue.pipeline
2 |
3 | import com.intuit.superglue.pipeline.Metadata._
4 | import com.intuit.superglue.pipeline.parsers.{ScriptParser, SqlScriptParser}
5 | import com.intuit.superglue.pipeline.producers.ScriptInput
6 | import com.typesafe.config.{Config => TypesafeConfig}
7 | import com.typesafe.scalalogging.Logger
8 |
9 | import scala.concurrent.Future
10 |
11 | class ParsingPipeline(
12 | source: Source,
13 | customParsers: (String, ScriptParser)*,
14 | )(implicit rootConfig: TypesafeConfig) {
15 | private val logger = Logger[ParsingPipeline]
16 | private lazy val parsers: Map[String, ScriptParser] = Map(
17 | "sql" -> new SqlScriptParser(),
18 | ) ++ customParsers.toMap
19 |
20 | /**
21 | * Transforms the stream of [[ScriptInput]]s from the [[Source]] into a stream of
22 | * [[ScriptMetadata]]s by passing each input to an accepting [[ScriptParser]].
23 | *
24 | * @return The stream of processed [[ScriptMetadata]] objects.
25 | */
26 | def stream(): Iterator[Future[ScriptMetadata]] = {
27 | source.stream()
28 |
29 | // For each input, find a parser that can parse that input kind
30 | .map(input => input -> checkParserKind(input.name, input.kind))
31 |
32 | // For each input-parser pair, use the parser on the input
33 | .collect {
34 | case (input, Some(parser)) => parser.parse(input)
35 | }
36 | }
37 |
38 | /**
39 | * Checks if there exists a parser that can accept the given input kind.
40 | *
41 | * @param name The name of the input to check.
42 | * @param kind The kind of the input to check.
43 | * @return Some(parser) with a parser accepting "kind", or None if one was not found.
44 | */
45 | private def checkParserKind(name: String, kind: String): Option[ScriptParser] = {
46 | val maybe = parsers.values.find(_.acceptsKind(kind))
47 | if (maybe.isEmpty) logger.warn(s"Skipping input kind with no parser: '$kind' ($name)")
48 | maybe
49 | }
50 | }
51 |
--------------------------------------------------------------------------------
/parser/src/main/scala/com/intuit/superglue/pipeline/Sink.scala:
--------------------------------------------------------------------------------
1 | package com.intuit.superglue.pipeline
2 |
3 | import java.io.PrintStream
4 | import java.time.LocalDateTime
5 |
6 | import Metadata.ScriptMetadata
7 | import com.intuit.superglue.pipeline.consumers.OutputConsumer.{EndOfStream, Message, StartOfStream}
8 | import com.intuit.superglue.pipeline.consumers.{ConsoleConsumer, DatabaseConsumer, OutputConsumer}
9 | import com.typesafe.config.{Config => TypesafeConfig}
10 |
11 | import scala.concurrent.Future
12 |
13 | class Sink(
14 | pipeline: ParsingPipeline,
15 | customConsumers: (String, OutputConsumer[Future[ScriptMetadata]])*,
16 | )(implicit out: PrintStream, rootConfig: TypesafeConfig) {
17 | private lazy val consumers: Map[String, OutputConsumer[Future[ScriptMetadata]]] = Map(
18 | "console" -> new ConsoleConsumer(),
19 | "database" -> new DatabaseConsumer(),
20 | ) ++ customConsumers.toMap
21 |
22 | /**
23 | * Sends each [[ScriptMetadata]] object as an event to each [[OutputConsumer]].
24 | *
25 | * Outputs are sent as a payload in a [[Message]], and after all outputs
26 | * have been delivered, an [[EndOfStream]] event is sent so that each
27 | * consumer can flush messages and release resources.
28 | */
29 | def drain(): Map[String, OutputConsumer[Future[ScriptMetadata]]] = {
30 | val startTime = LocalDateTime.now()
31 | Iterator(
32 | Iterator(StartOfStream(startTime)),
33 | pipeline.stream().map(Message(_)),
34 | Iterator(EndOfStream)
35 | ).flatten.foreach(metadata => consumers.values.foreach(_.accept(metadata)))
36 | consumers
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/parser/src/main/scala/com/intuit/superglue/pipeline/Source.scala:
--------------------------------------------------------------------------------
1 | package com.intuit.superglue.pipeline
2 |
3 | import java.nio.file.FileSystem
4 |
5 | import com.intuit.superglue.pipeline.producers.{ScriptFileProvider, ScriptInput, ScriptProvider}
6 | import com.typesafe.config.{Config => TypesafeConfig}
7 |
8 | import scala.collection.JavaConverters._
9 |
10 | /**
11 | * A Source is a helper class that takes a configuration and creates a stream of
12 | * all of the [[ScriptInput]]s from all of the described [[ScriptProvider]]s.
13 | */
14 | class Source(customProviders: ScriptProvider*)
15 | (implicit rootConfig: TypesafeConfig, fs: FileSystem) {
16 | private val providers = if (customProviders.nonEmpty) customProviders else {
17 | val inputConfig = rootConfig.getConfig("com.intuit.superglue.pipeline.inputs")
18 | inputConfig.getConfigList("files").asScala
19 | .map(FileInputConfig(_))
20 | .collect { case Some(fileConfig) => ScriptFileProvider(fileConfig) }
21 | }
22 |
23 | def stream(): Iterator[ScriptInput] = providers.iterator.flatMap(_.stream())
24 | }
25 |
--------------------------------------------------------------------------------
/parser/src/main/scala/com/intuit/superglue/pipeline/consumers/ConsoleConsumer.scala:
--------------------------------------------------------------------------------
1 | package com.intuit.superglue.pipeline.consumers
2 |
3 | import java.io.PrintStream
4 |
5 | import com.intuit.superglue.pipeline.Metadata._
6 | import com.intuit.superglue.pipeline.consumers.ConsoleConsumer.ScriptView
7 | import com.intuit.superglue.pipeline.consumers.OutputConsumer.{EndOfStream, Message, StartOfStream}
8 | import com.typesafe.config.{Config => TypesafeConfig}
9 | import play.api.libs.json.{JsValue, Json, OFormat}
10 |
11 | import scala.concurrent.{Await, Future}
12 | import scala.concurrent.duration._
13 | import scala.concurrent.ExecutionContext.Implicits.global
14 | import scala.language.postfixOps
15 |
16 | class ConsoleConsumer(implicit out: PrintStream, rootConfig: TypesafeConfig) extends OutputConsumer[Future[ScriptMetadata]] {
17 | private val consoleConfig = rootConfig.getConfig("com.intuit.superglue.pipeline.outputs.console")
18 | private val enabled = consoleConfig.getBoolean("enabled")
19 | private var printObjects: Seq[Future[Option[ScriptView]]] = List.empty[Future[Option[ScriptView]]]
20 |
21 | override def accept(event: OutputConsumer.Event[Future[ScriptMetadata]]): Unit = {
22 | if (!enabled) return
23 | event match {
24 | case StartOfStream(_) => // We don't need to print the start time to the console
25 | case Message(futureMetadata) =>
26 | printObjects :+= futureMetadata.map(ConsoleConsumer.ScriptView(_))
27 | case EndOfStream =>
28 | out.println(Json.prettyPrint(scriptJson(printObjects)))
29 | }
30 | }
31 |
32 | private def scriptJson(scriptViews: Seq[Future[Option[ScriptView]]]): JsValue = {
33 | val futureViews = Future.sequence(scriptViews)
34 | val result = futureViews.map { maybeViews =>
35 | maybeViews.collect { case Some(view) => view }
36 | }
37 | val views = Await.result(result, 60 second)
38 | Json.toJson(views)
39 | }
40 | }
41 |
42 | object ConsoleConsumer {
43 |
44 | case class ScriptView(
45 | name: String,
46 | statements: Seq[StatementView],
47 | )
48 |
49 | object ScriptView {
50 | def apply(scriptMetadata: ScriptMetadata, filterUnknown: Boolean = true): Option[ScriptView] = {
51 | val scriptView = ScriptView(
52 | scriptMetadata.scriptName,
53 | scriptMetadata.statementsMetadata.map(StatementView(_)).filter { statement =>
54 | statement.inputObjects.nonEmpty || statement.outputObjects.nonEmpty
55 | }
56 | )
57 | if (scriptView.statements.nonEmpty) Some(scriptView) else None
58 | }
59 |
60 | implicit val scriptFormat: OFormat[ScriptView] = Json.format[ScriptView]
61 | }
62 |
63 | case class StatementView(
64 | `type`: String,
65 | inputObjects: Seq[String],
66 | outputObjects: Seq[String],
67 | )
68 |
69 | object StatementView {
70 | def apply(statementMetadata: StatementMetadata, filterUnknown: Boolean = true): StatementView = {
71 | StatementView(
72 | statementMetadata.statementMetadataFragment.statementType,
73 | statementMetadata.statementMetadataFragment.inputObjects,
74 | statementMetadata.statementMetadataFragment.outputObjects,
75 | )
76 | }
77 |
78 | implicit val statementFormat: OFormat[StatementView] = Json.format[StatementView]
79 | }
80 | }
--------------------------------------------------------------------------------
/parser/src/main/scala/com/intuit/superglue/pipeline/consumers/OutputConsumer.scala:
--------------------------------------------------------------------------------
1 | package com.intuit.superglue.pipeline.consumers
2 |
3 | import java.time.LocalDateTime
4 |
5 | import com.intuit.superglue.pipeline.consumers.OutputConsumer.Event
6 |
7 | /**
8 | * An OutputConsumer receives the output metadata from parsing statements
9 | * from a script.
10 | *
11 | * An OutputConsumer can receive events of two types: A
12 | * [[OutputConsumer.Message]] which
13 | * contains a payload of type T, or an
14 | * [[OutputConsumer.EndOfStream]],
15 | * which signals that the consumer will not receive any more events.
16 | *
17 | * Implementations may choose to buffer messages and flush them at
18 | * any point. An example usage of this would be to commit 50 rows to
19 | * a database in one transaction, rather than in 50 transactions.
20 | * If a consumer receives the EndOfStream event, it should flush any
21 | * buffered messages immediately, as it will not be called again.
22 | *
23 | * @tparam T The type of payload carried by a Message event.
24 | */
25 | trait OutputConsumer[T] {
26 | def accept(event: Event[T]): Unit
27 | }
28 |
29 | object OutputConsumer {
30 | sealed trait Event[+T]
31 | case class StartOfStream(startTime: LocalDateTime) extends Event[Nothing]
32 | case class Message[T](payload: T) extends Event[T]
33 | case object EndOfStream extends Event[Nothing]
34 | }
35 |
--------------------------------------------------------------------------------
/parser/src/main/scala/com/intuit/superglue/pipeline/package.scala:
--------------------------------------------------------------------------------
1 | package com.intuit.superglue
2 |
3 | import com.typesafe.config.{Config => TypesafeConfig}
4 | import pureconfig.generic.auto._
5 |
6 | package object pipeline {
7 | case class FileInputConfig(
8 | base: String,
9 | kind: Option[String] = None,
10 | dialect: Option[String] = None,
11 | includes: List[String] = List.empty,
12 | excludes: List[String] = List.empty,
13 | )
14 |
15 | object FileInputConfig {
16 | def apply(config: TypesafeConfig): Option[FileInputConfig] =
17 | pureconfig.loadConfig[FileInputConfig](config).toOption
18 | }
19 |
20 | object Implicits {
21 | implicit class StringEtc(s: String) {
22 | /**
23 | * A postfix operator for trimming a string to a given length
24 | * and adding ellipses if the string was too long.
25 | *
26 | * {{{
27 | * import Implicits.StringEtc
28 | * val string = "INFO: Some really long status that we only need the first few words from"
29 | * val printString = string etc 30
30 | * assertEquals(printString, "INFO: Some really long stat...")
31 | * }}}
32 | *
33 | * @param i The length of the trimmed string to be output.
34 | */
35 | def etc(i: Int): String = {
36 | if (s.length <= i) s else s.substring(0, i-3) + "..."
37 | }
38 | }
39 | }
40 | }
41 |
--------------------------------------------------------------------------------
/parser/src/main/scala/com/intuit/superglue/pipeline/parsers/NopPreprocessor.scala:
--------------------------------------------------------------------------------
1 | package com.intuit.superglue.pipeline.parsers
2 |
3 | import java.io.Reader
4 |
5 | /**
6 | * An implementation of Preparser that does not edit the Reader stream.
7 | */
8 | object NopPreprocessor extends Preprocessor {
9 | override def preprocess(input: Reader): Reader = input
10 | }
11 |
--------------------------------------------------------------------------------
/parser/src/main/scala/com/intuit/superglue/pipeline/parsers/NopStatementParser.scala:
--------------------------------------------------------------------------------
1 | package com.intuit.superglue.pipeline.parsers
2 |
3 | import com.intuit.superglue.pipeline.Metadata.StatementMetadataFragment
4 |
5 | object NopStatementParser extends StatementParser {
6 | override def parseStatement(statement: String, dialect: Option[String]): StatementMetadataFragment = {
7 | StatementMetadataFragment(
8 | statementParser = getClass.getName,
9 | statementType = "",
10 | inputObjects = List.empty[String],
11 | outputObjects = List.empty[String],
12 | List.empty[Throwable],
13 | )
14 | }
15 | }
16 |
--------------------------------------------------------------------------------
/parser/src/main/scala/com/intuit/superglue/pipeline/parsers/Preprocessor.scala:
--------------------------------------------------------------------------------
1 | package com.intuit.superglue.pipeline.parsers
2 |
3 | import java.io.Reader
4 |
5 | /**
6 | * Performs text-editing on the whole-body input of a given script.
7 | *
8 | * Preparsers may be composed with one another, so the input to a given
9 | * preprocessor may not be the raw input of the script, but may already
10 | * be preprocessed by a previous Preparser.
11 | *
12 | * Preparsing is performed on [[Reader]]s in order to allow lazy evaluation
13 | * and potentially reduce memory footprint. A given implementation of
14 | * Preparser may choose to buffer the input to a String, or may use
15 | * stream-editing techniques on the Reader itself.
16 | */
17 | trait Preprocessor {
18 | /**
19 | * Given an input-text Reader, return a Reader that applies text modifications
20 | * to the stream.
21 | *
22 | * @param input The whole-script input Reader.
23 | * @return A Reader with applied text-editing.
24 | */
25 | def preprocess(input: Reader): Reader
26 | }
27 |
--------------------------------------------------------------------------------
/parser/src/main/scala/com/intuit/superglue/pipeline/parsers/ScriptParser.scala:
--------------------------------------------------------------------------------
1 | package com.intuit.superglue.pipeline.parsers
2 |
3 | import com.intuit.superglue.pipeline.Metadata.ScriptMetadata
4 | import com.intuit.superglue.pipeline.producers.ScriptInput
5 |
6 | import scala.concurrent.Future
7 |
8 | trait ScriptParser {
9 | def acceptsKind(kind: String): Boolean
10 | def parse(input: ScriptInput): Future[ScriptMetadata]
11 | def parserName: String
12 | }
13 |
--------------------------------------------------------------------------------
/parser/src/main/scala/com/intuit/superglue/pipeline/parsers/SimpleStatementSplitter.scala:
--------------------------------------------------------------------------------
1 | package com.intuit.superglue.pipeline.parsers
2 |
3 | import java.util
4 |
5 | object SimpleStatementSplitter extends StatementSplitter {
6 | private val DELIMITER: Char = ';'
7 |
8 | /**
9 | * Splits a SQL script on unquoted semicolons.
10 | *
11 | * @param body The whole body of a script.
12 | * @return A collection of the individual statements in the script.
13 | */
14 | override def splitStatements(body: String): util.Collection[String] = {
15 | val statements = new util.ArrayList[String]()
16 | var startIndexOfUnquoted = 0
17 | var inQuotes = false
18 | for ((ch, currentIndex) <- body.toCharArray.iterator.zipWithIndex) {
19 | // We're at the end of the string if the index is length-1
20 | val atEnd = currentIndex == body.length - 1
21 | if (ch == '\'') inQuotes = !inQuotes
22 |
23 | if (atEnd) {
24 | // When we reach the end of the script, add the last unquoted string as a statement
25 | val end = if (ch == DELIMITER) { currentIndex } else { currentIndex + 1 }
26 | val stmt = body.substring(startIndexOfUnquoted, end).trim
27 | if (!"".equals(stmt)) statements.add(stmt)
28 | return statements
29 | }
30 | // When we see an unquoted semicolon, that's the end of a statement
31 | if (ch == DELIMITER && !inQuotes) {
32 | val stmt = body.substring(startIndexOfUnquoted, currentIndex).trim
33 | if (!"".equals(stmt)) statements.add(stmt)
34 | startIndexOfUnquoted = currentIndex + 1
35 | }
36 | }
37 | statements
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/parser/src/main/scala/com/intuit/superglue/pipeline/parsers/SqlScriptParser.scala:
--------------------------------------------------------------------------------
1 | package com.intuit.superglue.pipeline.parsers
2 |
3 | import com.typesafe.config.{Config => TypesafeConfig}
4 |
5 | class SqlScriptParser(
6 | override val preparsers: List[Preprocessor] = List(NopPreprocessor),
7 | override val splitter: StatementSplitter = SimpleStatementSplitter,
8 | override val statementParser: StatementParser = new CalciteStatementParser(),
9 | )(implicit rootConfig: TypesafeConfig) extends StagedScriptParser(
10 | preparsers,
11 | splitter,
12 | statementParser,
13 | ) {
14 | override def parserName: String = "sql"
15 | }
16 |
--------------------------------------------------------------------------------
/parser/src/main/scala/com/intuit/superglue/pipeline/parsers/StatementParser.scala:
--------------------------------------------------------------------------------
1 | package com.intuit.superglue.pipeline.parsers
2 |
3 | import com.intuit.superglue.pipeline.Metadata.StatementMetadataFragment
4 |
5 | /**
6 | * A StatementParser defines how to read a single statement and extract
7 | * some metadata from it.
8 | */
9 | trait StatementParser {
10 | def parseStatement(statement: String, dialect: Option[String] = None): StatementMetadataFragment
11 | }
12 |
--------------------------------------------------------------------------------
/parser/src/main/scala/com/intuit/superglue/pipeline/parsers/StatementSplitter.scala:
--------------------------------------------------------------------------------
1 | package com.intuit.superglue.pipeline.parsers
2 |
3 | import java.util
4 |
5 | /**
6 | * A statement splitter takes in the whole body of a script and returns
7 | * a list of the individual statements.
8 | */
9 | trait StatementSplitter {
10 | /**
11 | * Takes the whole body of a (perhaps preprocessed) script and splits it
12 | * into a collection of its individual statements.
13 | * @param body The whole body of a script.
14 | * @return A collection of the individual statements in the script.
15 | */
16 | def splitStatements(body: String): util.Collection[String]
17 | }
18 |
--------------------------------------------------------------------------------
/parser/src/main/scala/com/intuit/superglue/pipeline/producers/ScriptFileInput.scala:
--------------------------------------------------------------------------------
1 | package com.intuit.superglue.pipeline.producers
2 |
3 | import java.io.{BufferedInputStream, InputStream}
4 | import java.nio.file.{Files, Path}
5 |
6 | import io.tmos.arm.ArmMethods.manage
7 |
8 | import scala.util.Try
9 |
10 | /**
11 | * An input to the parser given by a file on disk.
12 | *
13 | * @param path The filepath where to read the file from.
14 | * @param name The filename (stringified path) of the input.
15 | * @param kind The type of this input. Used to determine which
16 | * parser will process this input.
17 | */
18 | case class ScriptFileInput(
19 | path: Path,
20 | name: String,
21 | kind: String,
22 | dialect: Option[String]
23 | ) extends ScriptInput {
24 | override def source: String = "FILE"
25 | override def readInputStream[R](f: InputStream => R): Try[R] = Try {
26 | for {
27 | fileInputStream <- manage(Files.newInputStream(path))
28 | bufferedInputStream <- manage(new BufferedInputStream(fileInputStream))
29 | } yield {
30 | f(bufferedInputStream)
31 | }
32 | }
33 | }
34 |
--------------------------------------------------------------------------------
/parser/src/main/scala/com/intuit/superglue/pipeline/producers/ScriptInput.scala:
--------------------------------------------------------------------------------
1 | package com.intuit.superglue.pipeline.producers
2 |
3 | import java.io.InputStream
4 |
5 | import scala.util.Try
6 |
7 | /**
8 | * Inputs to the parser have a name, a kind, and a way to read
9 | * from an InputStream.
10 | *
11 | * The name is just for readability, but the kind is used to allow
12 | * parser implementations to filter for only the inputs they support.
13 | */
14 | trait ScriptInput {
15 | def name: String
16 | def kind: String
17 | def dialect: Option[String]
18 | def source: String
19 |
20 | /**
21 | * Sends the [[InputStream]] of this Input to the given function.
22 | * This function returns the same value that the given function returns.
23 | *
24 | * @param f A function that takes the [[InputStream]].
25 | * @tparam R The type of the value returned by the given function.
26 | * @return The same value received from executing the given function.
27 | */
28 | def readInputStream[R](f: InputStream => R): Try[R]
29 | }
30 |
--------------------------------------------------------------------------------
/parser/src/main/scala/com/intuit/superglue/pipeline/producers/ScriptProvider.scala:
--------------------------------------------------------------------------------
1 | package com.intuit.superglue.pipeline.producers
2 |
3 | /**
4 | * The type of objects which can read scripts from various sources
5 | * in order to be parsed. See [[ScriptFileProvider]] as an example
6 | * implementation.
7 | */
8 | trait ScriptProvider {
9 | def stream(): Iterator[ScriptInput]
10 | }
11 |
--------------------------------------------------------------------------------
/parser/src/test/resources/logback-test.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/parser/src/test/scala/com/intuit/superglue/integration/ParserDaoIntegrationTest.scala:
--------------------------------------------------------------------------------
1 | package com.intuit.superglue.integration
2 |
3 | import java.io.PrintStream
4 | import java.nio.file.FileSystem
5 |
6 | import com.intuit.superglue.dao.SuperglueRepository
7 | import com.intuit.superglue.pipeline.consumers.DatabaseConsumer
8 | import com.intuit.superglue.pipeline.{FsSpec, ParsingPipeline, ScriptInputSpec, Sink, Source}
9 | import com.typesafe.config.{ConfigFactory, Config => TypesafeConfig}
10 |
11 | import scala.concurrent.Await
12 | import scala.concurrent.ExecutionContext.Implicits.global
13 | import scala.concurrent.duration._
14 | import scala.language.postfixOps
15 | import scala.util.Random
16 |
17 | class ParserDaoIntegrationTest extends ScriptInputSpec with FsSpec {
18 |
19 | private def inMemoryDbConfig(dbName: String): TypesafeConfig = ConfigFactory.parseString(
20 | s"""
21 | |com.intuit.superglue {
22 | | dao {
23 | | backend = "relational"
24 | | relational {
25 | | profile = "slick.jdbc.H2Profile$$"
26 | | dataSourceClass = "slick.jdbc.DatabaseUrlDataSource"
27 | | numThreads = 1
28 | | db {
29 | | driver = "org.h2.Driver"
30 | | url = "jdbc:h2:mem:$dbName"
31 | | user = ""
32 | | password = ""
33 | | }
34 | | }
35 | | }
36 | | pipeline {
37 | | parsers.sql {
38 | | enabled = true
39 | | input-kinds = ["sql"]
40 | | }
41 | | outputs.console.enabled = false
42 | | outputs.database {
43 | | enabled = true
44 | | batch-size = 50
45 | | timeout = 10
46 | | }
47 | | }
48 | |}
49 | """.stripMargin)
50 |
51 | "A SqlParser" should "write parsed data to a database" in {
52 | implicit val out: PrintStream = System.out
53 | implicit val rootConfig: TypesafeConfig = inMemoryDbConfig(Random.alphanumeric.take(10).toString)
54 | implicit val fs: FileSystem = new Fixture(Seq("fake/script/fileA.sql")).fs
55 | val superglue = SuperglueRepository(rootConfig).get
56 |
57 | // Initialize database for test
58 | Await.result(superglue.initialize(testMode = true), 1 second)
59 |
60 | val testProvider = TestScriptProvider(List(
61 | TestScriptInput("fake/script/fileA.sql", "sql", None,
62 | """
63 | |INSERT INTO output_table SELECT * FROM input_table
64 | """.stripMargin),
65 | ))
66 | val source = new Source(testProvider)
67 | val pipeline = new ParsingPipeline(source)
68 | val sink = new Sink(pipeline)
69 | val consumers = sink.drain()
70 | assert(consumers("database").isInstanceOf[DatabaseConsumer])
71 |
72 | // Query the entities that the parser inserted
73 | val query = for {
74 | scripts <- superglue.scriptRepository.getAll
75 | statements <- superglue.statementRepository.getAll
76 | tables <- superglue.tableRepository.getAll
77 | scriptTableRelations <- superglue.scriptTableRepository.getAll
78 | statementTableRelations <- superglue.statementTableRepository.getAll
79 | } yield (scripts, statements, tables, scriptTableRelations, statementTableRelations)
80 | val (scripts, statements, tables, scriptTableRelations, statementTableRelations) = Await.result(query, 1 second)
81 |
82 | assert(scripts.size == 1)
83 | assert(statements.size == 1)
84 | assert(tables.size == 2)
85 | assert(scriptTableRelations.size == 2)
86 | assert(statementTableRelations.size == 2)
87 |
88 | assert(scripts.exists { script =>
89 | script.name == "fake/script/fileA.sql" &&
90 | script.scriptType == "SQL"
91 | })
92 |
93 | assert(tables.exists(_.name == "OUTPUT_TABLE"))
94 | assert(tables.exists(_.name == "INPUT_TABLE"))
95 |
96 | assert(statements.exists { statement =>
97 | statement.text.trim == "INSERT INTO output_table SELECT * FROM input_table" &&
98 | statement.statementType == "INSERT"
99 | })
100 | }
101 | }
102 |
--------------------------------------------------------------------------------
/parser/src/test/scala/com/intuit/superglue/pipeline/ConsoleConsumerTest.scala:
--------------------------------------------------------------------------------
1 | package com.intuit.superglue.pipeline
2 |
3 | import java.io.{ByteArrayOutputStream, PrintStream}
4 | import java.time.LocalDateTime
5 |
6 | import com.intuit.superglue.pipeline.Metadata.{ScriptMetadata, StatementMetadata, StatementMetadataFragment}
7 | import com.intuit.superglue.pipeline.consumers.ConsoleConsumer
8 | import com.intuit.superglue.pipeline.consumers.OutputConsumer.{EndOfStream, Message, StartOfStream}
9 | import com.typesafe.config.{Config, ConfigFactory}
10 | import org.scalatest.FlatSpec
11 | import play.api.libs.json.Json
12 |
13 | import scala.concurrent.ExecutionContext.Implicits.global
14 | import scala.concurrent.Future
15 |
16 | class ConsoleConsumerTest extends FlatSpec {
17 |
18 | "A Console Consumer" should "print the metadata of scripts" in {
19 |
20 | implicit val rootConfig: Config = ConfigFactory.parseString(
21 | """
22 | |com.intuit.superglue.pipeline {
23 | | outputs.console {
24 | | enabled = true
25 | | errors-only = true
26 | | }
27 | |}
28 | """.stripMargin)
29 | val buffer = new ByteArrayOutputStream()
30 | implicit val printer: PrintStream = new PrintStream(buffer)
31 | val consoleConsumer = new ConsoleConsumer()
32 |
33 | // Mock some data to send to the console
34 | val testTime = LocalDateTime.now()
35 | val testStatementMetadata = StatementMetadata(
36 | statementText = "Test statement text",
37 | statementIndex = 0,
38 | statementParseStartTime = testTime,
39 | statementParseEndTime = testTime,
40 | statementMetadataFragment = StatementMetadataFragment(
41 | statementParser = "TestStatementParser",
42 | statementType = "TestStatementType",
43 | inputObjects = List("TestInputObject"),
44 | outputObjects = List("TestOutputObject"),
45 | errors = List(
46 | new Exception("Test statement error 1"),
47 | new Exception("Test statement error 2"),
48 | )
49 | ),
50 | )
51 |
52 | val testScriptMetadata = ScriptMetadata(
53 | scriptName = "One",
54 | scriptSource = "TestSource",
55 | scriptKind = "TestKind",
56 | scriptDialect = None,
57 | scriptParser = getClass.getName,
58 | scriptParseStartTime = testTime,
59 | scriptParseEndTime = testTime,
60 | statementsMetadata = List(
61 | testStatementMetadata,
62 | testStatementMetadata
63 | ),
64 | errors = List(
65 | new Exception("Test script error 1"),
66 | new Exception("Test script error 2"),
67 | ),
68 | )
69 |
70 | val events = List(
71 | testScriptMetadata,
72 | testScriptMetadata.copy(
73 | scriptName = "Two",
74 | statementsMetadata = List(
75 | testStatementMetadata.copy(
76 | statementMetadataFragment = testStatementMetadata.statementMetadataFragment
77 | .copy(errors = List.empty[Throwable])
78 | )
79 | ),
80 | errors = List.empty[Throwable],
81 | ),
82 | )
83 |
84 | // Send the metadata to the reporter
85 | consoleConsumer.accept(StartOfStream(testTime))
86 | events.map(Future(_)).map(Message(_)).foreach(consoleConsumer.accept(_))
87 | consoleConsumer.accept(EndOfStream)
88 |
89 | val outputString = buffer.toString()
90 | assert(!outputString.isEmpty)
91 |
92 | // Assert that the output parses into json
93 | val json = Json.parse(outputString)
94 | assert((json \ 0 \ "name").isDefined)
95 | assert((json \ 0 \ "statements").isDefined)
96 | assert((json \ 0 \ "statements" \ 0 \ "type").isDefined)
97 | assert((json \ 0 \ "statements" \ 0 \ "inputObjects").isDefined)
98 | assert((json \ 0 \ "statements" \ 0 \ "outputObjects").isDefined)
99 | }
100 | }
101 |
--------------------------------------------------------------------------------
/parser/src/test/scala/com/intuit/superglue/pipeline/FsSpec.scala:
--------------------------------------------------------------------------------
1 | package com.intuit.superglue.pipeline
2 |
3 | import java.nio.file.{FileSystem, Files, Path}
4 |
5 | import com.google.common.jimfs.{Configuration, Jimfs}
6 | import org.scalatest.FlatSpec
7 |
8 | /**
9 | * Filesystem Spec, the supertype of tests which need to mock
10 | * a virtual filesystem.
11 | */
12 | trait FsSpec extends FlatSpec {
13 |
14 | /**
15 | * Instantiates a virtual filesystem with the given files.
16 | * @param filenames A list of files to mock in the virtual fs.
17 | */
18 | protected case class Fixture(filenames: Seq[String]) {
19 | // Instantiate a new virtual filesystem
20 | val fs: FileSystem = Jimfs.newFileSystem(Configuration.unix())
21 | // Configure an arbitrary base directory for test
22 | val root: Path = fs.getPath("/")
23 | // Create the test directory in the virtual fs
24 | Files.createDirectories(root)
25 |
26 | // List the files for the test
27 | val paths: Seq[Path] = filenames.map(root.resolve)
28 | // Create the files in the virtual fs
29 | paths.foreach { path =>
30 | Files.createDirectories(path.getParent)
31 | Files.createFile(path)
32 | }
33 | }
34 |
35 | object Fixture {
36 | def apply(files: String*)(f: FileSystem => Unit): Unit = {
37 | val fixture = new Fixture(files)
38 | f(fixture.fs)
39 | }
40 | }
41 | }
42 |
--------------------------------------------------------------------------------
/parser/src/test/scala/com/intuit/superglue/pipeline/MiscellaneousTests.scala:
--------------------------------------------------------------------------------
1 | package com.intuit.superglue.pipeline
2 |
3 | import com.intuit.superglue.pipeline.Metadata.StatementMetadataFragment
4 | import com.intuit.superglue.pipeline.parsers.NopStatementParser
5 | import org.scalatest.FlatSpec
6 |
7 | class MiscellaneousTests extends FlatSpec {
8 |
9 | "The StringEtc trait" should "provide an etc extension method to shorten a string" in {
10 | import Implicits.StringEtc
11 | assert("The quick brown fox jumped over the lazy dog".etc(22).equals("The quick brown fox..."))
12 | }
13 |
14 | "The NopStatementParser" should "return a dummy metadata object" in {
15 | val metadata = NopStatementParser.parseStatement("SELECT * FROM table")
16 | assert(metadata ==
17 | StatementMetadataFragment(
18 | statementParser = "com.intuit.superglue.pipeline.parsers.NopStatementParser$",
19 | statementType = "",
20 | inputObjects = List.empty[String],
21 | outputObjects = List.empty[String],
22 | List.empty[Throwable],
23 | )
24 | )
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/parser/src/test/scala/com/intuit/superglue/pipeline/ParsingPipelineTest.scala:
--------------------------------------------------------------------------------
1 | package com.intuit.superglue.pipeline
2 |
3 | import com.intuit.superglue.pipeline.parsers.SqlScriptParser
4 | import com.typesafe.config.{ConfigFactory, Config => TypesafeConfig}
5 |
6 | import scala.concurrent.ExecutionContext.Implicits.global
7 | import scala.concurrent.duration._
8 | import scala.concurrent.{Await, Future}
9 | import scala.language.postfixOps
10 |
11 | class ParsingPipelineTest extends ScriptInputSpec with FsSpec {
12 |
13 | "The Pipeline" should "send inputs to parsers according to inputKind configurations" in {
14 | val files = Seq(
15 | "fake/script/fileA.sql",
16 | "fake/script/fileB.hql",
17 | "nonsense/fileC.xql",
18 | "nonsense/fileD.blah",
19 | )
20 |
21 | Fixture(files: _*) { implicit fs =>
22 | implicit val rootConfig: TypesafeConfig = ConfigFactory.parseString(
23 | """
24 | |com.intuit.superglue.pipeline.parsers {
25 | | sql {
26 | | enabled = true
27 | | input-kinds = ["sql"]
28 | | }
29 | | sqlHive {
30 | | enabled = true
31 | | input-kinds = ["sql_hive"]
32 | | }
33 | | sqlXray {
34 | | input-kinds = ["sql_xray"]
35 | | }
36 | |}
37 | """.stripMargin)
38 |
39 | // Create a test InputProvider with hardcoded "inputs"
40 | val testProvider = TestScriptProvider(List(
41 | TestScriptInput("fake/script/fileA.sql", "sql", None, "create table output like input including projections"),
42 | TestScriptInput("fake/script/fileB.hql", "sql_hive", Some("HIVE"), "some hive sql script"),
43 | TestScriptInput("nonsense/fileC.xql", "sql_xray", None, "some nonexistant sql strain script"),
44 | TestScriptInput("nonsense/fileD.blah", "nonsense", None, "not even a sql file"),
45 | ))
46 |
47 | // Create a Source that reads from the testProvider
48 | val source = new Source(testProvider)
49 |
50 | // Use the test Source to feed the Processor
51 | val processor = new ParsingPipeline(source)
52 |
53 | // Outputs record which Parser got them. Verify they went to the right ones.
54 | val futureOutputs = processor.stream().toSeq
55 | val expectedOutputs = Seq(
56 | ("fake/script/fileA.sql", new SqlScriptParser().parserName),
57 | )
58 | val outputs = Await.result(Future.sequence(futureOutputs), 1 second)
59 | assert(outputs.length == expectedOutputs.length)
60 | outputs.zip(expectedOutputs).foreach { case (output, (inputName, parserName)) =>
61 | assert(output.scriptName == inputName)
62 | assert(output.scriptParser == parserName)
63 | }
64 | }
65 | }
66 | }
67 |
--------------------------------------------------------------------------------
/parser/src/test/scala/com/intuit/superglue/pipeline/ScriptFileInputTest.scala:
--------------------------------------------------------------------------------
1 | package com.intuit.superglue.pipeline
2 |
3 | import java.io.{BufferedReader, InputStreamReader}
4 | import java.nio.charset.Charset
5 | import java.nio.file.Files
6 | import java.util.stream.Collectors
7 |
8 | import com.intuit.superglue.pipeline.producers.ScriptFileInput
9 |
10 | class ScriptFileInputTest extends FsSpec {
11 |
12 | "A ScriptFileInput" should "open and read text from a file" in {
13 | val f = Fixture(List("/test/file.txt"))
14 | val testFile = f.root.resolve("/test/file.txt")
15 | Files.write(testFile, "The quick brown fox jumped over the lazy dog".getBytes(Charset.defaultCharset()))
16 |
17 | val input = ScriptFileInput(testFile, "test.txt", "TXT", None)
18 | val testContents = input.readInputStream { inputStream =>
19 | new BufferedReader(new InputStreamReader(inputStream)).lines().collect(Collectors.joining("\n"))
20 | }
21 | assert(input.source.equals("FILE"))
22 | assert(testContents.isSuccess)
23 | assert(testContents.get.equals("The quick brown fox jumped over the lazy dog"))
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/parser/src/test/scala/com/intuit/superglue/pipeline/ScriptInputSpec.scala:
--------------------------------------------------------------------------------
1 | package com.intuit.superglue.pipeline
2 |
3 | import java.io.{ByteArrayInputStream, InputStream}
4 |
5 | import com.intuit.superglue.pipeline.producers.{ScriptInput, ScriptProvider}
6 | import org.scalatest.FlatSpec
7 |
8 | import scala.util.Try
9 |
10 | trait ScriptInputSpec extends FlatSpec {
11 |
12 | /** A TestInput holds an inputName and inputKind, but never gives an InputStream */
13 | protected case class TestScriptInput(
14 | name: String,
15 | kind: String,
16 | dialect: Option[String],
17 | testScript: String,
18 | ) extends ScriptInput {
19 | override def source: String = "TEST"
20 | override def readInputStream[R](f: InputStream => R): Try[R] = Try(f(new ByteArrayInputStream(testScript.getBytes())))
21 | }
22 |
23 | /** A TestInputProvider provides an in-memory list of Inputs as a stream */
24 | protected case class TestScriptProvider(inputs: List[ScriptInput]) extends ScriptProvider {
25 | override def stream(): Iterator[ScriptInput] = inputs.iterator
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/parser/src/test/scala/com/intuit/superglue/pipeline/SimpleStatementSplitterTest.scala:
--------------------------------------------------------------------------------
1 | package com.intuit.superglue.pipeline
2 |
3 | import com.intuit.superglue.pipeline.parsers.SimpleStatementSplitter
4 |
5 | import scala.collection.JavaConverters._
6 | import org.scalatest.FlatSpec
7 |
8 | class SimpleStatementSplitterTest extends FlatSpec {
9 |
10 | "A simple statement splitter" should "find one statement in a script with no semicolons" in {
11 | val script = "SELECT * from customers"
12 | val statements = SimpleStatementSplitter.splitStatements(script).asScala.toList
13 | val expected = List("SELECT * from customers")
14 | assert(statements == expected)
15 | }
16 |
17 | it should "remove a semicolon at the end of a single-statement script" in {
18 | val script = "SELECT * from customers ; "
19 | val statements = SimpleStatementSplitter.splitStatements(script).asScala.toList
20 | val expected = List("SELECT * from customers")
21 | assert(statements == expected)
22 | }
23 |
24 | it should "split two statements and remove a trailing semicolon" in {
25 | val script =
26 | """CREATE TABLE products;
27 | |INSERT INTO products
28 | |SELECT * from prototype_products;
29 | """.stripMargin
30 | val statements = SimpleStatementSplitter.splitStatements(script).asScala.toList
31 | val expected = List(
32 | "CREATE TABLE products",
33 | """INSERT INTO products
34 | |SELECT * from prototype_products
35 | """.stripMargin.trim
36 | )
37 | assert(statements == expected)
38 | }
39 |
40 | it should "ignore semicolons that appear in single-quotes" in {
41 | val script = "SELECT name FROM customers WHERE name = 'semicolon;man';"
42 | val statements = SimpleStatementSplitter.splitStatements(script).asScala.toList
43 | val expected = List("SELECT name FROM customers WHERE name = 'semicolon;man'")
44 | assert(statements == expected)
45 | }
46 | }
47 |
--------------------------------------------------------------------------------
/parser/src/test/scala/com/intuit/superglue/pipeline/SourceTest.scala:
--------------------------------------------------------------------------------
1 | package com.intuit.superglue.pipeline
2 |
3 | import com.intuit.superglue.pipeline.producers.ScriptFileInput
4 | import com.typesafe.config.{ConfigFactory, Config => TypesafeConfig}
5 |
6 | class SourceTest extends FsSpec {
7 |
8 | "A parser Source" should "use FileInputConfigs to set up FileProviders" in {
9 | val files = Seq(
10 | "/path/to/fileA",
11 | "/path/to/fileB",
12 | "/path/to/fileC",
13 | )
14 |
15 | // Create a mock filesystem (see FsSpec).
16 | Fixture(files: _*) { implicit fs =>
17 | implicit val rootConfig: TypesafeConfig = ConfigFactory.parseString(
18 | """
19 | |com.intuit.superglue.pipeline.inputs.files = [
20 | | { base = "/"
21 | | includes = [ "glob:**/*A*" ]
22 | | kind = "kindA" },
23 | | { base = "/"
24 | | includes = [ "glob:**/*B*" ] }
25 | |]
26 | """.stripMargin)
27 |
28 | // Create a Source and pass it the configs with file targets
29 | val source = new Source()
30 |
31 | // Collect all of the inputs generated by the Source
32 | val actualInputs = source.stream().toList
33 | val expectedInputs = List(
34 | ScriptFileInput(fs.getPath("/path/to/fileA"), "path/to/fileA", "kindA", None),
35 | )
36 | assert(actualInputs == expectedInputs)
37 | }
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/service/build.gradle:
--------------------------------------------------------------------------------
1 | plugins {
2 | // Apply the scala plugin to add support for Scala
3 | id "scala"
4 | id "com.github.maiflai.scalatest" version "0.23"
5 | }
6 |
7 | repositories {
8 | // Use jcenter for resolving your dependencies.
9 | // You can declare any Maven/Ivy/file repository here.
10 | jcenter()
11 | mavenCentral()
12 | }
13 |
14 | def scala_minor_version = "2.12"
15 |
16 | dependencies {
17 | implementation project(":dao")
18 |
19 | // Automatic resource management
20 | implementation "io.tmos:arm4s_${scala_minor_version}:1.1.0"
21 |
22 | // Elasticsearch client
23 | implementation "com.sksamuel.elastic4s:elastic4s-http_${scala_minor_version}:6.5.7"
24 |
25 | // Json serialization
26 | implementation "com.typesafe.play:play-json_${scala_minor_version}:2.6.10"
27 |
28 | // Slick. Needed for 'MappedTo' macro in DAO entities
29 | implementation "com.typesafe.slick:slick_${scala_minor_version}:3.3.0"
30 |
31 | // Google guava for caching
32 | implementation "com.google.guava:guava:27.1-jre"
33 | }
34 |
--------------------------------------------------------------------------------
/service/src/main/resources/reference.conf:
--------------------------------------------------------------------------------
1 | com.intuit.superglue.elastic {
2 | hostname = "localhost"
3 | port = 9200
4 | batch-size = 500
5 |
6 | alias = "lineage"
7 | alias = ${?SUPERGLUE_INDEX_ALIAS}
8 | index = "lineage1"
9 | index = ${?SUPERGLUE_INDEX_NAME}
10 | }
--------------------------------------------------------------------------------
/service/src/main/scala/com/intuit/superglue/elastic/package.scala:
--------------------------------------------------------------------------------
1 | package com.intuit.superglue
2 |
3 | import com.typesafe.config.{Config => TypesafeConfig}
4 | import pureconfig.generic.auto._
5 |
6 | package object elastic {
7 |
8 | case class ElasticsearchConfig(
9 | hostname: String,
10 | port: Int,
11 | batchSize: Int,
12 | index: String,
13 | alias: String,
14 | indexType: String = "_doc",
15 | )
16 |
17 | object ElasticsearchConfig {
18 | private val schemePattern = "^https?://.*".r
19 | def apply(config: TypesafeConfig): Option[ElasticsearchConfig] = {
20 | pureconfig.loadConfig[ElasticsearchConfig](config).toOption
21 | .map(conf => conf.hostname match {
22 | case schemePattern() => conf
23 | case _ => conf.copy(hostname = s"http://${conf.hostname}")
24 | })
25 | }
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/service/src/main/scala/com/intuit/superglue/lineage/LineageCacheService.scala:
--------------------------------------------------------------------------------
1 | package com.intuit.superglue.lineage
2 |
3 | import scala.language.postfixOps
4 | import pureconfig.generic.auto._
5 | import com.intuit.superglue.dao.model.{Direction, _}
6 | import com.google.common.cache._
7 | import com.intuit.superglue.dao.SuperglueRepository
8 | import com.intuit.superglue.dao.model.PrimaryKeys.TablePK
9 |
10 | class LineageCacheService(val superglueRepository: SuperglueRepository) {
11 |
12 | private case class CacheKey(tablePk: TablePK, direction: Direction)
13 |
14 | // Guava Cache
15 | // As the cache size grows close to the maximum, the cache evicts entries that are less likely to be used again.
16 | private val lineageCache = CacheBuilder.newBuilder()
17 | .maximumSize(100000L)
18 | .build[CacheKey, Set[LineageView]]
19 |
20 | def getCachedLineageView(table: TablePK, direction: Direction): Option[Set[LineageView]] = {
21 | Option(lineageCache.getIfPresent(CacheKey(table, direction)))
22 | }
23 |
24 | private def addLineageViewToCache(table: TablePK, direction: Direction, value: Set[LineageView]): Unit = {
25 | lineageCache.put(CacheKey(table, direction), value)
26 | }
27 |
28 | def addLineageViewsToCache(tableNames: Set[TablePK], direction: Direction, value: Set[LineageView]): Unit = {
29 | val groupedLineageViewResult: Map[TablePK, Set[LineageView]] = {
30 | direction match {
31 | case Output => value.groupBy(_.outputTableId)
32 | case Input => value.groupBy(_.inputTableId)
33 | }
34 | }
35 |
36 | // tables which has no lineage for the specified direction
37 | val tablesNoLineage = tableNames -- groupedLineageViewResult.keys
38 | val lineageViewsToAdd: Map[TablePK, Set[LineageView]] = {
39 | if (tablesNoLineage.nonEmpty) {
40 | groupedLineageViewResult ++ tablesNoLineage.flatMap(t => Map(t -> Set.empty[LineageView]))
41 | } else {
42 | groupedLineageViewResult
43 | }
44 | }
45 |
46 | lineageViewsToAdd.foreach(g => addLineageViewToCache(g._1, direction, g._2))
47 | }
48 |
49 | def invalidateKey(table: TablePK, direction: Direction): Unit = {
50 | lineageCache.invalidate(CacheKey(table, direction))
51 | }
52 |
53 | def invalidateAll(): Unit = {
54 | lineageCache.invalidateAll()
55 | }
56 | }
57 |
--------------------------------------------------------------------------------
/service/src/main/scala/com/intuit/superglue/lineage/model/Graph.scala:
--------------------------------------------------------------------------------
1 | package com.intuit.superglue.lineage.model
2 |
3 | case class Graph(
4 | nodes: Set[Node],
5 | links: Set[Link],
6 | )
7 |
8 | object Graph {
9 | def empty: Graph = Graph(Set.empty[Node], Set.empty[Link])
10 | }
11 |
--------------------------------------------------------------------------------
/service/src/main/scala/com/intuit/superglue/lineage/model/Link.scala:
--------------------------------------------------------------------------------
1 | package com.intuit.superglue.lineage.model
2 |
3 | case class Link(
4 | sourceNode: Node,
5 | destinationNode: Node,
6 | )
7 |
--------------------------------------------------------------------------------
/service/src/main/scala/com/intuit/superglue/lineage/model/Node.scala:
--------------------------------------------------------------------------------
1 | package com.intuit.superglue.lineage.model
2 |
3 | import com.intuit.superglue.dao.model.PrimaryKeys.TablePK
4 |
5 | sealed trait Node {
6 | def id: Long,
7 | }
8 |
9 | object Node {
10 | case class TableNode(pk: TablePK, name: String, group: String = "table") extends Node {
11 | override def id: Long = pk.value
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/settings.gradle:
--------------------------------------------------------------------------------
1 | /*
2 | * This file was generated by the Gradle 'init' task.
3 | *
4 | * The settings file is used to specify which projects to include in your build.
5 | *
6 | * Detailed information about configuring a multi-project build in Gradle can be found
7 | * in the user guide at https://docs.gradle.org/5.1.1/userguide/multi_project_builds.html
8 | */
9 |
10 | rootProject.name = 'superglue'
11 |
12 | include ':api'
13 | include ':cli'
14 | include ':dao'
15 | include ':parser'
16 | include ':service'
17 |
--------------------------------------------------------------------------------