├── .DS_Store ├── .github └── workflows │ └── nodeci.yml ├── .gitignore ├── README.md ├── __mocks__ ├── dataPrompt.js └── modePrompt.js ├── __tests__ ├── formatGQLSchema.test.ts ├── initTopiQLFolder.test.ts └── parseKafkaSchema.test.ts ├── cli-test ├── package-lock.json ├── package.json └── refHolder │ └── referenceConfig.js ├── data └── testDataFolder │ ├── avscSample.avsc │ ├── expAvVarSample.js │ ├── expAvroSample.js │ ├── passengerInfo.avsc │ ├── testSchema.avsc │ └── tripStatus.avsc ├── jest.config.js ├── kafka ├── docker │ └── docker-compose.yml ├── package-lock.json ├── package.json ├── public │ ├── consumer │ │ └── consumer.js │ ├── kconfig.js │ ├── passengerType.js │ ├── producer │ │ ├── producer.js │ │ └── producerFunc.js │ └── statusType.js ├── src │ ├── consumer │ │ └── consumer.ts │ ├── interfaces.d.ts │ ├── kconfig.ts │ ├── passengerType.ts │ ├── producer │ │ ├── producer.ts │ │ └── producerFunc.ts │ └── statusType.ts └── tsconfig.json ├── package-lock.json ├── package.json ├── testpkg ├── .gitignore ├── cli.js ├── makeInquire.js ├── package-lock.json ├── package.json ├── startInquire.js └── tools │ ├── buildGQLTool.js │ └── initTool.js ├── tsconfig.json └── website ├── .DS_Store ├── .gitignore ├── __test__ ├── puppeteer.test.js └── screenshot │ └── puppeteerScreenShot.jpg ├── client ├── .DS_Store ├── App.jsx ├── actions │ ├── actionTypes.js │ └── actions.js ├── assets │ ├── .DS_Store │ ├── buttonCopyText.png │ ├── buttonGreenCircle.png │ ├── buttonRedCircle.png │ ├── buttonYellowCircle.png │ ├── homepageGIFLogo.gif │ ├── homepageLogo.png │ ├── inquireBackground.jpg │ ├── linkedinBlueGreen.png │ ├── linkedinRed.png │ ├── linkedinYellow.png │ ├── logoGreen.png │ ├── logoRed.png │ ├── logoWhite.png │ ├── logoYellow.png │ ├── profileAnna.png │ ├── profileCece.png │ ├── profileHan.png │ ├── profileYing.png │ └── profiles.ai ├── components │ ├── converter │ │ ├── avroInput.jsx │ │ ├── buttonContainer.jsx │ │ ├── converterContainer.jsx │ │ ├── convertingBox.jsx │ │ ├── convertingDescription.jsx │ │ └── graphQLOutput.jsx │ ├── documentation │ │ ├── DocContainer.jsx │ │ ├── DocContent.jsx │ │ ├── DocSidebar.jsx │ │ ├── DocSnip.jsx │ │ └── sections │ │ │ ├── ConfigDocs.jsx │ │ │ ├── DocsConclusion.jsx │ │ │ ├── GenGQLDocs.jsx │ │ │ ├── InitDocs.jsx │ │ │ ├── PreReqDocs.jsx │ │ │ ├── QueryDocs.jsx │ │ │ └── SetupDocs.jsx │ ├── home │ │ ├── descriptionBox.jsx │ │ ├── descriptionImage.jsx │ │ └── homeContainer.jsx │ ├── navBar │ │ ├── navBar.jsx │ │ └── navButton.jsx │ └── team │ │ ├── teamContainer.jsx │ │ └── teammateProfile.jsx ├── index.jsx ├── reducers │ ├── conversionFuncs.js │ ├── index.js │ ├── schemaReducer.js │ └── webSessionReducer.js ├── store.js └── stylesheets │ ├── _converter.scss │ ├── _documentation.scss │ ├── _home.scss │ ├── _navBar.scss │ ├── _team.scss │ ├── _text.scss │ ├── _variables.scss │ └── styles.scss ├── index.html ├── package-lock.json ├── package.json ├── server └── server.js └── webpack.config.js /.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oslabs-beta/inquire/9316d4700c53e01da24a923f9dda37e6f14e349a/.DS_Store -------------------------------------------------------------------------------- /.github/workflows/nodeci.yml: -------------------------------------------------------------------------------- 1 | name: testing 2 | 3 | on: 4 | push: 5 | branches: [ ci-testing ] 6 | pull_request: 7 | branches: [ dev ] 8 | 9 | jobs: 10 | build: 11 | 12 | runs-on: macos-11 13 | 14 | strategy: 15 | matrix: 16 | node-version: [12.x, 14.x] 17 | 18 | steps: 19 | - uses: actions/checkout@v2 20 | - name: Use Node.js ${{ matrix.node-version }} 21 | uses: actions/setup-node@v2 22 | with: 23 | node-version: ${{ matrix.node-version }} 24 | cache: 'npm' 25 | - run: npm ci 26 | - run: npm test 27 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | lerna-debug.log* 8 | 9 | # Diagnostic reports (https://nodejs.org/api/report.html) 10 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 11 | 12 | # Runtime data 13 | pids 14 | *.pid 15 | *.seed 16 | *.pid.lock 17 | 18 | # Directory for instrumented libs generated by jscoverage/JSCover 19 | lib-cov 20 | 21 | # Coverage directory used by tools like istanbul 22 | coverage 23 | *.lcov 24 | 25 | # nyc test coverage 26 | .nyc_output 27 | 28 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 29 | .grunt 30 | 31 | # Bower dependency directory (https://bower.io/) 32 | bower_components 33 | 34 | # node-waf configuration 35 | .lock-wscript 36 | 37 | # Compiled binary addons (https://nodejs.org/api/addons.html) 38 | build/Release 39 | 40 | # Dependency directories 41 | node_modules/ 42 | jspm_packages/ 43 | 44 | # TypeScript v1 declaration files 45 | typings/ 46 | 47 | # TypeScript cache 48 | *.tsbuildinfo 49 | 50 | # Optional npm cache directory 51 | .npm 52 | 53 | # Optional eslint cache 54 | .eslintcache 55 | 56 | # Microbundle cache 57 | .rpt2_cache/ 58 | .rts2_cache_cjs/ 59 | .rts2_cache_es/ 60 | .rts2_cache_umd/ 61 | 62 | # Optional REPL history 63 | .node_repl_history 64 | 65 | # Output of 'npm pack' 66 | *.tgz 67 | 68 | # Yarn Integrity file 69 | .yarn-integrity 70 | 71 | # dotenv environment variables file 72 | .env 73 | .env.test 74 | 75 | # parcel-bundler cache (https://parceljs.org/) 76 | .cache 77 | 78 | # Next.js build output 79 | .next 80 | 81 | # Nuxt.js build / generate output 82 | .nuxt 83 | dist 84 | 85 | # Gatsby files 86 | .cache/ 87 | # Comment in the public line in if your project uses Gatsby and *not* Next.js 88 | # https://nextjs.org/blog/next-9-1#public-directory-support 89 | # public 90 | 91 | # vuepress build output 92 | .vuepress/dist 93 | 94 | # Serverless directories 95 | .serverless/ 96 | 97 | # FuseBox cache 98 | .fusebox/ 99 | 100 | # DynamoDB Local files 101 | .dynamodb/ 102 | 103 | # TernJS port file 104 | .tern-port 105 | 106 | # VS Code settings 107 | *.vscode 108 | 109 | # Environment variables and passcodes 110 | .env 111 | 112 | 113 | #.DS_Store 114 | .DS_Store -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # inquire 2 | 3 | An NPM Package offering GraphQL integration for Kafka Topics 4 | -------------------------------------------------------------------------------- /__mocks__/dataPrompt.js: -------------------------------------------------------------------------------- 1 | const dataPrompt = () => { 2 | return Promise.resolve('./__mocks__/mockUser'); 3 | }; 4 | exports.dataPrompt = dataPrompt; 5 | -------------------------------------------------------------------------------- /__mocks__/modePrompt.js: -------------------------------------------------------------------------------- 1 | const modePrompt = () => { 2 | return Promise.resolve('1'); 3 | }; 4 | exports.modePrompt = modePrompt; 5 | -------------------------------------------------------------------------------- /__tests__/formatGQLSchema.test.ts: -------------------------------------------------------------------------------- 1 | const { formatGQLSchema } = require('../testpkg/tools/buildGQLTool.js'); 2 | 3 | const mockParsedKafkaTrip = [ 4 | [ 5 | 'Trip', 6 | { name: 'id', type: 'string' }, 7 | { name: 'vehicleId', type: 'string' }, 8 | ], 9 | ]; 10 | 11 | let gqlSchema1 = ''; 12 | beforeEach(() => { 13 | return (gqlSchema1 = formatGQLSchema(mockParsedKafkaTrip)); 14 | }); 15 | 16 | describe('converting to graphql schema', () => { 17 | describe('data type of the converted graphql schema', () => { 18 | test('datatype as array', () => { 19 | expect(typeof gqlSchema1).toBe('string'); 20 | }); 21 | test('schema begins with keyword type', () => { 22 | expect(gqlSchema1.slice(0, 4)).toStrictEqual('type'); 23 | }); 24 | }); 25 | test('stringified schema result to equal expected', () => { 26 | expect(JSON.stringify(gqlSchema1)).toEqual( 27 | '"type Trip { \\n id: String \\n vehicleId: String \\n}\\n"' 28 | ); 29 | }); 30 | }); 31 | -------------------------------------------------------------------------------- /__tests__/initTopiQLFolder.test.ts: -------------------------------------------------------------------------------- 1 | export {}; 2 | const modePrompt = jest.fn(() => { 3 | return Promise.resolve('1'); 4 | }); 5 | const dataPrompt = jest.fn(() => { 6 | return Promise.resolve('./__mocks__/mockUser'); 7 | }); 8 | const { initInquire } = require('../testpkg/startInquire'); 9 | const fs = require('fs'); 10 | const readline = require('readline'); 11 | jest.mock('fs'); 12 | jest.mock('readline'); 13 | 14 | beforeEach(() => { 15 | return jest.resetAllMocks(); 16 | }); 17 | describe('startInquire process', () => { 18 | test('modePrompt and dataPrompt are called', () => { 19 | initInquire().then(() => { 20 | expect(modePrompt).toHaveBeenCalledTimes(1); 21 | expect(dataPrompt).toHaveBeenCalledTimes(1); 22 | }); 23 | }); 24 | }); 25 | -------------------------------------------------------------------------------- /__tests__/parseKafkaSchema.test.ts: -------------------------------------------------------------------------------- 1 | const { parseKafkaSchema } = require('../testpkg/tools/buildGQLTool.js'); 2 | 3 | const mockTripData = `{ 4 | "type": "record", 5 | "name": "Trip", 6 | "fields": [ 7 | { 8 | "name": "id", 9 | "type": "string" 10 | }, 11 | { 12 | "name": "vehicleId", 13 | "type": "string" 14 | } 15 | ] 16 | }`; 17 | 18 | let parsedAvroSchema = ''; 19 | 20 | beforeEach(() => { 21 | return (parsedAvroSchema = parseKafkaSchema(mockTripData)); 22 | }); 23 | 24 | describe('parsing avro schema', () => { 25 | describe('data type of the parsed avro schema', () => { 26 | test('datatype as array', () => { 27 | expect(Array.isArray(parsedAvroSchema)).toBeTruthy; 28 | }); 29 | test('datatype has a length of 3', () => { 30 | expect(parsedAvroSchema[0].length).toBe(3); 31 | }); 32 | }); 33 | describe('topic name and fields', () => { 34 | test('topic name is Trip', () => { 35 | expect(parsedAvroSchema[0][0]).toEqual('Trip'); 36 | }); 37 | test('type of topic field 1 results in an object', () => { 38 | expect(typeof parsedAvroSchema[0][1]).toBe('object'); 39 | }); 40 | test('type of topic field 1 has a key id', () => { 41 | expect(parsedAvroSchema[0][1]).toStrictEqual({ 42 | name: 'id', 43 | type: 'string', 44 | }); 45 | }); 46 | test('type of topic field 2 results in an object', () => { 47 | expect(typeof parsedAvroSchema[0][2]).toBe('object'); 48 | }); 49 | test('type of topic field 2 has a key vehicleId', () => { 50 | expect(parsedAvroSchema[0][2]).toStrictEqual({ 51 | name: 'vehicleId', 52 | type: 'string', 53 | }); 54 | }); 55 | }); 56 | }); 57 | -------------------------------------------------------------------------------- /cli-test/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "cli-test", 3 | "version": "1.0.0", 4 | "lockfileVersion": 1, 5 | "requires": true, 6 | "dependencies": { 7 | "topiql": { 8 | "version": "1.0.1", 9 | "resolved": "https://registry.npmjs.org/topiql/-/topiql-1.0.1.tgz", 10 | "integrity": "sha512-+yOdmpR6m8JTqcvqCeiQPFOGaQfZiY0wng1XDoBr+FAb5meoqkUM+9B/kCiH0f2XLWKsGVVNc6QMn4CNNWG9RA==" 11 | } 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /cli-test/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "cli-test", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "topiql": "topiql", 8 | "test": "echo \"Error: no test specified\" && exit 1" 9 | }, 10 | "keywords": [], 11 | "author": "", 12 | "license": "ISC", 13 | "dependencies": { 14 | "topiql": "^1.0.1" 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /cli-test/refHolder/referenceConfig.js: -------------------------------------------------------------------------------- 1 | const dotenv = require('dotenv'); // Allows us to safely store and reference credentials in a .env file that is not uploaded to github 2 | const path = require('path'); 3 | 4 | const envFile = path.resolve(__dirname, '../../../.env') // .env is for test & development; should be excluded when deployment 5 | dotenv.config({ path: envFile }); 6 | 7 | const username = process.env.DEMO_KAFKA_CLUSTER_USER; 8 | const password = process.env.DEMO_KAFKA_CLUSTER_PW; 9 | const broker = process.env.DEMO_KAFKA_BROKER; 10 | 11 | const sasl = username && password ? { username, password, mechanism: 'plain' } : null 12 | const ssl = !!sasl 13 | 14 | const MODE = { 15 | // ALL is to read all avsc files in the directory to be transformed into GQL schema 16 | ALL: 1, 17 | // SELECT is to read ONLY files in the 'targets' to be transformed into GQL Schema 18 | SELECT: 2 19 | }; 20 | 21 | module.exports = { 22 | mode: MODE.ALL, 23 | // please fill one topic per a AVRO schema file in targets with corresponding orders 24 | topics: ['avscTopic', 'hidden', 'han', 'cece', 'testy', 'tripStatus'], 25 | // If SELECT mode, please fill the file name you desire to transform into GQL schema with extension of file; e.g) 'tripStatus.avsc' 26 | targets: ['avscSample.avsc', 'expAvVarSample.js', 'expAvroSample.js', 'passengerInfo.avsc', 'testSchema.avsc', 'tripStatus.avsc'], 27 | clientId: 'kafQL', 28 | brokers: [broker], 29 | ssl, 30 | sasl, 31 | connectionTimeout: 3000, 32 | authenticationTimeout: 1000, 33 | reauthenticationThreshold: 10000, 34 | schemaFolder: path.resolve(__dirname, '../../../data/testDataFolder/'), 35 | }; -------------------------------------------------------------------------------- /data/testDataFolder/avscSample.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "type": "record", 3 | "name": "Trip", 4 | "namespace": "com.bakdata.quick.avro", 5 | "fields": [ 6 | { 7 | "name": "id", 8 | "type": "string" 9 | }, 10 | { 11 | "name": "vehicleId", 12 | "type": "string" 13 | }, 14 | { 15 | "name": "route", 16 | "type": [ 17 | "null", 18 | { 19 | "type": "array", 20 | "items": [ 21 | "null", 22 | { 23 | "type": "record", 24 | "name": "Status", 25 | "fields": [ 26 | { 27 | "name": "statusId", 28 | "type": "string" 29 | }, 30 | { 31 | "name": "tripId", 32 | "type": "string" 33 | }, 34 | { 35 | "name": "vehicleId", 36 | "type": "string" 37 | }, 38 | { 39 | "name": "position", 40 | "type": { 41 | "type": "record", 42 | "name": "Position", 43 | "fields": [ 44 | { 45 | "name": "lat", 46 | "type": "float" 47 | }, 48 | { 49 | "name": "lon", 50 | "type": "float" 51 | } 52 | ] 53 | } 54 | }, 55 | { 56 | "name": "batteryLevel", 57 | "type": "int" 58 | }, 59 | { 60 | "name": "distance", 61 | "type": "int" 62 | }, 63 | { 64 | "name": "timestamp", 65 | "type": "string" 66 | } 67 | ] 68 | } 69 | ] 70 | } 71 | ] 72 | } 73 | ] 74 | } -------------------------------------------------------------------------------- /data/testDataFolder/expAvVarSample.js: -------------------------------------------------------------------------------- 1 | const avro = require ('avsc'); 2 | 3 | /* The purpose of this comment is to exist in a space where it should be 4 | completely ignored. Rough! We want to grab the data after this. 5 | */ 6 | 7 | const schema = { 8 | "fields": [ 9 | { 10 | "name": "category", 11 | "type": { 12 | "name": "categoryType", 13 | "symbols": [ 14 | "DOG", 15 | "CAT" 16 | ], 17 | "type": "enum" 18 | } 19 | }, 20 | { 21 | "name": "noise", 22 | "type": "string" 23 | } 24 | ], 25 | "name": "animals", 26 | "type": "record" 27 | }; 28 | 29 | const throwawayVar = "I should be ignored when extracting the above schema"; 30 | 31 | module.exports = avro.Type.forSchema(schema); 32 | -------------------------------------------------------------------------------- /data/testDataFolder/expAvroSample.js: -------------------------------------------------------------------------------- 1 | let i = 2; 2 | const avro = require ('avsc'); 3 | 4 | /* The purpose of this comment is to exist in a space where it should be 5 | completely ignored. Rough! We want to grab the data after this. 6 | */ 7 | 8 | module.exports = avro.Type.forSchema({ 9 | "fields": [ 10 | { 11 | "name": "category", 12 | "type": { 13 | "name": "categoryType", 14 | "symbols": [ 15 | "DOG", 16 | "CAT" 17 | ], 18 | "type": "enum" 19 | } 20 | }, 21 | { 22 | "name": "noise", 23 | "type": "string" 24 | } 25 | ], 26 | "name": "animals", 27 | "type": "record" 28 | }); 29 | 30 | 31 | const throwawayVar = "I should be ignored when extracting the above schema"; -------------------------------------------------------------------------------- /data/testDataFolder/passengerInfo.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "fields": [ 3 | { 4 | "name": "name", 5 | "type": { 6 | "name": "nameType", 7 | "symbols": [ 8 | "Carla", 9 | "Joseph", 10 | "Megan", 11 | "Roland", 12 | "Stacey", 13 | "Maria", 14 | "Henry", 15 | "Peter" 16 | ], 17 | "type": "enum" 18 | } 19 | }, 20 | { 21 | "name": "street", 22 | "type": { 23 | "name": "emailType", 24 | "symbols": [ 25 | "CherryLane", 26 | "FifthAvenue", 27 | "FourteenthStreet", 28 | "PerlmanRoad", 29 | "BroadStreet", 30 | "SecondAvenue", 31 | "BleekerStreet", 32 | "LexingtonAvenue" 33 | ], 34 | "type": "enum" 35 | } 36 | } 37 | ], 38 | "name": "Passenger", 39 | "type": "record" 40 | } -------------------------------------------------------------------------------- /data/testDataFolder/testSchema.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "fields": [ 3 | { 4 | "name": "statusId", 5 | "type": "string" 6 | }, 7 | { 8 | "name": "tripId", 9 | "type": { 10 | "name": "tripIdType", 11 | "symbols": [ 12 | "trip1", 13 | "trip2" 14 | ], 15 | "type": "enum" 16 | } 17 | }, 18 | { 19 | "name": "vehicleId", 20 | "type": { 21 | "name": "vehicleIdType", 22 | "symbols": [ 23 | "car1", 24 | "car2" 25 | ], 26 | "type": "enum" 27 | } 28 | }, 29 | { 30 | "name": "position", 31 | "type": { 32 | "fields": [ 33 | { 34 | "name": "lat", 35 | "type": "float" 36 | }, 37 | { 38 | "name": "lon", 39 | "type": "float" 40 | } 41 | ], 42 | "name": "Position", 43 | "type": "record" 44 | } 45 | }, 46 | { 47 | "name": "batteryLevel", 48 | "type": "int" 49 | }, 50 | { 51 | "name": "distance", 52 | "type": "int" 53 | }, 54 | { 55 | "name": "timestamp", 56 | "type": "string" 57 | } 58 | ], 59 | "name": "status", 60 | "type": "record" 61 | } -------------------------------------------------------------------------------- /data/testDataFolder/tripStatus.avsc: -------------------------------------------------------------------------------- 1 | { 2 | "fields": [ 3 | { 4 | "name": "statusId", 5 | "type": "string" 6 | }, 7 | { 8 | "name": "tripId", 9 | "type": { 10 | "name": "tripIdType", 11 | "symbols": [ 12 | "trip1", 13 | "trip2" 14 | ], 15 | "type": "enum" 16 | } 17 | }, 18 | { 19 | "name": "vehicleId", 20 | "type": { 21 | "name": "vehicleIdType", 22 | "symbols": [ 23 | "car1", 24 | "car2" 25 | ], 26 | "type": "enum" 27 | } 28 | }, 29 | { 30 | "name": "position", 31 | "type": { 32 | "fields": [ 33 | { 34 | "name": "lat", 35 | "type": "float" 36 | }, 37 | { 38 | "name": "lon", 39 | "type": "float" 40 | } 41 | ], 42 | "name": "Position", 43 | "type": "record" 44 | } 45 | }, 46 | { 47 | "name": "batteryLevel", 48 | "type": "int" 49 | }, 50 | { 51 | "name": "distance", 52 | "type": "int" 53 | }, 54 | { 55 | "name": "timestamp", 56 | "type": "string" 57 | } 58 | ], 59 | "name": "Status", 60 | "type": "record" 61 | } -------------------------------------------------------------------------------- /jest.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | preset: 'ts-jest', 3 | testEnvironment: 'node', 4 | }; 5 | -------------------------------------------------------------------------------- /kafka/docker/docker-compose.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | # Use this file in order to run a local Kafka Instance 4 | # 1. Open Docker on desktop 5 | # 2. From a directory containing this YAML file, type in terminal: docker-compose up -d 6 | # 3. Create a topic by typing in terminal: (with appropriate topic name. Ex. purchases) 7 | # docker compose exec broker \ 8 | # kafka-topics --create \ 9 | # --topic purchases \ 10 | # --bootstrap-server localhost:9092 \ 11 | # --replication-factor 1 \ 12 | # --partitions 1 13 | # 4. Remember to configure Kafka broker address as: const broker = "localhost:9092"; 14 | # 5. Ready start producing and reading from/to topics 15 | # More info on YAML setup https://developer.confluent.io/get-started/nodejs/#kafka-setup 16 | 17 | version: '3' 18 | services: 19 | zookeeper: 20 | image: confluentinc/cp-zookeeper:7.0.0 21 | hostname: zookeeper 22 | container_name: zookeeper 23 | environment: 24 | ZOOKEEPER_CLIENT_PORT: 2181 25 | ZOOKEEPER_TICK_TIME: 2000 26 | 27 | broker: 28 | image: confluentinc/cp-kafka:7.0.0 29 | container_name: broker 30 | ports: 31 | - "9092:9092" 32 | depends_on: 33 | - zookeeper 34 | environment: 35 | KAFKA_BROKER_ID: 1 36 | KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181' 37 | KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_INTERNAL:PLAINTEXT 38 | KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://localhost:9092,PLAINTEXT_INTERNAL://broker:29092 39 | KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 40 | KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1 41 | KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1 -------------------------------------------------------------------------------- /kafka/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "kafka-instance", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "start:producer": "node public/producer/producer.js", 8 | "start:consumer": "node public/consumer/consumer.js", 9 | "start:all": "concurrently \"node public/producer/producer.js\" \"node public/consumer/consumer.js\"" 10 | }, 11 | "author": "inquire", 12 | "license": "ISC", 13 | "dependencies": { 14 | "avsc": "^5.6.2", 15 | "concurrently": "^6.4.0", 16 | "express": "^4.17.1", 17 | "kafkajs": "^1.15.0", 18 | "path": "^0.12.7" 19 | }, 20 | "devDependencies": { 21 | "@types/node": "^17.0.5", 22 | "dotenv": "^10.0.0", 23 | "typescript": "^4.5.2" 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /kafka/public/consumer/consumer.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { 3 | function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } 4 | return new (P || (P = Promise))(function (resolve, reject) { 5 | function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } 6 | function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } 7 | function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } 8 | step((generator = generator.apply(thisArg, _arguments || [])).next()); 9 | }); 10 | }; 11 | Object.defineProperty(exports, "__esModule", { value: true }); 12 | const { Kafka } = require('kafkajs'); // NPM Package: Javascript compatible Kafka 13 | const config = require('../kconfig.js'); // Information about Kafka Cluster and Topics 14 | // This Kafka instance is hosted on the Confluent Cloud, using the credentials in kafkaConfig.js. 15 | // Topics can be created online through confluent cloud portal 16 | const kafka = new Kafka(config); 17 | // Initiates a new consumer for every topic in config 18 | for (let i = 0; i < config.topics.length; i++) { 19 | const topicName = config.topics[i]; 20 | try { 21 | const topicName = config.topics[i]; 22 | const consumer = kafka.consumer({ groupId: `${topicName}-group` }); 23 | consumer.connect(); 24 | consumer.subscribe({ topic: `${topicName}`, fromBeginning: false }); 25 | consumer.run({ 26 | eachMessage: ({ message }) => __awaiter(void 0, void 0, void 0, function* () { 27 | // If topic and partition are needed, expand async function arguments to include: { topic, partition, message } 28 | console.log(`Consumer: Message Read - ${message.value}`); 29 | }), 30 | }); 31 | } 32 | catch (err) { 33 | console.log(`Consumer: Failed to read - ${topicName}`); 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /kafka/public/kconfig.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | Object.defineProperty(exports, "__esModule", { value: true }); 3 | const dotenv = require('dotenv'); // Allows us to safely store and reference credentials in a .env file that is not uploaded to github 4 | dotenv.config({ path: './../.env' }); 5 | const username = process.env.DEMO_KAFKA_CLUSTER_USER; 6 | const password = process.env.DEMO_KAFKA_CLUSTER_PW; 7 | const broker = process.env.DEMO_KAFKA_BROKER; 8 | // const broker = "localhost:9092"; // If running a local instance, the broker variable may look similar to this. Uncomment for local docker instance 9 | const sasl = username && password ? { username, password, mechanism: 'plain' } : null; 10 | const ssl = !!sasl; 11 | const kafkaSettings = { 12 | topics: ['passengerInfo', 'tripStatus'], 13 | clientId: 'kafQL', 14 | brokers: [broker], 15 | ssl, 16 | sasl, 17 | connectionTimeout: 3000, 18 | authenticationTimeout: 1000, 19 | reauthenticationThreshold: 10000, 20 | }; 21 | module.exports = kafkaSettings; 22 | -------------------------------------------------------------------------------- /kafka/public/passengerType.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | Object.defineProperty(exports, "__esModule", { value: true }); 3 | const avro = require('avsc'); 4 | module.exports = avro.Type.forSchema({ 5 | "type": "record", 6 | "name": "Passenger", 7 | "fields": [ 8 | { 9 | "name": "name", 10 | "type": { "type": "enum", "name": "nameType", "symbols": [ 11 | "Carla", 12 | "Joseph", 13 | "Megan", 14 | "Roland", 15 | "Stacey", 16 | "Maria", 17 | "Henry", 18 | "Peter" 19 | ] } 20 | }, 21 | { 22 | "name": "street", 23 | "type": { "type": "enum", "name": "emailType", "symbols": [ 24 | "CherryLane", 25 | "FifthAvenue", 26 | "FourteenthStreet", 27 | "PerlmanRoad", 28 | "BroadStreet", 29 | "SecondAvenue", 30 | "BleekerStreet", 31 | "LexingtonAvenue" 32 | ] } 33 | } 34 | ] 35 | }); 36 | -------------------------------------------------------------------------------- /kafka/public/producer/producer.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { 3 | function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } 4 | return new (P || (P = Promise))(function (resolve, reject) { 5 | function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } 6 | function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } 7 | function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } 8 | step((generator = generator.apply(thisArg, _arguments || [])).next()); 9 | }); 10 | }; 11 | Object.defineProperty(exports, "__esModule", { value: true }); 12 | const { Kafka } = require('kafkajs'); // NPM Package: Javascript compatible Kafka 13 | const config = require('../kconfig'); // Information about Kafka Cluster and Topics 14 | const { queueTripInfo, passengerInfo } = require('./producerFunc'); 15 | // This Kafka instance is hosted on the Confluent Cloud or locally, using the credentials in kconfig.js. 16 | // Topics can be created online through confluent cloud portal 17 | const kafka = new Kafka(config); 18 | const producer = kafka.producer(); 19 | const runProducers = () => __awaiter(void 0, void 0, void 0, function* () { 20 | const firstTopic = config.topics[0]; 21 | const secondTopic = config.topics[1]; 22 | const firstMessage = queueTripInfo(); 23 | const secondMessage = passengerInfo(); 24 | try { 25 | yield producer.connect(); 26 | yield producer.send({ 27 | topic: firstTopic, 28 | messages: [ 29 | { key: 'status', value: JSON.stringify(firstMessage), headers: '' }, 30 | ], 31 | }); 32 | yield producer.send({ 33 | topic: secondTopic, 34 | messages: [ 35 | { key: 'passengerInfo', value: JSON.stringify(secondMessage), headers: '' }, 36 | ], 37 | }); 38 | console.log(`Producer: Write success - ${firstTopic}, ${secondTopic}`); 39 | yield producer.disconnect(); 40 | } 41 | catch (err) { 42 | console.log(`Producer: Failed to write - ${err}`); 43 | } 44 | }); 45 | setInterval(() => { 46 | runProducers(); 47 | }, 4000); 48 | -------------------------------------------------------------------------------- /kafka/public/producer/producerFunc.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | Object.defineProperty(exports, "__esModule", { value: true }); 3 | // GENERATE TRIP STATUS 4 | function queueTripInfo() { 5 | const tripId = getRandomTripId(); 6 | const statusId = getStatusId(tripId); 7 | const vehicleId = getVehicleId(tripId); 8 | const position = getPosition(); 9 | const batteryLevel = batteryFunc(tripId); 10 | const distance = distanceFunc(tripId); 11 | const now = new Date(); 12 | const timestamp = now.toString(); 13 | const tripInfo = { 14 | statusId, 15 | tripId, 16 | vehicleId, 17 | position, 18 | batteryLevel, 19 | distance, 20 | timestamp, 21 | }; 22 | return tripInfo; 23 | } 24 | function getStatusId(tripId) { 25 | if (tripId === 'trip1') 26 | return 'status1'; 27 | if (tripId === 'trip2') 28 | return 'status2'; 29 | else 30 | return 'noStatusId'; 31 | } 32 | function getRandomTripId() { 33 | const trips = ['trip1', 'trip2']; 34 | return trips[Math.floor(Math.random() * trips.length)]; 35 | } 36 | function getVehicleId(tripId) { 37 | if (tripId === 'trip1') 38 | return 'car1'; 39 | if (tripId === 'trip2') 40 | return 'car2'; 41 | else 42 | return 'noTripId'; 43 | } 44 | function getPosition() { 45 | const lat = 40 + Math.random(); 46 | const lon = -(70 + Math.random()); 47 | const position = { 48 | lat: lat, 49 | lon: lon, 50 | }; 51 | return position; 52 | } 53 | function getBattery() { 54 | const cache = {}; 55 | function innerFunc(tripId) { 56 | if (cache[tripId]) { 57 | cache[tripId] = cache[tripId] - 1; 58 | return cache[tripId]; 59 | } 60 | else { 61 | cache[tripId] = Math.floor(Math.random() * 10) + 85; 62 | return cache[tripId]; 63 | } 64 | } 65 | ; 66 | return innerFunc; 67 | } 68 | const batteryFunc = getBattery(); 69 | function getDistance() { 70 | const cache = {}; 71 | function innerFunc(tripId) { 72 | if (cache[tripId]) { 73 | cache[tripId] = cache[tripId] + Math.floor(Math.random() * 10); 74 | return cache[tripId]; 75 | } 76 | else { 77 | cache[tripId] = Math.floor(Math.random() * 10); 78 | return cache[tripId]; 79 | } 80 | } 81 | ; 82 | return innerFunc; 83 | } 84 | const distanceFunc = getDistance(); 85 | // GENERATE PASSENGER INFO 86 | function passengerInfo() { 87 | const name = getRandomPassenger(); 88 | const street = getPassengerAddress(name); 89 | const passengerInfo = { 90 | name, 91 | street, 92 | }; 93 | return passengerInfo; 94 | } 95 | const passengerNames = [ 96 | "Carla", 97 | "Joseph", 98 | "Megan", 99 | "Roland", 100 | "Stacey", 101 | "Maria", 102 | "Henry", 103 | "Peter" 104 | ]; 105 | const addressBook = { 106 | "Carla": "CherryLane", 107 | "Joseph": "FifthAvenue", 108 | "Megan": "FourteenthStreet", 109 | "Roland": "PerlmanRoad", 110 | "Stacey": "BroadStreet", 111 | "Maria": "SecondAvenue", 112 | "Henry": "BleekerStreet", 113 | "Peter": "LexingtonAvenue" 114 | }; 115 | function getRandomPassenger() { 116 | const index = Math.floor(Math.random() * 8); 117 | return passengerNames[index]; 118 | } 119 | function getPassengerAddress(name) { 120 | return addressBook[name]; 121 | } 122 | module.exports = { queueTripInfo, passengerInfo }; 123 | -------------------------------------------------------------------------------- /kafka/public/statusType.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | Object.defineProperty(exports, "__esModule", { value: true }); 3 | const avro = require('avsc'); 4 | module.exports = avro.Type.forSchema({ 5 | type: 'record', 6 | name: 'Status', 7 | fields: [ 8 | { 9 | name: 'statusId', 10 | type: 'string', 11 | }, 12 | { 13 | name: 'tripId', 14 | type: { type: 'enum', symbols: ['trip1', 'trip2'] }, 15 | }, 16 | { 17 | name: 'vehicleId', 18 | type: { type: 'enum', symbols: ['car1', 'car2'] }, 19 | }, 20 | { 21 | name: 'position', 22 | type: { 23 | type: 'record', 24 | name: 'Position', 25 | fields: [ 26 | { 27 | name: 'lat', 28 | type: 'float', 29 | }, 30 | { 31 | name: 'lon', 32 | type: 'float', 33 | }, 34 | ], 35 | }, 36 | }, 37 | { 38 | name: 'batteryLevel', 39 | type: 'int', 40 | }, 41 | { 42 | name: 'distance', 43 | type: 'int', 44 | }, 45 | { 46 | name: 'timestamp', 47 | type: 'string', 48 | }, 49 | ], 50 | }); 51 | -------------------------------------------------------------------------------- /kafka/src/consumer/consumer.ts: -------------------------------------------------------------------------------- 1 | export {}; // This line of code prevents TS error 'Cannot redeclare block-scoped variable' in unrelated files 2 | 3 | const { Kafka } = require('kafkajs'); // NPM Package: Javascript compatible Kafka 4 | const config = require('../kconfig.js'); // Information about Kafka Cluster and Topics 5 | 6 | // This Kafka instance is hosted on the Confluent Cloud, using the credentials in kafkaConfig.js. 7 | // Topics can be created online through confluent cloud portal 8 | 9 | const kafka = new Kafka(config); 10 | 11 | // Initiates a new consumer for every topic in config 12 | for (let i = 0; i < config.topics.length; i++) { 13 | const topicName : string = config.topics[i]; 14 | try { 15 | const topicName = config.topics[i]; 16 | const consumer = kafka.consumer({ groupId: `${topicName}-group` }); 17 | consumer.connect(); 18 | consumer.subscribe({ topic: `${topicName}`, fromBeginning: false }); 19 | consumer.run({ 20 | eachMessage: async ({ message } : { message: any}) => { 21 | // If topic and partition are needed, expand async function arguments to include: { topic, partition, message } 22 | console.log(`Consumer: Message Read - ${message.value}`); 23 | }, 24 | }); 25 | } catch (err) { 26 | console.log(`Consumer: Failed to read - ${topicName}`); 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /kafka/src/interfaces.d.ts: -------------------------------------------------------------------------------- 1 | interface Position { 2 | lat: number; 3 | lon: number; 4 | } 5 | 6 | interface TripInfo { 7 | statusId: string; 8 | tripId: string; 9 | vehicleId: string; 10 | position: Position; 11 | batteryLevel: number; 12 | distance: number; 13 | timestamp: string; 14 | } 15 | 16 | interface PassengerInfo { 17 | name: string; 18 | street: string; 19 | } 20 | 21 | interface KafkaSettingsCloud { 22 | topics: string[]; 23 | clientId: string; 24 | brokers: (string | undefined)[]; 25 | ssl: boolean; 26 | sasl: SASLSetting | null; 27 | connectionTimeout: number; 28 | authenticationTimeout: number; 29 | reauthenticationThreshold: number; 30 | } 31 | 32 | interface KafkaSettingsDocker { 33 | topics: string[]; 34 | clientId: string; 35 | brokers: (string | undefined)[]; 36 | connectionTimeout: number; 37 | authenticationTimeout: number; 38 | reauthenticationThreshold: number; 39 | } 40 | 41 | interface SASLSetting { 42 | username: string; 43 | password: string; 44 | mechanism: string; 45 | } 46 | 47 | -------------------------------------------------------------------------------- /kafka/src/kconfig.ts: -------------------------------------------------------------------------------- 1 | export {}; // This line of code prevents TS error 'Cannot redeclare block-scoped variable' in unrelated files 2 | const dotenv = require('dotenv'); // Allows us to safely store and reference credentials in a .env file that is not uploaded to github 3 | dotenv.config({ path: './../.env' }); 4 | 5 | const username = process.env.DEMO_KAFKA_CLUSTER_USER; 6 | const password = process.env.DEMO_KAFKA_CLUSTER_PW; 7 | const broker = process.env.DEMO_KAFKA_BROKER; 8 | // const broker = "localhost:9092"; // If running a local instance, the broker variable may look similar to this. Uncomment for local docker instance 9 | 10 | const sasl = username && password ? { username, password, mechanism: 'plain' } : null 11 | const ssl = !!sasl 12 | 13 | const kafkaSettings : KafkaSettingsCloud = { // If running a local instance, set to "KafkaSettingsDocker" 14 | topics: ['passengerInfo', 'tripStatus'], 15 | clientId: 'kafQL', 16 | brokers: [broker], 17 | ssl, // If running a local instance, comment out 18 | sasl, 19 | connectionTimeout: 3000, 20 | authenticationTimeout: 1000, 21 | reauthenticationThreshold: 10000, 22 | }; 23 | 24 | module.exports = kafkaSettings; -------------------------------------------------------------------------------- /kafka/src/passengerType.ts: -------------------------------------------------------------------------------- 1 | export {}; // This line of code prevents TS error 'Cannot redeclare block-scoped variable' in unrelated files 2 | const avro = require('avsc'); 3 | module.exports = avro.Type.forSchema({ 4 | "type": "record", 5 | "name": "Passenger", 6 | "fields": [ 7 | { 8 | "name": "name", 9 | "type": { "type": "enum", "name": "nameType", "symbols": [ 10 | "Carla", 11 | "Joseph", 12 | "Megan", 13 | "Roland", 14 | "Stacey", 15 | "Maria", 16 | "Henry", 17 | "Peter" 18 | ]} 19 | }, 20 | { 21 | "name": "street", 22 | "type": { "type": "enum", "name": "emailType", "symbols": [ 23 | "CherryLane", 24 | "FifthAvenue", 25 | "FourteenthStreet", 26 | "PerlmanRoad", 27 | "BroadStreet", 28 | "SecondAvenue", 29 | "BleekerStreet", 30 | "LexingtonAvenue" 31 | ]} 32 | } 33 | ] 34 | }); 35 | -------------------------------------------------------------------------------- /kafka/src/producer/producer.ts: -------------------------------------------------------------------------------- 1 | export {}; // This line of code prevents TS error 'Cannot redeclare block-scoped variable' in unrelated files 2 | 3 | const { Kafka } = require('kafkajs'); // NPM Package: Javascript compatible Kafka 4 | const config = require('../kconfig'); // Information about Kafka Cluster and Topics 5 | const { queueTripInfo, passengerInfo } = require('./producerFunc'); 6 | 7 | // This Kafka instance is hosted on the Confluent Cloud or locally, using the credentials in kconfig.js. 8 | // Topics can be created online through confluent cloud portal 9 | 10 | const kafka = new Kafka(config); 11 | const producer = kafka.producer(); 12 | 13 | const runProducers = async () => { 14 | const firstTopic : string = config.topics[0]; 15 | const secondTopic : string = config.topics[1]; 16 | const firstMessage : TripInfo = queueTripInfo(); 17 | const secondMessage : PassengerInfo = passengerInfo(); 18 | try { 19 | await producer.connect(); 20 | 21 | await producer.send({ 22 | topic: firstTopic, 23 | messages: [ 24 | { key: 'status', value: JSON.stringify(firstMessage), headers: '' }, 25 | ], 26 | }); 27 | await producer.send({ 28 | topic: secondTopic, 29 | messages: [ 30 | { key: 'passengerInfo', value: JSON.stringify(secondMessage), headers: '' }, 31 | ], 32 | }); 33 | console.log(`Producer: Write success - ${firstTopic}, ${secondTopic}`); 34 | await producer.disconnect(); 35 | } catch (err) { 36 | console.log(`Producer: Failed to write - ${err}`); 37 | } 38 | 39 | }; 40 | 41 | setInterval(() => { 42 | runProducers(); 43 | }, 4000); 44 | -------------------------------------------------------------------------------- /kafka/src/producer/producerFunc.ts: -------------------------------------------------------------------------------- 1 | export {}; // This line of code prevents TS error 'Cannot redeclare block-scoped variable' in unrelated files 2 | 3 | // GENERATE TRIP STATUS 4 | function queueTripInfo() : TripInfo { 5 | const tripId : string = getRandomTripId(); 6 | const statusId : string = getStatusId(tripId); 7 | const vehicleId : string = getVehicleId(tripId); 8 | const position : Position = getPosition(); 9 | const batteryLevel : number = batteryFunc(tripId); 10 | const distance : number = distanceFunc(tripId); 11 | const now : Date = new Date(); 12 | const timestamp : string = now.toString(); 13 | 14 | const tripInfo : TripInfo = { 15 | statusId, 16 | tripId, 17 | vehicleId, 18 | position, 19 | batteryLevel, 20 | distance, 21 | timestamp, 22 | } 23 | 24 | return tripInfo; 25 | } 26 | 27 | function getStatusId(tripId : string) : string { 28 | if (tripId === 'trip1') return 'status1'; 29 | if (tripId === 'trip2') return 'status2'; 30 | else return 'noStatusId' 31 | } 32 | 33 | function getRandomTripId() : string { 34 | const trips = ['trip1', 'trip2']; 35 | return trips[Math.floor(Math.random() * trips.length)]; 36 | } 37 | 38 | function getVehicleId(tripId : string) : string { 39 | if (tripId === 'trip1') return 'car1'; 40 | if (tripId === 'trip2') return 'car2'; 41 | else return 'noTripId' 42 | } 43 | 44 | function getPosition() : Position { 45 | const lat = 40 + Math.random(); 46 | const lon = -(70 + Math.random()); 47 | const position : Position = { 48 | lat: lat, 49 | lon: lon, 50 | } 51 | return position; 52 | } 53 | 54 | 55 | function getBattery() : Function { 56 | const cache : {[key : string] : number} = {}; 57 | function innerFunc(tripId : string) { 58 | if (cache[tripId]) { 59 | cache[tripId] = cache[tripId] - 1; 60 | return cache[tripId]; 61 | } else { 62 | cache[tripId] = Math.floor(Math.random() * 10) + 85; 63 | return cache[tripId]; 64 | } 65 | }; 66 | return innerFunc 67 | } 68 | const batteryFunc = getBattery(); 69 | 70 | function getDistance() : Function { 71 | const cache : {[key : string] : number} = {}; 72 | function innerFunc(tripId : string) { 73 | if (cache[tripId]) { 74 | cache[tripId] = cache[tripId] + Math.floor(Math.random() * 10); 75 | return cache[tripId]; 76 | } else { 77 | cache[tripId] = Math.floor(Math.random() * 10); 78 | return cache[tripId]; 79 | } 80 | }; 81 | return innerFunc; 82 | } 83 | const distanceFunc = getDistance(); 84 | 85 | // GENERATE PASSENGER INFO 86 | function passengerInfo() : PassengerInfo { 87 | const name : string = getRandomPassenger(); 88 | const street : string = getPassengerAddress(name); 89 | 90 | const passengerInfo : PassengerInfo = { 91 | name, 92 | street, 93 | } 94 | return passengerInfo; 95 | } 96 | 97 | const passengerNames : string[] = [ 98 | "Carla", 99 | "Joseph", 100 | "Megan", 101 | "Roland", 102 | "Stacey", 103 | "Maria", 104 | "Henry", 105 | "Peter" 106 | ] 107 | 108 | const addressBook : {[key: string] : string} = { 109 | "Carla": "CherryLane", 110 | "Joseph" : "FifthAvenue", 111 | "Megan" : "FourteenthStreet", 112 | "Roland" : "PerlmanRoad", 113 | "Stacey" : "BroadStreet", 114 | "Maria" : "SecondAvenue", 115 | "Henry" : "BleekerStreet", 116 | "Peter" : "LexingtonAvenue" 117 | } 118 | 119 | function getRandomPassenger() : string { 120 | const index = Math.floor(Math.random() * 8) 121 | return passengerNames[index]; 122 | } 123 | function getPassengerAddress(name : string) : string { 124 | return addressBook[name]; 125 | } 126 | 127 | module.exports = { queueTripInfo, passengerInfo }; 128 | -------------------------------------------------------------------------------- /kafka/src/statusType.ts: -------------------------------------------------------------------------------- 1 | export {}; // This line of code prevents TS error 'Cannot redeclare block-scoped variable' in unrelated files 2 | const avro = require('avsc'); 3 | module.exports = avro.Type.forSchema({ 4 | type: 'record', 5 | name: 'Status', 6 | fields: [ 7 | { 8 | name: 'statusId', 9 | type: 'string', 10 | }, 11 | { 12 | name: 'tripId', 13 | type: { type: 'enum', symbols: ['trip1', 'trip2'] }, 14 | }, 15 | { 16 | name: 'vehicleId', 17 | type: { type: 'enum', symbols: ['car1', 'car2'] }, 18 | }, 19 | { 20 | name: 'position', 21 | type: { 22 | type: 'record', 23 | name: 'Position', 24 | fields: [ 25 | { 26 | name: 'lat', 27 | type: 'float', 28 | }, 29 | { 30 | name: 'lon', 31 | type: 'float', 32 | }, 33 | ], 34 | }, 35 | }, 36 | { 37 | name: 'batteryLevel', 38 | type: 'int', 39 | }, 40 | { 41 | name: 'distance', 42 | type: 'int', 43 | }, 44 | { 45 | name: 'timestamp', 46 | type: 'string', 47 | }, 48 | ], 49 | }); 50 | -------------------------------------------------------------------------------- /kafka/tsconfig.json: -------------------------------------------------------------------------------- 1 | // To use TS: 2 | // Install as a development dependency: npm i typescript --save-dev 3 | // Initialize TS project and create tsconfig.json: npx tsc --init 4 | // Compile code: npx tsc index.ts 5 | 6 | { 7 | "compilerOptions": { 8 | /* Visit https://aka.ms/tsconfig.json to read more about this file */ 9 | 10 | /* Projects */ 11 | // "incremental": true, /* Enable incremental compilation */ 12 | // "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */ 13 | // "tsBuildInfoFile": "./", /* Specify the folder for .tsbuildinfo incremental compilation files. */ 14 | // "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects */ 15 | // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */ 16 | // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ 17 | 18 | /* Language and Environment */ 19 | "target": "es2016", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */ 20 | // "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */ 21 | // "jsx": "preserve", /* Specify what JSX code is generated. */ 22 | // "experimentalDecorators": true, /* Enable experimental support for TC39 stage 2 draft decorators. */ 23 | // "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */ 24 | // "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h' */ 25 | // "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */ 26 | // "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using `jsx: react-jsx*`.` */ 27 | // "reactNamespace": "", /* Specify the object invoked for `createElement`. This only applies when targeting `react` JSX emit. */ 28 | // "noLib": true, /* Disable including any library files, including the default lib.d.ts. */ 29 | // "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */ 30 | 31 | /* Modules */ 32 | "module": "commonjs", /* Specify what module code is generated. */ 33 | "rootDir": "./src", /* Specify the root folder within your source files. */ 34 | // "moduleResolution": "node", /* Specify how TypeScript looks up a file from a given module specifier. */ 35 | // "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */ 36 | // "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */ 37 | // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */ 38 | // "typeRoots": [], /* Specify multiple folders that act like `./node_modules/@types`. */ 39 | // "types": [], /* Specify type package names to be included without being referenced in a source file. */ 40 | // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ 41 | // "resolveJsonModule": true, /* Enable importing .json files */ 42 | // "noResolve": true, /* Disallow `import`s, `require`s or ``s from expanding the number of files TypeScript should add to a project. */ 43 | 44 | /* JavaScript Support */ 45 | // "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the `checkJS` option to get errors from these files. */ 46 | // "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */ 47 | // "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from `node_modules`. Only applicable with `allowJs`. */ 48 | 49 | /* Emit */ 50 | // "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */ 51 | // "declarationMap": true, /* Create sourcemaps for d.ts files. */ 52 | // "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */ 53 | // "sourceMap": true, /* Create source map files for emitted JavaScript files. */ 54 | // "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If `declaration` is true, also designates a file that bundles all .d.ts output. */ 55 | "outDir": "./public", /* Specify an output folder for all emitted files. */ 56 | // "removeComments": true, /* Disable emitting comments. */ 57 | // "noEmit": true, /* Disable emitting files from a compilation. */ 58 | // "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */ 59 | // "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types */ 60 | // "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */ 61 | // "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */ 62 | // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ 63 | // "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */ 64 | // "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */ 65 | // "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */ 66 | // "newLine": "crlf", /* Set the newline character for emitting files. */ 67 | // "stripInternal": true, /* Disable emitting declarations that have `@internal` in their JSDoc comments. */ 68 | // "noEmitHelpers": true, /* Disable generating custom helper functions like `__extends` in compiled output. */ 69 | // "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */ 70 | // "preserveConstEnums": true, /* Disable erasing `const enum` declarations in generated code. */ 71 | // "declarationDir": "./", /* Specify the output directory for generated declaration files. */ 72 | // "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */ 73 | 74 | /* Interop Constraints */ 75 | // "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */ 76 | // "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */ 77 | "esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables `allowSyntheticDefaultImports` for type compatibility. */ 78 | // "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */ 79 | "forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */ 80 | 81 | /* Type Checking */ 82 | "strict": true, /* Enable all strict type-checking options. */ 83 | // "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied `any` type.. */ 84 | // "strictNullChecks": true, /* When type checking, take into account `null` and `undefined`. */ 85 | // "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */ 86 | // "strictBindCallApply": true, /* Check that the arguments for `bind`, `call`, and `apply` methods match the original function. */ 87 | // "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */ 88 | // "noImplicitThis": true, /* Enable error reporting when `this` is given the type `any`. */ 89 | // "useUnknownInCatchVariables": true, /* Type catch clause variables as 'unknown' instead of 'any'. */ 90 | // "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */ 91 | "noUnusedLocals": false, /* Enable error reporting when a local variables aren't read. */ 92 | "noUnusedParameters": true, /* Raise an error when a function parameter isn't read */ 93 | // "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */ 94 | // "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */ 95 | // "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */ 96 | // "noUncheckedIndexedAccess": true, /* Include 'undefined' in index signature results */ 97 | // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */ 98 | // "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type */ 99 | // "allowUnusedLabels": true, /* Disable error reporting for unused labels. */ 100 | // "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */ 101 | 102 | /* Completeness */ 103 | // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */ 104 | "skipLibCheck": true /* Skip type checking all .d.ts files. */ 105 | } 106 | } 107 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "inquire", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "start:apollo": "node server/server.js", 8 | "test": "export NODE_ENV=test; jest" 9 | }, 10 | "author": "inquire", 11 | "license": "ISC", 12 | "dependencies": { 13 | "@graphql-tools/schema": "^8.3.1", 14 | "apollo-server-express": "^3.5.0", 15 | "concurrently": "^6.4.0", 16 | "express": "^4.17.1", 17 | "graphql": "^15.7.2", 18 | "graphql-subscriptions": "^2.0.0", 19 | "http": "^0.0.1-security", 20 | "iterall": "^1.3.0", 21 | "kafkajs": "^1.15.0", 22 | "puppeteer": "^19.8.0", 23 | "subscriptions-transport-ws": "^0.11.0" 24 | }, 25 | "devDependencies": { 26 | "@types/jest": "^27.0.3", 27 | "dotenv": "^10.0.0", 28 | "jest": "^27.4.3", 29 | "mock-fs": "^5.1.2", 30 | "ts-jest": "^27.0.7", 31 | "typescript": "^4.5.2" 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /testpkg/.gitignore: -------------------------------------------------------------------------------- 1 | #absolute path to GQL generation 2 | testpkg/pathStore.json -------------------------------------------------------------------------------- /testpkg/cli.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | const yargs = require('yargs'); 3 | const path = require('path'); 4 | const fs = require('fs'); 5 | 6 | //goal is to get one string argument from this, send it to initInquire function in testpkg/startInquire.js 7 | const startInquire = require('./startInquire'); 8 | 9 | //take one argument, relative path wherein to create inquire folder 10 | const builder = command => { 11 | command 12 | .positional("absPath", { 13 | describe: "absolute path to destination folder for inquire" 14 | }) 15 | } 16 | 17 | const initHandler = ({absPath}) => { 18 | console.log("cli.js init command executing..."); 19 | fs.writeFileSync(path.resolve(__dirname, 'pathStore.json'), JSON.stringify(absPath)); 20 | startInquire.initInquire(absPath); 21 | } 22 | 23 | const buildHandler = () => { 24 | const makeInquire = require('./makeInquire'); 25 | const storedPath = path.resolve(__dirname, 'pathStore.json'); 26 | if (fs.existsSync(storedPath)) { 27 | const folderDest = JSON.parse(fs.readFileSync(storedPath)); 28 | makeInquire.writeGraphQLSchema(); 29 | makeInquire.writeResolver(); 30 | makeInquire.writeAsyncIterator(); 31 | makeInquire.writeServer(); 32 | } else { 33 | console.log("no config file found"); 34 | } 35 | } 36 | 37 | yargs.command("init ", false, builder, initHandler).parse(); 38 | yargs.command("build", false, builder, buildHandler).parse(); -------------------------------------------------------------------------------- /testpkg/makeInquire.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs'); 2 | const path = require('path'); 3 | const buildGQLTool = require('./tools/buildGQLTool.js'); 4 | 5 | // pathStore.json is generated when user initializes destination folder for graphQL Schemas 6 | const storedPath = JSON.parse( 7 | fs.readFileSync(path.resolve(__dirname, './pathStore.json')) 8 | ); 9 | const configPath = `${storedPath}/inquire/config.js`; 10 | const config = require(configPath); 11 | 12 | const topics = config.topics; 13 | const targets = config.targets; 14 | const mode = config.mode; 15 | const schemaFolder = config.schemaFolder; 16 | 17 | const typeDefsPath = `${storedPath}/inquire/typeDefs.js`; 18 | const resolversPath = `${storedPath}/inquire/resolvers.js`; 19 | const asyncIteratorPath = `${storedPath}/inquire/asyncIterator.js`; 20 | const serverPath = `${storedPath}/server.js`; 21 | 22 | /** 23 | * Returns completed graphql schema from targeted Avro schema files 24 | * Using various functions from buildGQLTool.js depends on which mode user selected. 25 | * 26 | * @returns {String} completed graphql schema to be written as typeDef.js 27 | */ 28 | const toGraphQL = () => { 29 | let formattedData = ``; 30 | const filenames = fs.readdirSync(schemaFolder); 31 | const topicsTypesZip = []; 32 | let targetZip; 33 | let currTopic; 34 | 35 | // when SELECTED mode, targetZip is used to avoid potential inconsistency of order of files 36 | if (mode === 2) { 37 | targetZip = buildGQLTool.zipTargets(topics, targets); 38 | } 39 | 40 | filenames.forEach((filename, topicsIdx) => { 41 | try { 42 | if (mode === 2 && targetZip.has(filename)) { 43 | currTopic = targetZip.get(filename); 44 | } else if (mode === 2) { 45 | // skips if current checking file isn't selected schema from user when SELECTED mode 46 | return; 47 | } else { 48 | currTopic = topics[topicsIdx]; 49 | } 50 | const tmpRead = fs.readFileSync(schemaFolder + '/' + filename); 51 | const innerData = buildGQLTool.getInnerData(tmpRead); 52 | const topicType = buildGQLTool.zipTopicTypes(currTopic, innerData); 53 | topicsTypesZip.push(topicType); 54 | const parsedData = buildGQLTool.parseKafkaSchema(innerData); 55 | formattedData += buildGQLTool.formatGQLSchema(parsedData); 56 | } catch (err) { 57 | console.log(`ERR: while reading ${filename} - ${err}`); 58 | } 59 | }); 60 | 61 | const completeTypedefData = buildGQLTool.completeTypeDef( 62 | formattedData, 63 | topicsTypesZip 64 | ); 65 | 66 | return completeTypedefData; 67 | }; 68 | 69 | /** 70 | * Build resolver file to iterate over all the topics that user specificed 71 | * 72 | * @returns {String} completed contents for resolver file to be written as resolvers.js 73 | */ 74 | const makeResolver = () => { 75 | let subscriptions = ``; 76 | 77 | for (const topic of topics) { 78 | subscriptions += ` 79 | ${topic}: { 80 | subscribe: () => kafkaEventToAsyncIterator('${topic}'), 81 | },`; 82 | } 83 | 84 | let result = `const kafkaEventToAsyncIterator = require('./asyncIterator.js') 85 | 86 | // GraphQL Resolvers 87 | module.exports = { 88 | Subscription: {${subscriptions} 89 | }, 90 | Query: { 91 | exampleQuery: () => "Add Result Here" 92 | } 93 | } 94 | `; 95 | return result; 96 | }; 97 | 98 | /** 99 | * @returns {String} complete contents to be written as asynIterator.js 100 | */ 101 | const makeAsyncIterator = () => { 102 | return `const { $$asyncIterator } = require ('iterall'); 103 | const { Kafka } = require('kafkajs'); // NPM Package: Javascript compatible Kafka 104 | const config = require('./config.js'); // Information about Kafka Cluster and Topics 105 | const client = new Kafka(config); 106 | 107 | // Helper function to initiate consumers 108 | const getConsumer = async (topic) => { 109 | try { 110 | const consumer = client.consumer({ groupId: \`\${topic}-group-\${Math.random() * 100}\` }); 111 | consumer.connect(); 112 | consumer.subscribe({ topic: \`\${topic}\`, fromBeginning: false }); 113 | return consumer; 114 | } catch (err) {console.log(err)} 115 | } 116 | 117 | // Function returns an async iterator tied to Kafka topic 118 | const kafkaEventToAsyncIterator = async (topicName) => { 119 | let promiseResolve; 120 | const consumer = await getConsumer(topicName); 121 | try { 122 | await consumer.run({ 123 | eachMessage: ({ topic, partition, message }) => { 124 | let parsedMessage = {[topicName]: JSON.parse(message.value)} 125 | if (promiseResolve && topicName == topic) { 126 | promiseResolve(parsedMessage); 127 | } 128 | } 129 | }); 130 | } catch (err) {console.log(err)} 131 | return { 132 | next() { 133 | return new Promise(resolve => { 134 | promiseResolve = resolve; 135 | }).then(value => { return {done: false, value} } 136 | ); 137 | }, 138 | return() { 139 | return Promise.resolve({ done: true, value: undefined }); 140 | }, 141 | throw(e) { 142 | return Promise.reject(e); 143 | }, 144 | [$$asyncIterator]() { 145 | return this; 146 | }, 147 | }; 148 | }; 149 | 150 | module.exports = kafkaEventToAsyncIterator; 151 | `; 152 | }; 153 | 154 | /** 155 | * @returns complete contents to be written as server.js 156 | */ 157 | const makeServer = () => { 158 | let result = `// Apollo docs describing how to swap apollo server: 159 | // https://www.apollographql.com/docs/apollo-server/integrations/middleware/#swapping-out-apollo-server 160 | // Once server is swapped, Apollo docs to use subscriptions: 161 | // https://www.apollographql.com/docs/apollo-server/data/subscriptions/#enabling-subscriptions 162 | 163 | const express = require('express'); 164 | const { createServer } = require('http'); 165 | const { execute, subscribe } = require('graphql'); 166 | 167 | const { ApolloServer } = require('apollo-server-express'); 168 | const { SubscriptionServer } = require('subscriptions-transport-ws'); 169 | const { makeExecutableSchema } = require('@graphql-tools/schema'); 170 | 171 | // Import schema and resolvers from files. 172 | const typeDefs = require('./inquire/typeDefs.js'); 173 | const resolvers = require('./inquire/resolvers.js'); 174 | 175 | // Server start must be wrapped in async function 176 | (async function () { 177 | const app = express(); 178 | 179 | const httpServer = createServer(app); 180 | 181 | const schema = makeExecutableSchema({ 182 | typeDefs, 183 | resolvers, 184 | }); 185 | 186 | const subscriptionServer = SubscriptionServer.create( 187 | { schema, execute, subscribe }, 188 | { server: httpServer, path: '/graphql' } 189 | ); 190 | 191 | const server = new ApolloServer({ 192 | schema, 193 | plugins: [{ 194 | async serverWillStart() { 195 | return { 196 | async drainServer() { 197 | subscriptionServer.close(); 198 | } 199 | }; 200 | } 201 | }], 202 | }); 203 | await server.start(); 204 | server.applyMiddleware({ app }); 205 | 206 | const PORT = 3000; 207 | httpServer.listen(PORT, () => 208 | console.log(\`Server is now running on http://localhost:\${PORT}/graphql\`) 209 | ); 210 | })(); 211 | `; 212 | return result; 213 | }; 214 | 215 | /** 216 | * functions to write files and separated for testing purpose 217 | */ 218 | const writeGraphQLSchema = () => { 219 | const graphQLData = toGraphQL(); 220 | fs.writeFileSync(typeDefsPath, graphQLData); 221 | }; 222 | 223 | const writeResolver = () => { 224 | const resolverData = makeResolver(); 225 | fs.writeFileSync(resolversPath, resolverData); 226 | }; 227 | 228 | const writeAsyncIterator = () => { 229 | const asyncIteratorData = makeAsyncIterator(); 230 | fs.writeFileSync(asyncIteratorPath, asyncIteratorData); 231 | }; 232 | 233 | const writeServer = () => { 234 | const serverData = makeServer(); 235 | fs.writeFileSync(serverPath, serverData); 236 | }; 237 | 238 | module.exports = { 239 | toGraphQL, 240 | makeResolver, 241 | makeAsyncIterator, 242 | makeServer, 243 | writeGraphQLSchema, 244 | writeResolver, 245 | writeAsyncIterator, 246 | writeServer, 247 | }; 248 | -------------------------------------------------------------------------------- /testpkg/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "inquire", 3 | "version": "1.0.1", 4 | "lockfileVersion": 1 5 | } 6 | -------------------------------------------------------------------------------- /testpkg/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "inquire", 3 | "version": "1.0.1", 4 | "description": "Generates fully-typed GraphQL subscription schema for Apache Kafka based on Avro data.", 5 | "main": "startInquire.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1" 8 | }, 9 | "keywords": [ 10 | "kafka", 11 | "apachekafka", 12 | "graphql", 13 | "schema", 14 | "avro", 15 | "apollo" 16 | ], 17 | "bin": "cli.js", 18 | "author": "Anna Falvello, Cecily Jansen, Han Bin Jo, Ying Liu", 19 | "license": "MIT", 20 | "repository": { 21 | "type": "git", 22 | "url": "https://github.com/oslabs-beta/inquire.git" 23 | }, 24 | "bugs": { 25 | "url": "https://github.com/oslabs-beta/inquire/issues" 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /testpkg/startInquire.js: -------------------------------------------------------------------------------- 1 | const { defaultInputTarget } = require('concurrently/src/defaults'); 2 | const fs = require('fs'); 3 | const path = require('path'); 4 | const { rawListeners } = require('process'); 5 | const initTool = require('./tools/initTool'); 6 | 7 | //make a directory in destination folder (server) called inquire 8 | 9 | let pickedMode; 10 | let dataFolder; 11 | 12 | const readline = require('readline'); 13 | 14 | const rl = readline.createInterface({ 15 | input: process.stdin, 16 | output: process.stdout, 17 | }); 18 | 19 | const modePrompt = () => { 20 | return new Promise((resolve, reject) => { 21 | rl.question( 22 | '\n' + 23 | 'Choose MODE:\n' + 24 | '1: Use all files in data folder to create GQL schema\n' + 25 | '2: Manually specify files when filling out configuration\n' + 26 | 'Enter 1 OR 2: ', 27 | (mode) => { 28 | pickedMode = mode; 29 | resolve(); 30 | } 31 | ); 32 | }); 33 | }; 34 | 35 | const dataPrompt = () => { 36 | return new Promise((resolve, reject) => { 37 | rl.question( 38 | 'Enter absolute path to folder containing schema file(s): \n', 39 | (folderPath) => { 40 | dataFolder = folderPath; 41 | resolve(); 42 | } 43 | ); 44 | }); 45 | }; 46 | 47 | const initInquire = async (absPath) => { 48 | await modePrompt(); 49 | await dataPrompt(); 50 | rl.on('close', async () => { 51 | const targets = await initTool.createTargets(pickedMode, dataFolder); 52 | const config = await initTool.createConfig(targets, pickedMode, dataFolder); 53 | if (!fs.existsSync(`${absPath}/inquire`)) { 54 | fs.mkdirSync(`${absPath}/inquire`); 55 | } 56 | fs.writeFileSync(`${absPath}/inquire/config.js`, config); 57 | }); 58 | rl.close(); 59 | }; 60 | 61 | //after this file is run, user will run their configuration file? which will run index.js in testpkg. 62 | //index.js in testpkg will read the user-given file and output it to the inquire folder created from this file. 63 | // initInquire(); 64 | 65 | module.exports = { initInquire }; 66 | -------------------------------------------------------------------------------- /testpkg/tools/buildGQLTool.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs'); 2 | 3 | /** 4 | * AVRO file can be written in different formats and different extensions 5 | * getInnerData will trim out the outter structure of the files 6 | * 7 | * @param {String} fileData raw file contents read from an AVRO schema file 8 | * @returns trimmed file contents maybe in need to create GQL schema 9 | */ 10 | const getInnerData = (fileData) => { 11 | fileData = fileData.toString('utf-8'); 12 | try { 13 | //check if current file contains "Avro.type.forSchema(" 14 | const expAvroRGX = /avro\.Type\.forSchema\(/g; 15 | if (expAvroRGX.test(fileData)) { 16 | let extractedData; 17 | //find schema object or variable name between parentheses 18 | extractedData = fileData 19 | .toString() 20 | .match(/(?<=avro\.Type\.forSchema\()[\s\S]*?(?=\);)/)[0] 21 | .trim(); 22 | 23 | if (extractedData[0] !== '{') { 24 | //extract variable following its assignment 25 | const varDefRegex = new RegExp('(?<=' + extractedData + ' =' + ')[\\s\\S]*(?=};)'); // find variable definition 26 | extractedData = fileData.match(varDefRegex).join('') + '}'; 27 | } 28 | fileData = extractedData 29 | } 30 | return fileData; 31 | } catch (err) { 32 | console.log(`Error: while getting inner data of kafka stream - ${err}`) 33 | return 34 | } 35 | } 36 | 37 | /** 38 | * Couples target-filename and topic-name to be searched while iteration of all files in the 39 | * AVRO schema folders. Key which is filename will be looked up during the iteration 40 | * and once the file is found, its topic-name will be used as topic. 41 | * 42 | * @param {String[]} topics topic names specified in config.js 43 | * @param {String[]} targets name of AVSC files in config.js 44 | * @returns map that consist of KEY: name of file VALUE: name of corresponding topic 45 | */ 46 | const zipTargets = (topics, targets) => { 47 | const zipMap = new Map(); 48 | if (!Array.isArray(topics) || !topics.length) { 49 | console.log("ERR: Your 'topics' in configuration isn't array or is empty - please review your configuration") 50 | return 51 | } else if (!Array.isArray(targets) || !targets.length) { 52 | console.log("ERR: Your 'targets' in configuration isn't array or is empty - Did you mean to use 'ALL-MODE'?") 53 | return 54 | } if (topics.length !== targets.length) { 55 | console.log("ERR: there must be one topic for each kafak schema file - please review your configuration"); 56 | return 57 | } 58 | for (let i = 0; i < targets.length; i++) { 59 | zipMap.set(targets[i], topics[i]) 60 | } 61 | 62 | return zipMap 63 | } 64 | 65 | /** 66 | * Retrieves topic-type from filedata and match with corresponing name of topic 67 | * 68 | * @param {String} topic topic name of currently found interested AVRO schema during iteration 69 | * @param {String} fileData trimmed filedata of currently scanned AVRO schema file 70 | * @returns {[String, String]} returns matched topic and topic-type 71 | */ 72 | const zipTopicTypes = (topic, fileData) => { 73 | try { 74 | const data = JSON.parse(fileData) 75 | return [topic, data.name] 76 | } catch (err) { 77 | console.log(`Err: ZipTopicTypes in buildGQLTool on ${topic} - ${err}`) 78 | return 79 | } 80 | } 81 | 82 | /** 83 | * Recursively retrieve GQL schema data from AVRO schema, written order of properties of AVRO schema 84 | * can be varied, so depends on the scanning situation, the function will be called recursively 85 | * to collect data by backtracking algorithm. 86 | * 87 | * @param {String} fileData trimmed file data of currently scanned AVRO schema file 88 | * @returns AVRO data as a deeply nested Array to be processed in formatGQLSchema 89 | */ 90 | const parseKafkaSchema = (fileData) => { 91 | try { 92 | let res = []; 93 | 94 | function backtrack(newObj) { 95 | let tmpArr = []; 96 | if (Array.isArray(newObj)) { // array check is required as basecase 97 | for (let k = 0; k < newObj.length; k++) { 98 | if (typeof newObj[k] === 'object') { // only interested in object type in the when array is found 99 | backtrack(newObj[k]); 100 | return; 101 | } 102 | } 103 | } else { 104 | if (newObj.type) { 105 | // only interested when the current layer's type is record or enum 106 | if (newObj.type === 'record' || newObj.type === 'enum') { 107 | if (newObj.name) { 108 | tmpArr.push(newObj.name); // then save the name of current layer 109 | if (newObj.fields) { 110 | for (let j = 0; j < newObj.fields.length; j++) { // retrieve each data in the field 111 | let tmpFieldEle = newObj.fields[j]; 112 | /* if element of field has 'type' property as object type this means there are 113 | more layer of AVRO schema data to be transformed into graphql schema; therefore 114 | we recursively search deeper */ 115 | if (typeof tmpFieldEle.type === 'object') { 116 | backtrack(tmpFieldEle.type); 117 | } 118 | tmpArr.push(tmpFieldEle); // keep collect the current layer's data 119 | } 120 | } else if (newObj.symbols) { 121 | tmpArr.push(newObj.symbols); 122 | } else { 123 | console.log('Syntax error with kafka stream producer schema: missing both fields and symbols'); 124 | } 125 | } else { 126 | console.log('Syntax error with kafka stream producer schema: missing both name and items'); 127 | } 128 | } else { 129 | if (newObj.items) { 130 | backtrack(newObj.items); 131 | return; 132 | } 133 | } 134 | } 135 | res.push(tmpArr); 136 | } 137 | } 138 | backtrack(JSON.parse(fileData)); 139 | return (res); 140 | } catch (err) { 141 | console.log( 142 | 'Error: there was an issue finding, reading, or parsing the schema' 143 | ); 144 | return 145 | } 146 | }; 147 | 148 | /** 149 | * formats to form of graphQL schema by iterating over parsed data from inner data of AVRO file 150 | * 151 | * @param {Array[]} newData AVRO data as a deeply nested Array from parseKafkaSchema function 152 | * @returns formatted data in a form of graphQL schema 153 | */ 154 | const formatGQLSchema = (newData) => { 155 | try { 156 | let result = ``; 157 | for (let i = newData.length - 1; i >= 0; i--) { 158 | let toAppend = ''; 159 | let prefix = 'type'; 160 | 161 | if (Array.isArray(newData[i][1])) { 162 | prefix = 'enum'; 163 | } 164 | toAppend += `${prefix} ${newData[i][0]} { \n`; 165 | 166 | for (let j = 1; j < newData[i].length; j++) { 167 | const currProp = newData[i][j]; 168 | if (prefix !== 'enum') { 169 | 170 | const typeDef = String(currProp.type); 171 | let currType = `${typeDef[0].toUpperCase().concat(typeDef.slice(1))}`; // capitalize first letter 172 | 173 | // if starts with Null, its type is an array filled with instances of a custom type 174 | if (currType.startsWith('Null')) { 175 | currType = `[${currProp.type[1].items[1].name}]`; 176 | toAppend += ` ${currProp.name}: ${currType} \n`; 177 | // if currType is [object, Object], the type will be a single custom type 178 | } else if (currType.startsWith('[object')) { 179 | currType = `${currProp.type.name}`; 180 | toAppend += ` ${currProp.name}: ${currType} \n`; 181 | } else { 182 | toAppend += ` ${currProp.name}: ${currType} \n`; 183 | } 184 | } else { 185 | //iterate through values in array and add to toAppend 186 | for (let k = 0; k < newData[i][j].length; k++) { 187 | toAppend += ` ${newData[i][j][k]}\n`; 188 | } 189 | } 190 | } 191 | toAppend += '}\n'; 192 | result += toAppend; 193 | } 194 | return result 195 | 196 | } catch (err) { 197 | console.log(`Error: while formatting final data for graphQL schema - ${err}`) 198 | return 199 | } 200 | }; 201 | 202 | /** 203 | * Wrap the processed GQL schema data with subscription to be written to final 204 | * form of graphqlSchema 205 | * 206 | * @param {String} formattedData final graphql schema portion 207 | * @param {Array of [String, String]} topicsTypesZip collected return from zipTopicTypes function 208 | * @returns complete form of graphql schema file 209 | */ 210 | const completeTypeDef = (formattedData, topicsTypesZip) => { 211 | let subs = ``; 212 | for (const topicType of topicsTypesZip) { 213 | subs += ` ${topicType[0]}: ${topicType[1]} 214 | `; 215 | } 216 | 217 | let result = `const { gql } = require('apollo-server-express'); 218 | 219 | module.exports = gql\` 220 | type Query { 221 | exampleQuery: String! 222 | } 223 | type Subscription { 224 | ${subs}}\n`; 225 | 226 | result += formattedData 227 | result += '`;'; 228 | return result 229 | } 230 | 231 | module.exports = { 232 | parseKafkaSchema, 233 | formatGQLSchema, 234 | getInnerData, 235 | completeTypeDef, 236 | zipTargets, 237 | zipTopicTypes 238 | } -------------------------------------------------------------------------------- /testpkg/tools/initTool.js: -------------------------------------------------------------------------------- 1 | const path = require('path') 2 | const fs = require('fs') 3 | 4 | /** 5 | * To help configuring initial setup for user specifically creating targets 6 | * 7 | * @param {String} mode the number represent mode selected 8 | * @param {*} dataFolder targetted AVRO schema folder 9 | * @returns array of strings of file names in the AVRO schema folder 10 | */ 11 | const createTargets = (mode, dataFolder) => { 12 | let targets = `` 13 | if (mode === '1') { 14 | const filenames = fs.readdirSync(dataFolder); 15 | targets += '[' 16 | let i = 0 17 | while (i < filenames.length - 1) { 18 | targets += `'${filenames[i]}', ` 19 | i += 1; 20 | } 21 | targets += `'${filenames[i]}']` 22 | return targets 23 | } else if (mode === '2') { 24 | targets += `[]` 25 | return targets 26 | } 27 | } 28 | 29 | 30 | /** 31 | * make a config.js with boilerplate to be filled by user 32 | * 33 | * @param {*} targetArr array of strings of file names in the AVRO schema folder 34 | * @param {*} mode number represent mode selected 35 | * @param {*} dataFolder targetted AVRO schema folder 36 | * @returns complete contents for configuration to generate config.js 37 | */ 38 | const createConfig = (targetArr, mode, dataFolder) => { 39 | let currMode; 40 | switch (mode) { 41 | case "0": 42 | currMode = "ALL"; 43 | break; 44 | case "1": 45 | currMode = "SELECT"; 46 | break; 47 | default: 48 | throw "Please select the mode from 0 ~ 1"; 49 | } 50 | const result = `// User Configuration File for Kafka - GraphQL connection using inquire library 51 | const path = require('path'); 52 | //input username and password for Confluent Cloud 53 | const username = '' 54 | const password = '' 55 | 56 | const sasl = username && password ? { username, password, mechanism: 'plain' } : null 57 | const ssl = !!sasl 58 | const MODE = { 59 | // ALL is to read all avsc files in the directory to be transformed into GQL schema 60 | ALL: 1, 61 | // SELECT is to read ONLY files in the 'targets' to be transformed into GQL Schema 62 | SELECT: 2 63 | }; 64 | 65 | module.exports = { 66 | mode: MODE.${currMode}, 67 | // please fill one topic per a AVRO schema file in targets with corresponding orders 68 | topics: [], 69 | // If SELECT mode, please fill the file name you desire to transform into GQL schema with extension of file; i.e) 'tripStatus.avsc' 70 | targets: ${targetArr}, 71 | //input Kafka client ID and brokers 72 | clientId: '', 73 | // please fill out broker from your kafka stream; i.e) ['pkc-lzvrd.us-xxxxx.gcp.confluent.cloud:xxxx'] 74 | brokers: [], 75 | ssl, 76 | sasl, 77 | connectionTimeout: 3000, 78 | authenticationTimeout: 1000, 79 | reauthenticationThreshold: 10000, 80 | schemaFolder: '${dataFolder}', 81 | };`; 82 | return result; 83 | } 84 | 85 | module.exports = { 86 | createTargets, 87 | createConfig 88 | } -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "es6", 4 | "module": "commonjs", 5 | "strict": true, 6 | "noImplicitAny": true, 7 | "esModuleInterop": true 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /website/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oslabs-beta/inquire/9316d4700c53e01da24a923f9dda37e6f14e349a/website/.DS_Store -------------------------------------------------------------------------------- /website/.gitignore: -------------------------------------------------------------------------------- 1 | node_modules/ 2 | nodemon.json 3 | .DS_STORE 4 | -------------------------------------------------------------------------------- /website/__test__/puppeteer.test.js: -------------------------------------------------------------------------------- 1 | const puppeteer = require('puppeteer'); 2 | const APP = `http://localhost:${process.env.PORT || 8080}/`; 3 | jest.setTimeout(100000); 4 | 5 | describe('Nav bar and convertor functionality', () => { 6 | let browser; 7 | let page; 8 | 9 | const avroSchema = `{ 10 | "type": "record", 11 | "name": "Trip", 12 | "fields": [ 13 | { 14 | "name": "id", 15 | "type": "string" 16 | }, 17 | { 18 | "name": "vehicleId", 19 | "type": "string" 20 | } 21 | ] 22 | }`; 23 | 24 | beforeAll(async () => { 25 | try { 26 | browser = await puppeteer.launch({ 27 | headless: false, 28 | slowMo: 30, 29 | defaultViewport: { 30 | width: 1280, 31 | height: 720, 32 | }, 33 | }); 34 | page = await browser.newPage(); 35 | await page.goto(APP); 36 | await page.screenshot({ 37 | path: '/Users/yingliu/KafQL/website/__test__/screenshot/puppeteerScreenShot.jpg', 38 | }); 39 | } catch (err) { 40 | console.log(err); 41 | } 42 | }); 43 | 44 | test('click on nav bar', async () => { 45 | try { 46 | await page.waitForTimeout(2000); 47 | await page.waitForSelector('#docs'); 48 | await page.focus('#docs'); 49 | await page.click('#docs'); 50 | await page.waitForTimeout(500); 51 | 52 | await page.waitForSelector('#team'); 53 | await page.focus('#team'); 54 | await page.click('#team'); 55 | await page.waitForTimeout(800); 56 | } catch (error) { 57 | console.error(error.message); 58 | } 59 | }); 60 | 61 | test('convertor functionality', async () => { 62 | try { 63 | await page.waitForSelector('#demo'); 64 | await page.focus('#demo'); 65 | await page.click('#demo'); 66 | await page.waitForTimeout(1000); 67 | 68 | await page.waitForSelector('#avroInput'); 69 | await page.focus('#avroInput'); 70 | await page.evaluate( 71 | () => (document.getElementById('avroInput').value = '') 72 | ); 73 | await page.keyboard.type(avroSchema); 74 | await page.waitForTimeout(1000); 75 | await page.waitForSelector('#generate'); 76 | await page.focus('#generate'); 77 | await page.click('#generate'); 78 | await page.waitForTimeout(1500); 79 | 80 | await page.waitForSelector('#clear'); 81 | await page.focus('#clear'); 82 | await page.click('#clear'); 83 | await page.waitForTimeout(1000); 84 | } catch (error) { 85 | console.error(error.message); 86 | } 87 | }); 88 | 89 | afterAll(async () => { 90 | try { 91 | await browser.close(); 92 | } catch (err) { 93 | console.log(err); 94 | } 95 | }); 96 | }); 97 | -------------------------------------------------------------------------------- /website/__test__/screenshot/puppeteerScreenShot.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oslabs-beta/inquire/9316d4700c53e01da24a923f9dda37e6f14e349a/website/__test__/screenshot/puppeteerScreenShot.jpg -------------------------------------------------------------------------------- /website/client/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oslabs-beta/inquire/9316d4700c53e01da24a923f9dda37e6f14e349a/website/client/.DS_Store -------------------------------------------------------------------------------- /website/client/App.jsx: -------------------------------------------------------------------------------- 1 | /** 2 | * ************************************ 3 | * 4 | * @module App.jsx 5 | * 6 | * ************************************ 7 | */ 8 | 9 | import React, { Component } from 'react'; 10 | import { connect } from 'react-redux'; 11 | import styles from './stylesheets/styles.scss'; 12 | 13 | import NavBar from './components/navBar/navBar.jsx'; 14 | import HomeContainer from './components/home/homeContainer.jsx'; 15 | import ConverterContainer from './components/converter/converterContainer.jsx'; 16 | import TeamContainer from './components/team/teamContainer.jsx'; 17 | import DocContainer from './components/documentation/DocContainer.jsx'; 18 | 19 | const mapDispatchToProps = (dispatch) => ({ 20 | // 21 | }); 22 | 23 | const mapStateToProps = (state) => ({ 24 | // add pertinent state here 25 | currPage: state.webSession.currPage, 26 | }); 27 | 28 | class App extends Component { 29 | constructor(props) { 30 | super(props); 31 | } 32 | 33 | render() { 34 | const displayComponents = []; 35 | displayComponents.push(); 36 | switch (this.props.currPage) { 37 | case 'home': 38 | displayComponents.push(); 39 | break; 40 | case 'demo': 41 | displayComponents.push(); 42 | break; 43 | case 'team': 44 | displayComponents.push(); 45 | break; 46 | case 'docs' : 47 | displayComponents.push() 48 | } 49 | return
{displayComponents}
; 50 | } 51 | } 52 | 53 | export default connect(mapStateToProps, mapDispatchToProps)(App); 54 | -------------------------------------------------------------------------------- /website/client/actions/actionTypes.js: -------------------------------------------------------------------------------- 1 | /** 2 | * ************************************ 3 | * 4 | * @module actionTypes.js 5 | * @description Action Type Constants 6 | * 7 | * ************************************ 8 | */ 9 | 10 | // add action type constants i.e.: 11 | // export const ACTION_DESCRIPTION = "ACTION_DESCRIPTION"; 12 | 13 | // Schema generation actions 14 | export const CONNECT_KAFKA = 'CONNECT_KAFKA'; 15 | export const MAKE_GRAPHQL = 'MAKE_GRAPHQL'; 16 | export const CLEAR_AVRO = 'CLEAR_AVRO'; 17 | export const ADD_AVRO = 'ADD_AVRO'; 18 | export const UPDATE_GRAPHQL = 'UPDATE_GRAPHQL'; 19 | 20 | // Web session actions 21 | export const CHANGE_PAGE = 'CHANGE_PAGE'; -------------------------------------------------------------------------------- /website/client/actions/actions.js: -------------------------------------------------------------------------------- 1 | /** 2 | * ************************************ 3 | * 4 | * @module actions.js 5 | * @description Action Creators 6 | * 7 | * ************************************ 8 | */ 9 | 10 | // import actionType constants 11 | import * as types from './actionTypes'; 12 | 13 | // Schema generation action generators 14 | 15 | export const connectKafkaActionCreator = () => ({ 16 | type: types.CONNECT_KAFKA, 17 | }); 18 | 19 | export const makeGraphQLActionCreator = () => ({ 20 | type: types.MAKE_GRAPHQL, 21 | }); 22 | 23 | export const clearAvroActionCreator = () => ({ 24 | type: types.CLEAR_AVRO, 25 | payload: '', 26 | }); 27 | 28 | export const addAvroActionCreator = (avroText) => ({ 29 | type: types.ADD_AVRO, 30 | payload: avroText, 31 | }); 32 | 33 | export const updateGraphQLActionCreator = (graphQLText) => ({ 34 | type: types.UPDATE_GRAPHQL, 35 | payload: graphQLText, 36 | }); 37 | 38 | // Web session action generators 39 | export const changePageActionCreator = (newPage) => ({ 40 | type: types.CHANGE_PAGE, 41 | payload: newPage, 42 | }); 43 | -------------------------------------------------------------------------------- /website/client/assets/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oslabs-beta/inquire/9316d4700c53e01da24a923f9dda37e6f14e349a/website/client/assets/.DS_Store -------------------------------------------------------------------------------- /website/client/assets/buttonCopyText.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oslabs-beta/inquire/9316d4700c53e01da24a923f9dda37e6f14e349a/website/client/assets/buttonCopyText.png -------------------------------------------------------------------------------- /website/client/assets/buttonGreenCircle.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oslabs-beta/inquire/9316d4700c53e01da24a923f9dda37e6f14e349a/website/client/assets/buttonGreenCircle.png -------------------------------------------------------------------------------- /website/client/assets/buttonRedCircle.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oslabs-beta/inquire/9316d4700c53e01da24a923f9dda37e6f14e349a/website/client/assets/buttonRedCircle.png -------------------------------------------------------------------------------- /website/client/assets/buttonYellowCircle.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oslabs-beta/inquire/9316d4700c53e01da24a923f9dda37e6f14e349a/website/client/assets/buttonYellowCircle.png -------------------------------------------------------------------------------- /website/client/assets/homepageGIFLogo.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oslabs-beta/inquire/9316d4700c53e01da24a923f9dda37e6f14e349a/website/client/assets/homepageGIFLogo.gif -------------------------------------------------------------------------------- /website/client/assets/homepageLogo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oslabs-beta/inquire/9316d4700c53e01da24a923f9dda37e6f14e349a/website/client/assets/homepageLogo.png -------------------------------------------------------------------------------- /website/client/assets/inquireBackground.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oslabs-beta/inquire/9316d4700c53e01da24a923f9dda37e6f14e349a/website/client/assets/inquireBackground.jpg -------------------------------------------------------------------------------- /website/client/assets/linkedinBlueGreen.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oslabs-beta/inquire/9316d4700c53e01da24a923f9dda37e6f14e349a/website/client/assets/linkedinBlueGreen.png -------------------------------------------------------------------------------- /website/client/assets/linkedinRed.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oslabs-beta/inquire/9316d4700c53e01da24a923f9dda37e6f14e349a/website/client/assets/linkedinRed.png -------------------------------------------------------------------------------- /website/client/assets/linkedinYellow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oslabs-beta/inquire/9316d4700c53e01da24a923f9dda37e6f14e349a/website/client/assets/linkedinYellow.png -------------------------------------------------------------------------------- /website/client/assets/logoGreen.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oslabs-beta/inquire/9316d4700c53e01da24a923f9dda37e6f14e349a/website/client/assets/logoGreen.png -------------------------------------------------------------------------------- /website/client/assets/logoRed.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oslabs-beta/inquire/9316d4700c53e01da24a923f9dda37e6f14e349a/website/client/assets/logoRed.png -------------------------------------------------------------------------------- /website/client/assets/logoWhite.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oslabs-beta/inquire/9316d4700c53e01da24a923f9dda37e6f14e349a/website/client/assets/logoWhite.png -------------------------------------------------------------------------------- /website/client/assets/logoYellow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oslabs-beta/inquire/9316d4700c53e01da24a923f9dda37e6f14e349a/website/client/assets/logoYellow.png -------------------------------------------------------------------------------- /website/client/assets/profileAnna.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oslabs-beta/inquire/9316d4700c53e01da24a923f9dda37e6f14e349a/website/client/assets/profileAnna.png -------------------------------------------------------------------------------- /website/client/assets/profileCece.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oslabs-beta/inquire/9316d4700c53e01da24a923f9dda37e6f14e349a/website/client/assets/profileCece.png -------------------------------------------------------------------------------- /website/client/assets/profileHan.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oslabs-beta/inquire/9316d4700c53e01da24a923f9dda37e6f14e349a/website/client/assets/profileHan.png -------------------------------------------------------------------------------- /website/client/assets/profileYing.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oslabs-beta/inquire/9316d4700c53e01da24a923f9dda37e6f14e349a/website/client/assets/profileYing.png -------------------------------------------------------------------------------- /website/client/assets/profiles.ai: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oslabs-beta/inquire/9316d4700c53e01da24a923f9dda37e6f14e349a/website/client/assets/profiles.ai -------------------------------------------------------------------------------- /website/client/components/converter/avroInput.jsx: -------------------------------------------------------------------------------- 1 | import React, { Component } from 'react'; 2 | import { connect } from 'react-redux'; 3 | import { addAvroActionCreator } from '../../actions/actions.js'; 4 | import buttonCopyText from '../../assets/buttonCopyText.png'; 5 | 6 | const mapDispatchToProps = (dispatch) => ({ 7 | addAvro: (text) => dispatch(addAvroActionCreator(text)), 8 | }); 9 | 10 | const mapStateToProps = (state) => ({ 11 | avroText: state.schemas.avroText, 12 | }); 13 | 14 | class AvroInput extends Component { 15 | constructor(props) { 16 | super(props); 17 | } 18 | 19 | render() { 20 | return ( 21 |
22 | my-logo { 27 | navigator.clipboard.writeText(this.props.avroText); 28 | }} 29 | /> 30 |