├── client ├── src │ ├── boot │ │ ├── .gitkeep │ │ ├── cognito.ts │ │ └── axios.ts │ ├── assets │ │ └── logos │ │ │ ├── 1159-teal-gradient.png │ │ │ └── 1159-gunmetal-solid.png │ ├── layouts │ │ └── MainLayout.vue │ ├── stores │ │ ├── ui-store.ts │ │ ├── auth-store.ts │ │ ├── index.ts │ │ ├── message-store.ts │ │ └── kb-store.ts │ ├── router │ │ ├── routes.ts │ │ └── index.ts │ ├── pages │ │ ├── ErrorNotFound.vue │ │ ├── KbIndex.vue │ │ └── KbHelp.vue │ ├── css │ │ ├── quasar.variables.scss │ │ └── app.scss │ ├── components │ │ ├── KbHeader.vue │ │ ├── KbFooter.vue │ │ ├── KbChat.vue │ │ └── KbRightDrawer.vue │ └── App.vue ├── .prettierrc ├── public │ ├── diagram.png │ ├── favicon.ico │ └── icons │ │ ├── role.png │ │ ├── policy.png │ │ ├── function.png │ │ ├── favicon-16x16.png │ │ └── favicon-32x32.png ├── tsconfig.vue-tsc.json ├── .eslintignore ├── .editorconfig ├── .npmrc ├── tsconfig.json ├── .vscode │ ├── extensions.json │ └── settings.json ├── README.md ├── .gitignore ├── postcss.config.cjs ├── index.html ├── package.json ├── .eslintrc.cjs └── quasar.config.js ├── .gitattributes ├── .npmignore ├── sample_data ├── Treasure Island.pdf ├── Catcher in the Rye.pdf └── A Tale of Two Cities.pdf ├── .gitignore ├── jest.config.js ├── lambdas ├── sample_data │ ├── package.json │ ├── tsconfig.json │ └── index.ts ├── copy_site │ ├── package.json │ ├── tsconfig.json │ └── index.ts ├── web_api │ ├── tsconfig.json │ ├── package.json │ └── index.ts ├── create_index │ ├── package.json │ ├── index.ts │ └── tsconfig.json └── create_index_provider │ ├── package.json │ ├── index.ts │ └── tsconfig.json ├── test └── kb-1159.test.ts ├── tsconfig.json ├── bin └── nebula.ts ├── package.json ├── cdk.json ├── README.md ├── lib └── nebula-stack.ts └── LICENSE /client/src/boot/.gitkeep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /client/.prettierrc: -------------------------------------------------------------------------------- 1 | { 2 | "singleQuote": true, 3 | "semi": true 4 | } 5 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | # Auto detect text files and perform LF normalization 2 | * text=auto 3 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | *.ts 2 | !*.d.ts 3 | 4 | # CDK asset staging directory 5 | .cdk.staging 6 | cdk.out 7 | -------------------------------------------------------------------------------- /client/public/diagram.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/1159-accelerators/nebula/HEAD/client/public/diagram.png -------------------------------------------------------------------------------- /client/public/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/1159-accelerators/nebula/HEAD/client/public/favicon.ico -------------------------------------------------------------------------------- /client/public/icons/role.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/1159-accelerators/nebula/HEAD/client/public/icons/role.png -------------------------------------------------------------------------------- /client/public/icons/policy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/1159-accelerators/nebula/HEAD/client/public/icons/policy.png -------------------------------------------------------------------------------- /sample_data/Treasure Island.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/1159-accelerators/nebula/HEAD/sample_data/Treasure Island.pdf -------------------------------------------------------------------------------- /client/public/icons/function.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/1159-accelerators/nebula/HEAD/client/public/icons/function.png -------------------------------------------------------------------------------- /sample_data/Catcher in the Rye.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/1159-accelerators/nebula/HEAD/sample_data/Catcher in the Rye.pdf -------------------------------------------------------------------------------- /sample_data/A Tale of Two Cities.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/1159-accelerators/nebula/HEAD/sample_data/A Tale of Two Cities.pdf -------------------------------------------------------------------------------- /client/public/icons/favicon-16x16.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/1159-accelerators/nebula/HEAD/client/public/icons/favicon-16x16.png -------------------------------------------------------------------------------- /client/public/icons/favicon-32x32.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/1159-accelerators/nebula/HEAD/client/public/icons/favicon-32x32.png -------------------------------------------------------------------------------- /client/tsconfig.vue-tsc.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "./tsconfig.json", 3 | "compilerOptions": { 4 | "skipLibCheck": true 5 | } 6 | } -------------------------------------------------------------------------------- /client/src/assets/logos/1159-teal-gradient.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/1159-accelerators/nebula/HEAD/client/src/assets/logos/1159-teal-gradient.png -------------------------------------------------------------------------------- /client/.eslintignore: -------------------------------------------------------------------------------- 1 | /dist 2 | /src-capacitor 3 | /src-cordova 4 | /.quasar 5 | /node_modules 6 | .eslintrc.cjs 7 | /quasar.config.*.temporary.compiled* 8 | -------------------------------------------------------------------------------- /client/src/assets/logos/1159-gunmetal-solid.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/1159-accelerators/nebula/HEAD/client/src/assets/logos/1159-gunmetal-solid.png -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.js 2 | !jest.config.js 3 | *.d.ts 4 | node_modules 5 | 6 | # CDK asset staging directory 7 | .cdk.staging 8 | cdk.out 9 | .DS_Store 10 | dist 11 | client/public/config.json 12 | -------------------------------------------------------------------------------- /jest.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | testEnvironment: 'node', 3 | roots: ['/test'], 4 | testMatch: ['**/*.test.ts'], 5 | transform: { 6 | '^.+\\.tsx?$': 'ts-jest' 7 | } 8 | }; 9 | -------------------------------------------------------------------------------- /client/.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*] 4 | charset = utf-8 5 | indent_style = space 6 | indent_size = 2 7 | end_of_line = lf 8 | insert_final_newline = true 9 | trim_trailing_whitespace = true 10 | -------------------------------------------------------------------------------- /client/.npmrc: -------------------------------------------------------------------------------- 1 | # pnpm-related options 2 | shamefully-hoist=true 3 | strict-peer-dependencies=false 4 | # to get the latest compatible packages when creating the project https://github.com/pnpm/pnpm/issues/6463 5 | resolution-mode=highest 6 | -------------------------------------------------------------------------------- /client/src/layouts/MainLayout.vue: -------------------------------------------------------------------------------- 1 | 7 | 8 | 12 | -------------------------------------------------------------------------------- /client/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "@quasar/app-vite/tsconfig-preset", 3 | "compilerOptions": { 4 | "baseUrl": "." 5 | }, 6 | "exclude": [ 7 | "./dist", 8 | "./.quasar", 9 | "./node_modules", 10 | "./src-capacitor", 11 | "./src-cordova", 12 | "./quasar.config.*.temporary.compiled*" 13 | ] 14 | } -------------------------------------------------------------------------------- /client/.vscode/extensions.json: -------------------------------------------------------------------------------- 1 | { 2 | "recommendations": [ 3 | "dbaeumer.vscode-eslint", 4 | "esbenp.prettier-vscode", 5 | "editorconfig.editorconfig", 6 | "vue.volar", 7 | "wayou.vscode-todo-highlight" 8 | ], 9 | "unwantedRecommendations": [ 10 | "octref.vetur", 11 | "hookyqr.beautify", 12 | "dbaeumer.jshint", 13 | "ms-vscode.vscode-typescript-tslint-plugin" 14 | ] 15 | } -------------------------------------------------------------------------------- /lambdas/sample_data/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "sample_data", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1" 8 | }, 9 | "author": "", 10 | "license": "ISC", 11 | "dependencies": { 12 | "@aws-sdk/client-bedrock-agent": "^3.624.0", 13 | "@aws-sdk/client-s3": "^3.624.0" 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /client/src/stores/ui-store.ts: -------------------------------------------------------------------------------- 1 | import { defineStore } from 'pinia'; 2 | 3 | export const useUiStore = defineStore('ui', { 4 | state: () => ({ 5 | rightDrawer: false, 6 | waiting: false 7 | }), 8 | // getters: { 9 | // doubleCount: (state) => state.counter * 2, 10 | // }, 11 | actions: { 12 | toggleRightDrawer() { 13 | this.rightDrawer = !this.rightDrawer; 14 | }, 15 | }, 16 | }); 17 | -------------------------------------------------------------------------------- /lambdas/copy_site/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "copy_site", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1" 8 | }, 9 | "author": "", 10 | "license": "ISC", 11 | "dependencies": { 12 | "@aws-sdk/client-s3": "^3.624.0", 13 | "@aws-sdk/client-sns": "^3.637.0", 14 | "aws-lambda": "^1.0.7" 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /client/.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "editor.bracketPairColorization.enabled": true, 3 | "editor.guides.bracketPairs": true, 4 | "editor.formatOnSave": true, 5 | "editor.defaultFormatter": "esbenp.prettier-vscode", 6 | "editor.codeActionsOnSave": [ 7 | "source.fixAll.eslint" 8 | ], 9 | "eslint.validate": [ 10 | "javascript", 11 | "javascriptreact", 12 | "typescript", 13 | "vue" 14 | ], 15 | "typescript.tsdk": "node_modules/typescript/lib" 16 | } -------------------------------------------------------------------------------- /lambdas/copy_site/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "es2020", 4 | "strict": true, 5 | "preserveConstEnums": true, 6 | "noEmit": true, 7 | "sourceMap": true, 8 | "module":"commonjs", 9 | "moduleResolution":"node", 10 | "esModuleInterop": true, 11 | "skipLibCheck": true, 12 | "forceConsistentCasingInFileNames": true, 13 | "isolatedModules": true, 14 | }, 15 | "exclude": ["node_modules", "**/*.test.ts"] 16 | } -------------------------------------------------------------------------------- /lambdas/web_api/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "es2020", 4 | "strict": true, 5 | "preserveConstEnums": true, 6 | "noEmit": true, 7 | "sourceMap": true, 8 | "module":"commonjs", 9 | "moduleResolution":"node", 10 | "esModuleInterop": true, 11 | "skipLibCheck": true, 12 | "forceConsistentCasingInFileNames": true, 13 | "isolatedModules": true, 14 | }, 15 | "exclude": ["node_modules", "**/*.test.ts"] 16 | } -------------------------------------------------------------------------------- /lambdas/sample_data/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "es2020", 4 | "strict": true, 5 | "preserveConstEnums": true, 6 | "noEmit": true, 7 | "sourceMap": true, 8 | "module":"commonjs", 9 | "moduleResolution":"node", 10 | "esModuleInterop": true, 11 | "skipLibCheck": true, 12 | "forceConsistentCasingInFileNames": true, 13 | "isolatedModules": true, 14 | }, 15 | "exclude": ["node_modules", "**/*.test.ts"] 16 | } -------------------------------------------------------------------------------- /lambdas/web_api/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "web_api", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1" 8 | }, 9 | "author": "", 10 | "license": "ISC", 11 | "dependencies": { 12 | "@aws-sdk/client-bedrock-agent": "^3.623.0", 13 | "@aws-sdk/client-bedrock-agent-runtime": "^3.625.0", 14 | "@aws-sdk/client-s3": "^3.623.0", 15 | "aws-lambda": "^1.0.7" 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /lambdas/create_index/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "create_index", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1" 8 | }, 9 | "author": "", 10 | "license": "ISC", 11 | "devDependencies": { 12 | "@types/aws-lambda": "^8.10.142" 13 | }, 14 | "dependencies": { 15 | "@aws-sdk/credential-provider-node": "^3.622.0", 16 | "@opensearch-project/opensearch": "^2.11.0" 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /lambdas/create_index_provider/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "create_index", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1" 8 | }, 9 | "author": "", 10 | "license": "ISC", 11 | "devDependencies": { 12 | "@types/aws-lambda": "^8.10.142" 13 | }, 14 | "dependencies": { 15 | "@aws-sdk/credential-provider-node": "^3.622.0", 16 | "@opensearch-project/opensearch": "^2.11.0" 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /client/src/router/routes.ts: -------------------------------------------------------------------------------- 1 | import { RouteRecordRaw } from 'vue-router'; 2 | 3 | const routes: RouteRecordRaw[] = [ 4 | { 5 | path: '/', 6 | component: () => import('layouts/MainLayout.vue'), 7 | children: [ 8 | { path: '', component: () => import('pages/KbIndex.vue'), name: 'index' }, 9 | { path: '/info', component: () => import('pages/KbHelp.vue'), name: 'help' }, 10 | ], 11 | }, 12 | 13 | // Always leave this as last one, 14 | // but you can also remove it 15 | { 16 | path: '/:catchAll(.*)*', 17 | component: () => import('pages/ErrorNotFound.vue'), 18 | }, 19 | ]; 20 | 21 | export default routes; 22 | -------------------------------------------------------------------------------- /test/kb-1159.test.ts: -------------------------------------------------------------------------------- 1 | // import * as cdk from 'aws-cdk-lib'; 2 | // import { Template } from 'aws-cdk-lib/assertions'; 3 | // import * as Kb1159 from '../lib/kb-1159-stack'; 4 | 5 | // example test. To run these tests, uncomment this file along with the 6 | // example resource in lib/kb-1159-stack.ts 7 | test('SQS Queue Created', () => { 8 | // const app = new cdk.App(); 9 | // // WHEN 10 | // const stack = new Kb1159.Kb1159Stack(app, 'MyTestStack'); 11 | // // THEN 12 | // const template = Template.fromStack(stack); 13 | 14 | // template.hasResourceProperties('AWS::SQS::Queue', { 15 | // VisibilityTimeout: 300 16 | // }); 17 | }); 18 | -------------------------------------------------------------------------------- /client/src/pages/ErrorNotFound.vue: -------------------------------------------------------------------------------- 1 | 24 | 25 | 30 | -------------------------------------------------------------------------------- /client/README.md: -------------------------------------------------------------------------------- 1 | # Knowledgebase Demo - 11:59 (knowledgebase-demo) 2 | 3 | Quickstart template for Gen AI on AWS 4 | 5 | ## Install the dependencies 6 | ```bash 7 | yarn 8 | # or 9 | npm install 10 | ``` 11 | 12 | ### Start the app in development mode (hot-code reloading, error reporting, etc.) 13 | ```bash 14 | quasar dev 15 | ``` 16 | 17 | 18 | ### Lint the files 19 | ```bash 20 | yarn lint 21 | # or 22 | npm run lint 23 | ``` 24 | 25 | 26 | ### Format the files 27 | ```bash 28 | yarn format 29 | # or 30 | npm run format 31 | ``` 32 | 33 | 34 | 35 | ### Build the app for production 36 | ```bash 37 | quasar build 38 | ``` 39 | 40 | ### Customize the configuration 41 | See [Configuring quasar.config.js](https://v2.quasar.dev/quasar-cli-vite/quasar-config-js). 42 | -------------------------------------------------------------------------------- /client/.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | .thumbs.db 3 | node_modules 4 | 5 | # Quasar core related directories 6 | .quasar 7 | /dist 8 | /quasar.config.*.temporary.compiled* 9 | 10 | # Cordova related directories and files 11 | /src-cordova/node_modules 12 | /src-cordova/platforms 13 | /src-cordova/plugins 14 | /src-cordova/www 15 | 16 | # Capacitor related directories and files 17 | /src-capacitor/www 18 | /src-capacitor/node_modules 19 | 20 | # BEX related directories and files 21 | /src-bex/www 22 | /src-bex/js/core 23 | 24 | # Log files 25 | npm-debug.log* 26 | yarn-debug.log* 27 | yarn-error.log* 28 | 29 | # Editor directories and files 30 | .idea 31 | *.suo 32 | *.ntvs* 33 | *.njsproj 34 | *.sln 35 | 36 | # local .env files 37 | .env.local* 38 | 39 | /public/config.json 40 | -------------------------------------------------------------------------------- /client/src/boot/cognito.ts: -------------------------------------------------------------------------------- 1 | import { boot } from 'quasar/wrappers'; 2 | import { Amplify } from 'aws-amplify'; 3 | 4 | const getCognitoOptions = async () => { 5 | const { cognitoOptions } = await fetch('/config.json').then((response) => response.json()) 6 | return cognitoOptions 7 | } 8 | // Be careful when using SSR for cross-request state pollution 9 | // due to creating a Singleton instance here; 10 | // If any client changes this (global) instance, it might be a 11 | // good idea to move this instance creation inside of the 12 | // "export default () => {}" function below (which runs individually 13 | // for each client) 14 | 15 | export default boot(async () => { 16 | const options = await getCognitoOptions() 17 | Amplify.configure(options) 18 | }); 19 | 20 | export { Amplify }; 21 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "ES2020", 4 | "module": "commonjs", 5 | "lib": [ 6 | "es2020", 7 | "dom" 8 | ], 9 | "declaration": true, 10 | "strict": true, 11 | "noImplicitAny": true, 12 | "strictNullChecks": true, 13 | "noImplicitThis": true, 14 | "alwaysStrict": true, 15 | "noUnusedLocals": false, 16 | "noUnusedParameters": false, 17 | "noImplicitReturns": true, 18 | "noFallthroughCasesInSwitch": false, 19 | "inlineSourceMap": true, 20 | "inlineSources": true, 21 | "experimentalDecorators": true, 22 | "strictPropertyInitialization": false, 23 | "typeRoots": [ 24 | "./node_modules/@types" 25 | ] 26 | }, 27 | "exclude": [ 28 | "node_modules", 29 | "cdk.out" 30 | ] 31 | } 32 | -------------------------------------------------------------------------------- /client/src/stores/auth-store.ts: -------------------------------------------------------------------------------- 1 | import { defineStore } from 'pinia'; 2 | //import { toRefs } from 'vue'; 3 | //import { useAuthenticator } from '@aws-amplify/ui-vue'; 4 | import { fetchAuthSession } from 'aws-amplify/auth'; 5 | 6 | //const authenticator = useAuthenticator(); 7 | 8 | export const useAuthStore = defineStore('auth', { 9 | state: () => ({ 10 | idToken: '' as string | undefined 11 | }), 12 | // getters: { 13 | // doubleCount: (state) => state.counter * 2, 14 | // }, 15 | actions: { 16 | // signOut() { 17 | // authenticator.signOut(); 18 | // this.$reset(); 19 | // }, 20 | async setToken() { 21 | try { 22 | const session = await fetchAuthSession(); 23 | this.idToken = session.tokens?.idToken?.toString(); 24 | } catch (err) { 25 | console.log(err); 26 | } 27 | }, 28 | }, 29 | }); 30 | -------------------------------------------------------------------------------- /client/src/css/quasar.variables.scss: -------------------------------------------------------------------------------- 1 | // Quasar SCSS (& Sass) Variables 2 | // -------------------------------------------------- 3 | // To customize the look and feel of this app, you can override 4 | // the Sass/SCSS variables found in Quasar's source Sass/SCSS files. 5 | 6 | // Check documentation for full list of Quasar variables 7 | 8 | // Your own variables (that are declared here) and Quasar's own 9 | // ones will be available out of the box in your .vue/.scss/.sass files 10 | 11 | // It's highly recommended to change the default colors 12 | // to match your app's branding. 13 | // Tip: Use the "Theme Builder" on Quasar's documentation website. 14 | 15 | $primary: #397f90; 16 | $secondary: #dfdedd; 17 | $accent: #9c27b0; 18 | 19 | $dark: #1d1d1d; 20 | $dark-page: #121212; 21 | 22 | $positive: #21ba45; 23 | $negative: #c10015; 24 | $info: #31ccec; 25 | $warning: #f2c037; 26 | 27 | $charcoal: #47545f; 28 | $gunmetal: #2a2d34; 29 | -------------------------------------------------------------------------------- /client/postcss.config.cjs: -------------------------------------------------------------------------------- 1 | /* eslint-disable */ 2 | // https://github.com/michael-ciniawsky/postcss-load-config 3 | 4 | module.exports = { 5 | plugins: [ 6 | // https://github.com/postcss/autoprefixer 7 | require('autoprefixer')({ 8 | overrideBrowserslist: [ 9 | 'last 4 Chrome versions', 10 | 'last 4 Firefox versions', 11 | 'last 4 Edge versions', 12 | 'last 4 Safari versions', 13 | 'last 4 Android versions', 14 | 'last 4 ChromeAndroid versions', 15 | 'last 4 FirefoxAndroid versions', 16 | 'last 4 iOS versions' 17 | ] 18 | }) 19 | 20 | // https://github.com/elchininet/postcss-rtlcss 21 | // If you want to support RTL css, then 22 | // 1. yarn/npm install postcss-rtlcss 23 | // 2. optionally set quasar.config.js > framework > lang to an RTL language 24 | // 3. uncomment the following line: 25 | // require('postcss-rtlcss') 26 | ] 27 | } 28 | -------------------------------------------------------------------------------- /client/src/stores/index.ts: -------------------------------------------------------------------------------- 1 | import { store } from 'quasar/wrappers' 2 | import { createPinia } from 'pinia' 3 | import { Router } from 'vue-router'; 4 | 5 | /* 6 | * When adding new properties to stores, you should also 7 | * extend the `PiniaCustomProperties` interface. 8 | * @see https://pinia.vuejs.org/core-concepts/plugins.html#typing-new-store-properties 9 | */ 10 | declare module 'pinia' { 11 | export interface PiniaCustomProperties { 12 | readonly router: Router; 13 | } 14 | } 15 | 16 | /* 17 | * If not building with SSR mode, you can 18 | * directly export the Store instantiation; 19 | * 20 | * The function below can be async too; either use 21 | * async/await or return a Promise which resolves 22 | * with the Store instance. 23 | */ 24 | 25 | export default store((/* { ssrContext } */) => { 26 | const pinia = createPinia() 27 | 28 | // You can add Pinia plugins here 29 | // pinia.use(SomePiniaPlugin) 30 | 31 | return pinia 32 | }) 33 | -------------------------------------------------------------------------------- /client/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | <%= productName %> 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 23 | 24 | 25 | 26 | 27 | -------------------------------------------------------------------------------- /client/src/boot/axios.ts: -------------------------------------------------------------------------------- 1 | import { boot } from 'quasar/wrappers'; 2 | import axios, { AxiosInstance } from 'axios'; 3 | import { Notify } from 'quasar'; 4 | 5 | declare module '@vue/runtime-core' { 6 | interface ComponentCustomProperties { 7 | $axios: AxiosInstance; 8 | $api: AxiosInstance; 9 | } 10 | } 11 | 12 | const getBaseUrl = async () => { 13 | const { baseUrl } = await fetch('/config.json').then((response) => 14 | response.json() 15 | ); 16 | return baseUrl; 17 | }; 18 | 19 | const api = axios.create(); 20 | 21 | export default boot(async ({ app }) => { 22 | app.config.globalProperties.$axios = axios; 23 | 24 | app.config.globalProperties.$api = api; 25 | 26 | api.defaults.baseURL = await getBaseUrl(); 27 | api.interceptors.response.use( 28 | (response) => response, 29 | (error) => { 30 | Notify.create({ 31 | type: 'negative', 32 | position: 'top', 33 | message: 'Something went wrong', 34 | }); 35 | console.log(error); 36 | } 37 | ); 38 | }); 39 | 40 | export { api }; 41 | -------------------------------------------------------------------------------- /bin/nebula.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | import "source-map-support/register"; 3 | import * as cdk from "aws-cdk-lib"; 4 | import { NebulaStack } from "../lib/nebula-stack"; 5 | 6 | const app = new cdk.App(); 7 | new NebulaStack(app, "NebulaStack", { 8 | synthesizer: new cdk.DefaultStackSynthesizer({ 9 | generateBootstrapVersionRule: false 10 | }) 11 | /* If you don't specify 'env', this stack will be environment-agnostic. 12 | * Account/Region-dependent features and context lookups will not work, 13 | * but a single synthesized template can be deployed anywhere. */ 14 | 15 | /* Uncomment the next line to specialize this stack for the AWS Account 16 | * and Region that are implied by the current CLI configuration. */ 17 | // env: { account: process.env.CDK_DEFAULT_ACCOUNT, region: process.env.CDK_DEFAULT_REGION }, 18 | 19 | /* Uncomment the next line if you know exactly what Account and Region you 20 | * want to deploy the stack to. */ 21 | // env: { account: '123456789012', region: 'us-east-1' }, 22 | 23 | /* For more information, see https://docs.aws.amazon.com/cdk/latest/guide/environments.html */ 24 | }); 25 | -------------------------------------------------------------------------------- /client/src/router/index.ts: -------------------------------------------------------------------------------- 1 | import { route } from 'quasar/wrappers'; 2 | import { 3 | createMemoryHistory, 4 | createRouter, 5 | createWebHashHistory, 6 | createWebHistory, 7 | } from 'vue-router'; 8 | 9 | import routes from './routes'; 10 | 11 | /* 12 | * If not building with SSR mode, you can 13 | * directly export the Router instantiation; 14 | * 15 | * The function below can be async too; either use 16 | * async/await or return a Promise which resolves 17 | * with the Router instance. 18 | */ 19 | 20 | export default route(function (/* { store, ssrContext } */) { 21 | const createHistory = process.env.SERVER 22 | ? createMemoryHistory 23 | : (process.env.VUE_ROUTER_MODE === 'history' ? createWebHistory : createWebHashHistory); 24 | 25 | const Router = createRouter({ 26 | scrollBehavior: () => ({ left: 0, top: 0 }), 27 | routes, 28 | 29 | // Leave this as is and make changes in quasar.conf.js instead! 30 | // quasar.conf.js -> build -> vueRouterMode 31 | // quasar.conf.js -> build -> publicPath 32 | history: createHistory(process.env.VUE_ROUTER_BASE), 33 | }); 34 | 35 | return Router; 36 | }); 37 | -------------------------------------------------------------------------------- /client/src/components/KbHeader.vue: -------------------------------------------------------------------------------- 1 | 18 | 36 | -------------------------------------------------------------------------------- /client/src/App.vue: -------------------------------------------------------------------------------- 1 | 27 | 28 | 39 | -------------------------------------------------------------------------------- /client/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "nebula-client", 3 | "version": "0.0.1", 4 | "description": "Quickstart template for Gen AI on AWS", 5 | "productName": "Nebula - 11:59", 6 | "author": "Justin Coker ", 7 | "private": true, 8 | "scripts": { 9 | "lint": "eslint --ext .js,.ts,.vue ./", 10 | "format": "prettier --write \"**/*.{js,ts,vue,scss,html,md,json}\" --ignore-path .gitignore", 11 | "test": "echo \"No test specified\" && exit 0", 12 | "dev": "quasar dev", 13 | "build": "quasar build" 14 | }, 15 | "dependencies": { 16 | "@aws-amplify/ui-vue": "^4.2.10", 17 | "@quasar/extras": "^1.16.17", 18 | "aws-amplify": "^6.4.4", 19 | "axios": "^1.2.1", 20 | "pinia": "^2.0.11", 21 | "quasar": "^2.17.7", 22 | "vue": "^3.4.18", 23 | "vue-router": "4.4.0" 24 | }, 25 | "devDependencies": { 26 | "@quasar/app-vite": "^1.11.0", 27 | "@quasar/cli": "^2.4.1", 28 | "@types/node": "^12.20.21", 29 | "@typescript-eslint/eslint-plugin": "^7.16.0", 30 | "@typescript-eslint/parser": "^7.16.0", 31 | "autoprefixer": "^10.4.2", 32 | "eslint": "^8.57.0", 33 | "eslint-config-prettier": "^8.1.0", 34 | "eslint-plugin-vue": "^9.0.0", 35 | "prettier": "^2.5.1", 36 | "typescript": "~5.5.4", 37 | "vite-plugin-checker": "^0.7.2", 38 | "vue-tsc": "^2.0.29" 39 | }, 40 | "engines": { 41 | "node": "^20 || ^18 || ^16", 42 | "npm": ">= 6.13.4", 43 | "yarn": ">= 1.21.1" 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /client/src/components/KbFooter.vue: -------------------------------------------------------------------------------- 1 | 34 | 52 | -------------------------------------------------------------------------------- /client/src/components/KbChat.vue: -------------------------------------------------------------------------------- 1 | 21 | 67 | -------------------------------------------------------------------------------- /client/src/pages/KbIndex.vue: -------------------------------------------------------------------------------- 1 | 39 | 40 | 63 | -------------------------------------------------------------------------------- /client/src/stores/message-store.ts: -------------------------------------------------------------------------------- 1 | import { defineStore } from 'pinia'; 2 | import { useUiStore } from './ui-store'; 3 | import { api } from 'boot/axios'; 4 | import { useAuthStore } from './auth-store'; 5 | import { scroll } from 'quasar'; 6 | 7 | const authStore = useAuthStore(); 8 | const uiStore = useUiStore(); 9 | const { getScrollTarget, setVerticalScrollPosition } = scroll; 10 | 11 | export const useMessageStore = defineStore('message', { 12 | state: () => ({ 13 | messages: [] as Message[], 14 | sessionId: undefined as string | undefined, 15 | questionInput: '', 16 | questionInputRef: null as HTMLDivElement | null, 17 | scrollPlaceholderRef: null as HTMLDivElement | null, 18 | }), 19 | // getters: { 20 | // doubleCount: (state) => state.counter * 2, 21 | // }, 22 | actions: { 23 | async focusInput() { 24 | if (this.questionInputRef) { 25 | this.questionInputRef.focus(); 26 | } 27 | }, 28 | async addMessage() { 29 | uiStore.waiting = true; 30 | const staticInput = JSON.parse(JSON.stringify(this.questionInput)); 31 | this.questionInput = ''; 32 | this.messages.push({ sender: 'me', text: staticInput }); 33 | this.scrollToElement(); 34 | 35 | const payload = { 36 | question: staticInput, 37 | sessionId: this.sessionId 38 | }; 39 | 40 | try { 41 | const response = await api.post('/chat', payload, { 42 | headers: { Authorization: `Bearer ${authStore.idToken}` }, 43 | }); 44 | this.messages.push({ sender: 'aws', text: response.data.data.answer, references: response.data.data.citations[0].retrievedReferences }); 45 | //this.sessionId = response.data.data.sessionId; 46 | } catch (err) { 47 | console.log(err); 48 | } 49 | uiStore.waiting = false; 50 | this.scrollToElement(); 51 | this.questionInputRef?.focus(); 52 | }, 53 | scrollToElement() { 54 | if (this.scrollPlaceholderRef) { 55 | const target = getScrollTarget(this.scrollPlaceholderRef); 56 | const offset = this.scrollPlaceholderRef.offsetTop; 57 | const duration = 1000; 58 | setVerticalScrollPosition(target, offset, duration); 59 | } 60 | }, 61 | }, 62 | }); 63 | 64 | interface Message { 65 | sender: 'me' | 'aws'; 66 | text: string; 67 | references?: Reference[] 68 | } 69 | 70 | export interface Reference { 71 | content: { 72 | text: string; 73 | }; 74 | location: { 75 | s3Location: { 76 | uri: string; 77 | }; 78 | type: string; 79 | }; 80 | } 81 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "nebula", 3 | "version": "1.4.0", 4 | "bin": { 5 | "nebula": "bin/nebula.js" 6 | }, 7 | "scripts": { 8 | "prebuild": "rm -rf dist", 9 | "build": "npm run compile && npm run package && npm run synth && npm run upload", 10 | "compile": "npm run compile-create-index && npm run compile-web-api && npm run compile-copy-site && npm run compile-data && npm run compile-client", 11 | "compile-create-index": "esbuild lambdas/create_index/index.ts --bundle --minify --sourcemap --platform=node --target=es2020 --outfile=dist/create_index.js", 12 | "compile-web-api": "esbuild lambdas/web_api/index.ts --bundle --minify --sourcemap --platform=node --target=es2020 --outfile=dist/web_api.js", 13 | "compile-copy-site": "esbuild lambdas/copy_site/index.ts --bundle --minify --sourcemap --platform=node --target=es2020 --outfile=dist/copy_site.js", 14 | "compile-data": "esbuild lambdas/sample_data/index.ts --bundle --minify --sourcemap --platform=node --target=es2020 --outfile=dist/sample_data.js", 15 | "compile-client": "npm run build --prefix client", 16 | "package": "npm run package-create-index && npm run package-web-api && npm run package-copy-site && npm run package-data", 17 | "package-create-index": "zip -j dist/create_index.zip dist/create_index.js*", 18 | "package-web-api": "zip -j dist/web_api.zip dist/web_api.js*", 19 | "package-copy-site": "zip -j dist/copy_site.zip dist/copy_site.js*", 20 | "package-data": "zip -j dist/sample_data.zip dist/sample_data.js*", 21 | "synth": "cdk synth --quiet", 22 | "upload": "npm run upload-lambdas && npm run upload-site && npm run upload-iac && npm run upload-data", 23 | "upload-iac": "aws s3 cp cdk.out/NebulaStack.template.json s3://1159-public-assets/nebula/$npm_package_version/template.json --profile shared-services", 24 | "upload-lambdas": "aws s3 cp dist/ s3://1159-public-assets/nebula/$npm_package_version/lambdas/ --recursive --exclude '*' --include '*.zip' --profile shared-services", 25 | "upload-site": "aws s3 sync client/dist/spa s3://1159-public-assets/nebula/$npm_package_version/site/ --exclude '.DS_Store' --exclude 'config.json' --profile shared-services", 26 | "upload-data": "aws s3 sync sample_data s3://1159-public-assets/nebula/$npm_package_version/sample_data/ --exclude '.DS_Store' --profile shared-services", 27 | "watch": "tsc -w", 28 | "test": "jest", 29 | "cdk": "cdk" 30 | }, 31 | "devDependencies": { 32 | "@types/aws-lambda": "^8.10.142", 33 | "@types/jest": "^29.5.12", 34 | "@types/node": "20.14.9", 35 | "aws-cdk": "2.150.0", 36 | "esbuild": "^0.23.0", 37 | "jest": "^29.7.0", 38 | "ts-jest": "^29.1.5", 39 | "ts-node": "^10.9.2", 40 | "typescript": "~5.5.3" 41 | }, 42 | "dependencies": { 43 | "aws-cdk-lib": "2.150.0", 44 | "constructs": "^10.0.0", 45 | "source-map-support": "^0.5.21" 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /client/src/stores/kb-store.ts: -------------------------------------------------------------------------------- 1 | import { defineStore } from 'pinia'; 2 | import { api } from 'boot/axios'; 3 | import { useAuthStore } from './auth-store'; 4 | 5 | const authStore = useAuthStore(); 6 | 7 | export const useKbStore = defineStore('kb', { 8 | state: () => ({ 9 | bucket: { 10 | docs: [], 11 | count: 0, 12 | }, 13 | dataSource: { 14 | createdAt: '', 15 | dataDeletionPolicy: '', 16 | dataSourceConfiguration: { 17 | s3Configuration: { 18 | bucketArn: '', 19 | bucketOwnerAccountId: '', 20 | }, 21 | type: '', 22 | }, 23 | dataSourceId: '', 24 | knowledgeBaseId: '', 25 | name: '', 26 | status: '', 27 | updatedAt: '', 28 | }, 29 | knowledgeBase: { 30 | createdAt: '', 31 | knowledgeBaseArn: '', 32 | knowledgeBaseConfiguration: { 33 | type: '', 34 | vectorKnowledgeBaseConfiguration: { 35 | embeddingModelArn: '', 36 | }, 37 | }, 38 | knowledgeBaseId: '', 39 | name: '', 40 | roleArn: '', 41 | status: '', 42 | updatedAt: '', 43 | storageConfiguration: { 44 | type: '', 45 | opensearchServerlessConfiguration: { 46 | collectionArn: '', 47 | fieldMapping: { 48 | metadataField: '', 49 | textField: '', 50 | vectorField: '', 51 | }, 52 | vectorIndexName: '', 53 | }, 54 | }, 55 | }, 56 | }), 57 | // getters: { 58 | // doubleCount: (state) => state.counter * 2, 59 | // }, 60 | getters: { 61 | bucketName(state) { 62 | const stringArray = 63 | state.dataSource.dataSourceConfiguration.s3Configuration.bucketArn.split( 64 | ':::' 65 | ); 66 | return stringArray[1]; 67 | }, 68 | storageType(state) { 69 | if ( 70 | state.knowledgeBase.storageConfiguration.type === 71 | 'OPENSEARCH_SERVERLESS' 72 | ) { 73 | return 'OpenSearch Serverless'; 74 | } else { 75 | return 'Unknown'; 76 | } 77 | }, 78 | }, 79 | actions: { 80 | async getDocsInfo() { 81 | try { 82 | const response = await api.get('/docs', { 83 | headers: { Authorization: `Bearer ${authStore.idToken}` }, 84 | }); 85 | this.bucket = response.data.data; 86 | } catch (error) { 87 | console.error(error); 88 | } 89 | }, 90 | async getKbInfo() { 91 | try { 92 | const response = await api.get('/kb', { 93 | headers: { Authorization: `Bearer ${authStore.idToken}` }, 94 | }); 95 | this.dataSource = response.data.data.dataSource; 96 | this.knowledgeBase = response.data.data.knowledgeBase; 97 | } catch (error) { 98 | console.error(error); 99 | } 100 | }, 101 | }, 102 | }); 103 | -------------------------------------------------------------------------------- /lambdas/create_index_provider/index.ts: -------------------------------------------------------------------------------- 1 | import { Context } from "aws-lambda"; 2 | import { defaultProvider } from "@aws-sdk/credential-provider-node"; // V3 SDK. 3 | import { Client } from "@opensearch-project/opensearch"; 4 | import { AwsSigv4Signer } from "@opensearch-project/opensearch/aws"; 5 | 6 | let region; 7 | if (process.env.REGION) { 8 | region = process.env.REGION; 9 | } else { 10 | throw new Error("REGION environmental variable not set"); 11 | } 12 | 13 | const endpoint = process.env.ENDPOINT; 14 | 15 | const client = new Client({ 16 | ...AwsSigv4Signer({ 17 | region: region, 18 | service: "aoss", 19 | getCredentials: () => { 20 | const credentialsProvider = defaultProvider(); 21 | return credentialsProvider(); 22 | }, 23 | }), 24 | node: endpoint, // OpenSearch domain URL 25 | }); 26 | 27 | type CloudFormationEvent = { 28 | RequestType: "Create" | "Delete" | "Update"; 29 | RequestId: string; 30 | ResourceType: string; 31 | LogicalResourceId: string; 32 | StackId: string; 33 | OldResourceProperties: { [key: string]: string }; 34 | ResourceProperties: { [key: string]: string }; 35 | PhysicalResourceId: string; 36 | }; 37 | 38 | type ResponseObject = { 39 | PhysicalResourceId: string; 40 | NoEcho: boolean; 41 | Data: { [key: string]: string }; 42 | }; 43 | 44 | export const handler = async ( 45 | event: CloudFormationEvent, 46 | _context: Context 47 | ): Promise => { 48 | console.log("REQUEST RECEIVED:\n" + JSON.stringify(event)); 49 | 50 | const requestType = event.RequestType; 51 | 52 | if (requestType === "Update" || requestType === "Delete") { 53 | return { 54 | PhysicalResourceId: event.PhysicalResourceId, 55 | NoEcho: false, 56 | Data: {}, 57 | }; 58 | } else { 59 | const settings = { 60 | settings: { 61 | index: { 62 | number_of_shards: 2, 63 | number_of_replicas: 0, 64 | }, 65 | }, 66 | mappings: { 67 | properties: { 68 | BEDROCK_METADATA: { type: "text", index: false }, 69 | BEDROCK_TEXT_CHUNK: { type: "text" }, 70 | "kb-vector-1159": { 71 | type: "knn_vector", 72 | dimension: 1024, 73 | method: { 74 | engine: "faiss", 75 | space_type: "l2", 76 | name: "hnsw", 77 | parameters: {}, 78 | }, 79 | }, 80 | }, 81 | }, 82 | }; 83 | 84 | try { 85 | await client.indices.create({ 86 | index: "kb-index-1159", 87 | body: settings, 88 | }); 89 | return { 90 | PhysicalResourceId: "kb-index-1159", 91 | NoEcho: false, 92 | Data: { Status: "index created" }, 93 | }; 94 | } catch (err) { 95 | console.log(err); 96 | throw new Error("Error:" + err); 97 | } 98 | } 99 | }; 100 | -------------------------------------------------------------------------------- /client/src/css/app.scss: -------------------------------------------------------------------------------- 1 | // app global css in SCSS form 2 | .text-gunmetal { 3 | color: $gunmetal !important; 4 | } 5 | .bg-gunmetal { 6 | background: $gunmetal !important; 7 | } 8 | 9 | .text-charcoal { 10 | color: $charcoal !important; 11 | } 12 | .bg-charcoal { 13 | background: $charcoal !important; 14 | } 15 | 16 | .kb-container { 17 | width: 100%; 18 | } 19 | 20 | .kb-page { 21 | padding-top: 8px; 22 | padding-right: 20px; 23 | padding-bottom: 8px; 24 | padding-left: 20px; 25 | } 26 | 27 | .kb-info-page { 28 | padding-top: 8px; 29 | padding-right: 20px; 30 | padding-bottom: 40px; 31 | padding-left: 20px; 32 | } 33 | 34 | .kb-footer { 35 | padding: 20px 8px; 36 | } 37 | 38 | .kb-selected-input, 39 | .q-field__native { 40 | color: $charcoal !important; 41 | } 42 | 43 | .kb-selected-input .q-field__append { 44 | align-self: flex-end; 45 | } 46 | 47 | @media (min-width: $breakpoint-md-max) { 48 | .kb-container { 49 | max-width: 900px; 50 | } 51 | } 52 | 53 | [data-amplify-authenticator] { 54 | --amplify-components-authenticator-router-box-shadow: 0; 55 | --amplify-components-authenticator-form-padding: var(--amplify-space-medium) 56 | var(--amplify-space-xl) var(--amplify-space-xl); 57 | --amplify-components-button-primary-background-color: var($primary); 58 | --amplify-components-fieldcontrol-focus-box-shadow: 0; 59 | --amplify-components-tabs-item-active-border-color: var( 60 | --amplify-colors-neutral-100 61 | ); 62 | --amplify-components-tabs-item-color: var(--amplify-colors-neutral-80); 63 | --amplify-components-tabs-item-active-color: var(--amplify-colors-purple-100); 64 | --amplify-components-button-link-color: var($primary); 65 | --amplify-components-button-link-active-background-color: transparent; 66 | --amplify-components-button-link-focus-background-color: transparent; 67 | --amplify-components-button-link-hover-background-color: transparent; 68 | --amplify-components-fieldcontrol-outline-color: var($primary); 69 | } 70 | 71 | body.mobile [data-amplify-authenticator] { 72 | --amplify-components-authenticator-router-border-width: 0; 73 | } 74 | 75 | body.desktop [data-amplify-authenticator] { 76 | --amplify-components-authenticator-router-border-width: 1px; 77 | } 78 | 79 | .amplify-button--primary { 80 | background: $primary; 81 | } 82 | 83 | .amplify-input { 84 | outline-width: 2px !important; 85 | outline-style: solid !important; 86 | outline-color: #ffffff !important; 87 | } 88 | 89 | .amplify-input:focus { 90 | outline-color: $primary !important; 91 | border: none !important; 92 | } 93 | 94 | #scroll-placeholder { 95 | height: 1px; 96 | } 97 | 98 | .q-message-text:last-child { 99 | max-height: 0 !important; 100 | } 101 | 102 | .kb-message-sent-name { 103 | text-align: right; 104 | } 105 | 106 | .info-h1 { 107 | color: $primary; 108 | font-size: 2.4em; 109 | font-weight: 700; 110 | } 111 | 112 | .info-h2 { 113 | color: $primary; 114 | font-size: 2.2em; 115 | font-weight: 700; 116 | border-width: 0 0 1px 0; 117 | border-color: $secondary; 118 | border-style: solid; 119 | } 120 | 121 | .info-h3 { 122 | color: $gunmetal; 123 | font-size: 1.2em; 124 | font-weight: 700; 125 | } 126 | -------------------------------------------------------------------------------- /client/.eslintrc.cjs: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | // https://eslint.org/docs/user-guide/configuring#configuration-cascading-and-hierarchy 3 | // This option interrupts the configuration hierarchy at this file 4 | // Remove this if you have an higher level ESLint config file (it usually happens into a monorepos) 5 | root: true, 6 | 7 | // https://eslint.vuejs.org/user-guide/#how-to-use-a-custom-parser 8 | // Must use parserOptions instead of "parser" to allow vue-eslint-parser to keep working 9 | // `parser: 'vue-eslint-parser'` is already included with any 'plugin:vue/**' config and should be omitted 10 | parserOptions: { 11 | parser: require.resolve('@typescript-eslint/parser'), 12 | extraFileExtensions: [ '.vue' ] 13 | }, 14 | 15 | env: { 16 | browser: true, 17 | es2021: true, 18 | node: true 19 | }, 20 | 21 | // Rules order is important, please avoid shuffling them 22 | extends: [ 23 | // Base ESLint recommended rules 24 | // 'eslint:recommended', 25 | 26 | // https://github.com/typescript-eslint/typescript-eslint/tree/master/packages/eslint-plugin#usage 27 | // ESLint typescript rules 28 | 'plugin:@typescript-eslint/recommended', 29 | 30 | // Uncomment any of the lines below to choose desired strictness, 31 | // but leave only one uncommented! 32 | // See https://eslint.vuejs.org/rules/#available-rules 33 | 'plugin:vue/vue3-essential', // Priority A: Essential (Error Prevention) 34 | // 'plugin:vue/vue3-strongly-recommended', // Priority B: Strongly Recommended (Improving Readability) 35 | // 'plugin:vue/vue3-recommended', // Priority C: Recommended (Minimizing Arbitrary Choices and Cognitive Overhead) 36 | 37 | // https://github.com/prettier/eslint-config-prettier#installation 38 | // usage with Prettier, provided by 'eslint-config-prettier'. 39 | 'prettier' 40 | ], 41 | 42 | plugins: [ 43 | // required to apply rules which need type information 44 | '@typescript-eslint', 45 | 46 | // https://eslint.vuejs.org/user-guide/#why-doesn-t-it-work-on-vue-files 47 | // required to lint *.vue files 48 | 'vue' 49 | 50 | // https://github.com/typescript-eslint/typescript-eslint/issues/389#issuecomment-509292674 51 | // Prettier has not been included as plugin to avoid performance impact 52 | // add it as an extension for your IDE 53 | 54 | ], 55 | 56 | globals: { 57 | ga: 'readonly', // Google Analytics 58 | cordova: 'readonly', 59 | __statics: 'readonly', 60 | __QUASAR_SSR__: 'readonly', 61 | __QUASAR_SSR_SERVER__: 'readonly', 62 | __QUASAR_SSR_CLIENT__: 'readonly', 63 | __QUASAR_SSR_PWA__: 'readonly', 64 | process: 'readonly', 65 | Capacitor: 'readonly', 66 | chrome: 'readonly' 67 | }, 68 | 69 | // add your custom rules here 70 | rules: { 71 | 72 | 'prefer-promise-reject-errors': 'off', 73 | 74 | quotes: ['warn', 'single', { avoidEscape: true }], 75 | 76 | // this rule, if on, would require explicit return type on the `render` function 77 | '@typescript-eslint/explicit-function-return-type': 'off', 78 | 79 | // in plain CommonJS modules, you can't use `import foo = require('foo')` to pass this rule, so it has to be disabled 80 | '@typescript-eslint/no-var-requires': 'off', 81 | 82 | // The core 'no-unused-vars' rules (in the eslint:recommended ruleset) 83 | // does not work with type definitions 84 | 'no-unused-vars': 'off', 85 | 86 | // allow debugger during development only 87 | 'no-debugger': process.env.NODE_ENV === 'production' ? 'error' : 'off' 88 | } 89 | } 90 | -------------------------------------------------------------------------------- /cdk.json: -------------------------------------------------------------------------------- 1 | { 2 | "app": "npx ts-node --prefer-ts-exts bin/nebula.ts", 3 | "watch": { 4 | "include": [ 5 | "**" 6 | ], 7 | "exclude": [ 8 | "README.md", 9 | "cdk*.json", 10 | "**/*.d.ts", 11 | "**/*.js", 12 | "tsconfig.json", 13 | "package*.json", 14 | "yarn.lock", 15 | "node_modules", 16 | "test" 17 | ] 18 | }, 19 | "context": { 20 | "@aws-cdk/aws-lambda:recognizeLayerVersion": true, 21 | "@aws-cdk/core:checkSecretUsage": true, 22 | "@aws-cdk/core:target-partitions": [ 23 | "aws", 24 | "aws-cn" 25 | ], 26 | "@aws-cdk-containers/ecs-service-extensions:enableDefaultLogDriver": true, 27 | "@aws-cdk/aws-ec2:uniqueImdsv2TemplateName": true, 28 | "@aws-cdk/aws-ecs:arnFormatIncludesClusterName": true, 29 | "@aws-cdk/aws-iam:minimizePolicies": true, 30 | "@aws-cdk/core:validateSnapshotRemovalPolicy": true, 31 | "@aws-cdk/aws-codepipeline:crossAccountKeyAliasStackSafeResourceName": true, 32 | "@aws-cdk/aws-s3:createDefaultLoggingPolicy": true, 33 | "@aws-cdk/aws-sns-subscriptions:restrictSqsDescryption": true, 34 | "@aws-cdk/aws-apigateway:disableCloudWatchRole": true, 35 | "@aws-cdk/core:enablePartitionLiterals": true, 36 | "@aws-cdk/aws-events:eventsTargetQueueSameAccount": true, 37 | "@aws-cdk/aws-ecs:disableExplicitDeploymentControllerForCircuitBreaker": true, 38 | "@aws-cdk/aws-iam:importedRoleStackSafeDefaultPolicyName": true, 39 | "@aws-cdk/aws-s3:serverAccessLogsUseBucketPolicy": true, 40 | "@aws-cdk/aws-route53-patters:useCertificate": true, 41 | "@aws-cdk/customresources:installLatestAwsSdkDefault": false, 42 | "@aws-cdk/aws-rds:databaseProxyUniqueResourceName": true, 43 | "@aws-cdk/aws-codedeploy:removeAlarmsFromDeploymentGroup": true, 44 | "@aws-cdk/aws-apigateway:authorizerChangeDeploymentLogicalId": true, 45 | "@aws-cdk/aws-ec2:launchTemplateDefaultUserData": true, 46 | "@aws-cdk/aws-secretsmanager:useAttachedSecretResourcePolicyForSecretTargetAttachments": true, 47 | "@aws-cdk/aws-redshift:columnId": true, 48 | "@aws-cdk/aws-stepfunctions-tasks:enableEmrServicePolicyV2": true, 49 | "@aws-cdk/aws-ec2:restrictDefaultSecurityGroup": true, 50 | "@aws-cdk/aws-apigateway:requestValidatorUniqueId": true, 51 | "@aws-cdk/aws-kms:aliasNameRef": true, 52 | "@aws-cdk/aws-autoscaling:generateLaunchTemplateInsteadOfLaunchConfig": true, 53 | "@aws-cdk/core:includePrefixInUniqueNameGeneration": true, 54 | "@aws-cdk/aws-efs:denyAnonymousAccess": true, 55 | "@aws-cdk/aws-opensearchservice:enableOpensearchMultiAzWithStandby": true, 56 | "@aws-cdk/aws-lambda-nodejs:useLatestRuntimeVersion": true, 57 | "@aws-cdk/aws-efs:mountTargetOrderInsensitiveLogicalId": true, 58 | "@aws-cdk/aws-rds:auroraClusterChangeScopeOfInstanceParameterGroupWithEachParameters": true, 59 | "@aws-cdk/aws-appsync:useArnForSourceApiAssociationIdentifier": true, 60 | "@aws-cdk/aws-rds:preventRenderingDeprecatedCredentials": true, 61 | "@aws-cdk/aws-codepipeline-actions:useNewDefaultBranchForCodeCommitSource": true, 62 | "@aws-cdk/aws-cloudwatch-actions:changeLambdaPermissionLogicalIdForLambdaAction": true, 63 | "@aws-cdk/aws-codepipeline:crossAccountKeysDefaultValueToFalse": true, 64 | "@aws-cdk/aws-codepipeline:defaultPipelineTypeToV2": true, 65 | "@aws-cdk/aws-kms:reduceCrossAccountRegionPolicyScope": true, 66 | "@aws-cdk/aws-eks:nodegroupNameAttribute": true, 67 | "@aws-cdk/aws-ec2:ebsDefaultGp3Volume": true, 68 | "@aws-cdk/aws-ecs:removeDefaultDeploymentAlarm": true, 69 | "@aws-cdk/custom-resources:logApiResponseDataPropertyTrueDefault": false, 70 | "@aws-cdk/aws-stepfunctions-tasks:ecsReduceRunTaskPermissions": true 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /client/src/components/KbRightDrawer.vue: -------------------------------------------------------------------------------- 1 | 93 | 104 | -------------------------------------------------------------------------------- /lambdas/sample_data/index.ts: -------------------------------------------------------------------------------- 1 | import { Context } from "aws-lambda"; 2 | import * as https from "node:https"; 3 | import { 4 | S3Client, 5 | ListObjectsCommand, 6 | CopyObjectCommand, 7 | } from "@aws-sdk/client-s3"; 8 | import { 9 | BedrockAgentClient, 10 | StartIngestionJobCommand, 11 | } from "@aws-sdk/client-bedrock-agent"; 12 | 13 | const s3Client = new S3Client({}); 14 | const listObjectsCommand = new ListObjectsCommand({ 15 | Bucket: process.env.SOURCE_BUCKET, 16 | Prefix: `nebula/${process.env.VERSION}/sample_data/`, 17 | }); 18 | 19 | const bedrockAgentClient = new BedrockAgentClient({}); 20 | const startIngestionJobCommand = new StartIngestionJobCommand({ 21 | knowledgeBaseId: process.env.KB_ID, 22 | dataSourceId: process.env.DATA_SOURCE_ID, 23 | }); 24 | 25 | type CloudFormationEvent = { 26 | RequestType: "Create" | "Delete" | "Update"; 27 | RequestId: string; 28 | ResponseURL: string; 29 | ResourceType: string; 30 | LogicalResourceId: string; 31 | StackId: string; 32 | }; 33 | 34 | type ResponseStatus = "FAILED" | "SUCCESS"; 35 | 36 | type ResponseData = { 37 | Status?: string; 38 | Error?: string; 39 | }; 40 | 41 | export const handler = async (event: CloudFormationEvent, context: Context) => { 42 | console.log("REQUEST RECEIVED:\n" + JSON.stringify(event)); 43 | 44 | let responseStatus: ResponseStatus = "FAILED"; 45 | let responseData: ResponseData = {}; 46 | 47 | // For Delete requests, immediately send a SUCCESS response. 48 | if (event.RequestType === "Delete" || event.ResourceType === "Update") { 49 | await sendResponse(event, context, "SUCCESS", { Status: "Deleting" }); 50 | return; 51 | } 52 | 53 | try { 54 | const sourceObjects = await s3Client.send(listObjectsCommand); 55 | console.log(sourceObjects) 56 | const keys = sourceObjects.Contents?.map((object) => ({ 57 | newKey: object.Key?.slice(25), 58 | oldKey: object.Key, 59 | })); 60 | 61 | if (keys?.length) { 62 | for (const key of keys) { 63 | const send = await s3Client.send( 64 | new CopyObjectCommand({ 65 | Bucket: process.env.DOCS_BUCKET, 66 | Key: key.newKey, 67 | CopySource: `${process.env.SOURCE_BUCKET}/${key.oldKey}`, 68 | }) 69 | ); 70 | } 71 | } 72 | 73 | await bedrockAgentClient.send(startIngestionJobCommand); 74 | responseStatus = "SUCCESS"; 75 | responseData = { Status: "Sample data copied" }; 76 | } catch (err) { 77 | responseData = { Error: "Something went wrong" }; 78 | console.log(responseData.Error + ":\n", err); 79 | } 80 | 81 | await sendResponse(event, context, responseStatus, responseData); 82 | }; 83 | 84 | function sendResponse( 85 | event: CloudFormationEvent, 86 | context: Context, 87 | responseStatus: ResponseStatus, 88 | responseData: ResponseData 89 | ) { 90 | return new Promise((resolve, reject) => { 91 | let responseBody = JSON.stringify({ 92 | Status: responseStatus, 93 | Reason: 94 | "See the details in CloudWatch Log Stream: " + context.logStreamName, 95 | PhysicalResourceId: context.logStreamName, 96 | StackId: event.StackId, 97 | RequestId: event.RequestId, 98 | LogicalResourceId: event.LogicalResourceId, 99 | NoEcho: false, 100 | Data: responseData, 101 | }); 102 | 103 | console.log("Response body:\n", responseBody); 104 | 105 | //var parsedUrl = url.parse(event.ResponseURL); 106 | //let parsedUrl = new URL(event.ResponseURL); 107 | let options = { 108 | method: "PUT", 109 | headers: { 110 | "Content-Type": "", 111 | "Content-Length": responseBody.length, 112 | }, 113 | }; 114 | 115 | var request = https.request(event.ResponseURL, options, (response) => { 116 | console.log("Status code: " + response.statusCode); 117 | resolve(context.done()); 118 | }); 119 | 120 | request.on("error", function (error) { 121 | console.log("send(..) failed executing https.request(..): " + error); 122 | reject(context.done(error)); 123 | }); 124 | 125 | request.write(responseBody); 126 | request.end(); 127 | }); 128 | } 129 | -------------------------------------------------------------------------------- /lambdas/create_index/index.ts: -------------------------------------------------------------------------------- 1 | import { Context } from "aws-lambda"; 2 | import { defaultProvider } from "@aws-sdk/credential-provider-node"; // V3 SDK. 3 | import { Client } from "@opensearch-project/opensearch"; 4 | import { AwsSigv4Signer } from "@opensearch-project/opensearch/aws"; 5 | import * as https from "node:https"; 6 | 7 | let region: string; 8 | 9 | const endpoint = process.env.ENDPOINT; 10 | 11 | type CloudFormationEvent = { 12 | RequestType: "Create" | "Delete" | "Update"; 13 | RequestId: string; 14 | ResponseURL: string; 15 | ResourceType: string; 16 | LogicalResourceId: string; 17 | StackId: string; 18 | }; 19 | 20 | type ResponseStatus = "FAILED" | "SUCCESS"; 21 | 22 | type ResponseData = { 23 | Status?: string; 24 | Error?: string; 25 | }; 26 | 27 | export const handler = async (event: CloudFormationEvent, context: Context) => { 28 | console.log("REQUEST RECEIVED:\n" + JSON.stringify(event)); 29 | 30 | let responseStatus: ResponseStatus = "FAILED"; 31 | let responseData: ResponseData = {}; 32 | 33 | if (process.env.REGION) { 34 | region = process.env.REGION; 35 | } else { 36 | await sendResponse(event, context, responseStatus, { Error: "REGION variable not set" }); 37 | } 38 | 39 | const client = new Client({ 40 | ...AwsSigv4Signer({ 41 | region: region, 42 | service: "aoss", 43 | getCredentials: () => { 44 | const credentialsProvider = defaultProvider(); 45 | return credentialsProvider(); 46 | }, 47 | }), 48 | node: endpoint, // OpenSearch domain URL 49 | }); 50 | 51 | // For Delete requests, immediately send a SUCCESS response. 52 | if (event.RequestType === "Delete" || event.ResourceType === "Update") { 53 | await sendResponse(event, context, "SUCCESS", { Status: "Deleting" }); 54 | return; 55 | } 56 | 57 | const settings = { 58 | settings: { 59 | index: { 60 | number_of_shards: 2, 61 | number_of_replicas: 0, 62 | }, 63 | }, 64 | mappings: { 65 | properties: { 66 | BEDROCK_METADATA: { type: "text", index: false }, 67 | BEDROCK_TEXT_CHUNK: { type: "text" }, 68 | "nebula-vector": { 69 | type: "knn_vector", 70 | dimension: 1024, 71 | method: { 72 | engine: "faiss", 73 | space_type: "l2", 74 | name: "hnsw", 75 | parameters: {}, 76 | }, 77 | }, 78 | }, 79 | }, 80 | }; 81 | 82 | try { 83 | await client.indices.create({ 84 | index: "nebula-index", 85 | body: settings, 86 | }); 87 | await new Promise(resolve => setTimeout(resolve, 30000)); 88 | // setTimeout(() => { 89 | // console.log("Pausing for index access policy creation") 90 | // }, 60000) 91 | responseStatus = "SUCCESS"; 92 | responseData = { Status: "Index created" }; 93 | } catch (err) { 94 | responseData = { Error: "Index was not created" }; 95 | console.log(responseData.Error + ":\n", err); 96 | } 97 | 98 | await sendResponse(event, context, responseStatus, responseData); 99 | }; 100 | 101 | function sendResponse( 102 | event: CloudFormationEvent, 103 | context: Context, 104 | responseStatus: ResponseStatus, 105 | responseData: ResponseData 106 | ) { 107 | return new Promise((resolve, reject) => { 108 | let responseBody = JSON.stringify({ 109 | Status: responseStatus, 110 | Reason: 111 | "See the details in CloudWatch Log Stream: " + context.logStreamName, 112 | PhysicalResourceId: context.logStreamName, 113 | StackId: event.StackId, 114 | RequestId: event.RequestId, 115 | LogicalResourceId: event.LogicalResourceId, 116 | NoEcho: false, 117 | Data: responseData, 118 | }); 119 | 120 | console.log("Response body:\n", responseBody); 121 | 122 | //var parsedUrl = url.parse(event.ResponseURL); 123 | //let parsedUrl = new URL(event.ResponseURL); 124 | let options = { 125 | method: "PUT", 126 | headers: { 127 | "Content-Type": "", 128 | "Content-Length": responseBody.length, 129 | }, 130 | }; 131 | 132 | var request = https.request(event.ResponseURL, options, (response) => { 133 | console.log("Status code: " + response.statusCode); 134 | resolve(context.done()); 135 | }); 136 | 137 | request.on("error", function (error) { 138 | console.log("send(..) failed executing https.request(..): " + error); 139 | reject(context.done(error)); 140 | }); 141 | 142 | request.write(responseBody); 143 | request.end(); 144 | }); 145 | } 146 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Nebula Knowledge Base 2 | Welcome to the Nebula Knowledge Base, which provides contextual chat capabilities to help you query and understand your enterprise documents effectively. 3 | With deployment made simple through CloudFormation, you can get started in just a few clicks. 4 | It is fully contained within your environment, ensuring that your data remains secure at all times. 5 | 6 | 7 | ## Deploying the Stack 8 | The application can be deployed by simply clicking `Deploy the Stack` below. This link will take you directly to the CloudFormation console with the proper template preloaded. 9 | 10 | Due to origin limitations around lambda zip files delivered using S3, Nebula can only be deployed in the following regions. If you require deploying to a region not on the list, just let us know at 11 | accelerators@1159.ai 12 | 13 | - US-EAST-1 14 | - US-WEST-2 15 | - CA-CENTRAL-1 16 | 17 | Because Nebula uses Bedrock Knowledge Bases under the hood, it's currently limited to the following file types: 18 | - Plain text (txt) 19 | - Markdown (md) 20 | - HyperText Markup Language (html) 21 | - Microsoft Word document (doc/docx) 22 | - Comma-separated values (csv) 23 | - Microsoft Excel spreadsheet (xls/xlsx) 24 | - Portable Document (pdf) 25 | 26 | Once the stack creation is complete, the output `WebUrl` can be found in the Output tab within CloudFormation. Following this link should take you to the application 27 | and you can login with the temporary password that was sent to the user email provided during deployment 28 | 29 | **Note: You must have access to the chosen models BEFORE deploying this application.** To gain access to the necessary models, please reference 30 | [Manage access to Amazon Bedrock foundation models](https://docs.aws.amazon.com/bedrock/latest/userguide/model-access.html) in the Bedrock User Guide 31 | 32 | **[Deploy the Stack](https://console.aws.amazon.com/cloudformation/home?#/stacks/new?stackName=Nebula-1159&templateURL=https://1159-public-assets.s3.amazonaws.com/nebula/1.4.0/template.json)** 33 | 34 | #### CloudFormation Parameters 35 | ##### User Email 36 | This will be used to create the initial Cognito user and a temporary password 37 | will be sent to this address during the deployment 38 | 39 | ##### Upload Sample Documents 40 | Don't worry, it's just a couple of documents about [11:59](https://1159.ai), so storage consumption 41 | should not be an issue 42 | 43 | ##### Embedding Model 44 | These are the embeddings models available to choose: 45 | - amazon.titan-embed-text-v1 46 | - **amazon.titan-embed-text-v2:0** 47 | - cohere.embed-english-v3 48 | - cohere.embed-multilingual-v3 49 | 50 | ##### Foundation Model 51 | These are the LLMs available to choose: 52 | - amazon.titan-text-premier-v1:0 53 | - anthropic.claude-v2 54 | - anthropic.claude-v2:1 55 | - **anthropic.claude-3-sonnet-20240229-v1:0** 56 | - anthropic.claude-3-haiku-20240307-v1:0 57 | - anthropic.claude-instant-v1 58 | 59 | ## Component Details 60 | #### Backend 61 | - [API Gateway](https://aws.amazon.com/api-gateway/) 62 | - [Bedrock](https://aws.amazon.com/bedrock/) 63 | - [CloudFront](https://aws.amazon.com/cloudfront/) 64 | - [Cognito](https://aws.amazon.com/cognito/) 65 | - [Lambda](https://aws.amazon.com/lambda/) 66 | - [OpenSearch](https://aws.amazon.com/opensearch-service/) 67 | - [S3](https://aws.amazon.com/s3/) 68 | 69 | #### Frontend 70 | - [Quasar Framework](https://quasar.dev/) 71 | - [Vue.js](https://vuejs.org/) 72 | 73 | #### Architecture 74 | ![Architeture Diagram](/client/public/diagram.png "Architecture Diagram") 75 | 76 | ## Building Your Own 77 | #### Install the Prerequisites 78 | - [AWS CDK](https://docs.aws.amazon.com/cdk/v2/guide/getting_started.html) 79 | - [AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html) 80 | - [Node.js](https://nodejs.org/en/download/package-manager) 81 | 82 | #### Create an AWS CLI Profile 83 | Nebula uses AWS profiles in order to authenticate to AWS, and, by default, the profile is named `shared-services`. 84 | If you have a different name for your profile you must change all references to `shared-services` in 85 | the top-level `package.json` file. 86 | 87 | #### Create or Set the Source Bucket 88 | We designed Nebula to be deployable using nothing but a CloudFormation link, and because of that, 89 | all artifacts are built and then uploaded to an S3 bucket for distribution. We're using a bucket called 90 | `1159-public-assets` which obviously needs to be changed to your chosen bucket. References to the bucket 91 | are located in the following files: 92 | - package.json 93 | - lib/nebula-stack.ts 94 | 95 | #### Build the App 96 | After you've installed the prerequisites, set your AWS profile, and created the source bucket you can simply run 97 | `npm run build` to build the application. This is a cascading command that kicks off multiple other tasks in the `package.json` file. 98 | 1. Compiles the TypeScript lambdas using [esbuild](https://esbuild.github.io/) 99 | 2. Compiles the Quasar client 100 | 3. Archives the lambdas into zip files 101 | 4. Synthesizes the CDK project to CloudFormation 102 | 5. Uploads the CF templates, client assets, lambdas, and sample data to the source bucket 103 | 104 | After the build is complete, you should find the final CloudFormation file in your source bucket 105 | at `nebula/{version}/template.json` 106 | -------------------------------------------------------------------------------- /lambdas/copy_site/index.ts: -------------------------------------------------------------------------------- 1 | import { Context } from "aws-lambda"; 2 | import * as https from "node:https"; 3 | import { 4 | S3Client, 5 | ListObjectsCommand, 6 | CopyObjectCommand, 7 | PutObjectCommand, 8 | } from "@aws-sdk/client-s3"; 9 | 10 | import { SNSClient, PublishCommand } from "@aws-sdk/client-sns"; 11 | 12 | const s3Client = new S3Client({}); 13 | const snsClient = new SNSClient({}); 14 | 15 | const listObjectsCommand = new ListObjectsCommand({ 16 | Bucket: process.env.SOURCE_BUCKET, 17 | Prefix: `nebula/${process.env.VERSION}/site`, 18 | }); 19 | 20 | const publishCommand = new PublishCommand({ 21 | TopicArn: process.env.TOPIC_ARN, 22 | Subject: "Accelerator Deployment", 23 | Message: JSON.stringify({ 24 | accelerator: "KB", 25 | user_email: process.env.USER_EMAIL, 26 | }), 27 | }); 28 | 29 | type CloudFormationEvent = { 30 | RequestType: "Create" | "Delete" | "Update"; 31 | RequestId: string; 32 | ResponseURL: string; 33 | ResourceType: string; 34 | LogicalResourceId: string; 35 | StackId: string; 36 | }; 37 | 38 | type ResponseStatus = "FAILED" | "SUCCESS"; 39 | 40 | type ResponseData = { 41 | Status?: string; 42 | Error?: string; 43 | }; 44 | 45 | export const handler = async (event: CloudFormationEvent, context: Context) => { 46 | console.log("REQUEST RECEIVED:\n" + JSON.stringify(event)); 47 | 48 | let responseStatus: ResponseStatus = "FAILED"; 49 | let responseData: ResponseData = {}; 50 | 51 | // For Delete requests, immediately send a SUCCESS response. 52 | if (event.RequestType === "Delete" || event.ResourceType === "Update") { 53 | await sendResponse(event, context, "SUCCESS", { Status: "Deleting" }); 54 | return; 55 | } 56 | 57 | try { 58 | await snsClient.send(publishCommand); 59 | const sourceObjects = await s3Client.send(listObjectsCommand); 60 | const keys = sourceObjects.Contents?.map((object) => ({ 61 | newKey: object.Key?.slice(18), 62 | oldKey: object.Key, 63 | })); 64 | 65 | if (keys?.length) { 66 | for (const key of keys) { 67 | const send = await s3Client.send( 68 | new CopyObjectCommand({ 69 | Bucket: process.env.WEB_BUCKET, 70 | Key: key.newKey, 71 | CopySource: `${process.env.SOURCE_BUCKET}/${key.oldKey}`, 72 | }) 73 | ); 74 | } 75 | } 76 | const putConfig = await s3Client.send( 77 | new PutObjectCommand({ 78 | Key: "config.json", 79 | Body: JSON.stringify({ 80 | baseUrl: process.env.API_URL, 81 | cognitoOptions: { 82 | Auth: { 83 | Cognito: { 84 | userPoolId: process.env.USER_POOL_ID, 85 | userPoolClientId: process.env.USER_POOL_CLIENT_ID, 86 | loginWith: { 87 | email: true, 88 | }, 89 | signUpVerificationMethod: "code", 90 | userAttributes: { 91 | email: { 92 | required: true, 93 | }, 94 | }, 95 | allowGuestAccess: false, 96 | passwordFormat: { 97 | minLength: 12, 98 | requireLowercase: true, 99 | requireUppercase: true, 100 | requireNumbers: true, 101 | requireSpecialCharacters: true, 102 | }, 103 | }, 104 | }, 105 | }, 106 | }), 107 | Bucket: process.env.WEB_BUCKET, 108 | }) 109 | ); 110 | responseStatus = "SUCCESS"; 111 | responseData = { Status: "Objects copied" }; 112 | } catch (err) { 113 | responseData = { Error: "Something went wrong" }; 114 | console.log(responseData.Error + ":\n", err); 115 | } 116 | 117 | await sendResponse(event, context, responseStatus, responseData); 118 | }; 119 | 120 | function sendResponse( 121 | event: CloudFormationEvent, 122 | context: Context, 123 | responseStatus: ResponseStatus, 124 | responseData: ResponseData 125 | ) { 126 | return new Promise((resolve, reject) => { 127 | let responseBody = JSON.stringify({ 128 | Status: responseStatus, 129 | Reason: 130 | "See the details in CloudWatch Log Stream: " + context.logStreamName, 131 | PhysicalResourceId: context.logStreamName, 132 | StackId: event.StackId, 133 | RequestId: event.RequestId, 134 | LogicalResourceId: event.LogicalResourceId, 135 | NoEcho: false, 136 | Data: responseData, 137 | }); 138 | 139 | console.log("Response body:\n", responseBody); 140 | 141 | //var parsedUrl = url.parse(event.ResponseURL); 142 | //let parsedUrl = new URL(event.ResponseURL); 143 | let options = { 144 | method: "PUT", 145 | headers: { 146 | "Content-Type": "", 147 | "Content-Length": responseBody.length, 148 | }, 149 | }; 150 | 151 | var request = https.request(event.ResponseURL, options, (response) => { 152 | console.log("Status code: " + response.statusCode); 153 | resolve(context.done()); 154 | }); 155 | 156 | request.on("error", function (error) { 157 | console.log("send(..) failed executing https.request(..): " + error); 158 | reject(context.done(error)); 159 | }); 160 | 161 | request.write(responseBody); 162 | request.end(); 163 | }); 164 | } 165 | -------------------------------------------------------------------------------- /lambdas/web_api/index.ts: -------------------------------------------------------------------------------- 1 | import { APIGatewayProxyEvent, APIGatewayProxyResult } from "aws-lambda"; 2 | import { S3Client, ListObjectsCommand } from "@aws-sdk/client-s3"; 3 | import { 4 | BedrockAgentClient, 5 | GetKnowledgeBaseCommand, 6 | GetDataSourceCommand, 7 | DataSource, 8 | KnowledgeBase, 9 | } from "@aws-sdk/client-bedrock-agent"; 10 | import { 11 | BedrockAgentRuntimeClient, 12 | RetrieveAndGenerateCommand, 13 | Citation, 14 | } from "@aws-sdk/client-bedrock-agent-runtime"; 15 | 16 | const s3Client = new S3Client({}); 17 | const s3ListObjectsInput = { 18 | Bucket: process.env.DOCS_BUCKET, 19 | }; 20 | const s3ListObjectsCommand = new ListObjectsCommand(s3ListObjectsInput); 21 | 22 | const bedrockAgentClient = new BedrockAgentClient({}); 23 | const bedrockGetKnowledgeBaseCommand = new GetKnowledgeBaseCommand({ 24 | knowledgeBaseId: process.env.KB_ID, 25 | }); 26 | 27 | const bedrockGetDataSourceCommand = new GetDataSourceCommand({ 28 | knowledgeBaseId: process.env.KB_ID, 29 | dataSourceId: process.env.DATA_SOURCE_ID, 30 | }); 31 | 32 | const bedrockAgentRuntimeClient = new BedrockAgentRuntimeClient({}); 33 | 34 | type ResponseBody = { 35 | data?: { 36 | docs?: (string | undefined)[] | undefined; 37 | count?: number; 38 | knowledgeBase?: KnowledgeBase; 39 | dataSource?: DataSource; 40 | answer?: string; 41 | sessionId?: string; 42 | citations?: Citation[]; 43 | }; 44 | error?: { 45 | message?: string; 46 | detail?: unknown; 47 | }; 48 | }; 49 | 50 | const buildResponse = (body: ResponseBody, statusCode = 200) => { 51 | return { 52 | statusCode: statusCode, 53 | headers: { 54 | "Access-Control-Allow-Headers": "Content-Type,Authorization", 55 | "Access-Control-Allow-Origin": "*", 56 | "Access-Control-Allow-Methods": "OPTIONS,POST,GET", 57 | }, 58 | body: JSON.stringify(body), 59 | }; 60 | }; 61 | 62 | export const handler = async ( 63 | event: APIGatewayProxyEvent 64 | ): Promise => { 65 | let response; 66 | 67 | if (event.path === "/docs" && event.httpMethod === "GET") { 68 | try { 69 | const s3Response = await s3Client.send(s3ListObjectsCommand); 70 | response = buildResponse({ 71 | data: { 72 | docs: s3Response.Contents?.map((doc) => doc.Key), 73 | count: s3Response.Contents?.length, 74 | }, 75 | }); 76 | } catch (err) { 77 | console.log(err); 78 | response = buildResponse( 79 | { error: { message: "something went wrong", detail: err } }, 80 | 500 81 | ); 82 | } 83 | } else if (event.path === "/kb" && event.httpMethod === "GET") { 84 | try { 85 | const bedrockKbResponse = await bedrockAgentClient.send( 86 | bedrockGetKnowledgeBaseCommand 87 | ); 88 | 89 | const bedrockDataSourceResponse = await bedrockAgentClient.send( 90 | bedrockGetDataSourceCommand 91 | ); 92 | response = buildResponse({ 93 | data: { 94 | dataSource: bedrockDataSourceResponse.dataSource, 95 | knowledgeBase: bedrockKbResponse.knowledgeBase, 96 | }, 97 | }); 98 | } catch (err) { 99 | console.log(err); 100 | response = buildResponse( 101 | { error: { message: "something went wrong", detail: err } }, 102 | 500 103 | ); 104 | } 105 | } else if ( 106 | event.path === "/chat" && 107 | event.httpMethod === "POST" && 108 | event.body && 109 | event.body !== "" 110 | ) { 111 | const body = JSON.parse(event.body); 112 | try { 113 | const query = await bedrockAgentRuntimeClient.send( 114 | new RetrieveAndGenerateCommand({ 115 | sessionId: body.sessionId, 116 | input: { 117 | text: body.question, // required 118 | }, 119 | retrieveAndGenerateConfiguration: { 120 | type: "KNOWLEDGE_BASE", // required 121 | knowledgeBaseConfiguration: { 122 | knowledgeBaseId: process.env.KB_ID, 123 | modelArn: process.env.FOUNDATION_MODEL_ARN, 124 | retrievalConfiguration: { 125 | // KnowledgeBaseRetrievalConfiguration 126 | vectorSearchConfiguration: { 127 | // KnowledgeBaseVectorSearchConfiguration 128 | numberOfResults: Number(process.env.SOURCE_CHUNKS), 129 | overrideSearchType: "HYBRID", 130 | }, 131 | }, 132 | generationConfiguration: { 133 | // promptTemplate: { // PromptTemplate 134 | // textPromptTemplate: "STRING_VALUE", 135 | // }, 136 | inferenceConfig: { 137 | textInferenceConfig: { 138 | temperature: Number(process.env.TEMPERATURE), 139 | topP: Number(process.env.TOP_P), 140 | maxTokens: Number(process.env.MAX_TOKENS), 141 | }, 142 | }, 143 | }, 144 | }, 145 | }, 146 | }) 147 | ); 148 | response = buildResponse({ 149 | data: { 150 | answer: query.output?.text, 151 | sessionId: query.sessionId, 152 | citations: query.citations, 153 | }, 154 | }); 155 | } catch (err) { 156 | console.log(err); 157 | response = buildResponse( 158 | { error: { message: "something went wrong", detail: err } }, 159 | 500 160 | ); 161 | } 162 | } else { 163 | response = buildResponse({ error: { message: "Invalid operaton" } }); 164 | } 165 | 166 | return response; 167 | }; 168 | -------------------------------------------------------------------------------- /client/quasar.config.js: -------------------------------------------------------------------------------- 1 | /* eslint-env node */ 2 | 3 | /* 4 | * This file runs in a Node context (it's NOT transpiled by Babel), so use only 5 | * the ES6 features that are supported by your Node version. https://node.green/ 6 | */ 7 | 8 | // Configuration for your app 9 | // https://v2.quasar.dev/quasar-cli-vite/quasar-config-js 10 | 11 | 12 | const { configure } = require('quasar/wrappers'); 13 | 14 | 15 | module.exports = configure(function (/* ctx */) { 16 | return { 17 | boot: [ 18 | 'axios', 19 | 'cognito' 20 | ], 21 | css: [ 22 | 'app.scss' 23 | ], 24 | 25 | // https://github.com/quasarframework/quasar/tree/dev/extras 26 | extras: [ 27 | 'line-awesome', 28 | 'themify' 29 | ], 30 | 31 | // Full list of options: https://v2.quasar.dev/quasar-cli-vite/quasar-config-js#build 32 | build: { 33 | target: { 34 | browser: [ 'es2019', 'edge88', 'firefox78', 'chrome87', 'safari13.1' ], 35 | node: 'node20' 36 | }, 37 | 38 | vueRouterMode: 'hash', // available values: 'hash', 'history' 39 | // vueRouterBase, 40 | // vueDevtools, 41 | // vueOptionsAPI: false, 42 | 43 | // rebuildCache: true, // rebuilds Vite/linter/etc cache on startup 44 | 45 | // publicPath: '/', 46 | // analyze: true, 47 | // env: {}, 48 | // rawDefine: {} 49 | // ignorePublicFolder: true, 50 | // minify: false, 51 | // polyfillModulePreload: true, 52 | // distDir 53 | 54 | // extendViteConf (viteConf) {}, 55 | // viteVuePluginOptions: {}, 56 | 57 | vitePlugins: [ 58 | ['vite-plugin-checker', { 59 | vueTsc: { 60 | tsconfigPath: 'tsconfig.vue-tsc.json' 61 | }, 62 | eslint: { 63 | lintCommand: 'eslint "./**/*.{js,ts,mjs,cjs,vue}"' 64 | } 65 | }, { server: false }] 66 | ] 67 | }, 68 | 69 | // Full list of options: https://v2.quasar.dev/quasar-cli-vite/quasar-config-js#devServer 70 | devServer: { 71 | // https: true 72 | open: true // opens browser window automatically 73 | }, 74 | 75 | // https://v2.quasar.dev/quasar-cli-vite/quasar-config-js#framework 76 | framework: { 77 | config: {}, 78 | 79 | iconSet: 'line-awesome', // Quasar icon set 80 | // lang: 'en-US', // Quasar language pack 81 | 82 | // For special cases outside of where the auto-import strategy can have an impact 83 | // (like functional components as one of the examples), 84 | // you can manually specify Quasar components/directives to be available everywhere: 85 | // 86 | // components: [], 87 | // directives: [], 88 | 89 | // Quasar plugins 90 | plugins: [ 91 | 'Notify', 92 | 'Screen' 93 | ] 94 | }, 95 | 96 | // animations: 'all', // --- includes all animations 97 | // https://v2.quasar.dev/options/animations 98 | animations: [], 99 | 100 | // https://v2.quasar.dev/quasar-cli-vite/quasar-config-js#sourcefiles 101 | // sourceFiles: { 102 | // rootComponent: 'src/App.vue', 103 | // router: 'src/router/index', 104 | // store: 'src/store/index', 105 | // registerServiceWorker: 'src-pwa/register-service-worker', 106 | // serviceWorker: 'src-pwa/custom-service-worker', 107 | // pwaManifestFile: 'src-pwa/manifest.json', 108 | // electronMain: 'src-electron/electron-main', 109 | // electronPreload: 'src-electron/electron-preload' 110 | // }, 111 | 112 | // https://v2.quasar.dev/quasar-cli-vite/developing-ssr/configuring-ssr 113 | ssr: { 114 | // ssrPwaHtmlFilename: 'offline.html', // do NOT use index.html as name! 115 | // will mess up SSR 116 | 117 | // extendSSRWebserverConf (esbuildConf) {}, 118 | // extendPackageJson (json) {}, 119 | 120 | pwa: false, 121 | 122 | // manualStoreHydration: true, 123 | // manualPostHydrationTrigger: true, 124 | 125 | prodPort: 3000, // The default port that the production server should use 126 | // (gets superseded if process.env.PORT is specified at runtime) 127 | 128 | middlewares: [ 129 | 'render' // keep this as last one 130 | ] 131 | }, 132 | 133 | // https://v2.quasar.dev/quasar-cli-vite/developing-pwa/configuring-pwa 134 | pwa: { 135 | workboxMode: 'generateSW', // or 'injectManifest' 136 | injectPwaMetaTags: true, 137 | swFilename: 'sw.js', 138 | manifestFilename: 'manifest.json', 139 | useCredentialsForManifestTag: false, 140 | // useFilenameHashes: true, 141 | // extendGenerateSWOptions (cfg) {} 142 | // extendInjectManifestOptions (cfg) {}, 143 | // extendManifestJson (json) {} 144 | // extendPWACustomSWConf (esbuildConf) {} 145 | }, 146 | 147 | // Full list of options: https://v2.quasar.dev/quasar-cli-vite/developing-cordova-apps/configuring-cordova 148 | cordova: { 149 | // noIosLegacyBuildFlag: true, // uncomment only if you know what you are doing 150 | }, 151 | 152 | // Full list of options: https://v2.quasar.dev/quasar-cli-vite/developing-capacitor-apps/configuring-capacitor 153 | capacitor: { 154 | hideSplashscreen: true 155 | }, 156 | 157 | // Full list of options: https://v2.quasar.dev/quasar-cli-vite/developing-electron-apps/configuring-electron 158 | electron: { 159 | // extendElectronMainConf (esbuildConf) 160 | // extendElectronPreloadConf (esbuildConf) 161 | 162 | inspectPort: 5858, 163 | 164 | bundler: 'packager', // 'packager' or 'builder' 165 | 166 | packager: { 167 | // https://github.com/electron-userland/electron-packager/blob/master/docs/api.md#options 168 | 169 | // OS X / Mac App Store 170 | // appBundleId: '', 171 | // appCategoryType: '', 172 | // osxSign: '', 173 | // protocol: 'myapp://path', 174 | 175 | // Windows only 176 | // win32metadata: { ... } 177 | }, 178 | 179 | builder: { 180 | // https://www.electron.build/configuration/configuration 181 | 182 | appId: 'knowledgebase-demo' 183 | } 184 | }, 185 | 186 | // Full list of options: https://v2.quasar.dev/quasar-cli-vite/developing-browser-extensions/configuring-bex 187 | bex: { 188 | contentScripts: [ 189 | 'my-content-script' 190 | ], 191 | 192 | // extendBexScriptsConf (esbuildConf) {} 193 | // extendBexManifestJson (json) {} 194 | } 195 | } 196 | }); 197 | -------------------------------------------------------------------------------- /client/src/pages/KbHelp.vue: -------------------------------------------------------------------------------- 1 | 55 | 56 | 146 | -------------------------------------------------------------------------------- /lambdas/create_index/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | /* Visit https://aka.ms/tsconfig to read more about this file */ 4 | 5 | /* Projects */ 6 | // "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */ 7 | // "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */ 8 | // "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */ 9 | // "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */ 10 | // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */ 11 | // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ 12 | 13 | /* Language and Environment */ 14 | "target": "es2020", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */ 15 | // "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */ 16 | // "jsx": "preserve", /* Specify what JSX code is generated. */ 17 | // "experimentalDecorators": true, /* Enable experimental support for legacy experimental decorators. */ 18 | // "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */ 19 | // "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */ 20 | // "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */ 21 | // "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */ 22 | // "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */ 23 | // "noLib": true, /* Disable including any library files, including the default lib.d.ts. */ 24 | // "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */ 25 | // "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */ 26 | 27 | /* Modules */ 28 | "module": "commonjs", /* Specify what module code is generated. */ 29 | // "rootDir": "./", /* Specify the root folder within your source files. */ 30 | // "moduleResolution": "node10", /* Specify how TypeScript looks up a file from a given module specifier. */ 31 | // "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */ 32 | // "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */ 33 | // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */ 34 | // "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */ 35 | // "types": [], /* Specify type package names to be included without being referenced in a source file. */ 36 | // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ 37 | // "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */ 38 | // "allowImportingTsExtensions": true, /* Allow imports to include TypeScript file extensions. Requires '--moduleResolution bundler' and either '--noEmit' or '--emitDeclarationOnly' to be set. */ 39 | // "resolvePackageJsonExports": true, /* Use the package.json 'exports' field when resolving package imports. */ 40 | // "resolvePackageJsonImports": true, /* Use the package.json 'imports' field when resolving imports. */ 41 | // "customConditions": [], /* Conditions to set in addition to the resolver-specific defaults when resolving imports. */ 42 | // "resolveJsonModule": true, /* Enable importing .json files. */ 43 | // "allowArbitraryExtensions": true, /* Enable importing files with any extension, provided a declaration file is present. */ 44 | // "noResolve": true, /* Disallow 'import's, 'require's or ''s from expanding the number of files TypeScript should add to a project. */ 45 | 46 | /* JavaScript Support */ 47 | // "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */ 48 | // "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */ 49 | // "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */ 50 | 51 | /* Emit */ 52 | // "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */ 53 | // "declarationMap": true, /* Create sourcemaps for d.ts files. */ 54 | // "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */ 55 | // "sourceMap": true, /* Create source map files for emitted JavaScript files. */ 56 | // "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */ 57 | // "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */ 58 | // "outDir": "./", /* Specify an output folder for all emitted files. */ 59 | // "removeComments": true, /* Disable emitting comments. */ 60 | // "noEmit": true, /* Disable emitting files from a compilation. */ 61 | // "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */ 62 | // "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */ 63 | // "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */ 64 | // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ 65 | // "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */ 66 | // "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */ 67 | // "newLine": "crlf", /* Set the newline character for emitting files. */ 68 | // "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */ 69 | // "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */ 70 | // "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */ 71 | // "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */ 72 | // "declarationDir": "./", /* Specify the output directory for generated declaration files. */ 73 | 74 | /* Interop Constraints */ 75 | // "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */ 76 | // "verbatimModuleSyntax": true, /* Do not transform or elide any imports or exports not marked as type-only, ensuring they are written in the output file's format based on the 'module' setting. */ 77 | // "isolatedDeclarations": true, /* Require sufficient annotation on exports so other tools can trivially generate declaration files. */ 78 | // "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */ 79 | "esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */ 80 | // "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */ 81 | "forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */ 82 | 83 | /* Type Checking */ 84 | "strict": true, /* Enable all strict type-checking options. */ 85 | // "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */ 86 | // "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */ 87 | // "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */ 88 | // "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */ 89 | // "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */ 90 | // "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */ 91 | // "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */ 92 | // "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */ 93 | // "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */ 94 | // "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */ 95 | // "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */ 96 | // "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */ 97 | // "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */ 98 | // "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */ 99 | // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */ 100 | // "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */ 101 | // "allowUnusedLabels": true, /* Disable error reporting for unused labels. */ 102 | // "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */ 103 | 104 | /* Completeness */ 105 | // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */ 106 | "skipLibCheck": true /* Skip type checking all .d.ts files. */ 107 | } 108 | } 109 | -------------------------------------------------------------------------------- /lambdas/create_index_provider/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | /* Visit https://aka.ms/tsconfig to read more about this file */ 4 | 5 | /* Projects */ 6 | // "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */ 7 | // "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */ 8 | // "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */ 9 | // "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */ 10 | // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */ 11 | // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ 12 | 13 | /* Language and Environment */ 14 | "target": "es2020", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */ 15 | // "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */ 16 | // "jsx": "preserve", /* Specify what JSX code is generated. */ 17 | // "experimentalDecorators": true, /* Enable experimental support for legacy experimental decorators. */ 18 | // "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */ 19 | // "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */ 20 | // "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */ 21 | // "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */ 22 | // "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */ 23 | // "noLib": true, /* Disable including any library files, including the default lib.d.ts. */ 24 | // "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */ 25 | // "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */ 26 | 27 | /* Modules */ 28 | "module": "commonjs", /* Specify what module code is generated. */ 29 | // "rootDir": "./", /* Specify the root folder within your source files. */ 30 | // "moduleResolution": "node10", /* Specify how TypeScript looks up a file from a given module specifier. */ 31 | // "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */ 32 | // "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */ 33 | // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */ 34 | // "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */ 35 | // "types": [], /* Specify type package names to be included without being referenced in a source file. */ 36 | // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ 37 | // "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */ 38 | // "allowImportingTsExtensions": true, /* Allow imports to include TypeScript file extensions. Requires '--moduleResolution bundler' and either '--noEmit' or '--emitDeclarationOnly' to be set. */ 39 | // "resolvePackageJsonExports": true, /* Use the package.json 'exports' field when resolving package imports. */ 40 | // "resolvePackageJsonImports": true, /* Use the package.json 'imports' field when resolving imports. */ 41 | // "customConditions": [], /* Conditions to set in addition to the resolver-specific defaults when resolving imports. */ 42 | // "resolveJsonModule": true, /* Enable importing .json files. */ 43 | // "allowArbitraryExtensions": true, /* Enable importing files with any extension, provided a declaration file is present. */ 44 | // "noResolve": true, /* Disallow 'import's, 'require's or ''s from expanding the number of files TypeScript should add to a project. */ 45 | 46 | /* JavaScript Support */ 47 | // "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */ 48 | // "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */ 49 | // "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */ 50 | 51 | /* Emit */ 52 | // "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */ 53 | // "declarationMap": true, /* Create sourcemaps for d.ts files. */ 54 | // "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */ 55 | // "sourceMap": true, /* Create source map files for emitted JavaScript files. */ 56 | // "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */ 57 | // "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */ 58 | // "outDir": "./", /* Specify an output folder for all emitted files. */ 59 | // "removeComments": true, /* Disable emitting comments. */ 60 | // "noEmit": true, /* Disable emitting files from a compilation. */ 61 | // "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */ 62 | // "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */ 63 | // "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */ 64 | // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ 65 | // "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */ 66 | // "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */ 67 | // "newLine": "crlf", /* Set the newline character for emitting files. */ 68 | // "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */ 69 | // "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */ 70 | // "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */ 71 | // "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */ 72 | // "declarationDir": "./", /* Specify the output directory for generated declaration files. */ 73 | 74 | /* Interop Constraints */ 75 | // "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */ 76 | // "verbatimModuleSyntax": true, /* Do not transform or elide any imports or exports not marked as type-only, ensuring they are written in the output file's format based on the 'module' setting. */ 77 | // "isolatedDeclarations": true, /* Require sufficient annotation on exports so other tools can trivially generate declaration files. */ 78 | // "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */ 79 | "esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */ 80 | // "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */ 81 | "forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */ 82 | 83 | /* Type Checking */ 84 | "strict": true, /* Enable all strict type-checking options. */ 85 | // "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */ 86 | // "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */ 87 | // "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */ 88 | // "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */ 89 | // "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */ 90 | // "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */ 91 | // "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */ 92 | // "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */ 93 | // "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */ 94 | // "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */ 95 | // "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */ 96 | // "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */ 97 | // "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */ 98 | // "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */ 99 | // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */ 100 | // "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */ 101 | // "allowUnusedLabels": true, /* Disable error reporting for unused labels. */ 102 | // "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */ 103 | 104 | /* Completeness */ 105 | // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */ 106 | "skipLibCheck": true /* Skip type checking all .d.ts files. */ 107 | } 108 | } 109 | -------------------------------------------------------------------------------- /lib/nebula-stack.ts: -------------------------------------------------------------------------------- 1 | import { Construct, DependencyGroup } from "constructs"; 2 | import { 3 | RemovalPolicy, 4 | Stack, 5 | StackProps, 6 | aws_s3 as s3, 7 | aws_cognito as cognito, 8 | aws_opensearchserverless as opensearchserverless, 9 | aws_iam as iam, 10 | Aws, 11 | Duration, 12 | CfnParameter, 13 | custom_resources as cr, 14 | aws_lambda as lambda, 15 | aws_apigateway as apigateway, 16 | aws_bedrock as bedrock, 17 | aws_cloudfront as cloudfront, 18 | aws_cloudformation as cloudformation, 19 | CustomResource, 20 | Fn, 21 | CfnCondition, 22 | CfnOutput, 23 | } from "aws-cdk-lib"; 24 | // import * as sqs from 'aws-cdk-lib/aws-sqs'; 25 | 26 | export class NebulaStack extends Stack { 27 | constructor(scope: Construct, id: string, props?: StackProps) { 28 | super(scope, id, props); 29 | 30 | const publicBucket = s3.Bucket.fromBucketArn( 31 | this, 32 | "PublicBucket", 33 | `arn:aws:s3:::1159-public-assets-${Aws.REGION}` 34 | ); 35 | 36 | const userEmailParam = new CfnParameter(this, "UserEmailParam", { 37 | type: "String", 38 | noEcho: false, 39 | description: 40 | "Will be used to create your Cognito account. You will receive an invitation email at this address", 41 | allowedPattern: "[^\\s@]+@[^\\s@]+\\.[^\\s@]+", 42 | constraintDescription: "Must enter a valid email address", 43 | minLength: 5, 44 | }); 45 | 46 | const embeddingModelParam = new CfnParameter(this, "EmbeddingModelParam", { 47 | type: "String", 48 | default: "amazon.titan-embed-text-v2:0", 49 | description: 50 | "This model will be used to create embeddings from the document repository", 51 | allowedValues: [ 52 | "amazon.titan-embed-text-v1", 53 | "amazon.titan-embed-text-v2:0", 54 | "cohere.embed-english-v3", 55 | "cohere.embed-multilingual-v3", 56 | ], 57 | }); 58 | 59 | const foundationModelParam = new CfnParameter( 60 | this, 61 | "FoundationModelParam", 62 | { 63 | type: "String", 64 | default: "anthropic.claude-3-5-sonnet-20241022-v2:0", 65 | description: "Base model for the conversational interface", 66 | allowedValues: [ 67 | "amazon.titan-text-premier-v1:0", 68 | "anthropic.claude-v2", 69 | "anthropic.claude-v2:1", 70 | "anthropic.claude-3-sonnet-20240229-v1:0", 71 | "anthropic.claude-3-5-sonnet-20240620-v1:0", 72 | "anthropic.claude-3-5-sonnet-20241022-v2:0", 73 | "anthropic.claude-3-haiku-20240307-v1:0", 74 | "anthropic.claude-3-5-haiku-20241022-v1:0", 75 | ], 76 | } 77 | ); 78 | 79 | const uploadParam = new CfnParameter(this, "UploadParam", { 80 | type: "String", 81 | default: "YES", 82 | description: 83 | "Uploads sample documents to your bucket. Must answer YES or NO", 84 | allowedValues: ["YES", "NO"], 85 | }); 86 | 87 | this.templateOptions.metadata = { 88 | "AWS::CloudFormation::Interface": { 89 | ParameterGroups: [ 90 | { 91 | Label: { default: "General" }, 92 | Parameters: [userEmailParam.logicalId, uploadParam.logicalId], 93 | }, 94 | { 95 | Label: { default: "Models" }, 96 | Parameters: [ 97 | embeddingModelParam.logicalId, 98 | foundationModelParam.logicalId, 99 | ], 100 | }, 101 | ], 102 | ParameterLabels: { 103 | [userEmailParam.logicalId]: { 104 | default: "What is your email address?", 105 | }, 106 | [uploadParam.logicalId]: { 107 | default: "Upload sample documents?", 108 | }, 109 | [embeddingModelParam.logicalId]: { 110 | default: "Embedding Model?", 111 | }, 112 | [foundationModelParam.logicalId]: { 113 | default: "Foundation Model?", 114 | }, 115 | }, 116 | }, 117 | }; 118 | 119 | const nebulaDocsBucket = new s3.Bucket(this, "NebulaDocsBucket", { 120 | blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, 121 | autoDeleteObjects: false, 122 | encryption: s3.BucketEncryption.S3_MANAGED, 123 | enforceSSL: true, 124 | versioned: false, 125 | removalPolicy: RemovalPolicy.RETAIN, 126 | }); 127 | const corsRule: s3.CorsRule = { 128 | allowedMethods: [s3.HttpMethods.GET, s3.HttpMethods.HEAD], 129 | allowedOrigins: ["*"], 130 | allowedHeaders: ["*"], 131 | maxAge: 300, 132 | }; 133 | 134 | const nebulaWebBucket = new s3.Bucket(this, "NebulaWebBucket", { 135 | blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, 136 | autoDeleteObjects: false, 137 | encryption: s3.BucketEncryption.S3_MANAGED, 138 | enforceSSL: true, 139 | versioned: false, 140 | removalPolicy: RemovalPolicy.RETAIN, 141 | cors: [corsRule], 142 | }); 143 | 144 | const nebulaUserPool = new cognito.UserPool(this, "NebulaUserPool", { 145 | deletionProtection: false, 146 | mfa: cognito.Mfa.OFF, 147 | passwordPolicy: { 148 | minLength: 12, 149 | requireLowercase: true, 150 | requireUppercase: true, 151 | requireDigits: true, 152 | requireSymbols: true, 153 | tempPasswordValidity: Duration.days(3), 154 | }, 155 | signInCaseSensitive: false, 156 | email: cognito.UserPoolEmail.withCognito(), 157 | accountRecovery: cognito.AccountRecovery.EMAIL_ONLY, 158 | enableSmsRole: false, 159 | selfSignUpEnabled: true, 160 | signInAliases: { 161 | email: true, 162 | phone: false, 163 | preferredUsername: false, 164 | username: false, 165 | }, 166 | autoVerify: { email: true, phone: false }, 167 | userPoolName: "NebulaUserPool", 168 | removalPolicy: RemovalPolicy.DESTROY, 169 | }); 170 | 171 | const nebulaUser = new cognito.CfnUserPoolUser(this, "NebulaUser", { 172 | userPoolId: nebulaUserPool.userPoolId, 173 | desiredDeliveryMediums: ["EMAIL"], 174 | username: userEmailParam.valueAsString, 175 | }); 176 | 177 | const nebulaUserPoolClient = nebulaUserPool.addClient("NebulaUserPoolClient", { 178 | authFlows: { 179 | userSrp: true, 180 | }, 181 | accessTokenValidity: Duration.minutes(180), 182 | authSessionValidity: Duration.minutes(5), 183 | enableTokenRevocation: true, 184 | generateSecret: false, 185 | idTokenValidity: Duration.minutes(180), 186 | preventUserExistenceErrors: true, 187 | refreshTokenValidity: Duration.days(30), 188 | userPoolClientName: "web", 189 | }); 190 | 191 | const nebulaCollection = new opensearchserverless.CfnCollection( 192 | this, 193 | "NebulaCollection", 194 | { 195 | name: "nebula-collection", 196 | description: "Collection for Nebula Knowledge Base", 197 | standbyReplicas: "DISABLED", 198 | type: "VECTORSEARCH", 199 | } 200 | ); 201 | 202 | const nebulaCollectionEncryptionPolicy = 203 | new opensearchserverless.CfnSecurityPolicy( 204 | this, 205 | "NebulaCollectionEncryptionPolicy", 206 | { 207 | policy: JSON.stringify({ 208 | Rules: [ 209 | { 210 | Resource: ["collection/nebula-collection"], 211 | ResourceType: "collection", 212 | }, 213 | ], 214 | AWSOwnedKey: true, 215 | }), 216 | type: "encryption", 217 | description: "Encryption policy for Nebula Knowledge Base Collection", 218 | name: "nebula-encryption-policy", 219 | } 220 | ); 221 | 222 | const nebulaCollectionNetworkPolicy = 223 | new opensearchserverless.CfnSecurityPolicy( 224 | this, 225 | "NebulaCollectionNetworkPolicy", 226 | { 227 | policy: JSON.stringify([ 228 | { 229 | Rules: [ 230 | { 231 | Resource: ["collection/nebula-collection"], 232 | ResourceType: "dashboard", 233 | }, 234 | { 235 | Resource: ["collection/nebula-collection"], 236 | ResourceType: "collection", 237 | }, 238 | ], 239 | AllowFromPublic: true, 240 | }, 241 | ]), 242 | type: "network", 243 | description: "Network policy for Knowledge Base Collection", 244 | name: "nebula-network-policy", 245 | } 246 | ); 247 | 248 | const nebulaCollectionAccessPolicy = new opensearchserverless.CfnAccessPolicy( 249 | this, 250 | "NebulaCollectionAccessPolicy", 251 | { 252 | policy: JSON.stringify([ 253 | { 254 | Rules: [ 255 | { 256 | Resource: ["collection/nebula-collection"], 257 | Permission: [ 258 | "aoss:DescribeCollectionItems", 259 | "aoss:CreateCollectionItems", 260 | "aoss:UpdateCollectionItems", 261 | ], 262 | ResourceType: "collection", 263 | }, 264 | { 265 | Resource: ["index/nebula-collection/*"], 266 | Permission: [ 267 | "aoss:UpdateIndex", 268 | "aoss:DescribeIndex", 269 | "aoss:ReadDocument", 270 | "aoss:WriteDocument", 271 | "aoss:CreateIndex", 272 | ], 273 | ResourceType: "index", 274 | }, 275 | ], 276 | Principal: [ 277 | `arn:aws:iam::${Aws.ACCOUNT_ID}:role/service-role/NebulaBedrockRole`, 278 | `arn:aws:iam::${Aws.ACCOUNT_ID}:role/service-role/NebulaCreateIndexRole`, 279 | ], 280 | }, 281 | ]), 282 | type: "data", 283 | description: "Access policy for Knowledge Base Collection", 284 | name: "nebula-access-policy", 285 | } 286 | ); 287 | 288 | const NebulaCollectionDepencyGroup = new DependencyGroup(); 289 | NebulaCollectionDepencyGroup.add(nebulaCollectionEncryptionPolicy); 290 | NebulaCollectionDepencyGroup.add(nebulaCollectionNetworkPolicy); 291 | NebulaCollectionDepencyGroup.add(nebulaCollectionAccessPolicy); 292 | 293 | nebulaCollection.node.addDependency(NebulaCollectionDepencyGroup); 294 | 295 | nebulaCollection.applyRemovalPolicy(RemovalPolicy.DESTROY); 296 | 297 | const nebulaCreateIndexPolicy = new iam.ManagedPolicy( 298 | this, 299 | "NebulaCreateIndexPolicy", 300 | { 301 | managedPolicyName: "NebulaCreateIndexPolicy", 302 | path: "/service-role/", 303 | document: new iam.PolicyDocument({ 304 | statements: [ 305 | new iam.PolicyStatement({ 306 | actions: [ 307 | "logs:CreateLogGroup", 308 | "logs:CreateLogStream", 309 | "logs:PutLogEvents", 310 | "cloudformation:SignalResource", 311 | "cloudformation:DescribeStackResource", 312 | ], 313 | resources: ["*"], 314 | }), 315 | new iam.PolicyStatement({ 316 | actions: ["aoss:APIAccessAll"], 317 | resources: [nebulaCollection.attrArn], 318 | }), 319 | ], 320 | }), 321 | } 322 | ); 323 | 324 | const nebulaCreateIndexRole = new iam.Role(this, "NebulaCreateIndexRole", { 325 | roleName: "NebulaCreateIndexRole", 326 | path: "/service-role/", 327 | assumedBy: new iam.ServicePrincipal("lambda.amazonaws.com"), 328 | managedPolicies: [nebulaCreateIndexPolicy], 329 | }); 330 | 331 | const nebulaCreateIndexFunction = new lambda.Function( 332 | this, 333 | "NebulaCreateIndexFunction", 334 | { 335 | runtime: lambda.Runtime.NODEJS_20_X, 336 | code: lambda.Code.fromBucket( 337 | publicBucket, 338 | `nebula/${process.env.npm_package_version}/lambdas/create_index.zip` 339 | ), 340 | handler: "create_index.handler", 341 | functionName: "NebulaCreateIndexFunction", 342 | role: nebulaCreateIndexRole, 343 | environment: { 344 | REGION: `${Aws.REGION}`, 345 | ENDPOINT: nebulaCollection.attrCollectionEndpoint, 346 | }, 347 | timeout: Duration.seconds(600), 348 | } 349 | ); 350 | 351 | const nebulaCreateIndexCr = new CustomResource(this, "NebulaCreateIndexCr", { 352 | serviceToken: nebulaCreateIndexFunction.functionArn, 353 | removalPolicy: RemovalPolicy.RETAIN, 354 | }); 355 | 356 | const nebulaBedrockPolicy = new iam.ManagedPolicy(this, "NebulaBedrockPolicy", { 357 | managedPolicyName: "NebulaBedrockPolicy", 358 | path: "/service-role/", 359 | document: new iam.PolicyDocument({ 360 | statements: [ 361 | new iam.PolicyStatement({ 362 | actions: ["aoss:APIAccessAll"], 363 | resources: [nebulaCollection.attrArn], 364 | }), 365 | new iam.PolicyStatement({ 366 | actions: ["bedrock:InvokeModel"], 367 | resources: [ 368 | `arn:aws:bedrock:${Aws.REGION}::foundation-model/amazon.titan-embed-text-v1`, 369 | `arn:aws:bedrock:${Aws.REGION}::foundation-model/amazon.titan-embed-text-v2:0`, 370 | `arn:aws:bedrock:${Aws.REGION}::foundation-model/cohere.embed-english-v3`, 371 | `arn:aws:bedrock:${Aws.REGION}::foundation-model/cohere.embed-multilingual-v3`, 372 | `arn:aws:bedrock:${Aws.REGION}::foundation-model/amazon.titan-text-premier-v1:0`, 373 | `arn:aws:bedrock:${Aws.REGION}::foundation-model/anthropic.claude-v2`, 374 | `arn:aws:bedrock:${Aws.REGION}::foundation-model/anthropic.claude-v2:1`, 375 | `arn:aws:bedrock:${Aws.REGION}::foundation-model/anthropic.claude-3-sonnet-20240229-v1:0`, 376 | `arn:aws:bedrock:${Aws.REGION}::foundation-model/anthropic.claude-3-haiku-20240307-v1:0`, 377 | `arn:aws:bedrock:${Aws.REGION}::foundation-model/anthropic.claude-instant-v1`, 378 | ], 379 | }), 380 | new iam.PolicyStatement({ 381 | actions: ["s3:ListBucket", "s3:GetObject"], 382 | resources: [nebulaDocsBucket.bucketArn, `${nebulaDocsBucket.bucketArn}/*`], 383 | }), 384 | new iam.PolicyStatement({ 385 | actions: [ 386 | "bedrock:RetrieveAndGenerate", 387 | "bedrock:ListFoundationModels", 388 | "bedrock:ListCustomModels", 389 | "bedrock:Retrieve", 390 | ], 391 | resources: ["*"], 392 | }), 393 | ], 394 | }), 395 | }); 396 | 397 | const nebulaBedrockRole = new iam.Role(this, "NebulaBedrockRole", { 398 | roleName: "NebulaBedrockRole", 399 | path: "/service-role/", 400 | assumedBy: new iam.ServicePrincipal("bedrock.amazonaws.com"), 401 | managedPolicies: [nebulaBedrockPolicy], 402 | }); 403 | 404 | const nebulaKb = new bedrock.CfnKnowledgeBase(this, "NebulaKB", { 405 | knowledgeBaseConfiguration: { 406 | type: "VECTOR", 407 | vectorKnowledgeBaseConfiguration: { 408 | embeddingModelArn: `arn:aws:bedrock:${Aws.REGION}::foundation-model/${embeddingModelParam.valueAsString}`, 409 | }, 410 | }, 411 | name: "nebula-kb", 412 | roleArn: nebulaBedrockRole.roleArn, 413 | storageConfiguration: { 414 | opensearchServerlessConfiguration: { 415 | collectionArn: nebulaCollection.attrArn, 416 | fieldMapping: { 417 | metadataField: "BEDROCK_METADATA", 418 | textField: "BEDROCK_TEXT_CHUNK", 419 | vectorField: "nebula-vector", 420 | }, 421 | vectorIndexName: "nebula-index", 422 | }, 423 | type: "OPENSEARCH_SERVERLESS", 424 | }, 425 | }); 426 | 427 | const nebulaDepencyGroup = new DependencyGroup(); 428 | nebulaDepencyGroup.add(nebulaCollection); 429 | nebulaDepencyGroup.add(nebulaBedrockRole); 430 | nebulaDepencyGroup.add(nebulaCreateIndexCr); 431 | 432 | nebulaKb.node.addDependency(nebulaDepencyGroup); 433 | 434 | const nebulaDataSource = new bedrock.CfnDataSource(this, "NebulaDataSource", { 435 | name: "nebula-source", 436 | knowledgeBaseId: nebulaKb.attrKnowledgeBaseId, 437 | dataDeletionPolicy: "DELETE", 438 | dataSourceConfiguration: { 439 | type: "S3", 440 | s3Configuration: { 441 | bucketArn: nebulaDocsBucket.bucketArn, 442 | bucketOwnerAccountId: `${Aws.ACCOUNT_ID}`, 443 | }, 444 | }, 445 | }); 446 | 447 | const nebulaWebApiPolicy = new iam.ManagedPolicy(this, "NebulaWebApiPolicy", { 448 | managedPolicyName: "NebulaWebApiPolicy", 449 | path: "/service-role/", 450 | document: new iam.PolicyDocument({ 451 | statements: [ 452 | new iam.PolicyStatement({ 453 | actions: [ 454 | "logs:CreateLogGroup", 455 | "logs:CreateLogStream", 456 | "logs:PutLogEvents", 457 | ], 458 | resources: ["*"], 459 | }), 460 | new iam.PolicyStatement({ 461 | actions: ["s3:ListBucket"], 462 | resources: [nebulaDocsBucket.bucketArn], 463 | }), 464 | new iam.PolicyStatement({ 465 | actions: [ 466 | "bedrock:GetAgentKnowledgeBase", 467 | "bedrock:GetKnowledgeBase", 468 | "bedrock:GetDataSource", 469 | ], 470 | resources: [nebulaKb.attrKnowledgeBaseArn], 471 | }), 472 | new iam.PolicyStatement({ 473 | actions: [ 474 | "bedrock:RetrieveAndGenerate", 475 | "bedrock:Retrieve", 476 | "bedrock:InvokeModel", 477 | ], 478 | resources: ["*"], 479 | }), 480 | ], 481 | }), 482 | }); 483 | 484 | const nebulaWebApiRole = new iam.Role(this, "NebulaWebApiRole", { 485 | roleName: "NebulaWebApiRole", 486 | path: "/service-role/", 487 | assumedBy: new iam.ServicePrincipal("lambda.amazonaws.com"), 488 | managedPolicies: [nebulaWebApiPolicy], 489 | }); 490 | 491 | const nebulaWebApiFunction = new lambda.Function(this, "NebulaWebApiFunction", { 492 | runtime: lambda.Runtime.NODEJS_20_X, 493 | code: lambda.Code.fromBucket( 494 | publicBucket, 495 | `nebula/${process.env.npm_package_version}/lambdas/web_api.zip` 496 | ), 497 | handler: "web_api.handler", 498 | functionName: "NebulaWebApiFunction", 499 | role: nebulaWebApiRole, 500 | environment: { 501 | DOCS_BUCKET: nebulaDocsBucket.bucketName, 502 | KB_ID: nebulaKb.attrKnowledgeBaseId, 503 | DATA_SOURCE_ID: nebulaDataSource.attrDataSourceId, 504 | FOUNDATION_MODEL_ARN: `arn:aws:bedrock:${Aws.REGION}::foundation-model/${foundationModelParam.valueAsString}`, 505 | SOURCE_CHUNKS: "25", 506 | TEMPERATURE: "0.3", 507 | TOP_P: "0.9", 508 | MAX_TOKENS: "2048", 509 | }, 510 | timeout: Duration.seconds(30), 511 | }); 512 | 513 | nebulaWebApiFunction.applyRemovalPolicy(RemovalPolicy.DESTROY); 514 | 515 | const nebulaApiAuthorizer = new apigateway.CognitoUserPoolsAuthorizer( 516 | this, 517 | "NebulaApiAuthorizer", 518 | { 519 | cognitoUserPools: [nebulaUserPool], 520 | authorizerName: "NebulaAuthorizer", 521 | } 522 | ); 523 | 524 | const nebulaApi = new apigateway.LambdaRestApi(this, "NebulaApi", { 525 | restApiName: "NebulaApi", 526 | handler: nebulaWebApiFunction, 527 | retainDeployments: false, 528 | defaultCorsPreflightOptions: { 529 | allowOrigins: apigateway.Cors.ALL_ORIGINS, 530 | allowMethods: apigateway.Cors.ALL_METHODS, 531 | allowHeaders: apigateway.Cors.DEFAULT_HEADERS, 532 | }, 533 | defaultMethodOptions: { 534 | authorizer: nebulaApiAuthorizer, 535 | authorizationType: apigateway.AuthorizationType.COGNITO, 536 | }, 537 | deploy: false, 538 | proxy: true, 539 | }); 540 | 541 | nebulaApiAuthorizer._attachToApi(nebulaApi); 542 | 543 | const nebulaApiDeployment = new apigateway.Deployment(this, "NebulaApiDeployment", { 544 | api: nebulaApi, 545 | }); 546 | 547 | const nebulaApiStagee = new apigateway.Stage(this, "NebulaApiStage", { 548 | deployment: nebulaApiDeployment, 549 | stageName: "prod", 550 | }); 551 | 552 | nebulaApi.addGatewayResponse("NebulaApiUnauthorizedResponse", { 553 | type: apigateway.ResponseType.UNAUTHORIZED, 554 | statusCode: "401", 555 | responseHeaders: { 556 | "Access-Control-Allow-Origin": "'*'", 557 | "Access-Control-Allow-Methods": "'*'", 558 | "Access-Control-Allow-Headers": "'Content-Type, Authorization'", 559 | //"Access-Control-Allow-Credentials": 'true' 560 | }, 561 | templates: { 562 | "application/json": '{"message":$context.error.messageString}', 563 | }, 564 | }); 565 | 566 | const nebulaSampleDataPolicy = new iam.ManagedPolicy( 567 | this, 568 | "NebulaSampleDataPolicy", 569 | { 570 | managedPolicyName: "NebulaSampleDataPolicy", 571 | path: "/service-role/", 572 | document: new iam.PolicyDocument({ 573 | statements: [ 574 | new iam.PolicyStatement({ 575 | actions: [ 576 | "logs:CreateLogGroup", 577 | "logs:CreateLogStream", 578 | "logs:PutLogEvents", 579 | "cloudformation:SignalResource", 580 | "cloudformation:DescribeStackResource", 581 | ], 582 | resources: ["*"], 583 | }), 584 | new iam.PolicyStatement({ 585 | actions: ["s3:ListBucket", "s3:GetObject"], 586 | resources: [ 587 | publicBucket.bucketArn, 588 | `${publicBucket.bucketArn}/*`, 589 | ], 590 | }), 591 | new iam.PolicyStatement({ 592 | actions: ["s3:PutObject"], 593 | resources: [`${nebulaDocsBucket.bucketArn}/*`], 594 | }), 595 | new iam.PolicyStatement({ 596 | actions: ["bedrock:StartIngestionJob"], 597 | resources: [nebulaKb.attrKnowledgeBaseArn], 598 | }), 599 | ], 600 | }), 601 | } 602 | ); 603 | 604 | const nebulaSampleDataRole = new iam.Role(this, "NebulaSampleDataRole", { 605 | roleName: "NebulaSampleDataRole", 606 | path: "/service-role/", 607 | assumedBy: new iam.ServicePrincipal("lambda.amazonaws.com"), 608 | managedPolicies: [nebulaSampleDataPolicy], 609 | }); 610 | 611 | const nebulaSampleDataFunction = new lambda.Function( 612 | this, 613 | "NebulaSampleDataFunction", 614 | { 615 | runtime: lambda.Runtime.NODEJS_20_X, 616 | code: lambda.Code.fromBucket( 617 | publicBucket, 618 | `nebula/${process.env.npm_package_version}/lambdas/sample_data.zip` 619 | ), 620 | handler: "sample_data.handler", 621 | functionName: "NebulaSampleDataFunction", 622 | role: nebulaSampleDataRole, 623 | environment: { 624 | VERSION: `${process.env.npm_package_version}`, 625 | DOCS_BUCKET: nebulaDocsBucket.bucketName, 626 | KB_ID: nebulaKb.attrKnowledgeBaseId, 627 | DATA_SOURCE_ID: nebulaDataSource.attrDataSourceId, 628 | SOURCE_BUCKET: publicBucket.bucketName 629 | }, 630 | timeout: Duration.seconds(120), 631 | } 632 | ); 633 | 634 | const nebulaSampleDataCr = new CustomResource(this, "NebulaSampleDataCr", { 635 | serviceToken: nebulaSampleDataFunction.functionArn, 636 | removalPolicy: RemovalPolicy.RETAIN, 637 | }); 638 | 639 | const nebulaSampleDataCondition = new CfnCondition( 640 | this, 641 | "NebulaSampleDataCondition", 642 | { 643 | expression: Fn.conditionEquals(uploadParam.valueAsString, "YES"), 644 | } 645 | ); 646 | 647 | const nebulaSampleDataPolicyCfn = nebulaSampleDataPolicy.node 648 | .defaultChild as iam.CfnManagedPolicy; 649 | nebulaSampleDataPolicyCfn.cfnOptions.condition = nebulaSampleDataCondition; 650 | 651 | const nebulaSampleDataRoleCfn = nebulaSampleDataRole.node 652 | .defaultChild as iam.CfnRole; 653 | nebulaSampleDataRoleCfn.cfnOptions.condition = nebulaSampleDataCondition; 654 | 655 | const nebulaSampleDataFunctionCfn = nebulaSampleDataFunction.node 656 | .defaultChild as lambda.CfnFunction; 657 | nebulaSampleDataFunctionCfn.cfnOptions.condition = nebulaSampleDataCondition; 658 | 659 | const nebulaSampleDataCrCfn = nebulaSampleDataCr.node 660 | .defaultChild as cloudformation.CfnCustomResource; 661 | nebulaSampleDataCrCfn.cfnOptions.condition = nebulaSampleDataCondition; 662 | 663 | const nebulaOai = new cloudfront.OriginAccessIdentity(this, "NebulaOai"); 664 | 665 | const nebulaDistro = new cloudfront.CloudFrontWebDistribution( 666 | this, 667 | "NebulaDistro", 668 | { 669 | originConfigs: [ 670 | { 671 | s3OriginSource: { 672 | s3BucketSource: nebulaWebBucket, 673 | originAccessIdentity: nebulaOai, 674 | }, 675 | behaviors: [ 676 | { isDefaultBehavior: true }, 677 | { 678 | allowedMethods: cloudfront.CloudFrontAllowedMethods.GET_HEAD, 679 | pathPattern: "/*", 680 | }, 681 | ], 682 | }, 683 | ], 684 | defaultRootObject: "index.html", 685 | enabled: true, 686 | httpVersion: cloudfront.HttpVersion.HTTP2_AND_3, 687 | viewerCertificate: 688 | cloudfront.ViewerCertificate.fromCloudFrontDefaultCertificate(), 689 | } 690 | ); 691 | 692 | const nebulaCopySitePolicy = new iam.ManagedPolicy(this, "NebulaCopySitePolicy", { 693 | managedPolicyName: "NebulaCopySitePolicy", 694 | path: "/service-role/", 695 | document: new iam.PolicyDocument({ 696 | statements: [ 697 | new iam.PolicyStatement({ 698 | actions: [ 699 | "logs:CreateLogGroup", 700 | "logs:CreateLogStream", 701 | "logs:PutLogEvents", 702 | "cloudformation:SignalResource", 703 | "cloudformation:DescribeStackResource", 704 | ], 705 | resources: ["*"], 706 | }), 707 | new iam.PolicyStatement({ 708 | actions: ["s3:ListBucket", "s3:GetObject"], 709 | resources: [publicBucket.bucketArn, `${publicBucket.bucketArn}/*`], 710 | }), 711 | new iam.PolicyStatement({ 712 | actions: ["s3:PutObject"], 713 | resources: [`${nebulaWebBucket.bucketArn}/*`], 714 | }), 715 | new iam. PolicyStatement({ 716 | actions: ["sns:Publish"], 717 | resources: [`arn:aws:sns:${Aws.REGION}:844603932797:1159-accelerators-topic`] 718 | }) 719 | ], 720 | }), 721 | }); 722 | 723 | const nebulaCopySiteRole = new iam.Role(this, "NebulaCopySiteRole", { 724 | roleName: "NebulaCopySiteRole", 725 | path: "/service-role/", 726 | assumedBy: new iam.ServicePrincipal("lambda.amazonaws.com"), 727 | managedPolicies: [nebulaCopySitePolicy], 728 | }); 729 | 730 | const nebulaCopySiteFunction = new lambda.Function(this, "NebulaCopySiteFunction", { 731 | runtime: lambda.Runtime.NODEJS_20_X, 732 | code: lambda.Code.fromBucket( 733 | publicBucket, 734 | `nebula/${process.env.npm_package_version}/lambdas/copy_site.zip` 735 | ), 736 | handler: "copy_site.handler", 737 | functionName: "NebulaCopySiteFunction", 738 | role: nebulaCopySiteRole, 739 | environment: { 740 | VERSION: `${process.env.npm_package_version}`, 741 | WEB_BUCKET: nebulaWebBucket.bucketName, 742 | API_URL: nebulaApiStagee.urlForPath(), 743 | USER_POOL_ID: nebulaUserPool.userPoolId, 744 | USER_POOL_CLIENT_ID: nebulaUserPoolClient.userPoolClientId, 745 | SOURCE_BUCKET: publicBucket.bucketName, 746 | USER_EMAIL: userEmailParam.valueAsString, 747 | TOPIC_ARN: `arn:aws:sns:${Aws.REGION}:844603932797:1159-accelerators-topic` 748 | }, 749 | timeout: Duration.seconds(120), 750 | }); 751 | 752 | const nebulaCopySiteCr = new CustomResource(this, "NebulaCopySiteCr", { 753 | serviceToken: nebulaCopySiteFunction.functionArn, 754 | removalPolicy: RemovalPolicy.RETAIN, 755 | }); 756 | 757 | const nebulaDistoOutput = new CfnOutput(this, "WebUrl", { 758 | description: "CloudFront Web URL for the demo application", 759 | value: nebulaDistro.distributionDomainName, 760 | }); 761 | } 762 | } 763 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | GNU GENERAL PUBLIC LICENSE 2 | Version 3, 29 June 2007 3 | 4 | Copyright (C) 2007 Free Software Foundation, Inc. 5 | Everyone is permitted to copy and distribute verbatim copies 6 | of this license document, but changing it is not allowed. 7 | 8 | Preamble 9 | 10 | The GNU General Public License is a free, copyleft license for 11 | software and other kinds of works. 12 | 13 | The licenses for most software and other practical works are designed 14 | to take away your freedom to share and change the works. By contrast, 15 | the GNU General Public License is intended to guarantee your freedom to 16 | share and change all versions of a program--to make sure it remains free 17 | software for all its users. We, the Free Software Foundation, use the 18 | GNU General Public License for most of our software; it applies also to 19 | any other work released this way by its authors. You can apply it to 20 | your programs, too. 21 | 22 | When we speak of free software, we are referring to freedom, not 23 | price. Our General Public Licenses are designed to make sure that you 24 | have the freedom to distribute copies of free software (and charge for 25 | them if you wish), that you receive source code or can get it if you 26 | want it, that you can change the software or use pieces of it in new 27 | free programs, and that you know you can do these things. 28 | 29 | To protect your rights, we need to prevent others from denying you 30 | these rights or asking you to surrender the rights. Therefore, you have 31 | certain responsibilities if you distribute copies of the software, or if 32 | you modify it: responsibilities to respect the freedom of others. 33 | 34 | For example, if you distribute copies of such a program, whether 35 | gratis or for a fee, you must pass on to the recipients the same 36 | freedoms that you received. You must make sure that they, too, receive 37 | or can get the source code. And you must show them these terms so they 38 | know their rights. 39 | 40 | Developers that use the GNU GPL protect your rights with two steps: 41 | (1) assert copyright on the software, and (2) offer you this License 42 | giving you legal permission to copy, distribute and/or modify it. 43 | 44 | For the developers' and authors' protection, the GPL clearly explains 45 | that there is no warranty for this free software. For both users' and 46 | authors' sake, the GPL requires that modified versions be marked as 47 | changed, so that their problems will not be attributed erroneously to 48 | authors of previous versions. 49 | 50 | Some devices are designed to deny users access to install or run 51 | modified versions of the software inside them, although the manufacturer 52 | can do so. This is fundamentally incompatible with the aim of 53 | protecting users' freedom to change the software. The systematic 54 | pattern of such abuse occurs in the area of products for individuals to 55 | use, which is precisely where it is most unacceptable. Therefore, we 56 | have designed this version of the GPL to prohibit the practice for those 57 | products. If such problems arise substantially in other domains, we 58 | stand ready to extend this provision to those domains in future versions 59 | of the GPL, as needed to protect the freedom of users. 60 | 61 | Finally, every program is threatened constantly by software patents. 62 | States should not allow patents to restrict development and use of 63 | software on general-purpose computers, but in those that do, we wish to 64 | avoid the special danger that patents applied to a free program could 65 | make it effectively proprietary. To prevent this, the GPL assures that 66 | patents cannot be used to render the program non-free. 67 | 68 | The precise terms and conditions for copying, distribution and 69 | modification follow. 70 | 71 | TERMS AND CONDITIONS 72 | 73 | 0. Definitions. 74 | 75 | "This License" refers to version 3 of the GNU General Public License. 76 | 77 | "Copyright" also means copyright-like laws that apply to other kinds of 78 | works, such as semiconductor masks. 79 | 80 | "The Program" refers to any copyrightable work licensed under this 81 | License. Each licensee is addressed as "you". "Licensees" and 82 | "recipients" may be individuals or organizations. 83 | 84 | To "modify" a work means to copy from or adapt all or part of the work 85 | in a fashion requiring copyright permission, other than the making of an 86 | exact copy. The resulting work is called a "modified version" of the 87 | earlier work or a work "based on" the earlier work. 88 | 89 | A "covered work" means either the unmodified Program or a work based 90 | on the Program. 91 | 92 | To "propagate" a work means to do anything with it that, without 93 | permission, would make you directly or secondarily liable for 94 | infringement under applicable copyright law, except executing it on a 95 | computer or modifying a private copy. Propagation includes copying, 96 | distribution (with or without modification), making available to the 97 | public, and in some countries other activities as well. 98 | 99 | To "convey" a work means any kind of propagation that enables other 100 | parties to make or receive copies. Mere interaction with a user through 101 | a computer network, with no transfer of a copy, is not conveying. 102 | 103 | An interactive user interface displays "Appropriate Legal Notices" 104 | to the extent that it includes a convenient and prominently visible 105 | feature that (1) displays an appropriate copyright notice, and (2) 106 | tells the user that there is no warranty for the work (except to the 107 | extent that warranties are provided), that licensees may convey the 108 | work under this License, and how to view a copy of this License. If 109 | the interface presents a list of user commands or options, such as a 110 | menu, a prominent item in the list meets this criterion. 111 | 112 | 1. Source Code. 113 | 114 | The "source code" for a work means the preferred form of the work 115 | for making modifications to it. "Object code" means any non-source 116 | form of a work. 117 | 118 | A "Standard Interface" means an interface that either is an official 119 | standard defined by a recognized standards body, or, in the case of 120 | interfaces specified for a particular programming language, one that 121 | is widely used among developers working in that language. 122 | 123 | The "System Libraries" of an executable work include anything, other 124 | than the work as a whole, that (a) is included in the normal form of 125 | packaging a Major Component, but which is not part of that Major 126 | Component, and (b) serves only to enable use of the work with that 127 | Major Component, or to implement a Standard Interface for which an 128 | implementation is available to the public in source code form. A 129 | "Major Component", in this context, means a major essential component 130 | (kernel, window system, and so on) of the specific operating system 131 | (if any) on which the executable work runs, or a compiler used to 132 | produce the work, or an object code interpreter used to run it. 133 | 134 | The "Corresponding Source" for a work in object code form means all 135 | the source code needed to generate, install, and (for an executable 136 | work) run the object code and to modify the work, including scripts to 137 | control those activities. However, it does not include the work's 138 | System Libraries, or general-purpose tools or generally available free 139 | programs which are used unmodified in performing those activities but 140 | which are not part of the work. For example, Corresponding Source 141 | includes interface definition files associated with source files for 142 | the work, and the source code for shared libraries and dynamically 143 | linked subprograms that the work is specifically designed to require, 144 | such as by intimate data communication or control flow between those 145 | subprograms and other parts of the work. 146 | 147 | The Corresponding Source need not include anything that users 148 | can regenerate automatically from other parts of the Corresponding 149 | Source. 150 | 151 | The Corresponding Source for a work in source code form is that 152 | same work. 153 | 154 | 2. Basic Permissions. 155 | 156 | All rights granted under this License are granted for the term of 157 | copyright on the Program, and are irrevocable provided the stated 158 | conditions are met. This License explicitly affirms your unlimited 159 | permission to run the unmodified Program. The output from running a 160 | covered work is covered by this License only if the output, given its 161 | content, constitutes a covered work. This License acknowledges your 162 | rights of fair use or other equivalent, as provided by copyright law. 163 | 164 | You may make, run and propagate covered works that you do not 165 | convey, without conditions so long as your license otherwise remains 166 | in force. You may convey covered works to others for the sole purpose 167 | of having them make modifications exclusively for you, or provide you 168 | with facilities for running those works, provided that you comply with 169 | the terms of this License in conveying all material for which you do 170 | not control copyright. Those thus making or running the covered works 171 | for you must do so exclusively on your behalf, under your direction 172 | and control, on terms that prohibit them from making any copies of 173 | your copyrighted material outside their relationship with you. 174 | 175 | Conveying under any other circumstances is permitted solely under 176 | the conditions stated below. Sublicensing is not allowed; section 10 177 | makes it unnecessary. 178 | 179 | 3. Protecting Users' Legal Rights From Anti-Circumvention Law. 180 | 181 | No covered work shall be deemed part of an effective technological 182 | measure under any applicable law fulfilling obligations under article 183 | 11 of the WIPO copyright treaty adopted on 20 December 1996, or 184 | similar laws prohibiting or restricting circumvention of such 185 | measures. 186 | 187 | When you convey a covered work, you waive any legal power to forbid 188 | circumvention of technological measures to the extent such circumvention 189 | is effected by exercising rights under this License with respect to 190 | the covered work, and you disclaim any intention to limit operation or 191 | modification of the work as a means of enforcing, against the work's 192 | users, your or third parties' legal rights to forbid circumvention of 193 | technological measures. 194 | 195 | 4. Conveying Verbatim Copies. 196 | 197 | You may convey verbatim copies of the Program's source code as you 198 | receive it, in any medium, provided that you conspicuously and 199 | appropriately publish on each copy an appropriate copyright notice; 200 | keep intact all notices stating that this License and any 201 | non-permissive terms added in accord with section 7 apply to the code; 202 | keep intact all notices of the absence of any warranty; and give all 203 | recipients a copy of this License along with the Program. 204 | 205 | You may charge any price or no price for each copy that you convey, 206 | and you may offer support or warranty protection for a fee. 207 | 208 | 5. Conveying Modified Source Versions. 209 | 210 | You may convey a work based on the Program, or the modifications to 211 | produce it from the Program, in the form of source code under the 212 | terms of section 4, provided that you also meet all of these conditions: 213 | 214 | a) The work must carry prominent notices stating that you modified 215 | it, and giving a relevant date. 216 | 217 | b) The work must carry prominent notices stating that it is 218 | released under this License and any conditions added under section 219 | 7. This requirement modifies the requirement in section 4 to 220 | "keep intact all notices". 221 | 222 | c) You must license the entire work, as a whole, under this 223 | License to anyone who comes into possession of a copy. This 224 | License will therefore apply, along with any applicable section 7 225 | additional terms, to the whole of the work, and all its parts, 226 | regardless of how they are packaged. This License gives no 227 | permission to license the work in any other way, but it does not 228 | invalidate such permission if you have separately received it. 229 | 230 | d) If the work has interactive user interfaces, each must display 231 | Appropriate Legal Notices; however, if the Program has interactive 232 | interfaces that do not display Appropriate Legal Notices, your 233 | work need not make them do so. 234 | 235 | A compilation of a covered work with other separate and independent 236 | works, which are not by their nature extensions of the covered work, 237 | and which are not combined with it such as to form a larger program, 238 | in or on a volume of a storage or distribution medium, is called an 239 | "aggregate" if the compilation and its resulting copyright are not 240 | used to limit the access or legal rights of the compilation's users 241 | beyond what the individual works permit. Inclusion of a covered work 242 | in an aggregate does not cause this License to apply to the other 243 | parts of the aggregate. 244 | 245 | 6. Conveying Non-Source Forms. 246 | 247 | You may convey a covered work in object code form under the terms 248 | of sections 4 and 5, provided that you also convey the 249 | machine-readable Corresponding Source under the terms of this License, 250 | in one of these ways: 251 | 252 | a) Convey the object code in, or embodied in, a physical product 253 | (including a physical distribution medium), accompanied by the 254 | Corresponding Source fixed on a durable physical medium 255 | customarily used for software interchange. 256 | 257 | b) Convey the object code in, or embodied in, a physical product 258 | (including a physical distribution medium), accompanied by a 259 | written offer, valid for at least three years and valid for as 260 | long as you offer spare parts or customer support for that product 261 | model, to give anyone who possesses the object code either (1) a 262 | copy of the Corresponding Source for all the software in the 263 | product that is covered by this License, on a durable physical 264 | medium customarily used for software interchange, for a price no 265 | more than your reasonable cost of physically performing this 266 | conveying of source, or (2) access to copy the 267 | Corresponding Source from a network server at no charge. 268 | 269 | c) Convey individual copies of the object code with a copy of the 270 | written offer to provide the Corresponding Source. This 271 | alternative is allowed only occasionally and noncommercially, and 272 | only if you received the object code with such an offer, in accord 273 | with subsection 6b. 274 | 275 | d) Convey the object code by offering access from a designated 276 | place (gratis or for a charge), and offer equivalent access to the 277 | Corresponding Source in the same way through the same place at no 278 | further charge. You need not require recipients to copy the 279 | Corresponding Source along with the object code. If the place to 280 | copy the object code is a network server, the Corresponding Source 281 | may be on a different server (operated by you or a third party) 282 | that supports equivalent copying facilities, provided you maintain 283 | clear directions next to the object code saying where to find the 284 | Corresponding Source. Regardless of what server hosts the 285 | Corresponding Source, you remain obligated to ensure that it is 286 | available for as long as needed to satisfy these requirements. 287 | 288 | e) Convey the object code using peer-to-peer transmission, provided 289 | you inform other peers where the object code and Corresponding 290 | Source of the work are being offered to the general public at no 291 | charge under subsection 6d. 292 | 293 | A separable portion of the object code, whose source code is excluded 294 | from the Corresponding Source as a System Library, need not be 295 | included in conveying the object code work. 296 | 297 | A "User Product" is either (1) a "consumer product", which means any 298 | tangible personal property which is normally used for personal, family, 299 | or household purposes, or (2) anything designed or sold for incorporation 300 | into a dwelling. In determining whether a product is a consumer product, 301 | doubtful cases shall be resolved in favor of coverage. For a particular 302 | product received by a particular user, "normally used" refers to a 303 | typical or common use of that class of product, regardless of the status 304 | of the particular user or of the way in which the particular user 305 | actually uses, or expects or is expected to use, the product. A product 306 | is a consumer product regardless of whether the product has substantial 307 | commercial, industrial or non-consumer uses, unless such uses represent 308 | the only significant mode of use of the product. 309 | 310 | "Installation Information" for a User Product means any methods, 311 | procedures, authorization keys, or other information required to install 312 | and execute modified versions of a covered work in that User Product from 313 | a modified version of its Corresponding Source. The information must 314 | suffice to ensure that the continued functioning of the modified object 315 | code is in no case prevented or interfered with solely because 316 | modification has been made. 317 | 318 | If you convey an object code work under this section in, or with, or 319 | specifically for use in, a User Product, and the conveying occurs as 320 | part of a transaction in which the right of possession and use of the 321 | User Product is transferred to the recipient in perpetuity or for a 322 | fixed term (regardless of how the transaction is characterized), the 323 | Corresponding Source conveyed under this section must be accompanied 324 | by the Installation Information. But this requirement does not apply 325 | if neither you nor any third party retains the ability to install 326 | modified object code on the User Product (for example, the work has 327 | been installed in ROM). 328 | 329 | The requirement to provide Installation Information does not include a 330 | requirement to continue to provide support service, warranty, or updates 331 | for a work that has been modified or installed by the recipient, or for 332 | the User Product in which it has been modified or installed. Access to a 333 | network may be denied when the modification itself materially and 334 | adversely affects the operation of the network or violates the rules and 335 | protocols for communication across the network. 336 | 337 | Corresponding Source conveyed, and Installation Information provided, 338 | in accord with this section must be in a format that is publicly 339 | documented (and with an implementation available to the public in 340 | source code form), and must require no special password or key for 341 | unpacking, reading or copying. 342 | 343 | 7. Additional Terms. 344 | 345 | "Additional permissions" are terms that supplement the terms of this 346 | License by making exceptions from one or more of its conditions. 347 | Additional permissions that are applicable to the entire Program shall 348 | be treated as though they were included in this License, to the extent 349 | that they are valid under applicable law. If additional permissions 350 | apply only to part of the Program, that part may be used separately 351 | under those permissions, but the entire Program remains governed by 352 | this License without regard to the additional permissions. 353 | 354 | When you convey a copy of a covered work, you may at your option 355 | remove any additional permissions from that copy, or from any part of 356 | it. (Additional permissions may be written to require their own 357 | removal in certain cases when you modify the work.) You may place 358 | additional permissions on material, added by you to a covered work, 359 | for which you have or can give appropriate copyright permission. 360 | 361 | Notwithstanding any other provision of this License, for material you 362 | add to a covered work, you may (if authorized by the copyright holders of 363 | that material) supplement the terms of this License with terms: 364 | 365 | a) Disclaiming warranty or limiting liability differently from the 366 | terms of sections 15 and 16 of this License; or 367 | 368 | b) Requiring preservation of specified reasonable legal notices or 369 | author attributions in that material or in the Appropriate Legal 370 | Notices displayed by works containing it; or 371 | 372 | c) Prohibiting misrepresentation of the origin of that material, or 373 | requiring that modified versions of such material be marked in 374 | reasonable ways as different from the original version; or 375 | 376 | d) Limiting the use for publicity purposes of names of licensors or 377 | authors of the material; or 378 | 379 | e) Declining to grant rights under trademark law for use of some 380 | trade names, trademarks, or service marks; or 381 | 382 | f) Requiring indemnification of licensors and authors of that 383 | material by anyone who conveys the material (or modified versions of 384 | it) with contractual assumptions of liability to the recipient, for 385 | any liability that these contractual assumptions directly impose on 386 | those licensors and authors. 387 | 388 | All other non-permissive additional terms are considered "further 389 | restrictions" within the meaning of section 10. If the Program as you 390 | received it, or any part of it, contains a notice stating that it is 391 | governed by this License along with a term that is a further 392 | restriction, you may remove that term. If a license document contains 393 | a further restriction but permits relicensing or conveying under this 394 | License, you may add to a covered work material governed by the terms 395 | of that license document, provided that the further restriction does 396 | not survive such relicensing or conveying. 397 | 398 | If you add terms to a covered work in accord with this section, you 399 | must place, in the relevant source files, a statement of the 400 | additional terms that apply to those files, or a notice indicating 401 | where to find the applicable terms. 402 | 403 | Additional terms, permissive or non-permissive, may be stated in the 404 | form of a separately written license, or stated as exceptions; 405 | the above requirements apply either way. 406 | 407 | 8. Termination. 408 | 409 | You may not propagate or modify a covered work except as expressly 410 | provided under this License. Any attempt otherwise to propagate or 411 | modify it is void, and will automatically terminate your rights under 412 | this License (including any patent licenses granted under the third 413 | paragraph of section 11). 414 | 415 | However, if you cease all violation of this License, then your 416 | license from a particular copyright holder is reinstated (a) 417 | provisionally, unless and until the copyright holder explicitly and 418 | finally terminates your license, and (b) permanently, if the copyright 419 | holder fails to notify you of the violation by some reasonable means 420 | prior to 60 days after the cessation. 421 | 422 | Moreover, your license from a particular copyright holder is 423 | reinstated permanently if the copyright holder notifies you of the 424 | violation by some reasonable means, this is the first time you have 425 | received notice of violation of this License (for any work) from that 426 | copyright holder, and you cure the violation prior to 30 days after 427 | your receipt of the notice. 428 | 429 | Termination of your rights under this section does not terminate the 430 | licenses of parties who have received copies or rights from you under 431 | this License. If your rights have been terminated and not permanently 432 | reinstated, you do not qualify to receive new licenses for the same 433 | material under section 10. 434 | 435 | 9. Acceptance Not Required for Having Copies. 436 | 437 | You are not required to accept this License in order to receive or 438 | run a copy of the Program. Ancillary propagation of a covered work 439 | occurring solely as a consequence of using peer-to-peer transmission 440 | to receive a copy likewise does not require acceptance. However, 441 | nothing other than this License grants you permission to propagate or 442 | modify any covered work. These actions infringe copyright if you do 443 | not accept this License. Therefore, by modifying or propagating a 444 | covered work, you indicate your acceptance of this License to do so. 445 | 446 | 10. Automatic Licensing of Downstream Recipients. 447 | 448 | Each time you convey a covered work, the recipient automatically 449 | receives a license from the original licensors, to run, modify and 450 | propagate that work, subject to this License. You are not responsible 451 | for enforcing compliance by third parties with this License. 452 | 453 | An "entity transaction" is a transaction transferring control of an 454 | organization, or substantially all assets of one, or subdividing an 455 | organization, or merging organizations. If propagation of a covered 456 | work results from an entity transaction, each party to that 457 | transaction who receives a copy of the work also receives whatever 458 | licenses to the work the party's predecessor in interest had or could 459 | give under the previous paragraph, plus a right to possession of the 460 | Corresponding Source of the work from the predecessor in interest, if 461 | the predecessor has it or can get it with reasonable efforts. 462 | 463 | You may not impose any further restrictions on the exercise of the 464 | rights granted or affirmed under this License. For example, you may 465 | not impose a license fee, royalty, or other charge for exercise of 466 | rights granted under this License, and you may not initiate litigation 467 | (including a cross-claim or counterclaim in a lawsuit) alleging that 468 | any patent claim is infringed by making, using, selling, offering for 469 | sale, or importing the Program or any portion of it. 470 | 471 | 11. Patents. 472 | 473 | A "contributor" is a copyright holder who authorizes use under this 474 | License of the Program or a work on which the Program is based. The 475 | work thus licensed is called the contributor's "contributor version". 476 | 477 | A contributor's "essential patent claims" are all patent claims 478 | owned or controlled by the contributor, whether already acquired or 479 | hereafter acquired, that would be infringed by some manner, permitted 480 | by this License, of making, using, or selling its contributor version, 481 | but do not include claims that would be infringed only as a 482 | consequence of further modification of the contributor version. For 483 | purposes of this definition, "control" includes the right to grant 484 | patent sublicenses in a manner consistent with the requirements of 485 | this License. 486 | 487 | Each contributor grants you a non-exclusive, worldwide, royalty-free 488 | patent license under the contributor's essential patent claims, to 489 | make, use, sell, offer for sale, import and otherwise run, modify and 490 | propagate the contents of its contributor version. 491 | 492 | In the following three paragraphs, a "patent license" is any express 493 | agreement or commitment, however denominated, not to enforce a patent 494 | (such as an express permission to practice a patent or covenant not to 495 | sue for patent infringement). To "grant" such a patent license to a 496 | party means to make such an agreement or commitment not to enforce a 497 | patent against the party. 498 | 499 | If you convey a covered work, knowingly relying on a patent license, 500 | and the Corresponding Source of the work is not available for anyone 501 | to copy, free of charge and under the terms of this License, through a 502 | publicly available network server or other readily accessible means, 503 | then you must either (1) cause the Corresponding Source to be so 504 | available, or (2) arrange to deprive yourself of the benefit of the 505 | patent license for this particular work, or (3) arrange, in a manner 506 | consistent with the requirements of this License, to extend the patent 507 | license to downstream recipients. "Knowingly relying" means you have 508 | actual knowledge that, but for the patent license, your conveying the 509 | covered work in a country, or your recipient's use of the covered work 510 | in a country, would infringe one or more identifiable patents in that 511 | country that you have reason to believe are valid. 512 | 513 | If, pursuant to or in connection with a single transaction or 514 | arrangement, you convey, or propagate by procuring conveyance of, a 515 | covered work, and grant a patent license to some of the parties 516 | receiving the covered work authorizing them to use, propagate, modify 517 | or convey a specific copy of the covered work, then the patent license 518 | you grant is automatically extended to all recipients of the covered 519 | work and works based on it. 520 | 521 | A patent license is "discriminatory" if it does not include within 522 | the scope of its coverage, prohibits the exercise of, or is 523 | conditioned on the non-exercise of one or more of the rights that are 524 | specifically granted under this License. You may not convey a covered 525 | work if you are a party to an arrangement with a third party that is 526 | in the business of distributing software, under which you make payment 527 | to the third party based on the extent of your activity of conveying 528 | the work, and under which the third party grants, to any of the 529 | parties who would receive the covered work from you, a discriminatory 530 | patent license (a) in connection with copies of the covered work 531 | conveyed by you (or copies made from those copies), or (b) primarily 532 | for and in connection with specific products or compilations that 533 | contain the covered work, unless you entered into that arrangement, 534 | or that patent license was granted, prior to 28 March 2007. 535 | 536 | Nothing in this License shall be construed as excluding or limiting 537 | any implied license or other defenses to infringement that may 538 | otherwise be available to you under applicable patent law. 539 | 540 | 12. No Surrender of Others' Freedom. 541 | 542 | If conditions are imposed on you (whether by court order, agreement or 543 | otherwise) that contradict the conditions of this License, they do not 544 | excuse you from the conditions of this License. If you cannot convey a 545 | covered work so as to satisfy simultaneously your obligations under this 546 | License and any other pertinent obligations, then as a consequence you may 547 | not convey it at all. For example, if you agree to terms that obligate you 548 | to collect a royalty for further conveying from those to whom you convey 549 | the Program, the only way you could satisfy both those terms and this 550 | License would be to refrain entirely from conveying the Program. 551 | 552 | 13. Use with the GNU Affero General Public License. 553 | 554 | Notwithstanding any other provision of this License, you have 555 | permission to link or combine any covered work with a work licensed 556 | under version 3 of the GNU Affero General Public License into a single 557 | combined work, and to convey the resulting work. The terms of this 558 | License will continue to apply to the part which is the covered work, 559 | but the special requirements of the GNU Affero General Public License, 560 | section 13, concerning interaction through a network will apply to the 561 | combination as such. 562 | 563 | 14. Revised Versions of this License. 564 | 565 | The Free Software Foundation may publish revised and/or new versions of 566 | the GNU General Public License from time to time. Such new versions will 567 | be similar in spirit to the present version, but may differ in detail to 568 | address new problems or concerns. 569 | 570 | Each version is given a distinguishing version number. If the 571 | Program specifies that a certain numbered version of the GNU General 572 | Public License "or any later version" applies to it, you have the 573 | option of following the terms and conditions either of that numbered 574 | version or of any later version published by the Free Software 575 | Foundation. If the Program does not specify a version number of the 576 | GNU General Public License, you may choose any version ever published 577 | by the Free Software Foundation. 578 | 579 | If the Program specifies that a proxy can decide which future 580 | versions of the GNU General Public License can be used, that proxy's 581 | public statement of acceptance of a version permanently authorizes you 582 | to choose that version for the Program. 583 | 584 | Later license versions may give you additional or different 585 | permissions. However, no additional obligations are imposed on any 586 | author or copyright holder as a result of your choosing to follow a 587 | later version. 588 | 589 | 15. Disclaimer of Warranty. 590 | 591 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY 592 | APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT 593 | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY 594 | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, 595 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 596 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM 597 | IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF 598 | ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 599 | 600 | 16. Limitation of Liability. 601 | 602 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING 603 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS 604 | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY 605 | GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE 606 | USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF 607 | DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD 608 | PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), 609 | EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF 610 | SUCH DAMAGES. 611 | 612 | 17. Interpretation of Sections 15 and 16. 613 | 614 | If the disclaimer of warranty and limitation of liability provided 615 | above cannot be given local legal effect according to their terms, 616 | reviewing courts shall apply local law that most closely approximates 617 | an absolute waiver of all civil liability in connection with the 618 | Program, unless a warranty or assumption of liability accompanies a 619 | copy of the Program in return for a fee. 620 | 621 | END OF TERMS AND CONDITIONS 622 | 623 | How to Apply These Terms to Your New Programs 624 | 625 | If you develop a new program, and you want it to be of the greatest 626 | possible use to the public, the best way to achieve this is to make it 627 | free software which everyone can redistribute and change under these terms. 628 | 629 | To do so, attach the following notices to the program. It is safest 630 | to attach them to the start of each source file to most effectively 631 | state the exclusion of warranty; and each file should have at least 632 | the "copyright" line and a pointer to where the full notice is found. 633 | 634 | 635 | Copyright (C) 636 | 637 | This program is free software: you can redistribute it and/or modify 638 | it under the terms of the GNU General Public License as published by 639 | the Free Software Foundation, either version 3 of the License, or 640 | (at your option) any later version. 641 | 642 | This program is distributed in the hope that it will be useful, 643 | but WITHOUT ANY WARRANTY; without even the implied warranty of 644 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 645 | GNU General Public License for more details. 646 | 647 | You should have received a copy of the GNU General Public License 648 | along with this program. If not, see . 649 | 650 | Also add information on how to contact you by electronic and paper mail. 651 | 652 | If the program does terminal interaction, make it output a short 653 | notice like this when it starts in an interactive mode: 654 | 655 | Copyright (C) 656 | This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. 657 | This is free software, and you are welcome to redistribute it 658 | under certain conditions; type `show c' for details. 659 | 660 | The hypothetical commands `show w' and `show c' should show the appropriate 661 | parts of the General Public License. Of course, your program's commands 662 | might be different; for a GUI interface, you would use an "about box". 663 | 664 | You should also get your employer (if you work as a programmer) or school, 665 | if any, to sign a "copyright disclaimer" for the program, if necessary. 666 | For more information on this, and how to apply and follow the GNU GPL, see 667 | . 668 | 669 | The GNU General Public License does not permit incorporating your program 670 | into proprietary programs. If your program is a subroutine library, you 671 | may consider it more useful to permit linking proprietary applications with 672 | the library. If this is what you want to do, use the GNU Lesser General 673 | Public License instead of this License. But first, please read 674 | . 675 | --------------------------------------------------------------------------------