├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── LICENSE
├── README.md
├── assets
├── AWS_Quant_Real_Time_Installation.drawio.png
├── AWS_Quant_Real_Time_Operational_View.drawio.png
├── Architecture.drawio
├── aws-cdk.png
├── cdk.webp
└── installation
│ ├── dynamodb_table_items.png
│ ├── secret_value.png
│ └── secrets_manager.png
├── aws-quant-infra
├── README.md
├── deployment
│ ├── cdk
│ │ ├── README.md
│ │ ├── bin
│ │ │ └── cdk.ts
│ │ ├── cdk.context.json
│ │ ├── cdk.json
│ │ ├── deployment.sh
│ │ ├── jest.config.js
│ │ ├── lib
│ │ │ ├── appConfig.ts
│ │ │ ├── batch.ts
│ │ │ ├── database.ts
│ │ │ ├── environment.ts
│ │ │ ├── lambda.ts
│ │ │ ├── pipeline.ts
│ │ │ ├── secrets.ts
│ │ │ └── shared
│ │ │ │ ├── custom-resources
│ │ │ │ └── codecommit-bootstrap
│ │ │ │ │ ├── Dockerfile
│ │ │ │ │ └── index.py
│ │ │ │ └── ssm-manager.ts
│ │ ├── outputs.json
│ │ ├── package-lock.json
│ │ ├── package.json
│ │ └── tsconfig.json
│ ├── cloud9
│ │ └── dev_environment.yaml
│ └── grafana
│ │ ├── grafana.tfvars
│ │ ├── main.tf
│ │ └── vars.tf
├── instructions.md
├── package-lock.json
├── package.json
├── requirements.txt
├── scripts
│ ├── create_system_event.py
│ ├── deploy_portfolio.py
│ └── sp500.txt
└── src
│ ├── batch
│ ├── Dockerfile
│ └── python
│ │ ├── __init__.py
│ │ ├── portfolio_tracker.py
│ │ ├── subscribe_market_data.py
│ │ └── test_docker.py
│ ├── bpipe-testing
│ ├── Dockerfile
│ ├── Miniconda3-latest-Linux-x86_64.sh
│ ├── bpipe.py
│ ├── bpipe_native_example.py
│ ├── bpipe_scratch.py
│ ├── config_env.sh
│ ├── stream_data.py
│ ├── test_market_data.py
│ └── test_portfolio.py
│ ├── config
│ ├── portfolio_tracker_cfg.json
│ ├── portfolio_tracker_cfg_BPIPE.json
│ └── portfolio_tracker_cfg_IEX.json
│ ├── lambda
│ ├── Dockerfile
│ └── python
│ │ ├── __init__.py
│ │ ├── handle_portfolio_update
│ │ ├── __init__.py
│ │ └── lambda_function.py
│ │ ├── intraday_close
│ │ ├── __init__.py
│ │ └── lambda_function.py
│ │ ├── intraday_momentum
│ │ ├── __init__.py
│ │ └── lambda_function.py
│ │ ├── schedule_listener
│ │ ├── __init__.py
│ │ └── lambda_function.py
│ │ ├── system_event_listener
│ │ ├── __init__.py
│ │ └── lambda_function.py
│ │ └── test_custom_layer
│ │ ├── __init__.py
│ │ └── lambda_function.py
│ ├── shared
│ └── python
│ │ ├── aws_quant_infra.py
│ │ ├── aws_quant_market_data.py
│ │ ├── aws_quant_risk.py
│ │ ├── run_quant.py
│ │ └── test_quant.py
│ └── utils
│ ├── nyse-ticker-list.csv
│ ├── portfolio-test_ptf_50.json
│ ├── portfolio_generator.py
│ └── resize_root.sh
├── bandit.toml
├── deployment.sh
├── grafana-infra
├── README.md
├── bin
│ └── cdk-grafana.ts
├── cdk.context.json
├── cdk.json
├── img
│ ├── Network.drawio
│ └── diagram01.png
├── jest.config.js
├── lib
│ └── cdk-grafana-stack.ts
├── package-lock.json
├── package.json
├── test
│ └── grafana.test.ts
└── tsconfig.json
└── portfolio-test_ptf_50.json
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | ## Code of Conduct
2 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct).
3 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact
4 | opensource-codeofconduct@amazon.com with any additional questions or comments.
5 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing Guidelines
2 |
3 | Thank you for your interest in contributing to our project. Whether it's a bug report, new feature, correction, or additional
4 | documentation, we greatly value feedback and contributions from our community.
5 |
6 | Please read through this document before submitting any issues or pull requests to ensure we have all the necessary
7 | information to effectively respond to your bug report or contribution.
8 |
9 |
10 | ## Reporting Bugs/Feature Requests
11 |
12 | We welcome you to use the GitHub issue tracker to report bugs or suggest features.
13 |
14 | When filing an issue, please check existing open, or recently closed, issues to make sure somebody else hasn't already
15 | reported the issue. Please try to include as much information as you can. Details like these are incredibly useful:
16 |
17 | * A reproducible test case or series of steps
18 | * The version of our code being used
19 | * Any modifications you've made relevant to the bug
20 | * Anything unusual about your environment or deployment
21 |
22 |
23 | ## Contributing via Pull Requests
24 | Contributions via pull requests are much appreciated. Before sending us a pull request, please ensure that:
25 |
26 | 1. You are working against the latest source on the *main* branch.
27 | 2. You check existing open, and recently merged, pull requests to make sure someone else hasn't addressed the problem already.
28 | 3. You open an issue to discuss any significant work - we would hate for your time to be wasted.
29 |
30 | To send us a pull request, please:
31 |
32 | 1. Fork the repository.
33 | 2. Modify the source; please focus on the specific change you are contributing. If you also reformat all the code, it will be hard for us to focus on your change.
34 | 3. Ensure local tests pass.
35 | 4. Commit to your fork using clear commit messages.
36 | 5. Send us a pull request, answering any default questions in the pull request interface.
37 | 6. Pay attention to any automated CI failures reported in the pull request, and stay involved in the conversation.
38 |
39 | GitHub provides additional document on [forking a repository](https://help.github.com/articles/fork-a-repo/) and
40 | [creating a pull request](https://help.github.com/articles/creating-a-pull-request/).
41 |
42 |
43 | ## Finding contributions to work on
44 | Looking at the existing issues is a great way to find something to contribute on. As our projects, by default, use the default GitHub issue labels (enhancement/bug/duplicate/help wanted/invalid/question/wontfix), looking at any 'help wanted' issues is a great place to start.
45 |
46 |
47 | ## Code of Conduct
48 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct).
49 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact
50 | opensource-codeofconduct@amazon.com with any additional questions or comments.
51 |
52 |
53 | ## Security issue notifications
54 | If you discover a potential security issue in this project we ask that you notify AWS/Amazon Security via our [vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/). Please do **not** create a public github issue.
55 |
56 |
57 | ## Licensing
58 |
59 | See the [LICENSE](LICENSE) file for our project's licensing. We will ask you to confirm the licensing of your contribution.
60 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT No Attribution
2 |
3 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy of
6 | this software and associated documentation files (the "Software"), to deal in
7 | the Software without restriction, including without limitation the rights to
8 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
9 | the Software, and to permit persons to whom the Software is furnished to do so.
10 |
11 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
12 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
13 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
14 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
15 | IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
16 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
17 |
18 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | [![Contributors][contributors-shield]][contributors-url]
5 | [![Forks][forks-shield]][forks-url]
6 | [![Stargazers][stars-shield]][stars-url]
7 | [![Issues][issues-shield]][issues-url]
8 | [![MIT License][license-shield]][license-url]
9 | [![LinkedIn][linkedin-shield]][linkedin-url]
10 |
11 |
12 |
13 |
14 |
15 |
32 |
33 |
34 |
35 |
36 |
37 | Table of Contents
38 |
39 | -
40 | About The Project
41 |
44 |
45 | -
46 | Getting Started
47 |
53 |
54 | - Usage
55 | - Roadmap
56 | - Contributing
57 | - License
58 | - Contact
59 | - Acknowledgments
60 |
61 |
62 |
63 |
64 |
65 |
66 | ## About The Project
67 |
68 | ![Architecture Diagram of Installation][product-screenshot1]
69 | ![Architecture Diagram of Operational View][product-screenshot2]
70 |
71 | About the project...
72 |
73 | This is a codebase to initialize the underlying infrastructure and stacks needed for real time quantitative trading as well as the basic application level logic. As a quant, your job is to focus on quantitative logic, the reality is that you have to worry about underlying infrastructure and a lot of different layers when deploying solutions. For example, where to run, how to achieve elasticity, etc. This repository and the event-driven infrastructure provided aim to provide a quick start and entry point into your own quantitative work and to help alleviate these challenges. The solution helps take care of SDLC, market data durability, market data connectivity, DevOps (elasticity), as well as the management of the underlying infrastructure. We use P&L calculations just as an example, but we'll leave the secret sauce up to you.
74 |
75 | This real time market portfolio application on AWS is setup through the [AWS CDK](https://aws.amazon.com/cdk/). The deployed CDK infrastructure comes with an example portfolio of the S&P 500 based on intraday momentum. The intraday momentum pattern says that the first half-hour return on the market since the previous day’s market close will predict the last half-hour return. This predictability will be stronger on more volatile days, on higher volume days, on recession days, and on major macroeconomic news release days.
76 |
77 |
78 | (back to top)
79 |
80 |
81 |
82 | ## Getting Started
83 |
84 | How to get started. This project is deployed using the AWS CDK.
85 |
86 | ### Initial Setup
87 |
88 | You will use [AWS Cloud9](https://aws.amazon.com/cloud9/) as the IDE to setup the code and deploy the CDK environment. You can also use a different IDE if you’d prefer.
89 |
90 | 1. Navigate to the **AWS Cloud9** console and press **Create environment**.
91 | 2. Enter a name - **MarketPortfolioEnv**.
92 | 3. Use a **t2.micro** instance type.
93 | 4. Leave all other settings as default and choose **Create**.
94 | 5. After a few minutes, the environment should be created. Under **Cloud9 IDE**, press **Open**.
95 | 6. In the command line at the bottom, clone the [Git repository](https://github.com/aws-samples/quant-trading) using the following command:
96 | ```sh
97 | git clone https://github.com/aws-samples/quant-trading.git
98 | ```
99 |
100 |
101 | ### CDK Deployment
102 |
103 | Now that the environment is setup, let’s deploy the application using the CDK. You’ll need to run a few commands to get everything set up for the CDK, this will allow for the entire application to be spun up through the CDK
104 |
105 | 1. In the **Cloud9 CLI**, type in the following commands to navigate to the CDK portion of the code and install the necessary dependencies
106 | ```sh
107 | cd AWSQuant/aws-quant-infra/deployment/cdk &&
108 | npm install
109 | ```
110 | 2. Use this command to bootstrap the environment:
111 | ```sh
112 | cdk bootstrap
113 | ```
114 | 3. This command is needed to download a required Lambda layer for AppConfig.
115 | ```sh
116 | sudo yum install jq -y &&
117 | aws lambda get-layer-version-by-arn —arn arn:aws:lambda:us-east-1:027255383542:layer:AWS-AppConfig-Extension:110 | jq -r '.Content.Location' | xargs curl -o ../../src/lambda/extension.zip
118 | ```
119 | 4. Now, to deploy the application using the CDK code enter this command:
120 | ```sh
121 | cdk deploy --all
122 | ```
123 |
124 | *Note*: if you get an error saying the docker build failed and says “no space left on device” run this command:
125 | ```sh
126 | chmod +x ./../../src/utils/resize_root.sh &&
127 | ./../../src/utils/resize_root.sh 50
128 | ```
129 |
130 | *Note*: If you get an error from creating the DynamoDB replica instance in the DB stack, you’ll need to go to the DynamoDB console and delete the replica from the console, then redeploy the CDK stack.
131 |
132 |
133 |
134 | ### Adding API Key
135 |
136 | You can have data come in from either IEX or B-PIPE (Bloomberg Market Data Feed). In this section, you’ll enter the API key in Secrets Manager and that will enable the Intraday Momentum application to start working and allow the data to flow in from the market data feed.
137 |
138 | 1. Navigate to the **AWS Secrets Manager** console.
139 | 2. You should see two secrets created: `api_token_pk_sandbox` and `api_token_pk`.
140 |
141 | ![Secrets Manager Keys][secrets-manager]
142 |
143 | 3. Select `api_token_pk`.
144 | 4. Scroll down to the section that says **Secret value** and towards the right, select **Retrieve secret value**.
145 |
146 | ![Secret Value][secret-value]
147 |
148 | 5. Then, choose **Edit** and paste in your IEX or B-PIPE API key.
149 | 6. Press **Save**.
150 |
151 |
152 | ### Looking at the Results
153 |
154 | You can view the results of the Intraday Momentum application after the day end by going to the DynamoDB table.
155 |
156 |
157 | 1. Navigate to the **AWS DynamoDB** console.
158 | 2. On the left, select **Tables** and then choose the table called `MvpPortfolioMonitoringPortfolioTable`.
159 | 3. Then, press the orange button in the top right that says **Explore table items**.
160 |
161 | ![DynamoDB Table Items][dynamodb-table-items]
162 |
163 | 4. You should then see data populated at the bottom under **Items returned**.
164 |
165 | *Note*: If you don’t see any data, select the orange **Run** button to scan the table and retrieve the data.
166 |
167 | 5. If you’d like to analyze this data further, you can download it in CSV format by selecting **Actions**, then **Download results to CSV**.
168 |
169 |
170 |
171 |
172 | ## Usage
173 |
174 | Add additional screenshots, code examples and demos...
175 |
176 | _For more examples, please refer to the [Documentation](https://example.com)_
177 |
178 | (back to top)
179 |
180 |
181 |
182 |
183 | ## Roadmap
184 |
185 | - [x] Add Changelog
186 | - [x] Add back to top links
187 | - [ ] Add Additional Templates w/ Examples
188 | - [ ] Add "components" document to easily copy & paste sections of the readme
189 | - [ ] Multi-language Support
190 | - [ ] Chinese
191 | - [ ] Spanish
192 |
193 | See the [open issues](https://github.com/othneildrew/Best-README-Template/issues) for a full list of proposed features (and known issues).
194 |
195 | (back to top)
196 |
197 | ### Built With
198 |
199 | This section should list any major frameworks/libraries used to bootstrap your project. Leave any add-ons/plugins for the acknowledgements section. Here are a few examples.
200 |
201 | * [![CDK][aws-cdk]][cdk-url]
202 | * [![React][React.js]][React-url]
203 | * [![Vue][Vue.js]][Vue-url]
204 | * [![Angular][Angular.io]][Angular-url]
205 | * [![Svelte][Svelte.dev]][Svelte-url]
206 | * [![Laravel][Laravel.com]][Laravel-url]
207 | * [![Bootstrap][Bootstrap.com]][Bootstrap-url]
208 | * [![JQuery][JQuery.com]][JQuery-url]
209 |
210 | (back to top)
211 |
212 |
213 | ## Contributing
214 |
215 | Contributions are what make the open source community such an amazing place to learn, inspire, and create. Any contributions you make are **greatly appreciated**.
216 |
217 | If you have a suggestion that would make this better, please fork the repo and create a pull request. You can also simply open an issue with the tag "enhancement".
218 | Don't forget to give the project a star! Thanks again!
219 |
220 | 1. Fork the Project
221 | 2. Create your Feature Branch (`git checkout -b feature/AmazingFeature`)
222 | 3. Commit your Changes (`git commit -m 'Add some AmazingFeature'`)
223 | 4. Push to the Branch (`git push origin feature/AmazingFeature`)
224 | 5. Open a Pull Request
225 |
226 | (back to top)
227 |
228 |
229 |
230 |
231 | ## License
232 |
233 | Distributed under the MIT License. See `LICENSE.txt` for more information.
234 |
235 | (back to top)
236 |
237 |
238 |
239 |
240 | ## Contact
241 |
242 | Your Name - [@your_twitter](https://twitter.com/your_username) - email@example.com
243 |
244 | Project Link: [https://github.com/your_username/repo_name](https://github.com/your_username/repo_name)
245 |
246 | (back to top)
247 |
248 |
249 |
250 |
251 | ## Acknowledgments
252 |
253 | Use this space to list resources you find helpful and would like to give credit to. I've included a few of my favorites to kick things off!
254 |
255 | * [Choose an Open Source License](https://choosealicense.com)
256 | * [GitHub Emoji Cheat Sheet](https://www.webpagefx.com/tools/emoji-cheat-sheet)
257 | * [Malven's Flexbox Cheatsheet](https://flexbox.malven.co/)
258 | * [Malven's Grid Cheatsheet](https://grid.malven.co/)
259 | * [Img Shields](https://shields.io)
260 | * [GitHub Pages](https://pages.github.com)
261 | * [Font Awesome](https://fontawesome.com)
262 | * [React Icons](https://react-icons.github.io/react-icons/search)
263 |
264 | (back to top)
265 |
266 |
267 |
268 |
269 |
270 | [contributors-shield]: https://img.shields.io/github/contributors/aws-samples/quant-trading?style=for-the-badge
271 | [contributors-url]: https://github.com/aws-samples/quant-trading/graphs/contributors
272 | [forks-shield]: https://img.shields.io/github/forks/aws-samples/quant-trading?style=for-the-badge
273 | [forks-url]: https://github.com/aws-samples/quant-trading/network/members
274 | [stars-shield]: https://img.shields.io/github/stars/aws-samples/quant-trading?style=for-the-badge
275 | [stars-url]: https://github.com/aws-samples/quant-trading/stargazers
276 | [issues-shield]: https://img.shields.io/github/issues/aws-samples/quant-trading?style=for-the-badge
277 | [issues-url]: https://github.com/aws-samples/quant-trading/issues
278 | [license-shield]: https://img.shields.io/github/license/othneildrew/Best-README-Template.svg?style=for-the-badge
279 | [license-url]: https://github.com/othneildrew/Best-README-Template/blob/master/LICENSE.txt
280 | [linkedin-shield]: https://img.shields.io/badge/-LinkedIn-black.svg?style=for-the-badge&logo=linkedin&colorB=555
281 | [linkedin-url]: https://www.linkedin.com/in/sam-farber/
282 | [product-screenshot1]: assets/AWS_Quant_Real_Time_Operational_View.drawio.png
283 | [product-screenshot2]: assets/AWS_Quant_Real_Time_Installation.drawio.png
284 | [secrets-manager]: assets/installation/secrets_manager.png
285 | [secret-value]: assets/installation/secret_value.png
286 | [dynamodb-table-items]: assets/installation/dynamodb_table_items.png
287 | [aws-cdk]: https://img.shields.io/badge/-AWS%20CDK-orange
288 | [cdk-url]: https://aws.amazon.com/cdk/
289 | [React.js]: https://img.shields.io/badge/React-20232A?style=for-the-badge&logo=react&logoColor=61DAFB
290 | [React-url]: https://reactjs.org/
291 | [Vue.js]: https://img.shields.io/badge/Vue.js-35495E?style=for-the-badge&logo=vuedotjs&logoColor=4FC08D
292 | [Vue-url]: https://vuejs.org/
293 | [Angular.io]: https://img.shields.io/badge/Angular-DD0031?style=for-the-badge&logo=angular&logoColor=white
294 | [Angular-url]: https://angular.io/
295 | [Svelte.dev]: https://img.shields.io/badge/Svelte-4A4A55?style=for-the-badge&logo=svelte&logoColor=FF3E00
296 | [Svelte-url]: https://svelte.dev/
297 | [Laravel.com]: https://img.shields.io/badge/Laravel-FF2D20?style=for-the-badge&logo=laravel&logoColor=white
298 | [Laravel-url]: https://laravel.com
299 | [Bootstrap.com]: https://img.shields.io/badge/Bootstrap-563D7C?style=for-the-badge&logo=bootstrap&logoColor=white
300 | [Bootstrap-url]: https://getbootstrap.com
301 | [JQuery.com]: https://img.shields.io/badge/jQuery-0769AD?style=for-the-badge&logo=jquery&logoColor=white
302 | [JQuery-url]: https://jquery.com
303 |
--------------------------------------------------------------------------------
/assets/AWS_Quant_Real_Time_Installation.drawio.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/quant-trading/f1dd9ace0c732cae3d0b103affae3d6e6b2da80f/assets/AWS_Quant_Real_Time_Installation.drawio.png
--------------------------------------------------------------------------------
/assets/AWS_Quant_Real_Time_Operational_View.drawio.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/quant-trading/f1dd9ace0c732cae3d0b103affae3d6e6b2da80f/assets/AWS_Quant_Real_Time_Operational_View.drawio.png
--------------------------------------------------------------------------------
/assets/aws-cdk.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/quant-trading/f1dd9ace0c732cae3d0b103affae3d6e6b2da80f/assets/aws-cdk.png
--------------------------------------------------------------------------------
/assets/cdk.webp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/quant-trading/f1dd9ace0c732cae3d0b103affae3d6e6b2da80f/assets/cdk.webp
--------------------------------------------------------------------------------
/assets/installation/dynamodb_table_items.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/quant-trading/f1dd9ace0c732cae3d0b103affae3d6e6b2da80f/assets/installation/dynamodb_table_items.png
--------------------------------------------------------------------------------
/assets/installation/secret_value.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/quant-trading/f1dd9ace0c732cae3d0b103affae3d6e6b2da80f/assets/installation/secret_value.png
--------------------------------------------------------------------------------
/assets/installation/secrets_manager.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/quant-trading/f1dd9ace0c732cae3d0b103affae3d6e6b2da80f/assets/installation/secrets_manager.png
--------------------------------------------------------------------------------
/aws-quant-infra/README.md:
--------------------------------------------------------------------------------
1 | # Serverless Framework with JavaScript
2 |
3 | ---
4 |
5 | Example project using the [Serverless Framework](https://serverless.com), JavaScript, AWS Lambda, AWS API Gateway and GitLab Pages.
6 |
7 | ---
8 |
9 | ## Deployment
10 | ---
11 |
12 | ### Secrets
13 |
14 | Secrets are injected into your functions using environment variables. By defining variables in the provider section of the `serverless.yml` you add them to the environment of the deployed function. From there, you can reference them in your functions as well.
15 |
16 | So you would add something like:
17 | ```yml
18 | provider:
19 | environment:
20 | A_VARIABLE: ${env:A_VARIABLE}
21 | ```
22 | to your `serverless.yml`, and then you can add `A_VARIABLE` to your GitLab Ci variables and it will get picked up and deployed with your function.
23 |
24 | For local development, we suggest installing something like [dotenv](https://www.npmjs.com/package/dotenv) to manage environment variables.
25 |
26 | ### Setting Up AWS
27 |
28 | 1. Create AWS credentials including the following IAM policies: `AWSLambdaFullAccess`, `AmazonAPIGatewayAdministrator` and `AWSCloudFormationFullAccess`.
29 | 1. Set the `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY` variables in the GitLab CI/CD settings. `Settings > CI/CD > Variables`.
30 |
31 | ### Accessing Page
32 |
33 | To view your page go to `Settings > Pages` and click on the given link.
34 |
35 | ## Development
36 |
37 | ### Running Locally
38 |
39 | Install dependencies with:
40 |
41 | ```sh
42 | npm install
43 | ```
44 |
45 | Run backend server with:
46 |
47 | ```sh
48 | npm start
49 | ```
50 |
51 | This runs the serverless function locally using `serverless-offline` plugin.
52 |
53 | Run frontend with:
54 |
55 | ```sh
56 | npm run pages
57 | ```
58 |
59 | The frontend should be available at `http://localhost:8080`
60 |
61 | ### Running Tests
62 | ```sh
63 | npm test
64 | ```
65 |
66 | #### Unit Tests
67 |
68 | For the serverless backend, unit tests live with the src files as `srcFile.test.js`. The unit tests use the `serverless-jest-plugin` and lambda wrapper to simulate events to the functions and validate their outputs.
69 |
70 | #### Feature Tests
71 |
72 | Feature tests live in the folder `featureTests`. Those tests allow us to spin up serverless offline as a service and make requests against it and validate the results of those requests.
73 |
74 | Feature tests double as post deploy tests when the environment variable `STACK_JSON_FILE` is specified with the path to the file generated on deployment (`stack.json`), see in `gitlab-ci.yml`.
75 |
76 | A typical feature test will look something like:
77 |
78 | ```javascript
79 | // This helper provides access to the serverless process and an axios instance
80 | // to make requests against the running service.
81 | const { serverlessProcess, serverlessService } = require('./helper.js')
82 |
83 | describe('some_function', () => {
84 | beforeAll(async () => {
85 | // serverlessProcess.start starts serverless offline in a child process
86 | await serverlessProcess.start()
87 | })
88 |
89 | afterAll(() => {
90 | // serverlessProcess.stop kills the child process at the end of the test
91 | serverlessProcess.stop()
92 | })
93 |
94 | it('responds to a request', async () => {
95 | // The axios instance has the base url and port already, so you just have
96 | // to provide a route and any paramters or headers. See the axios project
97 | // for details.
98 | let response = await serverlessService.get('/some_route?param=here')
99 |
100 | expect(response.data.info).toEqual('amazing')
101 | })
102 | });
103 | ```
104 |
105 | ## Additional information
106 |
107 | ### Getting the Endpoint URL
108 |
109 | This project is setup with the `serverless-stack-output plugin` which is configured to output a JSON file to `./stack.json`. See [this github repo](https://github.com/sbstjn/serverless-stack-output) for more details.
110 |
111 |
--------------------------------------------------------------------------------
/aws-quant-infra/deployment/cdk/README.md:
--------------------------------------------------------------------------------
1 | # Welcome to your CDK TypeScript project!
2 |
3 | This is a blank project for TypeScript development with CDK.
4 |
5 | The `cdk.json` file tells the CDK Toolkit how to execute your app.
6 |
7 | ## Useful commands
8 |
9 | * `npm run build` compile typescript to js
10 | * `npm run watch` watch for changes and compile
11 | * `npm run test` perform the jest unit tests
12 | * `cdk deploy` deploy this stack to your default AWS account/region
13 | * `cdk diff` compare deployed stack with current state
14 | * `cdk synth` emits the synthesized CloudFormation template
15 |
16 |
17 | ## TODO
18 | - Update docker for referencing ml_infra in code
19 | - Scope down IAM roles while maintaining functionality
20 | - Add pipeline for batch + lambda updating
21 | - Incorporate passing projectId to docker for pulling from parameter store
22 | - Test lambda batch images
--------------------------------------------------------------------------------
/aws-quant-infra/deployment/cdk/bin/cdk.ts:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env node
2 | import * as cdk from 'aws-cdk-lib';
3 | import { Construct } from 'constructs';
4 | // import * as cdk from '@aws-cdk/core';
5 | import { EnvironmentStack } from '../lib/environment';
6 | import { DatabaseStack } from '../lib/database';
7 | import { BatchStack } from '../lib/batch';
8 | import { LambdaStack } from '../lib/lambda';
9 | import { AppConfigStack } from '../lib/appConfig';
10 | import { SecretsStack } from '../lib/secrets';
11 |
12 | import { AwsSolutionsChecks } from 'cdk-nag'
13 | import { Aspects } from 'aws-cdk-lib';
14 |
15 | const app = new cdk.App();
16 | //Aspects.of(app).add(new AwsSolutionsChecks({ verbose: true }));
17 |
18 | const env = {
19 | // account: process.env.CDK_DEFAULT_ACCOUNT,
20 | // region: app.node.tryGetContext('us-west-2')
21 | region: 'us-west-2'
22 | };
23 |
24 | const envStack = new EnvironmentStack(app, 'EnvStack', {
25 | env: env
26 | });
27 | const dbStack = new DatabaseStack(app, 'DbStack', {
28 | env: env
29 | });
30 | const appConfigStack = new AppConfigStack(app, 'AppConfigStack', {
31 | env: env
32 | });
33 | const secretsStack = new SecretsStack(app, 'SecretsStack', {
34 | env: env,
35 | computePolicy: envStack.computePolicy
36 | });
37 | const batchStack = new BatchStack(app, 'BatchStack', {
38 | env: env,
39 | vpc: envStack.vpc,
40 | computePolicy: envStack.computePolicy
41 | });
42 | batchStack.addDependency(appConfigStack);
43 | batchStack.addDependency(secretsStack);
44 | const lambdaStack = new LambdaStack(app, 'LambdaStack', {
45 | env: env,
46 | portfoliosTable: dbStack.portfolioTable,
47 | portfolioSystemEventsTable: dbStack.systemEventsTable,
48 | computePolicy: envStack.computePolicy
49 | });
50 | lambdaStack.addDependency(appConfigStack);
51 | lambdaStack.addDependency(secretsStack);
52 |
53 |
54 |
--------------------------------------------------------------------------------
/aws-quant-infra/deployment/cdk/cdk.context.json:
--------------------------------------------------------------------------------
1 | {
2 | "deployment_prefix": "Mvp",
3 | "project": "PortfolioMonitoring",
4 | "region": "us-east-1",
5 | "cidr_range": "10.251.0.0/16",
6 | "cidr_subnet_mask": 24,
7 | "portfolio_table_replica_regions": [
8 | "us-east-2"
9 | ],
10 | "remove_tables": true,
11 | "availability-zones:account=XXXXXXXXXXXX:region=us-east-1": [
12 | "us-east-1a",
13 | "us-east-1b",
14 | "us-east-1c",
15 | "us-east-1d",
16 | "us-east-1e",
17 | "us-east-1f"
18 | ],
19 | "jobPortfolioTrackerLogGroup": "/aws/batch/portfolio-tracker/logs",
20 | "jobMarketDataLogGroup": "/aws/batch/market-data/logs",
21 | "availability-zones:account=XXXXXXXXXXXX:region=us-east-1": [
22 | "us-east-1a",
23 | "us-east-1b",
24 | "us-east-1c",
25 | "us-east-1d",
26 | "us-east-1e",
27 | "us-east-1f"
28 | ],
29 | "availability-zones:account=XXXXXXXXXXXX:region=us-east-1": [
30 | "us-east-1a",
31 | "us-east-1b",
32 | "us-east-1c",
33 | "us-east-1d",
34 | "us-east-1e",
35 | "us-east-1f"
36 | ],
37 | "availability-zones:account=XXXXXXXXXXXX:region=us-east-1": [
38 | "us-east-1a",
39 | "us-east-1b",
40 | "us-east-1c",
41 | "us-east-1d",
42 | "us-east-1e",
43 | "us-east-1f"
44 | ],
45 | "availability-zones:account=XXXXXXXXXXXX:region=us-east-1": [
46 | "us-east-1a",
47 | "us-east-1b",
48 | "us-east-1c",
49 | "us-east-1d",
50 | "us-east-1e",
51 | "us-east-1f"
52 | ]
53 | }
54 |
--------------------------------------------------------------------------------
/aws-quant-infra/deployment/cdk/cdk.json:
--------------------------------------------------------------------------------
1 | {
2 | "app": "npx ts-node --prefer-ts-exts bin/cdk.ts",
3 | "context": {
4 | "@aws-cdk/aws-apigateway:usagePlanKeyOrderInsensitiveId": true,
5 | "@aws-cdk/core:stackRelativeExports": true,
6 | "@aws-cdk/aws-rds:lowercaseDbIdentifier": true,
7 | "@aws-cdk/aws-lambda:recognizeVersionProps": true,
8 | "@aws-cdk/aws-lambda:recognizeLayerVersion": true,
9 | "@aws-cdk/aws-cloudfront:defaultSecurityPolicyTLSv1.2_2021": true,
10 | "@aws-cdk-containers/ecs-service-extensions:enableDefaultLogDriver": true,
11 | "@aws-cdk/aws-ec2:uniqueImdsv2TemplateName": true,
12 | "@aws-cdk/core:checkSecretUsage": true,
13 | "@aws-cdk/aws-iam:minimizePolicies": true,
14 | "@aws-cdk/core:newStyleStackSynthesis": false,
15 | "@aws-cdk/core:target-partitions": [
16 | "aws",
17 | "aws-cn"
18 | ]
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/aws-quant-infra/deployment/cdk/deployment.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | npm install
4 |
5 | #From the CDK folder
6 | #Requirements: have JQ installed: yum install jq -y or apt install jq -y
7 | aws lambda get-layer-version-by-arn --arn arn:aws:lambda:us-east-2:728743619870:layer:AWS-AppConfig-Extension:77 | jq -r '.Content.Location' | xargs curl -o ../../src/lambda/extension.zip
8 |
9 | cdk bootstrap
10 |
11 | cdk diff --no-color &> changes.txt
12 |
13 | cdk deploy "*" --outputs-file outputs.json
14 |
15 | clone_url=$(cat outputs.json | jq 'with_entries( select(.key | contains("SDLCStack")))'[] | jq -r 'with_entries( select(.key | contains("RepoCloneUrl")))'[])
16 |
17 | git config --global user.name "AwsQuantInitDeploy"
18 | git config --global user.email ""
19 |
20 | git config --global credential.helper '!aws codecommit credential-helper $@'
21 | git config --global credential.UseHttpPath true
22 |
23 | echo $clone_url
--------------------------------------------------------------------------------
/aws-quant-infra/deployment/cdk/jest.config.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | testEnvironment: 'node',
3 | roots: ['/test'],
4 | testMatch: ['**/*.test.ts'],
5 | transform: {
6 | '^.+\\.tsx?$': 'ts-jest'
7 | }
8 | };
9 |
--------------------------------------------------------------------------------
/aws-quant-infra/deployment/cdk/lib/appConfig.ts:
--------------------------------------------------------------------------------
1 | // import * as cdk from '@aws-cdk/core';
2 | import * as cdk from 'aws-cdk-lib';
3 | import { Construct } from 'constructs';
4 | import { Stack, App } from 'aws-cdk-lib';
5 | import { NagSuppressions } from 'cdk-nag';
6 | import {aws_appconfig as appconfig} from 'aws-cdk-lib';
7 | // import * as appconfig from '@aws-cdk/aws-appconfig';
8 | import * as appconfigs from '../../../src/config/portfolio_tracker_cfg.json';
9 | import { SsmManager } from './shared/ssm-manager';
10 |
11 | export class AppConfigStack extends cdk.Stack {
12 | private projectId: string;
13 | private project: string;
14 |
15 | public appConfig: appconfig.CfnApplication;
16 |
17 | constructor(scope: Construct, id: string, props?: cdk.StackProps) {
18 | super(scope, id, props);
19 |
20 | this.project = this.node.tryGetContext('project');
21 | const prefix = this.node.tryGetContext('deployment_prefix');
22 | this.projectId = `${prefix}${this.project}`;
23 |
24 | this.appConfig = this.createAppConfig();
25 | NagSuppressions.addStackSuppressions(this, [ { id: 'AwsSolutions-IAM4', reason: 'lorem ipsum' }]);
26 | NagSuppressions.addStackSuppressions(this, [ { id: 'AwsSolutions-IAM5', reason: 'lorem ipsum' }]);
27 | }
28 |
29 | private createAppConfig(): appconfig.CfnApplication{
30 | const app = this.createApp();
31 | const env = this.createEnvironment(app);
32 | const profile = this.createConfigProfile(app);
33 | const hostedProfile = this.createHostedConfigProfile(app, profile);
34 | const deploymentStrat = this.createDeploymentStrategy();
35 | const deployment = this.createDeployment(app, profile, env, deploymentStrat, hostedProfile.ref);
36 | deployment.addDependsOn(hostedProfile);
37 | new SsmManager(this, `${this.projectId}AppConfigSsmManager`).setParameterValue({
38 | value: this.toJsonString({
39 | Application: app.name,
40 | Configuration: profile.name,
41 | [env.name]: hostedProfile.ref
42 | }),
43 | valueName: 'AppConfigDetails'
44 | });
45 | return app;
46 | }
47 | private createApp(): appconfig.CfnApplication {
48 | return new appconfig.CfnApplication(this, `${this.projectId}AppConfigApp`, {
49 | name: this.project,
50 | description: `${this.project} App configurations`
51 | });
52 | }
53 | private createEnvironment(app: appconfig.CfnApplication): appconfig.CfnEnvironment {
54 | const env = appconfigs.env;
55 |
56 | return new appconfig.CfnEnvironment(this, `${this.projectId}${env}`, {
57 | applicationId: app.ref,
58 | name: `${env}`,
59 | description: `${this.project} ${env} Environment`
60 | });
61 | }
62 | private createConfigProfile(app: appconfig.CfnApplication): appconfig.CfnConfigurationProfile {
63 | return new appconfig.CfnConfigurationProfile(this, `${this.projectId}ConfigProfile`, {
64 | applicationId: app.ref,
65 | name: `${this.project}ConfigProfile`,
66 | locationUri: 'hosted',
67 | description: `${this.project} configuration profile`
68 | });
69 | }
70 | private createHostedConfigProfile(app: appconfig.CfnApplication, configProfile: appconfig.CfnConfigurationProfile): appconfig.CfnHostedConfigurationVersion {
71 | return new appconfig.CfnHostedConfigurationVersion(this, `${this.projectId}HostedConfigProfile`, {
72 | applicationId: app.ref,
73 | configurationProfileId: configProfile.ref,
74 | contentType: 'application/json',
75 | content: this.toJsonString(appconfigs),
76 | });
77 | }
78 | private createDeploymentStrategy(): appconfig.CfnDeploymentStrategy {
79 | const env = appconfigs.env;
80 |
81 | return new appconfig.CfnDeploymentStrategy(this, `${this.projectId}DeploymentStrategy`, {
82 | name: 'Custom.AllAtOnce',
83 | deploymentDurationInMinutes: 0,
84 | growthFactor: 100,
85 | finalBakeTimeInMinutes: 0,
86 | replicateTo: 'NONE',
87 | growthType: 'LINEAR',
88 | description: `${this.project} ${env} configs deployment strategy - All at once deployment (i.e., immediate)`
89 | });
90 | }
91 | private createDeployment(app: appconfig.CfnApplication, configProfile: appconfig.CfnConfigurationProfile, configEnv: appconfig.CfnEnvironment, configDeploymentStrat: appconfig.CfnDeploymentStrategy, version: string): appconfig.CfnDeployment {
92 | return new appconfig.CfnDeployment(this, `${this.projectId}Deployment`, {
93 | applicationId: app.ref,
94 | configurationProfileId: configProfile.ref,
95 | configurationVersion: version,
96 | deploymentStrategyId: configDeploymentStrat.ref,
97 | environmentId: configEnv.ref,
98 | });
99 | }
100 | }
--------------------------------------------------------------------------------
/aws-quant-infra/deployment/cdk/lib/batch.ts:
--------------------------------------------------------------------------------
1 | // import * as cdk from '@aws-cdk/core';
2 | import * as cdk from 'aws-cdk-lib';
3 | import { Construct } from 'constructs';
4 |
5 | // import * as ec2 from '@aws-cdk/aws-ec2';
6 | import { aws_ec2 as ec2 } from 'aws-cdk-lib';
7 | // import { DockerImageAsset } from '@aws-cdk/aws-ecr-assets';
8 | import { DockerImageAsset } from 'aws-cdk-lib/aws-ecr-assets';
9 | // import * as batch from '@aws-cdk/aws-batch';
10 | import { aws_batch as batch } from 'aws-cdk-lib';
11 | import { SsmManager } from './shared/ssm-manager';
12 | // import * as iam from '@aws-cdk/aws-iam';
13 | import { aws_iam as iam } from 'aws-cdk-lib';
14 | import * as path from 'path';
15 | import { NagSuppressions } from 'cdk-nag';
16 | interface BatchProps extends cdk.StackProps {
17 | vpc: ec2.IVpc;
18 | computePolicy: iam.ManagedPolicy;
19 | }
20 |
21 | export class BatchStack extends cdk.Stack {
22 | private ssm: SsmManager;
23 | private prefix: string;
24 | private projectId: string;
25 |
26 | private vpc: ec2.IVpc;
27 | private computePolicy: iam.ManagedPolicy;
28 |
29 | public dockerImage: DockerImageAsset;
30 | private computeEnvironments: batch.CfnComputeEnvironment[] = [];
31 | constructor(scope: Construct, id: string, props: BatchProps) {
32 | super(scope, id, props);
33 | const project = this.node.tryGetContext('project');
34 | this.prefix = this.node.tryGetContext('deployment_prefix');
35 | this.projectId = `${this.prefix}${project}`;
36 | this.ssm = new SsmManager(this, `${this.projectId}EnvSsmManager`);
37 | this.vpc = props.vpc;
38 | this.computePolicy = props.computePolicy;
39 |
40 | this.dockerImage = this.createDockerImage();
41 | this.createComputeEnvironments();
42 | this.createJobQueues();
43 | this.createJobDefinitions();
44 | NagSuppressions.addStackSuppressions(this, [ { id: 'AwsSolutions-IAM4', reason: 'uses AWS managed role - nothing to do' }]);
45 | NagSuppressions.addStackSuppressions(this, [ { id: 'AwsSolutions-IAM5', reason: 'uses AWS managed role - nothing to do' }]);
46 | }
47 |
48 | private createDockerImage(): DockerImageAsset {
49 | const image = new DockerImageAsset(this, `${this.projectId}BatchImage`, {
50 | directory: path.join(__dirname, '../../../src'),
51 | buildArgs: {
52 | SSM_PREFIX: this.prefix,
53 | AWS_REGION: this.region
54 | },
55 | file: 'batch/Dockerfile'
56 | });
57 | this.ssm.setParameterValue({
58 | value: image.imageUri,
59 | valueName: 'BatchImageUri'
60 | });
61 | return image;
62 | }
63 |
64 | private createComputeEnvironments() {
65 | const securityGroup = new ec2.SecurityGroup(this, `${this.projectId}BatchSecurityGroup`, {
66 | vpc: this.vpc,
67 | allowAllOutbound: true,
68 | description: `${this.projectId} Batch Security Group`,
69 | securityGroupName: `${this.projectId}BatchSecurityGroup`
70 | });
71 | const serviceRole = new iam.Role(this, `${this.projectId}BatchServiceRole`, {
72 | roleName: `${this.projectId}BatchServiceRole`,
73 | assumedBy: new iam.ServicePrincipal('batch.amazonaws.com'),
74 | })
75 | serviceRole.addManagedPolicy(iam.ManagedPolicy.fromAwsManagedPolicyName("service-role/AWSBatchServiceRole"));
76 | const instanceRole = new iam.Role(this, `${this.projectId}BatchInstanceRole`, {
77 | roleName: `${this.projectId}BatchInstanceRole`,
78 | assumedBy: new iam.ServicePrincipal('ec2.amazonaws.com'),
79 | managedPolicies: [
80 | iam.ManagedPolicy.fromAwsManagedPolicyName('service-role/AmazonEC2ContainerServiceforEC2Role'),
81 | this.computePolicy
82 | ]
83 | });
84 | instanceRole.addToPolicy(new iam.PolicyStatement({
85 | effect: iam.Effect.ALLOW,
86 | actions: ['sts:AssumeRole'],
87 | resources: ['*']
88 | }));
89 | const instanceProfile = new iam.CfnInstanceProfile(this, `${this.projectId}BatchInstanceProfile`, {
90 | instanceProfileName: `${this.projectId}BatchInstanceProfile`,
91 | roles: [ instanceRole.roleName]
92 | });
93 | const customSubnet = this.node.tryGetContext('custom_subnet_id');
94 | const ec2Od = new batch.CfnComputeEnvironment(this, `${this.projectId}BatchComputeEc2OD`, {
95 | type: 'MANAGED',
96 | state: 'ENABLED',
97 | computeEnvironmentName: `${this.projectId}_ec2_od`,
98 | serviceRole: serviceRole.roleArn,
99 | computeResources: {
100 | minvCpus: 0,
101 | desiredvCpus: 0,
102 | maxvCpus: 128,
103 | instanceTypes: [
104 | "optimal"
105 | ],
106 | type: "EC2",
107 | subnets: customSubnet !== undefined ? [ customSubnet ] : this.vpc.privateSubnets.map(x=>x.subnetId),
108 | allocationStrategy: 'BEST_FIT',
109 | securityGroupIds: [ securityGroup.securityGroupId ],
110 | instanceRole: instanceProfile.attrArn
111 | }
112 | });
113 | const ec2Spot = new batch.CfnComputeEnvironment(this, `${this.projectId}BatchComputeEc2Spot`, {
114 | type: 'MANAGED',
115 | state: 'ENABLED',
116 | computeEnvironmentName: `${this.projectId}_ec2_spot`,
117 | serviceRole: serviceRole.roleArn,
118 | computeResources: {
119 | minvCpus: 0,
120 | desiredvCpus: 0,
121 | maxvCpus: 128,
122 | bidPercentage: 100,
123 | instanceTypes: [
124 | "optimal"
125 | ],
126 | type: "SPOT",
127 | subnets: customSubnet !== undefined ? [ customSubnet ] : this.vpc.privateSubnets.map(x=>x.subnetId),
128 | allocationStrategy: 'SPOT_CAPACITY_OPTIMIZED',
129 | securityGroupIds: [ securityGroup.securityGroupId ],
130 | instanceRole: instanceProfile.attrArn
131 | }
132 | });
133 | const fargate = new batch.CfnComputeEnvironment(this, `${this.projectId}BatchComputeFargate`, {
134 | type: 'MANAGED',
135 | state: 'ENABLED',
136 | computeEnvironmentName: `${this.projectId}_fargate`,
137 | serviceRole: serviceRole.roleArn,
138 | computeResources: {
139 | maxvCpus: 256,
140 | type: "FARGATE",
141 | subnets: customSubnet !== undefined ? [ customSubnet ] : this.vpc.privateSubnets.map(x=>x.subnetId),
142 | securityGroupIds: [ securityGroup.securityGroupId ],
143 | instanceRole: 'dummy-instance-role',
144 | instanceTypes: [ 'dummy-instance-type']
145 | }
146 | });
147 |
148 | fargate.addPropertyDeletionOverride('ComputeResources.InstanceRole');
149 | fargate.addPropertyDeletionOverride('ComputeResources.InstanceTypes');
150 | const fargateSpot = new batch.CfnComputeEnvironment(this, `${this.projectId}BatchComputeFargateSpot`, {
151 | type: 'MANAGED',
152 | state: 'ENABLED',
153 | computeEnvironmentName: `${this.projectId}_fargate_spot`,
154 | serviceRole: serviceRole.roleArn,
155 | computeResources: {
156 | maxvCpus: 256,
157 | type: "FARGATE_SPOT",
158 | subnets: customSubnet !== undefined ? [ customSubnet ] : this.vpc.privateSubnets.map(x=>x.subnetId),
159 | securityGroupIds: [ securityGroup.securityGroupId ],
160 | instanceRole: 'dummy-instance-role',
161 | instanceTypes: [ 'dummy-instance-type']
162 | }
163 | });
164 | fargateSpot.addPropertyDeletionOverride('ComputeResources.InstanceRole');
165 | fargateSpot.addPropertyDeletionOverride('ComputeResources.InstanceTypes');
166 | this.computeEnvironments.push(ec2Od, ec2Spot, fargate, fargateSpot);
167 | this.ssm.setParameterValue({
168 | value: {
169 | 'ec2_od': ec2Od.computeEnvironmentName,
170 | 'ec2_spot': ec2Spot.computeEnvironmentName,
171 | 'fargate_od': fargate.computeEnvironmentName,
172 | 'fargate_spot': fargateSpot.computeEnvironmentName
173 | },
174 | valueName: 'BatchComputeEnvironments'
175 | });
176 | }
177 | private createJobQueues() {
178 | const qEc2 = new batch.CfnJobQueue(this, `${this.projectId}BatchEc2JobQueue`, {
179 | jobQueueName: `${this.projectId}_q_ec2`,
180 | priority: 1,
181 | state: 'ENABLED',
182 | computeEnvironmentOrder: [
183 | {
184 | order: 1,
185 | computeEnvironment: this.computeEnvironments[0].computeEnvironmentName!
186 | },
187 | {
188 | order: 2,
189 | computeEnvironment: this.computeEnvironments[1].computeEnvironmentName!
190 | }
191 | ]
192 | });
193 | qEc2.addDependsOn(this.computeEnvironments[0]);
194 | qEc2.addDependsOn(this.computeEnvironments[1]);
195 | const qFargate = new batch.CfnJobQueue(this, `${this.projectId}BatchFargateJobQueue`, {
196 | jobQueueName: `${this.projectId}_q_fargate`,
197 | priority: 1,
198 | state: 'ENABLED',
199 | computeEnvironmentOrder: [
200 | {
201 | order: 1,
202 | computeEnvironment: this.computeEnvironments[2].computeEnvironmentName!
203 | },
204 | {
205 | order: 2,
206 | computeEnvironment: this.computeEnvironments[3].computeEnvironmentName!
207 | }
208 | ]
209 | });
210 | qFargate.addDependsOn(this.computeEnvironments[2]);
211 | qFargate.addDependsOn(this.computeEnvironments[3]);
212 | this.ssm.setParameterValue({
213 | value: qEc2.jobQueueName!,
214 | valueName: 'BatchEc2JobQueueName'
215 | });
216 | this.ssm.setParameterValue({
217 | value: qFargate.jobQueueName!,
218 | valueName: 'BatchFargateJobQueueName'
219 | });
220 | }
221 | private createJobDefinitions() {
222 | //portfolio_tracker
223 | const portfolioTrackerJob = new batch.CfnJobDefinition(this, `${this.projectId}BatchPortfolioTrackerJobDef`, {
224 | jobDefinitionName: `${this.projectId}_portfolio_tracker`,
225 | type: 'Container',
226 | timeout: {
227 | attemptDurationSeconds: 600000
228 | },
229 | platformCapabilities: [
230 | 'EC2'
231 | ],
232 | containerProperties: {
233 | command: ["/src/portfolio_tracker.py '4cc2ab26e6f7997a5d362bb1ce193005'"],
234 | vcpus: 2,
235 | memory: 8192,
236 | image: this.dockerImage.imageUri
237 | }
238 | });
239 | this.ssm.setParameterValue({
240 | value: portfolioTrackerJob.jobDefinitionName!,
241 | valueName: 'BatchPortfolioTrackerJobDef'
242 | });
243 | //get_market_data_test
244 | const getMarketData = new batch.CfnJobDefinition(this, `${this.projectId}BatchGetMarketDataJobDef`, {
245 | jobDefinitionName: `${this.projectId}_get_market_data`,
246 | type: 'Container',
247 | platformCapabilities: [
248 | 'EC2'
249 | ],
250 | containerProperties: {
251 | command: ["/src/subscribe_market_data.py"],
252 | vcpus: 1,
253 | memory: 4096,
254 | image: this.dockerImage.imageUri
255 | }
256 | });
257 | this.ssm.setParameterValue({
258 | value: getMarketData.jobDefinitionName!,
259 | valueName: 'BatchGetMarketDataJobDef'
260 | });
261 | //test_batch_docker
262 | const testBatchDocker = new batch.CfnJobDefinition(this, `${this.projectId}BatchTestBatchDockerJobDef`, {
263 | jobDefinitionName: `${this.projectId}_test_docker`,
264 | type: 'Container',
265 | platformCapabilities: [
266 | 'EC2'
267 | ],
268 | containerProperties: {
269 | command: ["/src/test_docker.py"],
270 | vcpus: 1,
271 | memory: 2048,
272 | image: this.dockerImage.imageUri
273 | }
274 | });
275 | this.ssm.setParameterValue({
276 | value: testBatchDocker.jobDefinitionName!,
277 | valueName: 'BatchTestBatchDockerJobDef'
278 | });
279 | }
280 | }
--------------------------------------------------------------------------------
/aws-quant-infra/deployment/cdk/lib/database.ts:
--------------------------------------------------------------------------------
1 | // import * as cdk from '@aws-cdk/core';
2 | import * as cdk from 'aws-cdk-lib';
3 | import { Construct } from 'constructs';
4 |
5 | import { SsmManager } from './shared/ssm-manager';
6 | // import * as dynamodb from '@aws-cdk/aws-dynamodb';
7 | import { aws_dynamodb as dynamodb } from 'aws-cdk-lib';
8 | // import * as timestream from '@aws-cdk/aws-timestream';
9 | import { aws_timestream as timestream } from 'aws-cdk-lib';
10 | import { NagSuppressions } from 'cdk-nag';
11 | export class DatabaseStack extends cdk.Stack {
12 | private ssm: SsmManager;
13 | private projectId: string;
14 | private timestreamTables = {
15 | 'market_data_table': 'realtime_data',
16 | 'portfolio_table': 'portfolio_tracker'
17 | };
18 |
19 |
20 | private removalPolicy: cdk.RemovalPolicy;
21 |
22 | public readonly portfolioTable: dynamodb.ITable;
23 | public readonly systemEventsTable: dynamodb.ITable;
24 |
25 | constructor(scope: Construct, id: string, props?: cdk.StackProps) {
26 | super(scope, id, props);
27 | const project = this.node.tryGetContext('project');
28 | const prefix = this.node.tryGetContext('deployment_prefix');
29 | this.projectId = `${prefix}${project}`;
30 | this.ssm = new SsmManager(this, `${this.projectId}DbSsmManager`);
31 |
32 | this.removalPolicy = this.node.tryGetContext('remove_tables') === true ? cdk.RemovalPolicy.DESTROY : cdk.RemovalPolicy.RETAIN;
33 |
34 | this.createPortfolioMapTable();
35 | this.portfolioTable = this.createPortfoliosTable();
36 | this.systemEventsTable = this.createPortfolioSystemEventsTable();
37 | this.createTimestream();
38 | NagSuppressions.addStackSuppressions(this, [ { id: 'AwsSolutions-IAM4', reason: 'uses AWS managed role - nothing to do' }]);
39 | NagSuppressions.addStackSuppressions(this, [ { id: 'AwsSolutions-IAM5', reason: 'uses AWS managed role - nothing to do' }]);
40 | NagSuppressions.addStackSuppressions(this, [ { id: 'AwsSolutions-DDB3', reason: 'uses AWS managed role - nothing to do' }]);
41 | NagSuppressions.addStackSuppressions(this, [ { id: 'AwsSolutions-TS3', reason: 'uses AWS managed role - nothing to do' }]);
42 |
43 |
44 | }
45 |
46 | private createPortfolioMapTable() {
47 | const table = new dynamodb.Table(this, `${this.projectId}PortfolioMap`, {
48 | tableName: `${this.projectId}SymbolTable`,
49 | partitionKey: { name: 'symbol', type: dynamodb.AttributeType.STRING },
50 | billingMode: dynamodb.BillingMode.PAY_PER_REQUEST,
51 | removalPolicy: this.removalPolicy,
52 | });
53 | this.ssm.setParameterValue({
54 | value: table.tableName,
55 | valueName: 'PortfolioMapTable'
56 | });
57 | }
58 |
59 | private createPortfoliosTable(): dynamodb.Table {
60 | const replicaRegions = this.node.tryGetContext('portfolio_table_replica_regions');
61 | const table = new dynamodb.Table(this, `${this.projectId}Portfolios`, {
62 | tableName: `${this.projectId}PortfolioTable`,
63 | partitionKey: { name: 'portf_id', type: dynamodb.AttributeType.STRING },
64 | sortKey: {name: 'portf_create_ts', type: dynamodb.AttributeType.NUMBER },
65 | stream: dynamodb.StreamViewType.NEW_AND_OLD_IMAGES,
66 | billingMode: dynamodb.BillingMode.PAY_PER_REQUEST,
67 | replicationRegions: replicaRegions,
68 | removalPolicy: this.removalPolicy,
69 | });
70 | this.ssm.setParameterValue({
71 | value: table.tableName,
72 | valueName: 'PortfoliosTable'
73 | });
74 | return table;
75 | }
76 | private createPortfolioSystemEventsTable(): dynamodb.Table {
77 | const replicaRegions = this.node.tryGetContext('portfolio_table_replica_regions');
78 | const table = new dynamodb.Table(this, `${this.projectId}SystemEventTable`, {
79 | tableName: `${this.projectId}SystemEventTable`,
80 | partitionKey: { name: 'event_id', type: dynamodb.AttributeType.STRING },
81 | stream: dynamodb.StreamViewType.NEW_AND_OLD_IMAGES,
82 | billingMode: dynamodb.BillingMode.PAY_PER_REQUEST,
83 | replicationRegions: replicaRegions,
84 | removalPolicy: this.removalPolicy,
85 | });
86 | this.ssm.setParameterValue({
87 | value: table.tableName,
88 | valueName: 'SystemEventTable'
89 | });
90 | return table;
91 | }
92 |
93 | private createTimestreamDb(): timestream.CfnDatabase {
94 | const db = new timestream.CfnDatabase(this, `${this.projectId}TimestreamDb`, {
95 | databaseName: `${this.projectId}Timestream`,
96 | });
97 | this.ssm.setParameterValue({
98 | value: db.databaseName!,
99 | valueName: 'TimestreamDb'
100 | });
101 | return db
102 | }
103 | private createTimestreamTable(db: string, table: string): timestream.CfnTable {
104 | const timestreamTable = new timestream.CfnTable(this, `${this.projectId}TimestreamTable${table}`, {
105 | databaseName: db,
106 | tableName: table,
107 | });
108 | return timestreamTable;
109 | }
110 | private createTimestream() {
111 | const timestreamDb = this.createTimestreamDb();
112 | for ( const [_, value] of Object.entries(this.timestreamTables)) {
113 | let timestreamTable = this.createTimestreamTable(timestreamDb.databaseName!, value);
114 | timestreamTable.node.addDependency(timestreamDb);
115 | }
116 | this.ssm.setParameterValue({
117 | value: this.timestreamTables,
118 | valueName: 'TimestreamTables'
119 | });
120 |
121 | }
122 |
123 | }
124 |
--------------------------------------------------------------------------------
/aws-quant-infra/deployment/cdk/lib/environment.ts:
--------------------------------------------------------------------------------
1 | // import * as cdk from '@aws-cdk/core';
2 | import * as cdk from 'aws-cdk-lib';
3 | import { Construct } from 'constructs';
4 |
5 | import { SsmManager } from './shared/ssm-manager';
6 | // import * as ec2 from '@aws-cdk/aws-ec2';
7 | import { aws_ec2 as ec2 } from 'aws-cdk-lib';
8 | // import * as ecr from '@aws-cdk/aws-ecr';
9 | import { aws_ecr as ecr } from 'aws-cdk-lib';
10 | import { aws_codecommit as codeCommit } from 'aws-cdk-lib';
11 | // import * as codeCommit from '@aws-cdk/aws-codecommit';
12 | // import * as iam from '@aws-cdk/aws-iam';
13 | import { aws_iam as iam } from 'aws-cdk-lib';
14 | // import * as lambda from '@aws-cdk/aws-lambda';
15 | import { aws_lambda as lambda } from 'aws-cdk-lib';
16 | // import { PythonFunction } from "@aws-cdk/aws-lambda-python";
17 | import { PythonFunction } from '@aws-cdk/aws-lambda-python-alpha';
18 | // import { AwsCliLayer } from '@aws-cdk/lambda-layer-awscli'
19 | import { AwsCliLayer } from 'aws-cdk-lib/lambda-layer-awscli';
20 | // import * as cr from '@aws-cdk/custom-resources';
21 | import * as cr from 'aws-cdk-lib/custom-resources';
22 | // import * as logs from '@aws-cdk/aws-logs';
23 | import { aws_logs as logs } from 'aws-cdk-lib';
24 | import * as path from 'path';
25 | import { PipelineStack } from './pipeline';
26 | import { NagSuppressions } from 'cdk-nag';
27 |
28 | export class EnvironmentStack extends cdk.Stack {
29 | private ssm: SsmManager;
30 | private projectId: string;
31 | private project: string;
32 | private prefix: string;
33 |
34 | public readonly vpc: ec2.IVpc;
35 | public readonly codeRepo: codeCommit.Repository;
36 | public readonly repo: ecr.IRepository;
37 | public readonly computePolicy: iam.ManagedPolicy;
38 |
39 | // constructor(scope: cdk.Construct, id: string, props?: cdk.StackProps) {
40 | constructor(scope: Construct, id: string, props?: cdk.StackProps) {
41 | super(scope, id, props);
42 | this.project = this.node.tryGetContext('project');
43 | this.prefix = this.node.tryGetContext('deployment_prefix');
44 | this.projectId = `${this.prefix}${this.project}`;
45 | this.ssm = new SsmManager(this, `${this.projectId}EnvSsmManager`);
46 |
47 | this.vpc = this.createVpc();
48 | this.computePolicy = this.createComputePolicy();
49 | this.codeRepo = this.createCodeCommit();
50 |
51 | NagSuppressions.addStackSuppressions(this, [ { id: 'AwsSolutions-IAM4', reason: 'uses AWS managed role - nothing to do' }]);
52 | NagSuppressions.addStackSuppressions(this, [ { id: 'AwsSolutions-IAM5', reason: 'uses AWS managed role - nothing to do' }]);
53 | NagSuppressions.addStackSuppressions(this, [ { id: 'AwsSolutions-CB3', reason: 'uses AWS managed role - nothing to do' }]);
54 | NagSuppressions.addStackSuppressions(this, [ { id: 'AwsSolutions-KMS5', reason: 'uses AWS managed role - nothing to do' }]);
55 | NagSuppressions.addStackSuppressions(this, [ { id: 'AwsSolutions-S1', reason: 'uses AWS managed role - nothing to do' }]);
56 |
57 |
58 | new PipelineStack(this, 'SdlcStack', {
59 | codecommitRepo: this.codeRepo
60 | });
61 | }
62 |
63 | private createVpc() {
64 | const vpc_id = this.node.tryGetContext('custom_vpc_id');
65 | const subnet_id = this.node.tryGetContext('custom_subnet_id');
66 | var vpc;
67 | if (vpc_id !== undefined){
68 | vpc = ec2.Vpc.fromLookup(this, `${this.projectId}Vpc` , {
69 | isDefault: false,
70 | vpcId: vpc_id
71 | });
72 | }
73 | else {
74 | const cidr = this.node.tryGetContext('cidr_range');
75 | const cidrMask = this.node.tryGetContext('cidr_subnet_mask');
76 | vpc = new ec2.Vpc(this, `${this.projectId}Vpc`, {
77 | maxAzs: 2,
78 | cidr: cidr,
79 | subnetConfiguration: [
80 | {
81 | name: `${this.projectId}-public`,
82 | subnetType: ec2.SubnetType.PUBLIC,
83 | cidrMask: cidrMask
84 | },
85 | {
86 | name: `${this.projectId}-private`,
87 | subnetType: ec2.SubnetType.PRIVATE_WITH_EGRESS,
88 | cidrMask: cidrMask
89 | }
90 | ],
91 | natGateways: 2,
92 | gatewayEndpoints: {
93 | S3: {
94 | service: ec2.GatewayVpcEndpointAwsService.S3
95 | },
96 | DynamoDB: {
97 | service: ec2.GatewayVpcEndpointAwsService.DYNAMODB
98 | }
99 | }
100 | });
101 | }
102 | this.ssm.setParameterValue({
103 | value: vpc.vpcId,
104 | valueName: 'VpcId',
105 | });
106 | return vpc;
107 | }
108 |
109 | private createEcr() {
110 | const repo = new ecr.Repository(this, `${this.projectId}Ecr`, {
111 | repositoryName: `${this.projectId}-ecr-repo`.toLowerCase()
112 | });
113 | this.ssm.setParameterValue({
114 | value: repo.repositoryName,
115 | valueName: 'EcrRepoName'
116 | });
117 | return repo;
118 | }
119 |
120 | private createCodeCommit(): codeCommit.Repository {
121 | const repo = new codeCommit.Repository(this, `${this.projectId}CodeCommitRepo`, {
122 | repositoryName: `${this.projectId}-code-repo`,
123 | description: `CodeCommit repository for all ${this.projectId} code`
124 | });
125 | //TODO: Write bootstrap lambda function
126 | // this.bootstrapCodeCommit(repo);
127 | this.ssm.setParameterValue({
128 | value: repo.repositoryName,
129 | valueName: 'CodecommitRepoName'
130 | });
131 | this.ssm.setParameterValue({
132 | value: repo.repositoryCloneUrlHttp,
133 | valueName: 'CodecommitRepoCloneUrl'
134 | });
135 | return repo;
136 | }
137 |
138 | private bootstrapCodeCommit(repo: codeCommit.Repository) {
139 | const lambdaFunction = new lambda.DockerImageFunction(this, `${this.projectId}CodeCommitBootstrap`, {
140 | functionName: `${this.projectId}_codecommit_bootstrap`,
141 | code: lambda.DockerImageCode.fromImageAsset('../../../aws-quant-infra', {
142 | file: 'deployment/cdk/lib/shared/custom-resources/codecommit-bootstrap/Dockerfile'
143 | }),
144 | environment: {
145 | REPO_REGION: this.region,
146 | DEST_REPO: repo.repositoryName
147 | },
148 | timeout: cdk.Duration.minutes(15)
149 | });
150 | repo.grantPullPush(lambdaFunction.role!);
151 | const provider = new cr.Provider(this, `${this.projectId}CodeCommitBootstrapProvider`, {
152 | onEventHandler: lambdaFunction,
153 | logRetention: logs.RetentionDays.ONE_DAY
154 | });
155 | new cdk.CustomResource(this, `${this.projectId}CodeCommitBootstrapCR`, { serviceToken: provider.serviceToken });
156 | }
157 |
158 | private createComputePolicy() {
159 | const regionAccount = `${this.region}:${this.account}`;
160 | const resourcePrefix = `${this.prefix}*${this.project}`;
161 | const policyDocument = {
162 | "Version": "2012-10-17",
163 | "Statement": [
164 | // {
165 | // "Effect": "Allow",
166 | // "Action": [
167 | // // Parameter store
168 | // "ssm:DescribeParameters",
169 | // "ssm:GetParameter",
170 | // // Secrets Manager
171 | // "secretsmanager:GetSecretValue",
172 | // // TimeStream
173 | // "timestream:*",
174 | // // Batch
175 | // "batch:*",
176 | // // Events
177 | // "events:*",
178 | // // AppConfig
179 | // "appconfig:*",
180 | // // DynamoDb
181 | // "dynamodb:*"
182 | // ],
183 | // "Resource": "*",
184 | // },
185 | // WIP scoping down calls to resources created by cdk
186 | {
187 | "Effect": "Allow",
188 | "Action": [
189 | "ssm:DescribeParameters",
190 | "events:*",
191 | "appconfig:*",
192 | "secretsmanager:GetSecretValue" // TODO integrate with python
193 | ],
194 | "Resource": "*"
195 | },
196 | {
197 | "Effect": "Allow",
198 | "Action": [
199 | "ssm:GetParameter",
200 | "ssm:PutParameter"
201 | ],
202 | "Resource": `arn:aws:ssm:${regionAccount}:parameter/${resourcePrefix}*`
203 | },
204 | // {
205 | // "Effect": "Allow",
206 | // "Action": [
207 | // "secretsmanager:GetSecretValue"
208 | // ],
209 | // "Resource": `arn:aws:secretsmanager:${regionAccount}:secret:${resourcePrefix}*`
210 | // },
211 | {
212 | "Effect": "Allow",
213 | "Action": [
214 | "timestream:*"
215 | ],
216 | "Resource": `arn:aws:timestream:${regionAccount}:database/${resourcePrefix}*`
217 | },
218 | {
219 | "Effect": "Allow",
220 | "Action": [
221 | "timestream:DescribeEndpoints",
222 | "timestream:SelectValues",
223 | "timestream:CancelQuery"
224 | ],
225 | "Resource": "*"
226 | },
227 | {
228 | "Effect": "Allow",
229 | "Action": [
230 | "batch:CancelJob",
231 | "batch:SubmitJob",
232 | ],
233 | "Resource": [
234 | `arn:aws:batch:${regionAccount}:job-definition/${resourcePrefix}*`,
235 | `arn:aws:batch:${regionAccount}:job-queue/${resourcePrefix}*`
236 | ]
237 | },
238 | {
239 | "Effect": "Allow",
240 | "Action": [
241 | "batch:ListJobs",
242 | "batch:TerminateJob",
243 | ],
244 | "Resource": "*"
245 | },
246 | {
247 | "Effect": "Allow",
248 | "Action": [
249 | "dynamodb:*"
250 | ],
251 | "Resource": `arn:aws:dynamodb:${regionAccount}:table/${resourcePrefix}*`
252 | }
253 | ]
254 | };
255 | const policy = new iam.ManagedPolicy(this, `${this.projectId}ComputePolicy`, {
256 | managedPolicyName: `${this.projectId}ComputePolicy`,
257 | description: `Policy attached to ${this.projectId} compute resources`,
258 | document: iam.PolicyDocument.fromJson(policyDocument)
259 | });
260 | this.ssm.setParameterValue({
261 | value: policy.managedPolicyName,
262 | valueName: 'ComputeIamPolicyName'
263 | });
264 | return policy;
265 | }
266 | //TODO: Needs to be scoped to least privilege - use ml_infra as baseline for now
267 | private createIamRoles() {
268 | const batchRole = new iam.Role(this, `${this.projectId}BatchRole`, {
269 | assumedBy: new iam.ServicePrincipal('ec2.amazonaws.com')
270 | });
271 | }
272 | }
273 |
--------------------------------------------------------------------------------
/aws-quant-infra/deployment/cdk/lib/lambda.ts:
--------------------------------------------------------------------------------
1 | // import * as cdk from '@aws-cdk/core';
2 | import * as cdk from 'aws-cdk-lib';
3 | import { Construct } from 'constructs';
4 | import { SsmManager } from './shared/ssm-manager';
5 | // import * as iam from '@aws-cdk/aws-iam';
6 | import { aws_iam as iam } from 'aws-cdk-lib';
7 | // import * as dynamodb from '@aws-cdk/aws-dynamodb';
8 | import { aws_dynamodb as dynamodb } from 'aws-cdk-lib';
9 |
10 | // import * as events from '@aws-cdk/aws-events';
11 | import { aws_events as events } from 'aws-cdk-lib';
12 |
13 | import * as targets from 'aws-cdk-lib/aws-events-targets';
14 | // import * as lambda from '@aws-cdk/aws-lambda';
15 | import { aws_lambda as lambda } from 'aws-cdk-lib';
16 |
17 | import * as path from 'path';
18 | import { DynamoEventSource } from 'aws-cdk-lib/aws-lambda-event-sources';
19 | // import { DockerImageAsset } from '@aws-cdk/aws-ecr-assets';
20 | import { DockerImageAsset } from 'aws-cdk-lib/aws-ecr-assets';
21 | import { NagSuppressions } from 'cdk-nag';
22 |
23 | interface LambdaProps extends cdk.StackProps {
24 | portfoliosTable: dynamodb.ITable;
25 | portfolioSystemEventsTable: dynamodb.ITable;
26 | computePolicy: iam.ManagedPolicy;
27 | }
28 |
29 | export class LambdaStack extends cdk.Stack {
30 | private ssm: SsmManager;
31 | private prefix: string;
32 | private projectId: string;
33 |
34 | private portfoliosTable: dynamodb.ITable;
35 | private portfolioSystemEventsTable: dynamodb.ITable;
36 | private computePolicy: iam.ManagedPolicy;
37 |
38 | private tradingEODEventRule: events.Rule;
39 | private tradingSODEventRule: events.Rule;
40 | private intradayEventRule: events.Rule;
41 | private intradayCloseEventRule: events.Rule;
42 |
43 | private dockerPath = path.join(__dirname, '../../../src');
44 | public dockerImage: DockerImageAsset;
45 | private dockerImageTag: string;
46 |
47 | private lambdaRole: iam.Role;
48 |
49 | /**
50 | * // TODO: comment dispose
51 | * handler config should be defined once and then just reused vs. hardcoded
52 | */
53 | private SOD_RULE_PROPS = {
54 | event: events.RuleTargetInput.fromObject({
55 | account: events.EventField.fromPath('$.account'),
56 | region: events.EventField.fromPath('$.region'),
57 | detail: {
58 | event_type:"SOD",
59 | handler:{'Configuration': 'PortfolioMonitoringConfigProfile',
60 | 'Environment': 'dev',
61 | 'Application': 'PortfolioMonitoring'}
62 | }
63 | })
64 | };
65 | private EOD_RULE_PROPS = {
66 | event: events.RuleTargetInput.fromObject({
67 | account: events.EventField.fromPath('$.account'),
68 | region: events.EventField.fromPath('$.region'),
69 | detail: {
70 | event_type:"EOD",
71 | handler:{'Configuration': 'PortfolioMonitoringConfigProfile',
72 | 'Environment': 'dev',
73 | 'Application': 'PortfolioMonitoring'}
74 | }
75 | })
76 | };
77 | private INTRADAY_RULE_PROPS = {
78 | event: events.RuleTargetInput.fromObject({
79 | account: events.EventField.fromPath('$.account'),
80 | region: events.EventField.fromPath('$.region'),
81 | detail: {
82 | event_type:"INTRADAY",
83 | handler:{'Configuration': 'PortfolioMonitoringConfigProfile',
84 | 'Environment': 'dev',
85 | 'Application': 'PortfolioMonitoring'}
86 | }
87 | })
88 | };
89 | private INTRADAY_CLOSE_RULE_PROPS = {
90 | event: events.RuleTargetInput.fromObject({
91 | account: events.EventField.fromPath('$.account'),
92 | region: events.EventField.fromPath('$.region'),
93 | detail: {
94 | event_type:"INTRADAYCLOSE",
95 | handler:{'Configuration': 'PortfolioMonitoringConfigProfile',
96 | 'Environment': 'dev',
97 | 'Application': 'PortfolioMonitoring'}
98 | }
99 | })
100 | };
101 |
102 | constructor(scope: Construct, id: string, props: LambdaProps) {
103 | super(scope, id, props);
104 | const project = this.node.tryGetContext('project');
105 | this.prefix = this.node.tryGetContext('deployment_prefix');
106 | this.projectId = `${this.prefix}${project}`;
107 | this.ssm = new SsmManager(this, `${this.projectId}EnvSsmManager`);
108 | this.portfoliosTable = props.portfoliosTable;
109 | this.portfolioSystemEventsTable = props.portfolioSystemEventsTable;
110 | this.computePolicy = props.computePolicy;
111 |
112 | this.createEvents();
113 | this.dockerImage = this.createDockerImage();
114 | this.lambdaRole = this.createLambaRole();
115 | this.tradingStartStopFunction();
116 | this.portfolioUpdateFunction();
117 | this.systemEventsFunction();
118 | this.testDockerFunction();
119 | this.intradayMomentumFunction();
120 | this.intradayCloseFunction();
121 | NagSuppressions.addStackSuppressions(this, [ { id: 'AwsSolutions-IAM4', reason: 'uses AWS managed role - nothing to do' }]);
122 | NagSuppressions.addStackSuppressions(this, [ { id: 'AwsSolutions-IAM5', reason: 'uses AWS managed role - nothing to do' }]);
123 | }
124 |
125 |
126 | private createEvents() {
127 | this.tradingEODEventRule = new events.Rule(this, `${this.projectId}TradingEODRule`, {
128 | ruleName: `trading_EOD`,
129 | description: 'trading_EOD_cron(05 20 * * ? *)',
130 | enabled: true,
131 | schedule: events.Schedule.expression('cron(05 20 * * ? *)'),
132 | });
133 | this.ssm.setParameterValue({
134 | value: this.tradingEODEventRule.ruleName,
135 | valueName: 'TradingEODEventRule'
136 | });
137 | this.tradingSODEventRule = new events.Rule(this, `${this.projectId}TradingSODRule`, {
138 | ruleName: `trading_SOD`,
139 | description: 'trading_SOD_cron(25 13 * * ? *)',
140 | enabled: true,
141 | schedule: events.Schedule.expression('cron(25 13 * * ? *)'),
142 | });
143 | this.ssm.setParameterValue({
144 | value: this.tradingSODEventRule.ruleName,
145 | valueName: 'TradingSODEventRule'
146 | });
147 | this.intradayEventRule = new events.Rule(this, `${this.projectId}intradayRule`, {
148 | ruleName: `IntradayMomentum`,
149 | description: 'intraday_cron(00 14 ? * MON-FRI *)',
150 | enabled: true,
151 | schedule: events.Schedule.expression('cron(00 14 ? * MON-FRI *)'),
152 | });
153 | this.ssm.setParameterValue({
154 | value: this.intradayEventRule.ruleName,
155 | valueName: 'intradayEventRule'
156 | });
157 | this.intradayCloseEventRule = new events.Rule(this, `${this.projectId}intradayCloseRule`, {
158 | ruleName: `IntradayMomentumClose`,
159 | description: 'intraday_close_cron(00 20 ? * MON-FRI *)',
160 | enabled: true,
161 | schedule: events.Schedule.expression('cron(00 20 ? * MON-FRI *)'),
162 | });
163 | this.ssm.setParameterValue({
164 | value: this.intradayCloseEventRule.ruleName,
165 | valueName: 'intradayCloseEventRule'
166 | });
167 | }
168 |
169 | private createLambaRole(): iam.Role {
170 | const role = new iam.Role(this, `${this.projectId}LambaRole`, {
171 | assumedBy: new iam.ServicePrincipal('lambda.amazonaws.com'),
172 | roleName: `${this.projectId}LambaRole`,
173 | managedPolicies: [
174 | this.computePolicy,
175 | iam.ManagedPolicy.fromAwsManagedPolicyName('service-role/AWSLambdaBasicExecutionRole')
176 | ]
177 | });
178 | this.ssm.setParameterValue({
179 | value: role.roleName,
180 | valueName: 'LambdaRoleName'
181 | });
182 | return role;
183 | }
184 |
185 | private createDockerImage(): DockerImageAsset {
186 | const image = new DockerImageAsset(this, `${this.projectId}LambdaImage`, {
187 | directory: path.join(this.dockerPath),
188 | buildArgs: {
189 | SSM_PREFIX: this.prefix,
190 | AWS_REGION: this.region
191 | },
192 | file: 'lambda/Dockerfile'
193 | });
194 | this.ssm.setParameterValue({
195 | value: image.imageUri,
196 | valueName: 'LambdaImageUri'
197 | });
198 | this.dockerImageTag = image.assetHash;
199 | return image;
200 | }
201 |
202 | private tradingStartStopFunction() {
203 | const lambdaFunction = new lambda.DockerImageFunction(this, `${this.projectId}TradingStartStopLambda`, {
204 | functionName: `${this.projectId}_trading_start_stop`,
205 | code: lambda.DockerImageCode.fromEcr(this.dockerImage.repository, {
206 | cmd: [ '/var/task/schedule_listener/lambda_function.lambda_handler' ],
207 | tag: this.dockerImageTag
208 | }),
209 | role: this.lambdaRole,
210 | memorySize: 128,
211 | timeout: cdk.Duration.minutes(15)
212 | });
213 | this.tradingEODEventRule.addTarget(new targets.LambdaFunction(lambdaFunction, this.EOD_RULE_PROPS));
214 | this.tradingSODEventRule.addTarget(new targets.LambdaFunction(lambdaFunction, this.SOD_RULE_PROPS));
215 | this.ssm.setParameterValue({
216 | value: lambdaFunction.functionName,
217 | valueName: 'TradingStartStopLambdaName'
218 | });
219 |
220 | }
221 | private portfolioUpdateFunction() {
222 | const lambdaFunction = new lambda.DockerImageFunction(this, `${this.projectId}PortfolioUpdateLambda`, {
223 | functionName: `${this.projectId}_portfolio_update`,
224 | code: lambda.DockerImageCode.fromEcr(this.dockerImage.repository, {
225 | cmd: [ '/var/task/handle_portfolio_update/lambda_function.lambda_handler' ],
226 | tag: this.dockerImageTag
227 | }),
228 | role: this.lambdaRole,
229 | memorySize: 128,
230 | timeout: cdk.Duration.minutes(15)
231 | });
232 | lambdaFunction.addEventSource(new DynamoEventSource(this.portfoliosTable, {
233 | startingPosition: lambda.StartingPosition.TRIM_HORIZON,
234 | batchSize: 1,
235 | bisectBatchOnError: false,
236 | enabled: true,
237 | retryAttempts: 0
238 | }));
239 | this.ssm.setParameterValue({
240 | value: lambdaFunction.functionName,
241 | valueName: 'PortfolioUpdateLambdaName'
242 | });
243 |
244 | }
245 | private intradayMomentumFunction() {
246 | const lambdaFunction = new lambda.DockerImageFunction(this, `${this.projectId}IntradayMomentumLambda`, {
247 | functionName: `${this.projectId}_intraday_momentum`,
248 | code: lambda.DockerImageCode.fromEcr(this.dockerImage.repository, {
249 | cmd: [ '/var/task/intraday_momentum/lambda_function.lambda_handler' ],
250 | tag: this.dockerImageTag
251 | }),
252 | role: this.lambdaRole,
253 | memorySize: 128,
254 | timeout: cdk.Duration.minutes(15)
255 | });
256 | this.intradayEventRule.addTarget(new targets.LambdaFunction(lambdaFunction, this.INTRADAY_RULE_PROPS));
257 | this.ssm.setParameterValue({
258 | value: lambdaFunction.functionName,
259 | valueName: 'IntradayMomentumLambdaName'
260 | });
261 |
262 | }
263 | private intradayCloseFunction() {
264 | const lambdaFunction = new lambda.DockerImageFunction(this, `${this.projectId}IntradayCloseLambda`, {
265 | functionName: `${this.projectId}_intraday_close`,
266 | code: lambda.DockerImageCode.fromEcr(this.dockerImage.repository, {
267 | cmd: [ '/var/task/intraday_close/lambda_function.lambda_handler' ],
268 | tag: this.dockerImageTag
269 | }),
270 | role: this.lambdaRole,
271 | memorySize: 128,
272 | timeout: cdk.Duration.minutes(15)
273 | });
274 | this.intradayCloseEventRule.addTarget(new targets.LambdaFunction(lambdaFunction, this.INTRADAY_CLOSE_RULE_PROPS));
275 | this.ssm.setParameterValue({
276 | value: lambdaFunction.functionName,
277 | valueName: 'IntradayCloseLambdaName'
278 | });
279 |
280 | }
281 | private systemEventsFunction() {
282 | const lambdaFunction = new lambda.DockerImageFunction(this, `${this.projectId}PortfolioSystemEventsLambda`, {
283 | functionName: `${this.projectId}_system_events_handler`,
284 | code: lambda.DockerImageCode.fromEcr(this.dockerImage.repository, {
285 | cmd: [ '/var/task/system_event_listener/lambda_function.lambda_handler' ],
286 | tag: this.dockerImageTag
287 | }),
288 | role: this.lambdaRole,
289 | memorySize: 128,
290 | timeout: cdk.Duration.minutes(15)
291 | });
292 | lambdaFunction.addEventSource(new DynamoEventSource(this.portfolioSystemEventsTable, {
293 | startingPosition: lambda.StartingPosition.TRIM_HORIZON,
294 | batchSize: 1,
295 | bisectBatchOnError: false,
296 | enabled: true,
297 | retryAttempts: 0
298 |
299 | }));
300 | this.ssm.setParameterValue({
301 | value: lambdaFunction.functionName,
302 | valueName: 'SystemEventsLambdaName'
303 | });
304 |
305 | }
306 | private testDockerFunction() {
307 | const lambdaFunction = new lambda.DockerImageFunction(this, `${this.projectId}TestDockerLambda`, {
308 | functionName: `${this.projectId}_test_docker_handler`,
309 | code: lambda.DockerImageCode.fromEcr(this.dockerImage.repository, {
310 | cmd: [ '/var/task/test_custom_layer/lambda_function.lambda_handler' ],
311 | tag: this.dockerImageTag
312 | }),
313 | role: this.lambdaRole,
314 | memorySize: 128,
315 | timeout: cdk.Duration.minutes(15)
316 | });
317 | lambdaFunction.addEventSource(new DynamoEventSource(this.portfolioSystemEventsTable, {
318 | startingPosition: lambda.StartingPosition.TRIM_HORIZON,
319 | batchSize: 1,
320 | bisectBatchOnError: false,
321 | enabled: true,
322 | retryAttempts: 0
323 |
324 | }));
325 | lambdaFunction.addEventSource(new DynamoEventSource(this.portfoliosTable, {
326 | startingPosition: lambda.StartingPosition.TRIM_HORIZON,
327 | batchSize: 1,
328 | bisectBatchOnError: false,
329 | enabled: true,
330 | retryAttempts: 0
331 |
332 | }));
333 | this.tradingEODEventRule.addTarget(new targets.LambdaFunction(lambdaFunction, this.EOD_RULE_PROPS));
334 | this.tradingSODEventRule.addTarget(new targets.LambdaFunction(lambdaFunction, this.SOD_RULE_PROPS));
335 | this.ssm.setParameterValue({
336 | value: lambdaFunction.functionName,
337 | valueName: 'TestDockerLambdaName'
338 | });
339 |
340 | }
341 | }
--------------------------------------------------------------------------------
/aws-quant-infra/deployment/cdk/lib/pipeline.ts:
--------------------------------------------------------------------------------
1 | // import * as cdk from '@aws-cdk/core';
2 | import * as cdk from 'aws-cdk-lib';
3 | import { Construct } from 'constructs';
4 | import { SsmManager } from './shared/ssm-manager';
5 | // import * as codePipeline from '@aws-cdk/aws-codepipeline';
6 | import { aws_codepipeline as codePipeline } from 'aws-cdk-lib';
7 | // import * as codePipelineActions from '@aws-cdk/aws-codepipeline-actions';
8 | import * as codePipelineActions from 'aws-cdk-lib/aws-codepipeline-actions';
9 | // import * as codeCommit from '@aws-cdk/aws-codecommit';
10 | import { aws_codecommit as codeCommit } from 'aws-cdk-lib';
11 | // import * as codeBuild from '@aws-cdk/aws-codebuild';
12 | import { aws_codebuild as codeBuild } from 'aws-cdk-lib';
13 | import { NagSuppressions } from 'cdk-nag';
14 | interface PipelineProps extends cdk.NestedStackProps {
15 | codecommitRepo: codeCommit.Repository;
16 | }
17 |
18 | export class PipelineStack extends cdk.NestedStack {
19 | private ssm: SsmManager;
20 | private prefix: string;
21 | private project: string;
22 | private projectId: string;
23 |
24 | private codecommitRepo: codeCommit.Repository;
25 |
26 | constructor(scope: Construct, id: string, props: PipelineProps) {
27 | super(scope, id, props);
28 | this.project = this.node.tryGetContext('project');
29 | this.prefix = this.node.tryGetContext('deployment_prefix');
30 | this.projectId = `${this.prefix}${this.project}`;
31 | this.ssm = new SsmManager(this, `${this.projectId}PipelineSsmManager`);
32 |
33 | this.codecommitRepo = props.codecommitRepo;
34 |
35 | this.createPipeline();
36 | NagSuppressions.addStackSuppressions(this, [ { id: 'AwsSolutions-IAM4', reason: 'lorem ipsum' }]);
37 | NagSuppressions.addStackSuppressions(this, [ { id: 'AwsSolutions-IAM5', reason: 'lorem ipsum' }]);
38 | }
39 |
40 | private createPipeline() {
41 | const sourceOutput = new codePipeline.Artifact();
42 | const pipeline = new codePipeline.Pipeline(this, `${this.projectId}SdlcPipeline`, {
43 | pipelineName: `${this.projectId}SDLCPipeline`,
44 | stages: [
45 | this.createSourceStage('Source', sourceOutput),
46 | this.createImageBuildStage('Build', sourceOutput)
47 | ]
48 | });
49 | this.ssm.setParameterValue({
50 | value: pipeline.pipelineName,
51 | valueName: 'SDLCPipeline'
52 | });
53 | }
54 | private createSourceStage(stageName: string, output: codePipeline.Artifact): codePipeline.StageProps {
55 | const codeCommitAction = new codePipelineActions.CodeCommitSourceAction({
56 | actionName: 'CodeCommit_Source',
57 | output: output,
58 | repository: this.codecommitRepo
59 | });
60 | return {
61 | stageName: stageName,
62 | actions: [ codeCommitAction ]
63 | };
64 | }
65 | private createImageBuildStage(
66 | stageName: string,
67 | input: codePipeline.Artifact
68 | ): codePipeline.StageProps {
69 | return {
70 | stageName: stageName,
71 | actions: [
72 | this.createLambdaAction(input),
73 | this.createBatchAction(input)
74 | ]
75 | };
76 | }
77 | private createLambdaAction(
78 | input: codePipeline.Artifact
79 | ): codePipelineActions.CodeBuildAction {
80 | const output = new codePipeline.Artifact();
81 | const lambdaProject = new codeBuild.PipelineProject(this, `${this.projectId}LambdaProject`, {
82 | projectName: `${this.projectId}Lambda`,
83 | buildSpec: this.buildSpec(),
84 | environment: {
85 | buildImage: codeBuild.LinuxBuildImage.AMAZON_LINUX_2,
86 | privileged: true
87 | },
88 | environmentVariables: {
89 | 'DOCKERFILE_PATH': { value: 'lambda/Dockerfile' },
90 | 'SSM_PARAM_NAME': { value: `${this.prefix}-${this.project}-LambdaImageUri` },
91 | 'SSM_PREFIX': { value: this.prefix }
92 | }
93 | });
94 | const lambdaAction = new codePipelineActions.CodeBuildAction({
95 | actionName: 'LambdaBuild_Action',
96 | input: input,
97 | outputs: [ output ],
98 | project: lambdaProject
99 | });
100 | return lambdaAction;
101 | }
102 | private createBatchAction(
103 | input: codePipeline.Artifact
104 | ): codePipelineActions.CodeBuildAction {
105 | const output = new codePipeline.Artifact();
106 | const batchProject = new codeBuild.PipelineProject(this, `${this.projectId}BatchProject`, {
107 | projectName: `${this.projectId}Batch`,
108 | buildSpec: this.buildSpec(),
109 | environment: {
110 | buildImage: codeBuild.LinuxBuildImage.AMAZON_LINUX_2,
111 | privileged: true
112 | },
113 | environmentVariables: {
114 | 'DOCKERFILE_PATH': { value: 'batch/Dockerfile' },
115 | 'SSM_PARAM_NAME': { value: `${this.prefix}-${this.project}-BatchImageUri` },
116 | 'SSM_PREFIX': { value: this.prefix }
117 | }
118 | });
119 | const batchAction = new codePipelineActions.CodeBuildAction({
120 | actionName: 'BatchBuild_Action',
121 | input: input,
122 | outputs: [ output ],
123 | project: batchProject
124 | });
125 | return batchAction;
126 | }
127 | private buildSpec(): codeBuild.BuildSpec {
128 | return codeBuild.BuildSpec.fromObject({
129 | verion: '0.2',
130 | phases: {
131 | pre_build: {
132 | commands: [
133 | 'aws --version',
134 | '$(aws ecr get-login --region ${AWS_DEFAULT_REGION} --no-include-email | sed \'s|https://||\')',
135 | 'LABEL=(aws ssm get-paramer --name "${SSM_PARAM_NAME}" --query Parameter.Value --output text)'
136 | ]
137 | },
138 | build: {
139 | commands: [
140 | 'cd src',
141 | 'docker build -f ${DOCKERFILE_PATH} --build-arg SSM_PREFIX=${SSM_PREFIX}',
142 | 'docker tag ${LABEL}',
143 | ]
144 | },
145 | post_build: {
146 | commands: [
147 | 'docker push ${LABEL}',
148 | ]
149 | }
150 | }
151 | });
152 |
153 | }
154 | }
--------------------------------------------------------------------------------
/aws-quant-infra/deployment/cdk/lib/secrets.ts:
--------------------------------------------------------------------------------
1 | // import * as cdk from '@aws-cdk/core';
2 | import * as cdk from 'aws-cdk-lib';
3 | import { Construct } from 'constructs';
4 |
5 | import { SsmManager } from './shared/ssm-manager';
6 | // import * as iam from '@aws-cdk/aws-iam';
7 | import { aws_iam as iam } from 'aws-cdk-lib';
8 | import { aws_secretsmanager as secretsmanager } from 'aws-cdk-lib';
9 | // import * as secretsmanager from '@aws-cdk/aws-secretsmanager';
10 | import * as appconfigs from '../../../src/config/portfolio_tracker_cfg.json';
11 | import { NagSuppressions } from 'cdk-nag';
12 | interface SecretsProps extends cdk.StackProps {
13 | computePolicy: iam.ManagedPolicy;
14 | }
15 |
16 | export class SecretsStack extends cdk.Stack {
17 | private ssm: SsmManager;
18 | private prefix: string;
19 | private projectId: string;
20 |
21 | private computePolicy: iam.ManagedPolicy;
22 | constructor(scope: Construct, id: string, props: SecretsProps) {
23 | super(scope, id, props);
24 | const project = this.node.tryGetContext('project');
25 | this.prefix = this.node.tryGetContext('deployment_prefix');
26 | this.projectId = `${this.prefix}${project}`;
27 | this.ssm = new SsmManager(this, `${this.projectId}EnvSsmManager`);
28 |
29 | this.computePolicy = props.computePolicy;
30 |
31 | this.createSecrets();
32 | }
33 |
34 | private createSecrets() {
35 | const secretNames = appconfigs.secrets;
36 | secretNames.forEach((name) => {
37 | const secret = new secretsmanager.Secret(this, `${name}Secret`, {
38 | // secretName: `${this.projectId}-${name}`
39 | secretName: `${name}`
40 | });
41 | // this.computePolicy.addStatements(
42 | // new iam.PolicyStatement({
43 | // effect: iam.Effect.ALLOW,
44 | // actions:
45 | // })
46 | // )
47 | });
48 | this.ssm.setParameterValue({
49 | value: secretNames,
50 | valueName: 'SecretNames'
51 | });
52 | NagSuppressions.addStackSuppressions(this, [ { id: 'AwsSolutions-IAM4', reason: 'secret manager is used to keep 3rd party passwords that have its own rotation schedule "invisioble" to this solution' }]);
53 | NagSuppressions.addStackSuppressions(this, [ { id: 'AwsSolutions-IAM5', reason: 'secret manager is used to keep 3rd party passwords that have its own rotation schedule "invisioble" to this solution' }]);
54 | }
55 | }
--------------------------------------------------------------------------------
/aws-quant-infra/deployment/cdk/lib/shared/custom-resources/codecommit-bootstrap/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM amazon/aws-lambda-python:3.8
2 |
3 | # WORKDIR /app
4 | # Copy handler function (from the local app directory)
5 | # COPY index.py .
6 | # Install git
7 | RUN yum update -y \
8 | && yum install -y git zip unzip wget tar gzip jq
9 | # Make sure git is installed
10 | RUN git version
11 |
12 | # Install git-remote-codecommit
13 | RUN pip install git-remote-codecommit
14 |
15 | # # Clone aws-quant repo
16 | # RUN git clone --mirror https://github.com/aws-samples/aws-machine-learning-university-accelerated-nlp.git aws-quant-tmp && ls -la aws-quant-tmp
17 |
18 | # Install awscli
19 | RUN curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip"
20 | RUN unzip awscliv2.zip
21 | RUN ./aws/install
22 |
23 | # Make sure awscli is installed
24 | RUN aws --version
25 |
26 | # Prepare codecommit
27 | RUN git config --global credential.helper '!aws codecommit credential-helper $@'
28 | RUN git config --global credential.UseHttpPath true
29 |
30 | COPY . /var/task/base
31 | COPY deployment/cdk/lib/shared/index.py /var/task/
32 | RUN ls -la /var/task/*
33 | # Overwrite the command by providing a different command directly in the template.
34 | CMD ["/var/task/index.handler"]
--------------------------------------------------------------------------------
/aws-quant-infra/deployment/cdk/lib/shared/custom-resources/codecommit-bootstrap/index.py:
--------------------------------------------------------------------------------
1 | import os
2 | # import boto3
3 | # import requests
4 | # import zipfile
5 | # import io
6 | import subprocess
7 | # import shutil
8 | # import urllib3
9 |
10 | # CLONE_REPO = os.environ['CLONE_REPO']
11 | CC_REPO_NAME = os.environ['DEST_REPO']
12 | # KEY = os.environ['AWS_ACCESS_KEY_ID']
13 | # SECRET_KEY = os.environ['AWS_SECRET_ACCESS_KEY']
14 | REGION = os.environ['REPO_REGION']
15 | CC_REPO = f'codecommit::{REGION}://{CC_REPO_NAME}'
16 | # TMP_PATH = "aws-quant-tmp"
17 |
18 | def handler(event, context):
19 | if event['RequestType'] == 'Create':
20 | # Clone empty code commit...
21 | print('cloning empty repo...')
22 | res = subprocess.run(["git", "clone", CC_REPO, "/tmp/aws-quant"], capture_output=True)
23 | print(res.stdout)
24 | print(res.stderr)
25 | # copy baseline repo to code commit dir...
26 | print('moving baseline repo to code commit git...')
27 | res = subprocess.run(["cp", "-a", "/var/task/base/.", "/tmp/aws-quant/"], capture_output=True)
28 | print(res.stdout)
29 | print(res.stderr)
30 | # Push to private repo...
31 | print('pushing changes to code commit...')
32 | res = subprocess.run(["git", "push", CC_REPO, "--all"], cwd='/tmp/aws-quant', capture_output=True)
33 | print(res.stdout)
34 | print(res.stderr)
35 |
36 |
37 | def configureCli():
38 | print(f'running aws configure commands... {REGION} {KEY} {SECRET_KEY}')
39 | subprocess.run(["aws", "configure", "set", "region" , REGION])
40 | subprocess.run(["aws", "configure", "set", "aws_access_key_id" , KEY])
41 | subprocess.run(["aws", "configure", "set", "aws_secret_access_key" , SECRET_KEY])
42 | print('aws cli configured')
43 |
44 | # def downloadRepo():
45 | # subprocess.run(["rm", "-rf", "/tmp/*"])
46 | # subprocess.run(["cd", "/tmp"])
47 | # subprocess.run(["git", "clone", CLONE_REPO, TMP_PATH])
48 | # r = requests.get(CLONE_REPO)
49 | # z = zipfile.ZipFile(io.BytesIO(r.content))
50 | # z.extractall(TMP_PATH)
51 | # zip = '/tmp/tmp.zip'
52 | # http = urllib3.PoolManager()
53 | # r = http.request('GET', CLONE_REPO)
54 | # with open(zip, 'wb') as out:
55 | # while True:
56 | # data = r.read(64)
57 | # if not data:
58 | # break
59 | # out.write(data)
60 | # shutil.unpack_archive(zip, TMP_PATH)
61 |
62 | # def create_codecommit_repo_commit(repo_name, branch_name, code_folder):
63 | # client = boto3.client('codecommit')
64 | # parent_folder = os.path.join(code_folder, repo_name)
65 | # putFilesList = []
66 | # for (root, folders, files) in os.walk(parent_folder):
67 | # for file in files:
68 | # print(f'Making entry for file: ${file}')
69 | # file_path = os.path.join(root, file)
70 | # with open(file_path, mode='r+b') as file_obj:
71 | # file_content = file_obj.read()
72 | # putFileEntry = {'filePath': str(file_path).replace(parent_folder, ''),
73 | # 'fileContent': file_content}
74 | # putFilesList.append(putFileEntry)
75 |
76 | # response = client.create_commit(repositoryName=repo_name, branchName=branch_name, putFiles=putFilesList)
77 | # print(response)
--------------------------------------------------------------------------------
/aws-quant-infra/deployment/cdk/lib/shared/ssm-manager.ts:
--------------------------------------------------------------------------------
1 | // import * as cdk from '@aws-cdk/core';
2 | import * as cdk from 'aws-cdk-lib';
3 | import { Construct } from 'constructs';
4 | import { aws_iam as iam } from 'aws-cdk-lib';
5 | import { aws_ssm as ssm } from 'aws-cdk-lib';
6 | // import * as iam from '@aws-cdk/aws-iam';
7 | // import * as ssm from '@aws-cdk/aws-ssm';
8 |
9 | export interface setParameterValueProps {
10 | value: string | string[] | object;
11 | valueName: string;
12 | role?: iam.IRole
13 | }
14 |
15 | export interface getParameterValueProps {
16 | valueName: string
17 | }
18 |
19 | export class SsmManager extends Construct {
20 | private projectId: string;
21 |
22 | constructor(scope: Construct, id: string) {
23 | super(scope, id);
24 | const project = this.node.tryGetContext('project');
25 | const prefix = this.node.tryGetContext('deployment_prefix');
26 | this.projectId = `${prefix}-${project}`;
27 | }
28 | public setParameterValue(props: setParameterValueProps) {
29 | var value = props.value;
30 | const valueName = props.valueName;
31 | const role = props.role;
32 | var param: ssm.StringParameter;
33 | if ( Array.isArray(value) ) {
34 | const jsonValue = {
35 | values: value
36 | }
37 | value = JSON.stringify(jsonValue)
38 | }
39 | if ( typeof(value) === 'object'){
40 | value = JSON.stringify(value);
41 | }
42 | param = new ssm.StringParameter(this, `set-${valueName}-param`, {
43 | stringValue: value,
44 | parameterName: `${this.projectId}-${valueName}`,
45 | description: `${this.projectId} parameter`
46 | });
47 |
48 | new cdk.CfnOutput(this, `${valueName}CfnOutput`, {
49 | value: value,
50 | exportName: valueName
51 | });
52 | if (role){
53 | param.grantRead(role);
54 | }
55 | }
56 | public getParameterValue(props: getParameterValueProps): string{
57 | const valueName = props.valueName;
58 | var value = ssm.StringParameter.fromStringParameterAttributes(this, `get-${valueName}-param`, {
59 | parameterName: `${this.projectId}-${valueName}`
60 | }).stringValue;
61 |
62 | // Need to convert if given a json string
63 | if (value.includes(':')){
64 | value = JSON.parse(value);
65 | }
66 | return value
67 | }
68 | }
--------------------------------------------------------------------------------
/aws-quant-infra/deployment/cdk/outputs.json:
--------------------------------------------------------------------------------
1 | {
2 | "AppConfigStack": {
3 | "MvpPortfolioMonitoringAppConfigSsmManagerAppConfigDetailsCfnOutput904A98B6": "{\"Application\":\"PortfolioMonitoring\",\"Configuration\":\"PortfolioMonitoringConfigProfile\",\"dev\":\"1\"}"
4 | },
5 | "DbStack": {
6 | "ExportsOutputFnGetAttMvpPortfolioMonitoringSystemEventTableF4F7186EStreamArn23A545F0": "arn:aws:dynamodb:us-east-1:763406250311:table/MvpPortfolioMonitoringSystemEventTable/stream/2023-02-07T16:10:28.309",
7 | "MvpPortfolioMonitoringDbSsmManagerPortfolioMapTableCfnOutput27860BE4": "MvpPortfolioMonitoringSymbolTable",
8 | "MvpPortfolioMonitoringDbSsmManagerTimestreamTablesCfnOutputE81006F3": "{\"market_data_table\":\"realtime_data\",\"portfolio_table\":\"portfolio_tracker\"}",
9 | "MvpPortfolioMonitoringDbSsmManagerTimestreamDbCfnOutput1E60B91B": "MvpPortfolioMonitoringTimestream",
10 | "MvpPortfolioMonitoringDbSsmManagerSystemEventTableCfnOutput945C9CAD": "MvpPortfolioMonitoringSystemEventTable",
11 | "ExportsOutputFnGetAttMvpPortfolioMonitoringPortfoliosEE6961AAStreamArnDA5B06C8": "arn:aws:dynamodb:us-east-1:763406250311:table/MvpPortfolioMonitoringPortfolioTable/stream/2023-02-07T16:10:28.141",
12 | "MvpPortfolioMonitoringDbSsmManagerPortfoliosTableCfnOutput9B0F40DA": "MvpPortfolioMonitoringPortfolioTable"
13 | },
14 | "EnvStack": {
15 | "MvpPortfolioMonitoringEnvSsmManagerComputeIamPolicyNameCfnOutput1AA9E9A8": "MvpPortfolioMonitoringComputePolicy",
16 | "MvpPortfolioMonitoringEnvSsmManagerCodecommitRepoCloneUrlCfnOutputA1404F14": "https://git-codecommit.us-east-1.amazonaws.com/v1/repos/MvpPortfolioMonitoring-code-repo",
17 | "ExportsOutputRefMvpPortfolioMonitoringVpc00110CB6256AAB10": "vpc-01e86fd53926dacea",
18 | "ExportsOutputRefMvpPortfolioMonitoringComputePolicy8EB89484519526C6": "arn:aws:iam::763406250311:policy/MvpPortfolioMonitoringComputePolicy",
19 | "ExportsOutputRefMvpPortfolioMonitoringVpcMvpPortfolioMonitoringprivateSubnet2Subnet3C311C6749BA2DB0": "subnet-0153bbc030b57e13f",
20 | "MvpPortfolioMonitoringEnvSsmManagerVpcIdCfnOutputEC240D5B": "vpc-01e86fd53926dacea",
21 | "MvpPortfolioMonitoringEnvSsmManagerCodecommitRepoNameCfnOutput8D17DDEE": "MvpPortfolioMonitoring-code-repo",
22 | "ExportsOutputRefMvpPortfolioMonitoringVpcMvpPortfolioMonitoringprivateSubnet1Subnet9BA210F8599B49B9": "subnet-0ded6f5be60045fe4"
23 | },
24 | "SecretsStack": {
25 | "MvpPortfolioMonitoringEnvSsmManagerSecretNamesCfnOutputAD87BDE6": "{\"values\":[\"api_token_pk_sandbox\",\"api_token_pk\"]}"
26 | },
27 | "BatchStack": {
28 | "MvpPortfolioMonitoringEnvSsmManagerBatchEc2JobQueueNameCfnOutput2B29286B": "MvpPortfolioMonitoring_q_ec2",
29 | "MvpPortfolioMonitoringEnvSsmManagerBatchComputeEnvironmentsCfnOutput79265BED": "{\"ec2_od\":\"MvpPortfolioMonitoring_ec2_od\",\"ec2_spot\":\"MvpPortfolioMonitoring_ec2_spot\",\"fargate_od\":\"MvpPortfolioMonitoring_fargate\",\"fargate_spot\":\"MvpPortfolioMonitoring_fargate_spot\"}",
30 | "MvpPortfolioMonitoringEnvSsmManagerBatchImageUriCfnOutput3A45CE6D": "763406250311.dkr.ecr.us-east-1.amazonaws.com/cdk-hnb659fds-container-assets-763406250311-us-east-1:6b481850005179578ff09277d0a3a327e78f19da3caa56dba51504d4388e7bb6",
31 | "MvpPortfolioMonitoringEnvSsmManagerBatchTestBatchDockerJobDefCfnOutput30BCF9B5": "MvpPortfolioMonitoring_test_docker",
32 | "MvpPortfolioMonitoringEnvSsmManagerBatchPortfolioTrackerJobDefCfnOutputA61BE816": "MvpPortfolioMonitoring_portfolio_tracker",
33 | "MvpPortfolioMonitoringEnvSsmManagerBatchFargateJobQueueNameCfnOutputEC7C7A53": "MvpPortfolioMonitoring_q_fargate",
34 | "MvpPortfolioMonitoringEnvSsmManagerBatchGetMarketDataJobDefCfnOutput6A568336": "MvpPortfolioMonitoring_get_market_data"
35 | },
36 | "LambdaStack": {
37 | "MvpPortfolioMonitoringEnvSsmManagerTradingStartStopLambdaNameCfnOutput5B84B11A": "MvpPortfolioMonitoring_trading_start_stop",
38 | "MvpPortfolioMonitoringEnvSsmManagerLambdaImageUriCfnOutput0A55492B": "763406250311.dkr.ecr.us-east-1.amazonaws.com/cdk-hnb659fds-container-assets-763406250311-us-east-1:aba8f4892f777d94f4fa3eb71b0e503f3f72f5402c52602dc5d97cd6096af165",
39 | "MvpPortfolioMonitoringEnvSsmManagerLambdaRoleNameCfnOutput7AAA66AA": "MvpPortfolioMonitoringLambaRole",
40 | "MvpPortfolioMonitoringEnvSsmManagerTradingEODEventRuleCfnOutput6E42D170": "trading_EOD",
41 | "MvpPortfolioMonitoringEnvSsmManagerTradingSODEventRuleCfnOutput65C6F3B8": "trading_SOD",
42 | "MvpPortfolioMonitoringEnvSsmManagerTestDockerLambdaNameCfnOutput400B1DCD": "MvpPortfolioMonitoring_test_docker_handler",
43 | "MvpPortfolioMonitoringEnvSsmManagerPortfolioUpdateLambdaNameCfnOutputDB28FB96": "MvpPortfolioMonitoring_portfolio_update",
44 | "MvpPortfolioMonitoringEnvSsmManagerSystemEventsLambdaNameCfnOutput775675D6": "MvpPortfolioMonitoring_system_events_handler"
45 | }
46 | }
47 |
--------------------------------------------------------------------------------
/aws-quant-infra/deployment/cdk/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "cdk",
3 | "version": "0.1.0",
4 | "bin": {
5 | "cdk": "bin/cdk.js"
6 | },
7 | "scripts": {
8 | "build": "tsc",
9 | "watch": "tsc -w",
10 | "test": "jest",
11 | "cdk": "cdk"
12 | },
13 | "devDependencies": {
14 | "@types/jest": "26.0.10",
15 | "@types/node": "10.17.27",
16 | "aws-cdk-lib": "2.59.0",
17 | "constructs": "^10.0.0",
18 | "jest": "26.4.2",
19 | "ts-jest": "26.2.0",
20 | "ts-node": "9.1.1",
21 | "typescript": "3.9.7"
22 | },
23 | "dependencies": {
24 | "@aws-cdk/aws-ec2": "1.187.0",
25 | "@aws-cdk/aws-lambda-python-alpha": "2.59.0-alpha.0",
26 | "aws-cdk-lib": "2.59.0",
27 | "cdk-nag": "^2.26.6"
28 | }
29 | }
30 |
--------------------------------------------------------------------------------
/aws-quant-infra/deployment/cdk/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "target": "ES2018",
4 | "module": "commonjs",
5 | "lib": ["es2018"],
6 | "declaration": true,
7 | "strict": true,
8 | "noImplicitAny": true,
9 | "strictNullChecks": true,
10 | "noImplicitThis": true,
11 | "alwaysStrict": true,
12 | "noUnusedLocals": false,
13 | "noUnusedParameters": false,
14 | "noImplicitReturns": true,
15 | "noFallthroughCasesInSwitch": false,
16 | "inlineSourceMap": true,
17 | "inlineSources": true,
18 | "resolveJsonModule": true,
19 | "experimentalDecorators": true,
20 | "strictPropertyInitialization": false,
21 | "typeRoots": ["./node_modules/@types"]
22 | },
23 | "exclude": ["cdk.out"]
24 | }
25 |
--------------------------------------------------------------------------------
/aws-quant-infra/deployment/cloud9/dev_environment.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | AWSTemplateFormatVersion: '2010-09-09'
3 | Description: AWS CloudFormation template to create a Cloud9 environment for AWS Quant Infra
4 |
5 | Parameters:
6 | C9InstanceType:
7 | Description: Cloud9 instance type
8 | Type: String
9 | Default: m5.large
10 | AllowedValues:
11 | - t3.small
12 | - t3.medium
13 | - m4.large
14 | - m5.large
15 | ConstraintDescription: Must be a valid Cloud9 instance type
16 |
17 | Resources:
18 |
19 | ################## PERMISSIONS AND ROLES #################
20 | C9Role:
21 | Type: AWS::IAM::Role
22 | Properties:
23 | Tags:
24 | - Key: Environment
25 | Value: AWS Example
26 | AssumeRolePolicyDocument:
27 | Version: '2012-10-17'
28 | Statement:
29 | - Effect: Allow
30 | Principal:
31 | Service:
32 | - ec2.amazonaws.com
33 | - ssm.amazonaws.com
34 | Action:
35 | - sts:AssumeRole
36 | ManagedPolicyArns:
37 | - arn:aws:iam::aws:policy/AdministratorAccess
38 | Path: "/"
39 | C9LambdaExecutionRole:
40 | Type: AWS::IAM::Role
41 | Properties:
42 | AssumeRolePolicyDocument:
43 | Version: '2012-10-17'
44 | Statement:
45 | - Effect: Allow
46 | Principal:
47 | Service:
48 | - lambda.amazonaws.com
49 | Action:
50 | - sts:AssumeRole
51 | Path: "/"
52 | Policies:
53 | - PolicyName:
54 | Fn::Join:
55 | - ''
56 | - - C9LambdaPolicy-
57 | - Ref: AWS::Region
58 | PolicyDocument:
59 | Version: '2012-10-17'
60 | Statement:
61 | - Effect: Allow
62 | Action:
63 | - logs:CreateLogGroup
64 | - logs:CreateLogStream
65 | - logs:PutLogEvents
66 | Resource: arn:aws:logs:*:*:*
67 | - Effect: Allow
68 | Action:
69 | - cloudformation:DescribeStacks
70 | - cloudformation:DescribeStackEvents
71 | - cloudformation:DescribeStackResource
72 | - cloudformation:DescribeStackResources
73 | - ec2:DescribeInstances
74 | - ec2:DescribeVolumes
75 | - ec2:AssociateIamInstanceProfile
76 | - ec2:ModifyInstanceAttribute
77 | - ec2:ModifyVolume
78 | - ec2:ReplaceIamInstanceProfileAssociation
79 | - iam:ListInstanceProfiles
80 | - iam:PassRole
81 | Resource: "*"
82 |
83 | ################## LAMBDA BOOTSTRAP FUNCTION ################
84 |
85 | C9BootstrapInstanceLambda:
86 | Description: Bootstrap Cloud9 instance
87 | Type: Custom::C9BootstrapInstanceLambda
88 | DependsOn:
89 | - C9BootstrapInstanceLambdaFunction
90 | - C9Instance
91 | - C9LambdaExecutionRole
92 | Properties:
93 | Tags:
94 | - Key: Environment
95 | Value: AWS Example
96 | ServiceToken:
97 | Fn::GetAtt:
98 | - C9BootstrapInstanceLambdaFunction
99 | - Arn
100 | REGION:
101 | Ref: AWS::Region
102 | StackName:
103 | Ref: AWS::StackName
104 | EnvironmentId:
105 | Ref: C9Instance
106 | LabIdeInstanceProfileArn: !GetAtt C9InstanceProfile.Arn
107 |
108 | C9BootstrapInstanceLambdaFunction:
109 | Type: AWS::Lambda::Function
110 | Properties:
111 | Tags:
112 | - Key: Environment
113 | Value: AWS Example
114 | Handler: index.lambda_handler
115 | Role:
116 | Fn::GetAtt:
117 | - C9LambdaExecutionRole
118 | - Arn
119 | Runtime: python3.7
120 | MemorySize: 256
121 | Timeout: '600'
122 | Code:
123 | ZipFile: |
124 | from __future__ import print_function
125 | import boto3
126 | import json
127 | import os
128 | import time
129 | import traceback
130 | import cfnresponse
131 | def lambda_handler(event, context):
132 | print(f'event: {event}')
133 | print(f'context: {context}')
134 | responseData = {}
135 | if event['RequestType'] == 'Delete':
136 | try:
137 | responseData = {'Success': 'Finished cleanup'}
138 | cfnresponse.send(event, context, cfnresponse.SUCCESS, responseData, 'CustomResourcePhysicalID')
139 | except Exception as e:
140 | responseData = {'Error': traceback.format_exc(e)}
141 | cfnresponse.send(event, context, cfnresponse.FAILED, responseData, 'CustomResourcePhysicalID')
142 | if event['RequestType'] == 'Create':
143 | try:
144 | # Open AWS clients
145 | ec2 = boto3.client('ec2')
146 | # Get the InstanceId from Cloud9 IDE
147 | print('tag:aws:cloud9:environment : {}'.format(event['ResourceProperties']['EnvironmentId']))
148 | instance = ec2.describe_instances(Filters=[{'Name': 'tag:aws:cloud9:environment','Values': [event['ResourceProperties']['EnvironmentId']]}])['Reservations'][0]['Instances'][0]
149 | print(f'instance: {instance}')
150 | volume_id = instance['BlockDeviceMappings'][0]['Ebs']['VolumeId']
151 | print(f'Volume Id {volume_id}')
152 | ec2.modify_volume(VolumeId=volume_id, Size=25)
153 | print('Changed Volume to 25GB')
154 | # Create the IamInstanceProfile request object
155 | iam_instance_profile = {
156 | 'Arn': event['ResourceProperties']['LabIdeInstanceProfileArn']
157 | }
158 | print(f'iam_instance_profile: {iam_instance_profile}')
159 | # Wait for Instance to become ready before adding Role
160 | instance_state = instance['State']['Name']
161 | while instance_state != 'running':
162 | time.sleep(5)
163 | instance_state = ec2.describe_instances(InstanceIds=[instance['InstanceId']])
164 | print(f'waiting for the instance state to be "running", current instance_state: {instance_state}')
165 | # attach instance profile
166 | print(f'Instance is running , about to associate iam_instance_profile: {iam_instance_profile}')
167 | response = ec2.associate_iam_instance_profile(IamInstanceProfile=iam_instance_profile, InstanceId=instance['InstanceId'])
168 | print(f'response - associate_iam_instance_profile: {response}')
169 | r_ec2 = boto3.resource('ec2')
170 | responseData = {'Success': 'Started bootstrapping for instance: '+instance['InstanceId']}
171 | cfnresponse.send(event, context, cfnresponse.SUCCESS, responseData, 'CustomResourcePhysicalID')
172 | except Exception as e:
173 | responseData = {'Error': traceback.format_exc(e)}
174 | cfnresponse.send(event, context, cfnresponse.FAILED, responseData, 'CustomResourcePhysicalID')
175 | ################## SSM BOOTSRAP HANDLER ###############
176 | C9OutputBucket:
177 | Type: AWS::S3::Bucket
178 | DeletionPolicy: Delete
179 |
180 | C9SSMDocument:
181 | Type: AWS::SSM::Document
182 | Properties:
183 | Tags:
184 | - Key: Environment
185 | Value: AWS Example
186 | Content: Yaml
187 | DocumentType: Command
188 | Content:
189 | schemaVersion: '2.2'
190 | description: Bootstrap Cloud9 Instance
191 | mainSteps:
192 | - action: aws:runShellScript
193 | name: C9bootstrap
194 | inputs:
195 | runCommand:
196 | - "#!/bin/bash"
197 | - date
198 | - echo '=== Install deps ==='
199 | - sudo yum -y install jq
200 | - echo '=== Update to the latest AWS CLI ==='
201 | - sudo -H -u ec2-user aws --version
202 | - curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip"
203 | - unzip awscliv2.zip
204 | - sudo ./aws/install
205 | - . /home/ec2-user/.bash_profile
206 | - sudo -H -u ec2-user aws --version
207 | - echo '=== setup AWS configs ==='
208 | - rm -vf /home/ec2-user/.aws/credentials
209 | - export ACCOUNT_ID=$(aws sts get-caller-identity --output text --query Account)
210 | - export AWS_REGION=$(curl -s 169.254.169.254/latest/dynamic/instance-identity/document | jq -r '.region')
211 | - echo "export ACCOUNT_ID=${ACCOUNT_ID}" >> /home/ec2-user/.bash_profile
212 | - echo "export AWS_REGION=${AWS_REGION}" >> /home/ec2-user/.bash_profile
213 | - sudo -H -u ec2-user aws configure set default.region ${AWS_REGION}
214 | - sudo -H -u ec2-user aws configure get default.region
215 | - sudo -H -u ec2-user aws sts get-caller-identity
216 | - echo "Bootstrap completed with return code $?"
217 | C9BootstrapAssociation:
218 | Type: AWS::SSM::Association
219 | DependsOn:
220 | - C9OutputBucket
221 | Properties:
222 | Name: !Ref C9SSMDocument
223 | OutputLocation:
224 | S3Location:
225 | OutputS3BucketName: !Ref C9OutputBucket
226 | OutputS3KeyPrefix: bootstrapoutput
227 | Targets:
228 | - Key: tag:SSMBootstrap
229 | Values:
230 | - Active
231 |
232 | ################## INSTANCE #####################
233 | C9InstanceProfile:
234 | Type: AWS::IAM::InstanceProfile
235 | Properties:
236 | Path: "/"
237 | Roles:
238 | - Ref: C9Role
239 | C9Instance:
240 | Description: "AWS Quant Deployment Space"
241 | DependsOn: C9BootstrapAssociation
242 | Type: AWS::Cloud9::EnvironmentEC2
243 | Properties:
244 | Description: AWS Cloud9 instance for AWS Quant Deployment
245 | AutomaticStopTimeMinutes: 3600
246 | InstanceType:
247 | Ref: C9InstanceType
248 | Name:
249 | Ref: AWS::StackName
250 | Repositories:
251 | - PathComponent: /aws-quant-deployment
252 | RepositoryUrl: https://github.com/aws-samples/asynchronous-messaging-workshop.git
253 | Tags:
254 | -
255 | Key: SSMBootstrap
256 | Value: Active
257 | -
258 | Key: Environment
259 | Value:
260 | Ref: AWS::StackName
261 | Outputs:
262 | Cloud9IDE:
263 | Value:
264 | Fn::Join:
265 | - ''
266 | - - https://
267 | - Ref: AWS::Region
268 | - ".console.aws.amazon.com/cloud9/ide/"
269 | - Ref: C9Instance
270 | - "?region="
271 | - Ref: AWS::Region
--------------------------------------------------------------------------------
/aws-quant-infra/deployment/grafana/grafana.tfvars:
--------------------------------------------------------------------------------
1 | group_ids = ["Admins"]
2 | user_ids = ["USER_ID_HERE"]
3 | grafana_name = "aws_quant_sso"
4 | grafana_role = "GRAFANA_ROLE_ARN"
5 | grafana_local_role = "ADMIN"
--------------------------------------------------------------------------------
/aws-quant-infra/deployment/grafana/main.tf:
--------------------------------------------------------------------------------
1 | provider "aws" {
2 | region = "us-west-2"
3 | profile = "default"
4 | }
5 |
6 | data "aws_ssoadmin_instances" "sso_instance" {}
7 |
8 | resource "aws_identitystore_user" "sso_user" {
9 | identity_store_id = tolist(data.aws_ssoadmin_instances.sso_instance.identity_store_ids)[0]
10 | display_name = "Grafana Admin"
11 | user_name = "grafana_admin"
12 |
13 | name {
14 | family_name = "Grafana"
15 | given_name = "Admin"
16 | }
17 | }
18 |
19 | resource "aws_identitystore_group" "grafana_admin_group" {
20 | identity_store_id = tolist(data.aws_ssoadmin_instances.sso_instance.identity_store_ids)[0]
21 | display_name = "grafana_admin_group"
22 | description = "Grafana admin group"
23 | }
24 |
25 | resource "aws_identitystore_group_membership" "sso_group_user" {
26 | identity_store_id = tolist(data.aws_ssoadmin_instances.sso_instance.identity_store_ids)[0]
27 | group_id = aws_identitystore_group.grafana_admin_group.group_id
28 | member_id = aws_identitystore_user.sso_user.user_id
29 | }
30 |
31 |
32 |
33 |
34 | resource "aws_grafana_role_association" "role_association" {
35 | role = var.grafana_local_role
36 | group_ids = [aws_identitystore_group.grafana_admin_group.group_id]
37 | workspace_id = aws_grafana_workspace.grafana_workspace.id
38 | }
39 |
40 | resource "aws_grafana_workspace" "grafana_workspace" {
41 | name = var.grafana_name
42 | account_access_type = "CURRENT_ACCOUNT"
43 | authentication_providers = ["AWS_SSO", "SAML"]
44 | permission_type = "SERVICE_MANAGED"
45 | role_arn = var.grafana_role
46 | data_sources = ["CLOUDWATCH", "TIMESTREAM"]
47 | }
48 |
--------------------------------------------------------------------------------
/aws-quant-infra/deployment/grafana/vars.tf:
--------------------------------------------------------------------------------
1 | variable "grafana_role" {
2 | type = string
3 | }
4 | variable "grafana_name" {
5 | type = string
6 | }
7 | variable "grafana_local_role" {
8 | type = string
9 | }
10 | variable "group_ids" {
11 | type = list
12 | }
13 |
--------------------------------------------------------------------------------
/aws-quant-infra/instructions.md:
--------------------------------------------------------------------------------
1 | # Pre-requisites
2 |
3 |
4 | # Deployment steps
5 | ## Clone the Repo
6 |
7 | * First, you will need to clone the code repo:
8 |
9 | `git clone https://xxxxxx`
10 |
11 | `cd aws-quant-infra/`
12 |
13 | `git checkout cdk-deployment`
14 |
15 | `cd deployment/cdk`
16 |
17 | * To list the available stacks, do:
18 |
19 | `cdk ls`
20 |
21 | * To deploy all the stacks, do:
22 |
23 | `cdk deploy "*"`
24 |
25 | Follow the prompts, for instance entering "y" to continue and deploy the stack(s). The process can take a couple of hours to complete.
26 |
--------------------------------------------------------------------------------
/aws-quant-infra/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "serverless-framework-with-js",
3 | "version": "1.0.0",
4 | "description": "Example project using the Serverless Framework, JavaScript, AWS Lambda, AWS API Gateway and GitLab Pages.",
5 | "dependencies": {
6 | "cdk-nag": "^2.26.6",
7 | "serverless": "^1.56.1",
8 | "serverless-jest-plugin": "^0.2.1",
9 | "serverless-offline": "^14.4.0",
10 | "serverless-stack-output": "^0.2.3"
11 | },
12 | "devDependencies": {
13 | "axios": "^1.8.2",
14 | "http-server": "^0.11.1",
15 | "wait-on": "^3.3.0"
16 | },
17 | "scripts": {
18 | "test": "jest",
19 | "deploy": "serverless deploy",
20 | "start": "serverless offline",
21 | "pages": "http-server"
22 | },
23 | "jest": {
24 | "testMatch": [
25 | "**/?(*.)+(spec|test).[jt]s?(x)"
26 | ]
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/aws-quant-infra/requirements.txt:
--------------------------------------------------------------------------------
1 | boto3==1.19.12
2 | botocore==1.22.12
3 | jmespath==0.10.0
4 | numpy==1.21.4
5 | pandas==1.3.4
6 | python-dateutil==2.8.2
7 | pytz==2021.3
8 | s3transfer==0.5.0
9 | six==1.16.0
10 | urllib3==1.26.17
11 |
--------------------------------------------------------------------------------
/aws-quant-infra/scripts/create_system_event.py:
--------------------------------------------------------------------------------
1 | import json
2 | import boto3
3 | from decimal import *
4 | import time
5 | from dateutil.parser import isoparse
6 | import sys
7 | import hashlib
8 | import random
9 | # event_name='intraday_start_all'
10 | # # 'intraday_start_all','intraday_stop','intraday_stop_all'
11 | events = ['intraday_start_all','intraday_stop','intraday_stop_all']
12 | session = boto3.session.Session(profile_name='quant')
13 | dynamodb = session.resource('dynamodb', region_name='us-west-2')
14 |
15 | aws_region='us-west-2'
16 | # app_config = (boto3.client('appconfig',region_name=aws_region). \
17 | # get_configuration(Application='PortfolioMonitoring'
18 | # # ,Environment=config_env
19 | # ,Configuration='PortfolioMonitoringConfigProfile'
20 | # , ClientId=f'{random.randint(-sys.maxsize,sys.maxsize)}').get('Content').read())
21 | # app_config=json.loads(app_config.decode('utf-8'))
22 | portf_system_table = dynamodb.Table('MvpPortfolioMonitoringSystemEventTable')
23 | for event_name in events:
24 | create_ts = int(time.time() * 1000000)
25 | event_id = hashlib.md5((f'{event_name}{create_ts}').encode()).hexdigest()
26 | event_item = {
27 | 'event_id': event_id
28 | , 'event_create_ts': create_ts
29 | , 'event_name': event_name
30 | , 'commands': [{'batch': [{'c4fa6bae-6e6b-4618-af75-a7f967a12cc0': 'stop'}]}]
31 | , 'config': {'stale_threshold': '1m', 'symbol_stripe_size': '1'}
32 | , 'handler_info': {'app_config_dict':{'Configuration': 'PortfolioMonitoringConfigProfile',
33 | 'Environment': 'dev',
34 | 'Application': 'PortfolioMonitoring'}}
35 | }
36 | portf_system_table.put_item(
37 | TableName='MvpPortfolioMonitoringSystemEventTable',
38 | Item=event_item
39 | )
40 |
--------------------------------------------------------------------------------
/aws-quant-infra/scripts/deploy_portfolio.py:
--------------------------------------------------------------------------------
1 | import ast
2 | import boto3
3 | from decimal import *
4 | import time
5 | from dateutil.parser import isoparse
6 | import hashlib
7 | prtf_name='long_short'
8 | session = boto3.session.Session(profile_name='quant')
9 | dynamodb = session.resource('dynamodb', region_name='us-west-2')
10 | portf_table = dynamodb.Table('MvpPortfolioMonitoringPortfolioTable')
11 | create_ts = int(time.time() * 1000000)
12 | portf_id = hashlib.md5((f'{prtf_name}{create_ts}').encode()).hexdigest()
13 |
14 | #load portfolio list from file sp500.txt
15 | with open('aws-quant-infra/scripts/sp500.txt') as list_file:
16 | portfolio_list = ast.literal_eval(list_file.read())
17 |
18 | portf_item_positions = {'positions': []}
19 | for item in portfolio_list:
20 | position = {item: Decimal(0.25)}
21 | portf_item_positions['positions'].append(position)
22 |
23 |
24 | portf_item = {
25 | 'portf_id': portf_id,
26 | 'portf_name': prtf_name,
27 | 'portf_create_ts': create_ts,
28 | # 'positions': [{'GS': Decimal(-1)}, {'AMZN': Decimal(0.25)}, {'MSFT': Decimal(0.25)}, {'XLE': Decimal(0.25)},
29 | # {'QQQ': Decimal(0.25)}]
30 |
31 | # load positions from dict above
32 | 'positions': portf_item_positions['positions']
33 | , 'handler_info': {'deploy': 'batch', 'refresh_sec': 60,'app_config_dict':{
34 | 'Application':'PortfolioMonitoring'
35 | ,'Environment':'dev'#config_env
36 | ,'Configuration':'PortfolioMonitoringConfigProfile'
37 | }}
38 | }
39 | portf_table.put_item(
40 | TableName='MvpPortfolioMonitoringPortfolioTable',
41 | Item=portf_item
42 | )
--------------------------------------------------------------------------------
/aws-quant-infra/scripts/sp500.txt:
--------------------------------------------------------------------------------
1 | [
2 | "AAPL", "MSFT", "AMZN", "GOOGL", "BRK.B", "GOOG", "NVDA", "TSLA",
3 | "XOM", "UNH", "JNJ", "JPM", "V", "META", "PG", "HD", "CVX", "MA",
4 | "LLY", "MRK", "ABBV", "BAC", "PFE", "AVGO", "KO", "PEP", "TMO",
5 | "COST", "WMT", "DIS", "MCD", "CSCO", "ABT", "WFC", "ACN", "DHR",
6 | "ADBE", "CMCSA", "VZ", "CRM", "PM", "NKE", "NFLX", "LIN", "TXN",
7 | "COP", "BMY", "NEE", "QCOM", "RTX", "T", "HON", "CAT", "ORCL",
8 | "AMGN", "UPS", "MS", "LOW", "SBUX", "UNP", "SPGI", "IBM", "AMD",
9 | "GS", "PLD", "INTU", "BA", "ELV", "INTC", "CVS", "DE", "BLK",
10 | "SCHW", "MDT", "LMT", "GILD", "AXP", "AMT", "C", "AMAT", "BKNG",
11 | "TJX", "CB", "CI", "PYPL", "NOW", "ADP", "GE", "ADI", "MDLZ",
12 | "TMUS", "ISRG", "MMC", "SYK", "VRTX", "SLB", "REGN", "MO", "EOG",
13 | "PGR", "DUK", "TGT", "ZTS", "SO", "BDX", "APD", "MU", "FISV",
14 | "EQIX", "AON", "LRCX", "USB", "PNC", "BSX", "TFC", "ITW", "ETN",
15 | "FCX", "MMM", "NOC", "CCI", "CSX", "CME", "MRNA", "EL", "MPC",
16 | "HUM", "ICE", "CL", "WM", "KLAC", "PXD", "NSC", "VLO", "HCA",
17 | "ATVI", "SNPS", "MCK", "GM", "SHW", "DG", "EMR", "F", "PSX", "D",
18 | "GD", "SRE", "CDNS", "MCO", "OXY", "EW", "ORLY", "MET", "AEP",
19 | "NXPI", "JCI", "PSA", "AIG", "MAR", "APH", "A", "GIS", "ROP", "ADM",
20 | "CTVA", "FDX", "ADSK", "AZO", "COF", "FIS", "TRV", "CMG", "NUE",
21 | "KMB", "HES", "CNC", "O", "IQV", "MCHP", "DVN", "CHTR", "MSI",
22 | "DOW", "NEM", "BIIB", "MSCI", "AFL", "SPG", "DXCM", "ROST", "EXC",
23 | "TT", "PH", "AJG", "IDXX", "LHX", "TEL", "SYY", "HLT", "MNST",
24 | "PCAR", "PRU", "WMB", "CTAS", "XEL", "ECL", "STZ", "AMP", "KMI",
25 | "HAL", "DD", "BK", "CARR", "TDG", "YUM", "PAYX", "WELL", "CMI",
26 | "ALL", "FTNT", "MTD", "OTIS", "EA", "CTSH", "ED", "ILMN", "STT",
27 | "ALB", "RMD", "AME", "ROK", "VICI", "WBD", "HSY", "DFS", "DLR",
28 | "KEYS", "ON", "CSGP", "DLTR", "BKR", "KHC", "GPN", "SBAC", "ANET",
29 | "OKE", "ODFL", "DHI", "URI", "PEG", "APTV", "PPG", "KDP", "KR",
30 | "WEC", "CPRT", "AWK", "IFF", "FAST", "ENPH", "CEG", "VRSK", "ES",
31 | "GLW", "WTW", "MTB", "CBRE", "EBAY", "FANG", "EFX", "WBA", "ABC",
32 | "HPQ", "ZBH", "EIX", "ULTA", "IT", "TROW", "CDW", "PCG", "GWW",
33 | "FRC", "LEN", "GEHC", "WY", "RSG", "TSCO", "AVB", "FITB", "HIG",
34 | "DAL", "LYB", "VMC", "ARE", "FTV", "ACGL", "GPC", "BAX", "ANSS",
35 | "LH", "AEE", "FE", "IR", "ETR", "RF", "DTE", "PPL", "RJF", "LUV",
36 | "PFG", "HBAN", "MLM", "EQR", "CFG", "PWR", "EXR", "HPE", "HOLX",
37 | "DOV", "STE", "NDAQ", "VTR", "VRSN", "CTRA", "CAH", "NTRS", "WAT",
38 | "STLD", "WST", "ALGN", "EPAM", "TDY", "LVS", "CHD", "TSN", "MPWR",
39 | "INVH", "MAA", "WAB", "MKC", "CNP", "XYL", "DRI", "BALL", "MRO",
40 | "CMS", "AMCR", "IEX", "TTWO", "FSLR", "SWKS", "AES", "BR", "EXPD",
41 | "SIVB", "KEY", "MOH", "OMC", "PKI", "K", "EXPE", "CAG", "ETSY",
42 | "BBY", "CLX", "MOS", "TRGP", "DGX", "SEDG", "COO", "CINF", "SYF",
43 | "FMC", "CF", "ZBRA", "TER", "SJM", "ATO", "UAL", "INCY", "FDS",
44 | "JBHT", "IRM", "NVR", "J", "PAYC", "AVY", "FLT", "TXT", "GRMN",
45 | "MTCH", "POOL", "LKQ", "APA", "HWM", "PEAK", "NTAP", "TRMB", "ESS",
46 | "VTRS", "PTC", "LW", "WRB", "MKTX", "EVRG", "WDC", "RCL", "IPG",
47 | "KIM", "AKAM", "RE", "IP", "TYL", "LNT", "STX", "MGM", "BRO",
48 | "JKHY", "LDOS", "GEN", "HST", "SNA", "PKG", "HRL", "NDSN", "CPT",
49 | "CBOE", "UDR", "DPZ", "SWK", "TECH", "CRL", "PHM", "CHRW", "BF.B",
50 | "EQT", "CE", "HSIC", "L", "PARA", "QRVO", "MAS", "LYV", "TFX",
51 | "KMX", "CZR", "NI", "CDAY", "TPR", "BWA", "GL", "WYNN", "CCL",
52 | "EMN", "AAL", "FOXA", "BXP", "CPB", "JNPR", "BIO", "BBWI", "REG",
53 | "ALLE", "VFC", "UHS", "WRK", "TAP", "CTLT", "CMA", "RHI", "AAP",
54 | "FFIV", "HII", "PNR", "WHR", "BEN", "ROL", "PNW", "IVZ", "FRT",
55 | "ZION", "XRAY", "NWSA", "SEE", "SBNY", "NRG", "AOS", "OGN", "HAS",
56 | "GNRC", "AIZ", "DXC", "ALK", "NCLH", "MHK", "NWL", "LNC", "RL",
57 | "LUMN", "FOX", "DVA", "DISH", "NWS"
58 | ]
--------------------------------------------------------------------------------
/aws-quant-infra/src/batch/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM amazonlinux:2018.03
2 | ARG SSM_PREFIX
3 | ENV SSM_PREFIX ${SSM_PREFIX}
4 | RUN echo SSM_PREFIX: $SSM_PREFIX
5 | ARG AWS_REGION
6 | ENV AWS_DEFAULT_REGION ${AWS_REGION}
7 | RUN echo AWS_DEFAULT_REGION: ${AWS_DEFAULT_REGION}
8 | ARG PY_VERSION=3.7
9 | ENV conda_version=Anaconda3-2022.10-Linux-x86_64.sh
10 | RUN echo 'we are running some # of cool things'
11 | RUN ulimit -n 1024 && yum -y update
12 | RUN ulimit -n 1024 && yum install -y wget
13 | RUN ulimit -n 1024 && yum install -y git
14 | RUN ulimit -n 1024 && yum install -y unzip
15 | RUN ulimit -n 1024 && yum install -y zip
16 | RUN ulimit -n 1024 && yum install -y bzip2
17 | RUN wget https://repo.continuum.io/archive/${conda_version}
18 | RUN bash ${conda_version} -b
19 | ENV PATH=/root/anaconda3/bin:${PATH}
20 | #ENV PY_PACKAGE_DIR=/root/miniconda3/envs/lambda_layer/lib/python$PY_VERSION/site-packages
21 | ENV CUSTOM_SOURCE_DIR=/src
22 | ENV PYTHON_ENVS=fsibd_python
23 | RUN conda create -n ${PYTHON_ENVS} python=3.7 -y
24 | ENV PATH=/root/anaconda3/envs/${PYTHON_ENVS}/bin:${PATH}
25 | RUN source activate ${PYTHON_ENVS}
26 | RUN conda install -n ${PYTHON_ENVS} pandas -y
27 | RUN conda install -n ${PYTHON_ENVS} fsspec -y
28 | RUN /root/anaconda3/envs/${PYTHON_ENVS}/bin/pip install python-binance
29 | RUN /root/anaconda3/envs/${PYTHON_ENVS}/bin/pip install pyEX
30 | RUN /root/anaconda3/envs/${PYTHON_ENVS}/bin/pip install awswrangler
31 | RUN /root/anaconda3/envs/${PYTHON_ENVS}/bin/pip install xbbg==0.7.7a3
32 | RUN /root/anaconda3/envs/${PYTHON_ENVS}/bin/pip install blpapi==3.19.3 --index-url=https://bcms.bloomberg.com/pip/simple/
33 | RUN conda install -n ${PYTHON_ENVS} numpy -y
34 | RUN conda install -n ${PYTHON_ENVS} boto3 -y
35 | RUN conda install -n ${PYTHON_ENVS} s3fs -y
36 | # RUN conda install -n ${PYTHON_ENVS} -c conda-forge ta-lib -y
37 | ##### RUN conda install -n ${PYTHON_ENVS} hashlib -y
38 | ENV ENTRY_POINT='source activate '${PYTHON_ENVS}
39 | RUN mkdir ${CUSTOM_SOURCE_DIR}
40 | COPY batch/python/ ${CUSTOM_SOURCE_DIR}
41 | COPY shared ${CUSTOM_SOURCE_DIR}/shared
42 | RUN ls -la ${CUSTOM_SOURCE_DIR}/*
43 | ENV PYTHONPATH=${CUSTOM_SOURCE_DIR}
44 | RUN curl "https://s3.amazonaws.com/aws-cli/awscli-bundle.zip" -o "awscli-bundle.zip"
45 | RUN unzip awscli-bundle.zip
46 | RUN ./awscli-bundle/install -i /usr/local/aws -b /usr/local/bin/aws
47 | RUN aws --version
48 | RUN aws configure set region ${AWS_REGION}
49 | ENTRYPOINT ["python3.7"]
50 | #CMD ["conda activate fsibd_python"]
51 | #RUN /root/anaconda3/envs/lambda_layer/bin/pip install pyEX
52 |
--------------------------------------------------------------------------------
/aws-quant-infra/src/batch/python/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/quant-trading/f1dd9ace0c732cae3d0b103affae3d6e6b2da80f/aws-quant-infra/src/batch/python/__init__.py
--------------------------------------------------------------------------------
/aws-quant-infra/src/batch/python/portfolio_tracker.py:
--------------------------------------------------------------------------------
1 | import pyEX
2 | print("import pyEX:",pyEX.__file__)
3 | import binance
4 | print("import pyEX:",binance.__file__)
5 | import sys
6 | print("sys.path:",'\n'.join(sys.path))
7 |
8 | sys.path.append('/src/shared/python')
9 | sys.path.append('/Users/samfarbe/Documents/SA/Initiatives/AWSQuant/aws-quant-infra/src/shared/python')
10 |
11 | sys.path.append('/src/shared/python/')
12 | sys.path.append(('/home/ec2-user/environment/MvpPortfolioMonitoring-code-repo/aws-quant-infra'))
13 | sys.path.append(('/home/ec2-user/environment/MvpPortfolioMonitoring-code-repo/aws-quant-infra/src/shared/python'))
14 |
15 | import aws_quant_infra as aq_i
16 | print(f"import aws_quant_infra is done!")
17 | print(aq_i.__file__)
18 | import aws_quant_risk as aq_r
19 | print(f"import aws_quant_risk is done!")
20 | print(aq_r.__file__)
21 |
22 | import sys
23 | print (f'Number of arguments:{len(sys.argv)} arguments.')
24 | print (f'Argument List:{str(sys.argv)}')
25 | print (f'function call:{str(sys.argv[0])}')
26 | print (f'config environment:{str(sys.argv[1])}')
27 | print (f'portfolio id:{str(sys.argv[2])}')
28 | # TODO: think through unified logging, print and combing through logs is NOT the right thing to-do. since everything is one giant distributed dependency, Neptune DB seems the right thing to do???
29 |
30 | if False:
31 | #DEBUG
32 | import os
33 | os.environ["SSM_PREFIX"] = "Mvp"
34 | os.environ["AWS_REGION"] = "us-east-1"
35 | aq_r.PortfolioTracker.portfolio_tracker_main("dev",'7af1fa7c6b01edd3826c880082270775')
36 | else:
37 | aq_r.PortfolioTracker.portfolio_tracker_main(sys.argv[1],sys.argv[2])
38 |
39 |
40 |
--------------------------------------------------------------------------------
/aws-quant-infra/src/batch/python/subscribe_market_data.py:
--------------------------------------------------------------------------------
1 | import pyEX
2 | print("import pyEX:",pyEX.__file__)
3 | import sys
4 | print("sys.path:",'\n'.join(sys.path))
5 | from xbbg import blp
6 | print("import xbbg.blp:",blp.__file__)
7 |
8 | sys.path.append('/src/shared/python')
9 | sys.path.append('/Users/blitvin/IdeaProjects/aws-quant-infra/src/shared/python')
10 | sys.path.append('/var/task/shared/python')
11 | sys.path.append('/src/shared/python/')
12 | sys.path.append(('/home/ec2-user/environment/MvpPortfolioMonitoring-code-repo/aws-quant-infra'))
13 | sys.path.append(('/home/ec2-user/environment/MvpPortfolioMonitoring-code-repo/aws-quant-infra/src/shared/python'))
14 |
15 | import aws_quant_infra as aq_i
16 | print(f"import aws_quant_infra is done!")
17 | print(aq_i.__file__)
18 | import aws_quant_risk as aq_r
19 | print(f"import aws_quant_risk is done!")
20 | print(aq_r.__file__)
21 | import aws_quant_market_data as aq_md
22 | print(f"import aws_quant_market_data is done!")
23 | print(aq_md.__file__)
24 |
25 | print (f'Number of arguments:{len(sys.argv)} arguments.')
26 | print (f'Argument List:{str(sys.argv)}')
27 | print (f'function call:{str(sys.argv[0])}')
28 | print (f'config_env:{str(sys.argv[1])}')
29 | print (f'symbols:{str(sys.argv[2])}')
30 | if False:# for debugging
31 | import imp
32 | imp.reload(aq_md)
33 |
34 | #aq_md.IEX_data_provider.iex_subscribe_main(str(sys.argv[1]),str(sys.argv[2]))
35 | if False:
36 | #DEBUG
37 | import os
38 | os.environ["SSM_PREFIX"] = "Mvp"
39 | os.environ["AWS_REGION"] = "us-east-1"
40 | aq_md.MarketDataProvider.subscribe_main('dev','EURUSD,USDJPY')
41 | else:
42 | aq_md.MarketDataProvider.subscribe_main(str(sys.argv[1]),str(sys.argv[2]),str(sys.argv[3]))
43 | #aq_md.IEX_data_provider.iex_subscribe_main('dev','GS,AMZN')
44 |
--------------------------------------------------------------------------------
/aws-quant-infra/src/batch/python/test_docker.py:
--------------------------------------------------------------------------------
1 | import pyEX
2 | print("import pyEX:",pyEX.__file__)
3 | import binance
4 | print("import pyEX:",binance.__file__)
5 | import sys
6 | print("sys.path:",'\n'.join(sys.path))
7 | import boto3
8 | import os
9 | import json
10 | import random
11 | #TODO: check code committ stack; it wiped out existing repo when deployed [potentially rolled back] into non-greenfield account
12 | if (True):
13 | try:
14 | sys.path.append('/src/shared/python')
15 | import aws_quant_infra as aq_i
16 | print(f"import aws_quant_infra is done!")
17 | print(aq_i.__file__)
18 | import aws_quant_risk as aq_r
19 | print(f"import aws_quant_risk is done!")
20 | print(aq_r.__file__)
21 | except Exception as exc:
22 | print(f'error importing:{exc}')
23 | else:
24 | pass
25 |
26 |
27 | REGION = os.environ['AWS_DEFAULT_REGION']
28 | print(f'aws region: {REGION}')
29 |
30 | def boto3_test():
31 | print('Starting tests to check various boto3 calls')
32 | env, app = aqi_test()
33 | aqr_test(env, app)
34 |
35 | def aqi_test():
36 | print('Testing aws_quant_infra.get_app_config_from_paramstore')
37 | env_config = aq_i.get_app_config_from_paramstore(REGION)
38 | print(env_config)
39 |
40 | print('Testing appconfig call')
41 | env = 'dev'
42 | app_config_details = json.loads(env_config.get('Mvp-PortfolioMonitoring-AppConfigDetails'))
43 | app = app_config_details.get('Application')
44 | config = app_config_details.get('Configuration')
45 | app_config = (boto3.client('appconfig').\
46 | get_configuration(Application=app
47 | ,Environment=env
48 | ,Configuration=config
49 | , ClientId=f'{random.randint(-sys.maxsize,sys.maxsize)}').get('Content').read())
50 |
51 | app_config=json.loads(app_config.decode('utf-8'))
52 | print(app_config)
53 |
54 | print('Testing aws_quant_infra.get_secret')
55 | secret_name = app_config.get('secrets')[0]
56 | # secret = f'MvpPortfolioMonitoring-{secret_name}'
57 | secret = f'{secret_name}'
58 | secret_data = aq_i.get_secret(secret, REGION)
59 | print(secret_data)
60 | try:
61 | print('Testing format of response')
62 | print(secret_data[secret])
63 | except:
64 | print('get_secret response in wrong format!!!')
65 |
66 | return env_config, app_config
67 |
68 | def aqr_test(env_config, app_config):
69 | print('Testing Batch call')
70 | batch_client = boto3.client('batch')
71 | batch_jobs_resp = batch_client.list_jobs(jobQueue='MvpPortfolioMonitoring_q_ec2')
72 | print(batch_jobs_resp)
73 |
74 | print('Testing aws_quant_risk.PortfolioTrackerFactory')
75 | PTF = aq_r.PortfolioTrackerFactory(env_config, app_config, 'Mvp-')
76 | print(str(PTF))
77 |
78 | print('Testing aq_r.PTF.get_all_portfolios (Dynamodb call)')
79 | PTF.load_all_portfolios()
80 | print(PTF.all_portfolios)
81 |
82 | print('Testing Events calls')
83 | for rule_name in [ 'trading_EOD', 'trading_SOD']:
84 | print(f'Checking call for rule name: {rule_name}')
85 | print(boto3.client('events').list_targets_by_rule(Rule=rule_name))
86 |
87 | print('Testing timestream call')
88 | timestream_db = env_config.get('Mvp-PortfolioMonitoring-TimestreamDb')
89 | timestream_table = 'iex_realtime_data'
90 | print('Checking for query endpoints')
91 | query = f'SELECT * FROM {timestream_db}.{timestream_table} LIMIT 10'
92 | print(boto3.client('timestream-query').query(
93 | QueryString=query,
94 | ))
95 | print('Checking for write endpoints')
96 | print(boto3.client('timestream-write').describe_database(
97 | DatabaseName=timestream_db
98 | ))
99 |
100 | boto3_test()
--------------------------------------------------------------------------------
/aws-quant-infra/src/bpipe-testing/Dockerfile:
--------------------------------------------------------------------------------
1 | curl https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh -o Miniconda3-latest-Linux-x86_64.sh
2 | chmod +x Miniconda3-latest-Linux-x86_64.sh
3 | ./Miniconda3-latest-Linux-x86_64.sh -b
4 | PATH=/root/miniconda3/bin:${PATH}
5 | # RUN ls -la
6 | # RUN ls -la bin
7 | # RUN conda init bash
8 | # RUN exec bash
9 | RUN conda create --name bpipe python=3.8 -y
10 | RUN conda init bash
11 | # ENV PATH=/root/miniconda3/envs/${PYTHON_ENVS}/bin:${PATH}
12 | RUN source activate bpipe
13 | RUN conda install -c conda-forge blpapi -y
14 | RUN pip install xbbg==0.7.7a3
15 | RUN mkdir /src
16 | COPY bpipe.py /src/
17 | COPY stream_data.py /src/
--------------------------------------------------------------------------------
/aws-quant-infra/src/bpipe-testing/Miniconda3-latest-Linux-x86_64.sh:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/quant-trading/f1dd9ace0c732cae3d0b103affae3d6e6b2da80f/aws-quant-infra/src/bpipe-testing/Miniconda3-latest-Linux-x86_64.sh
--------------------------------------------------------------------------------
/aws-quant-infra/src/bpipe-testing/bpipe.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/quant-trading/f1dd9ace0c732cae3d0b103affae3d6e6b2da80f/aws-quant-infra/src/bpipe-testing/bpipe.py
--------------------------------------------------------------------------------
/aws-quant-infra/src/bpipe-testing/bpipe_native_example.py:
--------------------------------------------------------------------------------
1 | import time
2 |
3 | from argparse import ArgumentParser, RawTextHelpFormatter
4 |
5 | from blpapi_import_helper import blpapi
6 |
7 | from util.SubscriptionOptions import \
8 | addSubscriptionOptions, \
9 | setSubscriptionSessionOptions, \
10 | createSubscriptionList
11 | from util.ConnectionAndAuthOptions import \
12 | addConnectionAndAuthOptions, \
13 | createSessionOptions
14 |
15 | DEFAULT_QUEUE_SIZE = 10000
16 |
17 |
18 | class SubscriptionEventHandler(object):
19 | def getTimeStamp(self):
20 | return time.strftime("%Y/%m/%d %X")
21 |
22 | def processSubscriptionStatus(self, event):
23 | timeStamp = self.getTimeStamp()
24 | for msg in event:
25 | topic = msg.correlationId().value()
26 | print(f"{timeStamp}: {topic}")
27 | print(msg)
28 | if msg.messageType() == blpapi.Names.SUBSCRIPTION_FAILURE:
29 | print(f"Subscription for {topic} failed")
30 | elif msg.messageType() == blpapi.Names.SUBSCRIPTION_TERMINATED:
31 | # Subscription can be terminated if the session identity
32 | # is revoked.
33 | print(f"Subscription for {topic} TERMINATED")
34 |
35 | def processSubscriptionDataEvent(self, event):
36 | timeStamp = self.getTimeStamp()
37 | for msg in event:
38 | topic = msg.correlationId().value()
39 | print(f"{timeStamp}: {topic}")
40 | print(msg)
41 |
42 | def processMiscEvents(self, event):
43 | for msg in event:
44 | if msg.messageType() == blpapi.Names.SLOW_CONSUMER_WARNING:
45 | print(f"{blpapi.Names.SLOW_CONSUMER_WARNING} - The event queue is " +
46 | "beginning to approach its maximum capacity and " +
47 | "the application is not processing the data fast " +
48 | "enough. This could lead to ticks being dropped" +
49 | " (DataLoss).\n")
50 | elif msg.messageType() == blpapi.Names.SLOW_CONSUMER_WARNING_CLEARED:
51 | print(f"{blpapi.Names.SLOW_CONSUMER_WARNING_CLEARED} - the event " +
52 | "queue has shrunk enough that there is no " +
53 | "longer any immediate danger of overflowing the " +
54 | "queue. If any precautionary actions were taken " +
55 | "when SlowConsumerWarning message was delivered, " +
56 | "it is now safe to continue as normal.\n")
57 | elif msg.messageType() == blpapi.Names.DATA_LOSS:
58 | print(msg)
59 | topic = msg.correlationId().value()
60 | print(f"{blpapi.Names.DATA_LOSS} - The application is too slow to " +
61 | "process events and the event queue is overflowing. " +
62 | f"Data is lost for topic {topic}.\n")
63 | elif event.eventType() == blpapi.Event.SESSION_STATUS:
64 | # SESSION_STATUS events can happen at any time and
65 | # should be handled as the session can be terminated,
66 | # e.g. session identity can be revoked at a later
67 | # time, which terminates the session.
68 | if msg.messageType() == blpapi.Names.SESSION_TERMINATED:
69 | print("Session terminated")
70 | return
71 |
72 | def processEvent(self, event, _session):
73 | try:
74 | if event.eventType() == blpapi.Event.SUBSCRIPTION_DATA:
75 | self.processSubscriptionDataEvent(event)
76 | elif event.eventType() == blpapi.Event.SUBSCRIPTION_STATUS:
77 | self.processSubscriptionStatus(event)
78 | else:
79 | self.processMiscEvents(event)
80 | except blpapi.Exception as exception:
81 | print(f"Failed to process event {event}: {exception}")
82 | return False
83 |
84 |
85 | def parseCmdLine():
86 | """Parse command line arguments"""
87 |
88 | parser = ArgumentParser(formatter_class=RawTextHelpFormatter,
89 | description="Asynchronous subscription with event handler")
90 | addConnectionAndAuthOptions(parser)
91 | addSubscriptionOptions(parser)
92 |
93 | parser.add_argument(
94 | "-q",
95 | "--event-queue-size",
96 | dest="eventQueueSize",
97 | help="The maximum number of events that is buffered by the session (default: %(default)d)",
98 | type=int,
99 | metavar="eventQueueSize",
100 | default=DEFAULT_QUEUE_SIZE)
101 |
102 | options = parser.parse_args()
103 |
104 | return options
105 |
106 |
107 | def main():
108 | options = parseCmdLine()
109 |
110 | sessionOptions = createSessionOptions(options)
111 | setSubscriptionSessionOptions(sessionOptions, options)
112 | sessionOptions.setMaxEventQueueSize(options.eventQueueSize)
113 | handler = SubscriptionEventHandler()
114 | session = blpapi.Session(sessionOptions, handler.processEvent)
115 |
116 | try:
117 | if not session.start():
118 | print("Failed to start session.")
119 | return
120 |
121 | if not session.openService(options.service):
122 | print("Failed to open service.")
123 | return
124 |
125 | subscriptions = createSubscriptionList(options)
126 | session.subscribe(subscriptions)
127 |
128 | print("Press ENTER to quit")
129 | input()
130 |
131 | finally:
132 | session.stop()
133 |
134 |
135 | if __name__ == "__main__":
136 | try:
137 | main()
138 | except Exception as e: # pylint: disable=broad-except
139 | print(e)
140 |
--------------------------------------------------------------------------------
/aws-quant-infra/src/bpipe-testing/bpipe_scratch.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import pandas as pd
3 | import json
4 | import datetime
5 | from xbbg import blp
6 | import awswrangler as wr
7 |
8 | host="IP_ADDR"
9 |
10 | app='APP_NAME'
11 |
12 | port=8194
13 |
14 | blp.connect(auth_method='app', server_host=host, app_name=app)
15 |
16 | bpipe_info = [
17 | 'OPEN'
18 | ,'BID'
19 | ,'ASK'
20 | ,'PRICE_OPEN_RT'
21 | ,'CLOSE'
22 | ,'HIGH'
23 | ,'LOW'
24 | ,'LAST_PRICE'
25 | ,'HIGH_LOCAL_SOURCE_RT'
26 | ,'TICKER'
27 | ,'BLOOMBERG_SEND_TIME_RT'
28 | ,'LAST_TRADE_PRICE_TIME_TODAY_RT'
29 | ,'CONTINUOUS_VOLUME_RT'
30 | ,'VOLUME'
31 | ,'EQY_PRIM_EXCH_SHRT'
32 | ,'TIME'
33 | ,'NAME'
34 | ]
35 |
36 | measures=['TICKER','TIME','EQY_PRIM_EXCH_SHRT']
37 |
38 | # data is json
39 | ret=[]
40 | count=0
41 | market_view = {}
42 |
43 |
44 | def write_marketview_to_ts():
45 | rr = []
46 | print
47 | for one_symbol in market_view.keys():
48 | mv= pd.DataFrame.from_dict([market_view.get(one_symbol)])
49 | mv['symbol']=one_symbol
50 | rr.append(wr.timestream.write(
51 | df=mv,
52 | database='test',
53 | table='bpipe_test',
54 | time_col="tick_time",
55 | measure_col="OPEN",
56 | dimensions_cols=["symbol"]
57 | ))
58 | return(rr)
59 |
60 | async def sample_data():
61 | ret_pd = pd.DataFrame.from_dict(ret)
62 | print(ret_pd)
63 | write_marketview_to_ts()
64 |
65 |
66 | def market_view_upsert(key):
67 | if key in market_view.keys():
68 | pass
69 | else:
70 | market_view[key]={}
71 |
72 |
73 | def update_market_view(raw_bpipe_data):
74 | if "BLOOMBERG_SEND_TIME_RT" in raw_bpipe_data.keys():
75 | tick_time = datetime.datetime.combine(datetime.datetime.now().date(),raw_bpipe_data['BLOOMBERG_SEND_TIME_RT'])
76 | else:
77 | tick_time =datetime.datetime.now()
78 | symbol = raw_bpipe_data.get('TICKER')
79 | field = raw_bpipe_data.get('FIELD')
80 | market_view_upsert(symbol)
81 | market_view.get(symbol)['tick_time']=tick_time
82 | market_view.get(symbol)[field]=raw_bpipe_data.get(field)
83 |
84 | async def bad():
85 | raise Exception()
86 |
87 | async def not_so_bad():
88 | try:
89 | raise Warning("custom warning")
90 | except:
91 | print(f"internal exc")
92 |
93 | async def stream():
94 | options =[f"interval={7:.1f}"]
95 | #live_stream=blp.live(["QQQ US Equity"], flds=bpipe_info, info=bpipe_info)
96 | async for data in blp.live(["QQQ US Equity"], flds=bpipe_info, info=bpipe_info):
97 | #async for data in blp.live(["AMZN US Equity"],info=['BID','ASK','LAST_PRICE']):
98 | #async for data in blp.live(["AMZN US Equity","IBM US Equity","import asyncio
99 | import pandas as pd
100 | import json
101 | import datetime
102 | from xbbg import blp
103 | import awswrangler as wr
104 |
105 | host="HOST_ADDR"
106 |
107 | app='APP_NAME'
108 |
109 | port=8194
110 |
111 | blp.connect(auth_method='app', server_host=host, app_name=app)
112 |
113 | bpipe_info = [
114 | 'OPEN'
115 | ,'BID'
116 | ,'ASK'
117 | ,'PRICE_OPEN_RT'
118 | ,'CLOSE'
119 | ,'HIGH'
120 | ,'LOW'
121 | ,'LAST_PRICE'
122 | ,'HIGH_LOCAL_SOURCE_RT'
123 | ,'TICKER'
124 | ,'BLOOMBERG_SEND_TIME_RT'
125 | ,'LAST_TRADE_PRICE_TIME_TODAY_RT'
126 | ,'CONTINUOUS_VOLUME_RT'
127 | ,'VOLUME'
128 | ,'EQY_PRIM_EXCH_SHRT'
129 | ,'TIME'
130 | ,'NAME'
131 | ]
132 |
133 | measures=['TICKER','TIME','EQY_PRIM_EXCH_SHRT']
134 |
135 | # data is json
136 | ret=[]
137 | count=0
138 | market_view = {}
139 |
140 |
141 | def write_marketview_to_ts():
142 | rr = []
143 | print
144 | for one_symbol in market_view.keys():
145 | mv= pd.DataFrame.from_dict([market_view.get(one_symbol)])
146 | mv['symbol']=one_symbol
147 | rr.append(wr.timestream.write(
148 | df=mv,
149 | database='test',
150 | table='bpipe_test',
151 | time_col="tick_time",
152 | measure_col="OPEN",
153 | dimensions_cols=["symbol"]
154 | ))
155 | return(rr)
156 |
157 | async def sample_data():
158 | ret_pd = pd.DataFrame.from_dict(ret)
159 | print(ret_pd)
160 | write_marketview_to_ts()
161 |
162 |
163 | def market_view_upsert(key):
164 | if key in market_view.keys():
165 | pass
166 | else:
167 | market_view[key]={}
168 |
169 |
170 | def update_market_view(raw_bpipe_data):
171 | if "BLOOMBERG_SEND_TIME_RT" in raw_bpipe_data.keys():
172 | tick_time = datetime.datetime.combine(datetime.datetime.now().date(),raw_bpipe_data['BLOOMBERG_SEND_TIME_RT'])
173 | else:
174 | tick_time =datetime.datetime.now()
175 | symbol = raw_bpipe_data.get('TICKER')
176 | field = raw_bpipe_data.get('FIELD')
177 | market_view_upsert(symbol)
178 | market_view.get(symbol)['tick_time']=tick_time
179 | market_view.get(symbol)[field]=raw_bpipe_data.get(field)
180 |
181 | async def bad():
182 | raise Exception()
183 |
184 | async def not_so_bad():
185 | try:
186 | raise Warning("custom warning")
187 | except:
188 | print(f"internal exc")
189 |
190 | async def stream():
191 | options =[f"interval={7:.1f}"]
192 | #live_stream=blp.live(["QQQ US Equity"], flds=bpipe_info, info=bpipe_info)
193 | async for data in blp.live(["import asyncio
194 | import pandas as pd
195 | import json
196 | import datetime
197 | from xbbg import blp
198 | import awswrangler as wr
199 |
200 | host="HOST_ADDR"
201 |
202 | app='APP_NAME'
203 |
204 | port=8194
205 |
206 | blp.connect(auth_method='app', server_host=host, app_name=app)
207 |
208 | bpipe_info = [
209 | 'OPEN'
210 | ,'BID'
211 | ,'ASK'
212 | ,'PRICE_OPEN_RT'
213 | ,'CLOSE'
214 | ,'HIGH'
215 | ,'LOW'
216 | ,'LAST_PRICE'
217 | ,'HIGH_LOCAL_SOURCE_RT'
218 | ,'TICKER'
219 | ,'BLOOMBERG_SEND_TIME_RT'
220 | ,'LAST_TRADE_PRICE_TIME_TODAY_RT'
221 | ,'CONTINUOUS_VOLUME_RT'
222 | ,'VOLUME'
223 | ,'EQY_PRIM_EXCH_SHRT'
224 | ,'TIME'
225 | ,'NAME'
226 | ]
227 |
228 | measures=['TICKER','TIME','EQY_PRIM_EXCH_SHRT']
229 |
230 | # data is json
231 | ret=[]
232 | count=0
233 | market_view = {}
234 |
235 |
236 | def write_marketview_to_ts():
237 | rr = []
238 | print
239 | for one_symbol in market_view.keys():
240 | mv= pd.DataFrame.from_dict([market_view.get(one_symbol)])
241 | mv['symbol']=one_symbol
242 | rr.append(wr.timestream.write(
243 | df=mv,
244 | database='test',
245 | table='bpipe_test',
246 | time_col="tick_time",
247 | measure_col="OPEN",
248 | dimensions_cols=["symbol"]
249 | ))
250 | return(rr)
251 |
252 | async def sample_data():
253 | ret_pd = pd.DataFrame.from_dict(ret)
254 | print(ret_pd)
255 | write_marketview_to_ts()
256 |
257 |
258 | def market_view_upsert(key):
259 | if key in market_view.keys():
260 | pass
261 | else:
262 | market_view[key]={}
263 |
264 |
265 | def update_market_view(raw_bpipe_data):
266 | if "BLOOMBERG_SEND_TIME_RT" in raw_bpipe_data.keys():
267 | tick_time = datetime.datetime.combine(datetime.datetime.now().date(),raw_bpipe_data['BLOOMBERG_SEND_TIME_RT'])
268 | else:
269 | tick_time =datetime.datetime.now()
270 | symbol = raw_bpipe_data.get('TICKER')
271 | field = raw_bpipe_data.get('FIELD')
272 | market_view_upsert(symbol)
273 | market_view.get(symbol)['tick_time']=tick_time
274 | market_view.get(symbol)[field]=raw_bpipe_data.get(field)
275 |
276 | async def bad():
277 | raise Exception()
278 |
279 | async def not_so_bad():
280 | try:
281 | raise Warning("custom warning")
282 | except:
283 | print(f"internal exc")
284 |
285 | async def stream():
286 | options =[f"interval={10:.1f}"]
287 | #live_stream=blp.live(["QQQ US Equity"], flds=bpipe_info, info=bpipe_info)
288 | async for data in blp.live(["AMZN US Equity"], flds=bpipe_info, info=bpipe_info,options=options):
289 | #async for data in blp.live(["AMZN US Equity"],info=['BID','ASK','LAST_PRICE']):
290 | #async for data in blp.live(["AMZN US Equity","IBM US Equity","EURUSD BGN Curncy"],info=['BID','ASK','LAST_PRICE']):
291 | print(data)
292 | #ret.append(data)
293 | #update_market_view(data)
294 | #print(len(ret))
295 | #if len(ret)>0 and len(ret)%100==0:
296 | # await sample_data()
297 | # #ret=[]
298 | # #break
299 | try:
300 | asyncio.run(stream())
301 | except Exception:
302 | print(f'EXC!!!:{Exception}')
303 |
304 |
305 | try:
306 | asyncio.run(not_so_bad())
307 | except Warning as warn:
308 | print(f'EXC!!!:{warn}')"], flds=bpipe_info, info=bpipe_info):
309 | #async for data in blp.live(["AMZN US Equity"],info=['BID','ASK','LAST_PRICE']):
310 | #async for data in blp.live(["AMZN US Equity","IBM US Equity","EURUSD BGN Curncy"],info=['BID','ASK','LAST_PRICE']):
311 | print(data)
312 | #ret.append(data)
313 | #update_market_view(data)
314 | #print(len(ret))
315 | #if len(ret)>0 and len(ret)%100==0:
316 | # await sample_data()
317 | # #ret=[]
318 | # #break
319 |
320 | try:
321 | asyncio.run(stream())
322 | except Exception:
323 | print(f'EXC!!!:{Exception}')
324 |
325 |
326 | try:
327 | asyncio.run(not_so_bad())
328 | except Warning as warn:
329 | print(f'EXC!!!:{warn}')"],info=['BID','ASK','LAST_PRICE']):
330 | print(data)
331 | #ret.append(data)
332 | #update_market_view(data)
333 | #print(len(ret))
334 | #if len(ret)>0 and len(ret)%100==0:
335 | # await sample_data()
336 | # #ret=[]
337 | # #break
338 |
339 | try:
340 | asyncio.run(stream())
341 | except Exception:
342 | print(f'EXC!!!:{Exception}')
343 |
344 |
345 | try:
346 | asyncio.run(not_so_bad())
347 | except Warning as warn:
348 | print(f'EXC!!!:{warn}')
--------------------------------------------------------------------------------
/aws-quant-infra/src/bpipe-testing/config_env.sh:
--------------------------------------------------------------------------------
1 | curl https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh -o Miniconda3-latest-Linux-x86_64.sh
2 | chmod +x Miniconda3-latest-Linux-x86_64.sh
3 | ./Miniconda3-latest-Linux-x86_64.sh -b
4 | PATH=~/miniconda3/bin:${PATH}
5 | # RUN ls -la
6 | # RUN ls -la bin
7 | # RUN conda init bash
8 | # RUN exec bash
9 | conda create --name bpipe python=3.8 -y
10 | conda init bash
11 | # ENV PATH=/root/miniconda3/envs/${PYTHON_ENVS}/bin:${PATH}
12 | source activate bpipe
13 | conda install -c conda-forge blpapi -y
14 | pip install xbbg
15 | pip install awswrangler
16 | pip install sseclient
17 | pip install pyEX
18 |
19 | sudo yum install jq -y
20 |
21 | cd ~/environment/MvpPortfolioMonitoring-code-repo/aws-quant-infra/deployment/cdk
22 | #npm install
23 | cdk deploy "*"
24 | cd -
25 |
26 | source activate bpipe
27 | eval $(aws sts assume-role --role-arn ROLE_ARN_TO_ASSUME --role-session-name test | jq -r '.Credentials | "export AWS_ACCESS_KEY_ID=\(.AccessKeyId)\nexport AWS_SECRET_ACCESS_KEY=\(.SecretAccessKey)\nexport AWS_SESSION_TOKEN=\(.SessionToken)\n"')
28 |
29 | #! /bin/bash
30 | for state in SUBMITTED PENDING RUNNABLE STARTING RUNNING
31 | do
32 | for job in $(aws batch list-jobs --job-queue MvpPortfolioMonitoring_q_ec2 --job-status $state --output text --query jobSummaryList[*].[jobId])
33 | do
34 | echo -ne "Stopping job $job in state $state\t"
35 | aws batch terminate-job --reason "Terminating job." --job-id $job && echo "Done." || echo "Failed."
36 | done
37 | done
38 |
39 | cd ~/environment/MvpPortfolioMonitoring-code-repo/aws-quant-infra/src/utils/ &&
40 | python portfolio_generator.py --name test_ptf_50 --filename nyse-ticker-list.csv --ticker-amount 50 &&
41 | cd -
42 |
43 | git config --global user.name $USER
44 | git config --global user.email "email@company.com"
--------------------------------------------------------------------------------
/aws-quant-infra/src/bpipe-testing/stream_data.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | from xbbg import blp
3 | import bpipe
4 |
5 | blp.connect(auth_method='app', server_host=bpipe.host, app_name=bpipe.app)
6 |
7 | bpipe_info = [
8 | 'LAST_PRICE'
9 | , 'open'
10 | , 'close'
11 | , 'high'
12 | , 'low'
13 | , 'PRICE_EARNINGS_RATIO_RT'
14 | , 'VOLUME'
15 | ]
16 |
17 | bpipe_points=[]
18 | count=0
19 | # data is json
20 | async def stream():
21 | async for data in blp.live(["AMZN US Equity"], flds=bpipe_info, info=bpipe_info):
22 | #async for data in blp.live(["AMZN US Equity"],info=['BID','ASK','LAST_PRICE']):
23 | #async for data in blp.live(["AMZN US Equity","IBM US Equity","EURUSD BGN Curncy"],info=['BID','ASK','LAST_PRICE']):
24 | bpipe_points.append(data)
25 | if count%10==0:
26 | print(bpipe_points)
27 | break
28 |
29 | asyncio.run(stream())
30 |
--------------------------------------------------------------------------------
/aws-quant-infra/src/bpipe-testing/test_market_data.py:
--------------------------------------------------------------------------------
1 | import json
2 | import boto3
3 | import datetime
4 | import time
5 | import random
6 | import sys
7 | import os
8 |
9 | import pandas as pd
10 | # IEX
11 | from sseclient import SSEClient
12 | # BPIPE
13 | # TODO add logic to dockerfiles
14 | import asyncio
15 | #from xbbg import blp
16 |
17 | sys.path.append('/var/task/shared/python')
18 | sys.path.append('/src/shared/python/')
19 | sys.path.append(('/home/ec2-user/environment/AWSQuant/aws-quant-infra'))
20 | sys.path.append(('/home/ec2-user/environment/AWSQuant/aws-quant-infra/src/shared/python'))
21 | import aws_quant_infra as aq_i
22 | import aws_quant_market_data as aq_md
23 | import aws_quant_risk as aq_r
24 | if True:# for debugging
25 | import aws_quant_infra as aq_i
26 | import imp
27 | imp.reload(aq_i)
28 | import aws_quant_market_data as aq_md
29 | imp.reload(aq_md)
30 |
31 | if True: # debugging
32 | os.environ["SSM_PREFIX"] = "Mvp"
33 | os.environ["AWS_REGION"] = "us-east-1"
34 | os.environ["AWS_DEFAULT_REGION"] = "us-east-1"
35 |
36 | def check_config():
37 | config_env="dev"
38 |
39 | app_prefix = f"{os.environ['SSM_PREFIX']}-"
40 | aws_region=os.environ.get('AWS_DEFAULT_REGION',"us-east-1")
41 | env_config = aq_i.replace_string_dict_keys(aq_i.get_app_config_from_paramstore(aws_region),app_prefix,'')
42 |
43 |
44 | hosted_config_details = json.loads(env_config.get('PortfolioMonitoring-AppConfigDetails'))
45 | app_config_client =boto3.client('appconfig',region_name=aws_region)
46 | app_config_app = hosted_config_details.get('Application')
47 | app_config_config=hosted_config_details.get('Configuration')
48 | app_config = (app_config_client.get_configuration(Application=app_config_app,Environment=config_env,Configuration=app_config_config, ClientId=f'{random.randint(-sys.maxsize,sys.maxsize)}').get('Content').read())
49 | app_config=json.loads(app_config.decode('utf-8'))
50 | app_config['AWS_REGION']=aws_region
51 | return(config_env,app_config)
52 |
53 | config_env,app_config = check_config()
54 |
55 | def get_data_provider_inits(config_env,symbols):
56 | if False: # debugging
57 | os.environ["SSM_PREFIX"] = "Mvp"
58 | os.environ["AWS_REGION"] = "us-east-1" #TODO: move into separate debug settings and use MODE (deploy/local...) in code
59 | app_prefix = f"{os.environ['SSM_PREFIX']}-"
60 | aws_region=os.environ.get('AWS_DEFAULT_REGION',"us-east-1")
61 | env_config = aq_i.replace_string_dict_keys(aq_i.get_app_config_from_paramstore(aws_region),app_prefix,'')
62 | hosted_config_details = json.loads(env_config.get('PortfolioMonitoring-AppConfigDetails'))
63 | app_config = (boto3.client('appconfig',region_name=aws_region).\
64 | get_configuration(Application=hosted_config_details.get('Application')
65 | ,Environment=config_env
66 | ,Configuration=hosted_config_details.get('Configuration')
67 | , ClientId=f'{random.randint(-sys.maxsize,sys.maxsize)}'
68 | #,ClientConfigurationVersion=hosted_config_details.get(config_env)
69 | ).get('Content').read())
70 | app_config=json.loads(app_config.decode('utf-8'))
71 | app_config['AWS_REGION']=aws_region
72 | if type(symbols) == str:
73 | symbol_list = symbols.split(',')
74 | else:
75 | symbol_list = symbols
76 | print(f"working with: {symbol_list}\n\n")
77 | print(f"app_prefix: {app_prefix}\n\n")
78 | print(f"aws_region: {aws_region}\n\n")
79 | print(f"hosted_config_details: {hosted_config_details}\n\n")
80 | print(f"APP CONFIG: {app_config}\n\n")
81 | print(f"ENV CONFIG: {env_config}\n\n")
82 |
83 | return (env_config, app_config, app_prefix, symbol_list)
84 |
85 |
86 | symbols = ['DELL']#EURUSD BGN Curncy['SPY']#,'GS','AMZN'][{'GS': Decimal('-1')}, {'AMZN': Decimal('0.24')}, {'MSFT': Decimal('0.26')}, {'XLE': Decimal('0.25')}, {'QQQ': Decimal('0.25')}]
87 | env_config, app_config, app_prefix, symbol_list= get_data_provider_inits(config_env,symbols)
88 | mv=aq_md.MarketDataProvider.subscribe_main(config_env,symbols,10)
--------------------------------------------------------------------------------
/aws-quant-infra/src/bpipe-testing/test_portfolio.py:
--------------------------------------------------------------------------------
1 | import json
2 | import boto3
3 | import datetime
4 | import time
5 | import random
6 | import sys
7 | import os
8 |
9 | import pandas as pd
10 | # IEX
11 | from sseclient import SSEClient
12 | # BPIPE
13 | # TODO add logic to dockerfiles
14 | import asyncio
15 | from xbbg import blp
16 |
17 | sys.path.append('/var/task/shared/python')
18 | sys.path.append('/src/shared/python/')
19 | sys.path.append(('/home/ec2-user/environment/MvpPortfolioMonitoring-code-repo/aws-quant-infra'))
20 | sys.path.append(('/home/ec2-user/environment/MvpPortfolioMonitoring-code-repo/aws-quant-infra/src/shared/python'))
21 | import aws_quant_infra as aq_i
22 | import aws_quant_risk as aq_r
23 | if True:# for debugging
24 | import aws_quant_infra as aq_i
25 | import imp
26 | imp.reload(aq_i)
27 | import aws_quant_risk as aq_r
28 | imp.reload(aq_r)
29 |
30 | if True: # debugging
31 | os.environ["SSM_PREFIX"] = "Mvp"
32 | os.environ["AWS_REGION"] = "us-east-1"
33 | os.environ["AWS_DEFAULT_REGION"] = "us-east-1"
34 |
35 | def check_config():
36 | config_env="dev"
37 |
38 | app_prefix = f"{os.environ['SSM_PREFIX']}-"
39 | aws_region=os.environ.get('AWS_DEFAULT_REGION',"us-east-1")
40 | env_config = aq_i.replace_string_dict_keys(aq_i.get_app_config_from_paramstore(aws_region),app_prefix,'')
41 |
42 |
43 | hosted_config_details = json.loads(env_config.get('PortfolioMonitoring-AppConfigDetails'))
44 | app_config_client =boto3.client('appconfig',region_name=aws_region)
45 | app_config_app = hosted_config_details.get('Application')
46 | app_config_config=hosted_config_details.get('Configuration')
47 | app_config = (app_config_client.get_configuration(Application=app_config_app,Environment=config_env,Configuration=app_config_config, ClientId=f'{random.randint(-sys.maxsize,sys.maxsize)}').get('Content').read())
48 | app_config=json.loads(app_config.decode('utf-8'))
49 | app_config['AWS_REGION']=aws_region
50 |
51 | return(config_env,app_config)
52 |
53 | config_env,app_config = check_config()
54 |
55 | def get_portfolio_inits(config_env,portf_id='JD2966'):
56 | # portf_id='7af1fa7c6b01edd3826c880082270775'
57 | if False: # debugging
58 | os.environ["SSM_PREFIX"] = "Mvp"
59 | max_symbols = 1
60 | config_json= '{"version":"sandbox",' \
61 | '"token_secret":"iex_api_token_pk_sandbox",' \
62 | '"token_secret_streaming":"iex_api_token_pk",' \
63 | '"url":"https://cloud-sse.iexapis.com",' \
64 | '"streaming_endpoint_equity":"stocksUSNoUTP"}'
65 | app_prefix = f"{os.environ['SSM_PREFIX']}-"
66 | aws_region=os.environ.get('AWS_DEFAULT_REGION',"us-east-2")
67 | env_config = aq_i.get_app_config_from_paramstore(aws_region)
68 | app_config_client= boto3.client('appconfig',region_name=aws_region)
69 | app_config_temp=app_config_client.get_configuration(Application='PortfolioMonitoring',
70 | Environment=config_env,
71 | Configuration='PortfolioMonitoringConfigProfile',
72 | ClientId=f'{random.randint(-sys.maxsize,sys.maxsize)}')
73 | app_config = app_config_temp.get('Content').read()
74 | app_config=json.loads(app_config.decode('utf-8'))
75 | return (env_config, app_config, app_prefix, portf_id)
76 |
77 | env_config, app_config, app_prefix, portf_id= get_portfolio_inits(config_env)
78 | one_port = aq_r.PortfolioTracker( app_config,env_config,app_prefix, portf_id)
79 | #aq_r.PortfolioTracker.portfolio_tracker_main("dev",'d16af770c75df7fc23b922773cf1a450')
80 | if True:
81 | one_port.portfolio['last_tracker_update']='2022-06-23 19:00:00.493000000'
82 | one_port.load_portf_market_data()
83 | one_port.calc_portf_priceline()
84 | one_port.calc_portf_pnl()
85 | one_port.save_portf_priceline()
86 | one_port.save_portf_pnl()
87 |
--------------------------------------------------------------------------------
/aws-quant-infra/src/config/portfolio_tracker_cfg.json:
--------------------------------------------------------------------------------
1 | {
2 | "env":"dev",
3 | "market_data": {
4 | "source":"IEX",
5 | "bpipe_connect": {
6 | "host": "",
7 | "app": ""
8 | },
9 | "version":"sandbox",
10 | "token_secret":"api_token_pk_sandbox",
11 | "token_secret_streaming": "api_token_pk",
12 | "url": "https://cloud-sse.iexapis.com",
13 | "streaming_endpoint_equity":"stocksUSNoUTP",
14 | "default_handler": {
15 | "deploy" : {
16 | "mode":"batch",
17 | "cmd": "/src/subscribe_market_data.py",
18 | "refresh_sec" : "25",
19 | "job_queue" : "MvpPortfolioMonitoring_q_ec2"
20 | },
21 | "symbol_stripe_size" : "1",
22 | "stale_threshold" : "10m"
23 | }
24 | },
25 | "portfolio_tracker": {
26 | "default_handler": {
27 | "deploy" : {
28 | "mode":"batch",
29 | "cmd": "/src/portfolio_tracker.py",
30 | "refresh_sec" : "25",
31 | "job_queue" : "MvpPortfolioMonitoring_q_ec2"
32 | }
33 | }
34 | },
35 | "secrets": [
36 | "api_token_pk_sandbox",
37 | "api_token_pk"
38 | ]
39 | }
--------------------------------------------------------------------------------
/aws-quant-infra/src/config/portfolio_tracker_cfg_BPIPE.json:
--------------------------------------------------------------------------------
1 | {
2 | "env":"dev",
3 | "market_data": {
4 | "source":"BPIPE",
5 | "bpipe_connect": {
6 | "host": "",
7 | "app": ""
8 | },
9 | "version":"sandbox",
10 | "token_secret":"api_token_pk_sandbox",
11 | "token_secret_streaming": "api_token_pk",
12 | "url": "https://cloud-sse.iexapis.com",
13 | "streaming_endpoint_equity":"stocksUSNoUTP",
14 | "default_handler": {
15 | "deploy" : {
16 | "mode":"batch",
17 | "cmd": "/src/subscribe_market_data.py",
18 | "refresh_sec" : "25",
19 | "job_queue" : "MvpPortfolioMonitoring_q_ec2"
20 | },
21 | "symbol_stripe_size" : "1",
22 | "stale_threshold" : "10m"
23 | }
24 | },
25 | "portfolio_tracker": {
26 | "default_handler": {
27 | "deploy" : {
28 | "mode":"batch",
29 | "cmd": "/src/portfolio_tracker.py",
30 | "refresh_sec" : "25",
31 | "job_queue" : "MvpPortfolioMonitoring_q_ec2"
32 | }
33 | }
34 | },
35 | "secrets": [
36 | "api_token_pk_sandbox",
37 | "api_token_pk"
38 | ]
39 | }
--------------------------------------------------------------------------------
/aws-quant-infra/src/config/portfolio_tracker_cfg_IEX.json:
--------------------------------------------------------------------------------
1 | {
2 | "env":"dev",
3 | "market_data": {
4 | "source":"IEX",
5 | "bpipe_connect": {
6 | "host": "",
7 | "app": ""
8 | },
9 | "version":"sandbox",
10 | "token_secret":"api_token_pk_sandbox",
11 | "token_secret_streaming": "api_token_pk",
12 | "url": "https://cloud-sse.iexapis.com",
13 | "streaming_endpoint_equity":"stocksUSNoUTP",
14 | "default_handler": {
15 | "deploy" : {
16 | "mode":"batch",
17 | "cmd": "/src/subscribe_market_data.py",
18 | "refresh_sec" : "25",
19 | "job_queue" : "MvpPortfolioMonitoring_q_ec2"
20 | },
21 | "symbol_stripe_size" : "1",
22 | "stale_threshold" : "10m"
23 | }
24 | },
25 | "portfolio_tracker": {
26 | "default_handler": {
27 | "deploy" : {
28 | "mode":"batch",
29 | "cmd": "/src/portfolio_tracker.py",
30 | "refresh_sec" : "25",
31 | "job_queue" : "MvpPortfolioMonitoring_q_ec2"
32 | }
33 | }
34 | },
35 | "secrets": [
36 | "api_token_pk_sandbox",
37 | "api_token_pk"
38 | ]
39 | }
--------------------------------------------------------------------------------
/aws-quant-infra/src/lambda/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM public.ecr.aws/lambda/python:3.7 AS builder
2 | COPY lambda/extension.zip extension.zip
3 | RUN yum install -y unzip \
4 | && unzip extension.zip -d /opt \
5 | && rm -f extension.zip
6 |
7 | FROM amazon/aws-lambda-python:3.7
8 | ARG SSM_PREFIX
9 | ENV SSM_PREFIX ${SSM_PREFIX}
10 | RUN echo SSM_PREFIX: $SSM_PREFIX
11 | ARG AWS_REGION
12 | ENV AWS_REGION ${AWS_REGION}
13 | RUN echo AWS_REGION: ${AWS_REGION}
14 | COPY --from=builder /opt /opt
15 | RUN pip install pandas
16 | RUN pip install xbbg
17 | RUN pip install pyEX
18 | RUN pip install boto3
19 | RUN pip install python-binance
20 | RUN pip install awswrangler
21 | #RUN conda install -c conda-forge ta-lib -y
22 |
23 | COPY lambda/python/ /var/task
24 | COPY shared /var/task/shared
25 | RUN ls -la /var/task/*
--------------------------------------------------------------------------------
/aws-quant-infra/src/lambda/python/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/quant-trading/f1dd9ace0c732cae3d0b103affae3d6e6b2da80f/aws-quant-infra/src/lambda/python/__init__.py
--------------------------------------------------------------------------------
/aws-quant-infra/src/lambda/python/handle_portfolio_update/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/quant-trading/f1dd9ace0c732cae3d0b103affae3d6e6b2da80f/aws-quant-infra/src/lambda/python/handle_portfolio_update/__init__.py
--------------------------------------------------------------------------------
/aws-quant-infra/src/lambda/python/handle_portfolio_update/lambda_function.py:
--------------------------------------------------------------------------------
1 | import json
2 | import os
3 | import sys
4 |
5 | sys.path.append('/var/task/shared/python') #TODO: not a fan of hardcoded PATHS
6 | sys.path.append('/home/ec2-user/environment/AWSQuant/aws-quant-infra/src/shared/python') #TODO: not a fan of hardcoded PATHS
7 | import aws_quant_risk as aq_r
8 | print(f"import aws_quant_risk is done!")
9 | print(aq_r.__file__)
10 |
11 | def lambda_handler(event, context):
12 | #event['AWS_REGION']=os.environ['AWS_REGION']
13 | all_symbols,delta_symbols,delta_symbols_stripes,jobs,portf_batch= \
14 | aq_r.PortfolioTrackerFactory.handle_portfolio_update(event)
15 | print(f'submitted jobs:{[response["jobName"] for response in jobs]} for {delta_symbols} '
16 | f'out of: {all_symbols} in batches:{delta_symbols_stripes}')
17 | print(f'submitted portfolios:{[response["jobName"] for response in portf_batch]}')
18 | return {
19 | 'statusCode': 200,
20 | 'body': json.dumps(f'submitted jobs:{[response["jobName"] for response in jobs]} for {delta_symbols} out of: {all_symbols} in batches:{delta_symbols_stripes}')
21 | }
22 |
--------------------------------------------------------------------------------
/aws-quant-infra/src/lambda/python/intraday_close/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/quant-trading/f1dd9ace0c732cae3d0b103affae3d6e6b2da80f/aws-quant-infra/src/lambda/python/intraday_close/__init__.py
--------------------------------------------------------------------------------
/aws-quant-infra/src/lambda/python/intraday_close/lambda_function.py:
--------------------------------------------------------------------------------
1 | import json
2 | import os
3 | import sys
4 | import boto3
5 |
6 |
7 | sys.path.append('/var/task/shared/python') #TODO: not a fan of hardcoded PATHS
8 | sys.path.append('/home/ec2-user/environment/AWSQuant/aws-quant-infra/src/shared/python') #TODO: not a fan of hardcoded PATHS
9 | import aws_quant_risk as aq_r
10 | print(f"import aws_quant_risk is done!")
11 | print(aq_r.__file__)
12 |
13 | REGION=os.environ.get('AWS_DEFAULT_REGION')
14 | dynamodb = boto3.resource('dynamodb', region_name=REGION)
15 | ssm_client = boto3.client("ssm", region_name=REGION)
16 |
17 | def lambda_handler(event, context):
18 | portf_table = dynamodb.Table('MvpPortfolioMonitoringPortfolioTable')
19 | get_portf_id_response = ssm_client.get_parameter(Name='/Mvp-PortfolioMonitoring-IntradayMomentumPortfID')
20 | portf_id = get_portf_id_response['Parameter']['Value']
21 |
22 | get_portf_create_ts_response = ssm_client.get_parameter(Name='/Mvp-PortfolioMonitoring-IntradayMomentumPortfCreateTS')
23 | portf_create_ts = int(get_portf_create_ts_response['Parameter']['Value'])
24 | print("TS: ", portf_create_ts)
25 |
26 | response = portf_table.delete_item(
27 | Key={
28 | 'portf_id': portf_id,
29 | 'portf_create_ts': portf_create_ts
30 | }
31 | )
32 |
33 | status_code = response['ResponseMetadata']['HTTPStatusCode']
34 | print(status_code)
35 |
--------------------------------------------------------------------------------
/aws-quant-infra/src/lambda/python/intraday_momentum/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/quant-trading/f1dd9ace0c732cae3d0b103affae3d6e6b2da80f/aws-quant-infra/src/lambda/python/intraday_momentum/__init__.py
--------------------------------------------------------------------------------
/aws-quant-infra/src/lambda/python/intraday_momentum/lambda_function.py:
--------------------------------------------------------------------------------
1 | import json
2 | import os
3 | import sys
4 | import boto3
5 | from decimal import *
6 | import requests
7 | import time
8 | import random
9 | import hashlib
10 |
11 | import pandas as pd
12 | sys.path.append('/var/task/shared/python') #TODO: not a fan of hardcoded PATHS
13 | sys.path.append('/home/ec2-user/environment/AWSQuant/aws-quant-infra/src/shared/python') #TODO: not a fan of hardcoded PATHS
14 | import aws_quant_infra as aq_i
15 | import aws_quant_risk as aq_r
16 | # print(f"import aws_quant_risk is done!")
17 | # print(aq_r.__file__)
18 | REGION=os.environ.get('AWS_DEFAULT_REGION')
19 | dynamodb = boto3.resource('dynamodb', region_name=REGION)
20 | ssm_client = boto3.client("ssm", region_name=REGION)
21 |
22 | if True: # debugging
23 | os.environ["SSM_PREFIX"] = "Mvp"
24 | os.environ["AWS_REGION"] = "us-east-1"
25 | os.environ["AWS_DEFAULT_REGION"] = "us-east-1"
26 |
27 | def check_config():
28 | config_env="dev"
29 |
30 | app_prefix = f"{os.environ['SSM_PREFIX']}-"
31 | aws_region=os.environ.get('AWS_DEFAULT_REGION',"us-east-1")
32 | env_config = aq_i.replace_string_dict_keys(aq_i.get_app_config_from_paramstore(aws_region),app_prefix,'')
33 |
34 |
35 | hosted_config_details = json.loads(env_config.get('PortfolioMonitoring-AppConfigDetails'))
36 | app_config_client = boto3.client('appconfig',region_name=aws_region)
37 | app_config_app = hosted_config_details.get('Application')
38 | app_config_config=hosted_config_details.get('Configuration')
39 | app_config = (app_config_client.get_configuration(Application=app_config_app,Environment=config_env,Configuration=app_config_config, ClientId=f'{random.randint(-sys.maxsize,sys.maxsize)}').get('Content').read())
40 | app_config=json.loads(app_config.decode('utf-8'))
41 | app_config['AWS_REGION']=aws_region
42 | return(config_env,app_config)
43 |
44 | config_env,app_config = check_config()
45 |
46 | def get_secret():
47 | secret_name = app_config.get('secrets')[0]
48 | secret = f'{secret_name}'
49 | secret_data = aq_i.get_secret(secret, REGION)
50 | print(secret_data)
51 | try:
52 | print('Testing format of response')
53 | print(secret_data[secret])
54 | except:
55 | print('get_secret response in wrong format!!!')
56 |
57 | return secret_data[secret]
58 |
59 | SECRET_DATA = get_secret()
60 |
61 |
62 | def get_sp_symbols():
63 | # Returns list of current S&P 500 companies
64 |
65 | table=pd.read_html('https://en.wikipedia.org/wiki/List_of_S%26P_500_companies')
66 | df = table[0]
67 | symbols = df["Symbol"]
68 |
69 | return symbols
70 |
71 |
72 | def get_latest_updates(symbols):
73 | attributes = ['iexOpen', 'latestPrice']
74 | table = []
75 | iexOpen, latestPrice = 0, 0
76 |
77 | for i in symbols:
78 | ticker = i
79 | api_url = f'https://cloud.iexapis.com/stable/stock/{ticker}/quote?token={SECRET_DATA}'
80 | df = requests.get(api_url).json()
81 | iexOpen = df[attributes[0]]
82 | latestPrice = df[attributes[1]]
83 |
84 | if iexOpen is None:
85 | iexOpen = 0
86 | if latestPrice is None:
87 | latestPrice = 0
88 |
89 | table.append([i, iexOpen, latestPrice])
90 |
91 | return table
92 |
93 |
94 | def calculate_diff(symbols):
95 | diff_table = [Decimal(str(i[2])) - Decimal(str(i[1])) for i in symbols]
96 | return diff_table
97 |
98 | def calculate_weights(diff_table):
99 | neg_count = len(list(filter(lambda x: (x < 0), diff_table)))
100 | pos_count = len(diff_table) - neg_count
101 | neg_weight = "{:.3f}".format(-1/neg_count) if neg_count != 0 else -1
102 | pos_weight = "{:.3f}".format(1/pos_count) if pos_count != 0 else 1
103 |
104 | weights_table = list(map(lambda x: Decimal(neg_weight) if x < 0 else Decimal(pos_weight), diff_table))
105 | return weights_table
106 |
107 |
108 |
109 | def get_final_payload(weights, symbols):
110 | table_len = len(weights)
111 | return [{symbols[i][0]: weights[i]} for i in range(table_len)]
112 |
113 | def add_parameter(value, name, purpose):
114 | param_name = '/Mvp-PortfolioMonitoring-IntradayMomentum' + name
115 | new_string_parameter = ssm_client.put_parameter(
116 | Name=param_name,
117 | Description='Portfolio ' + purpose + ' of intraday momentum table',
118 | Value=value,
119 | Type='String',
120 | Overwrite=True,
121 | Tier='Standard',
122 | DataType='text'
123 | )
124 |
125 | return new_string_parameter
126 |
127 |
128 |
129 | def lambda_handler(event, context):
130 | #event['AWS_REGION']=os.environ['AWS_REGION']
131 | # all_symbols,delta_symbols,delta_symbols_stripes,jobs,portf_batch= \
132 | # aq_r.PortfolioTrackerFactory.handle_portfolio_update(event)
133 | # print(f'submitted jobs:{[response["jobName"] for response in jobs]} for {delta_symbols} '
134 | # f'out of: {all_symbols} in batches:{delta_symbols_stripes}')
135 | # print(f'submitted portfolios:{[response["jobName"] for response in portf_batch]}')
136 |
137 | symbols = get_sp_symbols()
138 | updates = get_latest_updates(symbols)
139 | diffs = calculate_diff(updates)
140 | weights = calculate_weights(diffs)
141 | final_payload = get_final_payload(weights, updates)
142 | print("Event: ", event)
143 | print("Context: ", context)
144 | print("Final payload: ", final_payload)
145 |
146 |
147 | prtf_name='long_short'
148 |
149 | portf_table = dynamodb.Table('MvpPortfolioMonitoringPortfolioTable')
150 | create_ts = int(time.time() * 1000000)
151 | portf_id = hashlib.md5((f'{prtf_name}{create_ts}').encode()).hexdigest()
152 | add_parameter(portf_id, "PortfID", "ID")
153 | add_parameter(str(create_ts), "PortfCreateTS", "time stamp")
154 | portf_item = {
155 | 'portf_id': portf_id
156 | , 'portf_name': prtf_name
157 | , 'portf_create_ts': create_ts
158 | ,
159 | 'positions': final_payload
160 | , 'handler_info': {'deploy': 'batch', 'refresh_sec': 60,'app_config_dict':{
161 | 'Application':'PortfolioMonitoring'
162 | ,'Environment':'dev'#config_env
163 | ,'Configuration':'PortfolioMonitoringConfigProfile'
164 | }}
165 | }
166 | portf_table.put_item(
167 | TableName='MvpPortfolioMonitoringPortfolioTable',
168 | Item=portf_item
169 | )
--------------------------------------------------------------------------------
/aws-quant-infra/src/lambda/python/schedule_listener/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/quant-trading/f1dd9ace0c732cae3d0b103affae3d6e6b2da80f/aws-quant-infra/src/lambda/python/schedule_listener/__init__.py
--------------------------------------------------------------------------------
/aws-quant-infra/src/lambda/python/schedule_listener/lambda_function.py:
--------------------------------------------------------------------------------
1 | import json
2 | import os
3 | import sys
4 |
5 | if (True):
6 | try:
7 | sys.path.append('/var/task/shared/python')
8 | sys.path.append('/home/ec2-user/environment/AWSQuant/aws-quant-infra/src/shared/python')
9 | import aws_quant_risk as aq_r
10 | print(f"import aws_quant_risk is done!")
11 | print(aq_r.__file__)
12 | except Exception as exc:
13 | print(f'error importing:{exc}')
14 | else:
15 | pass
16 |
17 | def lambda_handler(event, context):
18 | res=aq_r.PortfolioTrackerFactory.handle_schedule_event(event)
19 | print(f'{event} result:{res}')
20 | return {
21 | 'statusCode': 200,
22 | 'body': json.dumps(f'{event} result:{res}')
23 | }
24 |
25 |
--------------------------------------------------------------------------------
/aws-quant-infra/src/lambda/python/system_event_listener/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/quant-trading/f1dd9ace0c732cae3d0b103affae3d6e6b2da80f/aws-quant-infra/src/lambda/python/system_event_listener/__init__.py
--------------------------------------------------------------------------------
/aws-quant-infra/src/lambda/python/system_event_listener/lambda_function.py:
--------------------------------------------------------------------------------
1 | import json
2 | import os
3 | import sys
4 |
5 | sys.path.append('/var/task/shared/python')
6 | sys.path.append('/home/ec2-user/environment/AWSQuant/aws-quant-infra/src/shared/python')
7 | import aws_quant_risk as aq_r
8 | print(f"import aws_quant_risk is done!")
9 | print(aq_r.__file__)
10 |
11 | def lambda_handler(event, context):
12 | res=aq_r.PortfolioTrackerFactory.handle_system_event(event)
13 | print(f'{event} result:{res}')
14 | return {
15 | 'statusCode': 200,
16 | 'body': json.dumps(f'{event} result:{res}')
17 | }
18 |
--------------------------------------------------------------------------------
/aws-quant-infra/src/lambda/python/test_custom_layer/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/quant-trading/f1dd9ace0c732cae3d0b103affae3d6e6b2da80f/aws-quant-infra/src/lambda/python/test_custom_layer/__init__.py
--------------------------------------------------------------------------------
/aws-quant-infra/src/lambda/python/test_custom_layer/lambda_function.py:
--------------------------------------------------------------------------------
1 | import json
2 | import os
3 | import sys
4 | import boto3
5 | import random
6 | import urllib.request
7 | # AWS lambda expects all the code to be in /var/task/ directory. for this project dir is below (shared/python has custom code)
8 | # sh-4.2# ls -la /var/task/
9 | # total 40
10 | # drwxr-xr-x 1 root root 4096 Aug 19 23:47 .
11 | # drwxr-xr-x 1 root root 4096 Aug 19 23:46 ..
12 | # drwxrwxr-x 2 root root 4096 Aug 19 23:12 handle_portfolio_update
13 | # -rw-r--r-- 1 root root 0 Aug 19 23:12 __init__.py
14 | # drwxrwxr-x 2 root root 4096 Aug 19 23:12 schedule_listener
15 | # drwxr-xr-x 1 root root 4096 Aug 19 23:47 shared
16 | # drwxrwxr-x 2 root root 4096 Aug 19 23:12 system_event_listener
17 | # drwxrwxr-x 2 root root 4096 Aug 19 23:12 test_custom_layer
18 | if (True):
19 | try:
20 | sys.path.append('/var/task/shared/python')
21 | import aws_quant_infra as aq_i
22 | print(f"import aws_quant_infra is done!")
23 | print(aq_i.__file__)
24 | import aws_quant_risk as aq_r
25 | print(f"import aws_quant_risk is done!")
26 | print(aq_r.__file__)
27 | except Exception as exc:
28 | print(f'error importing:{exc}')
29 | else:
30 | pass
31 |
32 | REGION = os.environ['AWS_REGION']
33 | print(f'aws region: {REGION}')
34 |
35 | def boto3_test():
36 | print('Starting tests to check various boto3 calls')
37 | env, app = aqi_test()
38 | aqr_test(env, app)
39 |
40 | def aqi_test():
41 | print('Testing aws_quant_infra.get_app_config_from_paramstore')
42 | env_config = aq_i.get_app_config_from_paramstore(REGION)
43 | print(env_config)
44 |
45 | print('Testing appconfig call')
46 | env = 'dev'
47 | app_config_details = json.loads(env_config.get('Mvp-PortfolioMonitoring-AppConfigDetails'))
48 | app = app_config_details.get('Application')
49 | config = app_config_details.get('Configuration')
50 | version = app_config_details.get(env)
51 | print(f'App: {app} Config: {config} Version: {version}')
52 | app_config = (boto3.client('appconfig'). \
53 | get_configuration(Application=app
54 | ,Environment=env
55 | ,Configuration=config
56 | , ClientId=f'{random.randint(-sys.maxsize,sys.maxsize)}'
57 | ,ClientConfigurationVersion=version
58 | ).get('Content').read())
59 | # app_config = urllib.request.urlopen(
60 | # f'http://localhost:2772/applications/{app}/environments/{env}/configurations/{config}'
61 | # ).read()
62 |
63 | app_config=json.loads(app_config.decode('utf-8'))
64 | print(app_config)
65 |
66 | print('Testing aws_quant_infra.get_secret')
67 | secret_name = app_config.get('secrets')[0]
68 | # secret = f'MvpPortfolioMonitoring-{secret_name}'
69 | secret = f'{secret_name}'
70 | secret_data = aq_i.get_secret(secret, REGION)
71 | print(secret_data)
72 | try:
73 | print('Testing format of response')
74 | print(secret_data[secret])
75 | except:
76 | print('get_secret response in wrong format!!!')
77 |
78 | return env_config, app_config
79 |
80 | def aqr_test(env_config, app_config):
81 | print('Testing Batch call')
82 | batch_client = boto3.client('batch')
83 | batch_jobs_resp = batch_client.list_jobs(jobQueue='MvpPortfolioMonitoring_q_ec2')
84 | print(batch_jobs_resp)
85 |
86 | print('Testing aws_quant_risk.PortfolioTrackerFactory')
87 | PTF = aq_r.PortfolioTrackerFactory(env_config, app_config, 'Mvp-')
88 | print(str(PTF))
89 |
90 | print('Testing aq_r.PTF.get_all_portfolios (Dynamodb call)')
91 | PTF.load_all_portfolios()
92 | print(PTF.all_portfolios)
93 |
94 | print('Testing Events calls')
95 | for rule_name in [ 'trading_EOD', 'trading_SOD']:
96 | print(f'Checking call for rule name: {rule_name}')
97 | print(boto3.client('events').list_targets_by_rule(Rule=rule_name))
98 |
99 | print('Testing timestream call')
100 | timestream_db = env_config.get('Mvp-PortfolioMonitoring-TimestreamDb')
101 | timestream_table = 'iex_realtime_data'
102 | print('Checking for query endpoints')
103 | query = f'SELECT * FROM {timestream_db}.{timestream_table} LIMIT 10'
104 | print(boto3.client('timestream-query').query(
105 | QueryString=query,
106 | ))
107 | print('Checking for write endpoints')
108 | print(boto3.client('timestream-write').describe_database(
109 | DatabaseName=timestream_db
110 | ))
111 |
112 |
113 | def lambda_handler(event, context):
114 | # TODO implement
115 | dirs = ['/var/task/', '/var/task/shared', '/var/task/shared/python']
116 | for one_dir in dirs:
117 | try:
118 | print(f"dir list {one_dir}:", os.listdir(one_dir),"\n")
119 | except:
120 | print(f"NO dir: {one_dir}","\n")
121 | print("PYTHONPATH:", os.environ.get('PYTHONPATH'),"\n")
122 | print("PATH:", os.environ.get('PATH'),"\n")
123 | print("LD_LIBRARY_PATH:", os.environ.get('LD_LIBRARY_PATH'),"\n")
124 | print("sys.path:", sys.path,"\n")
125 | print("incoming event:", event,"\n")
126 |
127 | boto3_test()
128 |
129 | return {
130 | 'statusCode': 200,
131 | 'body': json.dumps('Hello from Lambda!')
132 | }
133 |
--------------------------------------------------------------------------------
/aws-quant-infra/src/shared/python/run_quant.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | # import unittest
3 | # from unittest.mock import MagicMock
4 | from aws_quant_risk import PortfolioTracker as aq_r
5 | # from aws_quant_risk import PortfolioTracker
6 |
7 | data = []
8 |
9 | def main():
10 | aq_r.test_harness()
11 |
12 | if __name__ == '__main__':
13 | main()
14 |
--------------------------------------------------------------------------------
/aws-quant-infra/src/shared/python/test_quant.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | from unittest.mock import MagicMock
3 |
4 | from aws_quant_risk import PortfolioTracker
5 |
6 | data = []
7 |
8 | class TestPortfolioTracker(unittest.TestCase):
9 |
10 | def test_calc_portf_pnl(self):
11 | data_mock = MagicMock()
12 | # Pass data from the test and pre calculate the result to assert
13 | pf = PortfolioTracker(data_mock, data_mock, data_mock, data_mock)
14 |
15 | #self.assertEqual(pf.calc_portf_pnl(), 4)
16 | pass
17 | # pf.exec_monitor = {}
18 | # pf.config_dict_app = config_dict_app
19 | # pf.config_dict_env = aqi.replace_string_dict_keys(config_dict_env,app_prefix,'')
20 | # pf.app_prefix = app_prefix
21 | # pf.epoch = datetime.datetime.utcfromtimestamp(0)
22 | # pf.raw_hist_data = None
23 | # pf.raw_rt_data = None
24 | # pf.portfolio_priceline = None
25 | # pf.portfolio_update_ts = None
26 | # pf.portfolio_alert = {}
27 | # pf.dynamodb = boto3.resource('dynamodb')
28 | # pf.portfolio_table = pf.config_dict_env.get('PortfolioMonitoring-PortfoliosTable')
29 | # pf.dynamodb_table = pf.dynamodb.Table(pf.portfolio_table )
30 | # pf.marketdata_source = boto3.client('timestream-query')
31 | # pf.marketdata_source_paginator = pf.marketdata_source.get_paginator('query')
32 | # pf.marketdata_source_price_field = 'measure_value::double'
33 | # pf.price_metric = 'latestPrice'
34 | # pf.timestream_tables = aqi.try_get_catch_dict(pf.config_dict_env, 'PortfolioMonitoring-TimestreamTables')
35 | # pf.marketdata_source_db = aqi.try_get_catch_dict(pf.config_dict_env, 'PortfolioMonitoring-TimestreamDb') # TODO get from parameter store
36 | # pf.marketdata_source_table = aqi.try_get_catch_dict(pf.timestream_tables, 'market_data_table')
37 | # pf.portfolio_target_table = aqi.try_get_catch_dict(pf.timestream_tables, 'portfolio_table')
38 | # pf.marketdata_source_query = \
39 | # f"SELECT distinct time,symbol,latestUpdate,measure_value::double \
40 | # FROM {pf.marketdata_source_db}.{pf.marketdata_source_table}\
41 | # where measure_name='{pf.price_metric}'\
42 | # and latestUpdate >= '%s' and latestUpdate != 'None' and symbol in (%s)\
43 | # order by time desc" # TODO: get database name and table name from parameter store
44 |
45 | # pf.portfolio_latest_ts_query = \
46 | # f"select max(latestUpdate) latestUpdate from \
47 | # (SELECT max(latestUpdate) latestUpdate FROM {pf.marketdata_source_db}.{pf.portfolio_target_table} \
48 | # where portf_id = '{portfolio_id}' and latestUpdate != 'None' \
49 | # union \
50 | # SELECT ago(1h) latestUpdate ) t" # TODO: get database name and table name from parameter store
51 | # pf.marketdata_source_schema = None
52 | # pf.marketdata_target = boto3.client(
53 | # 'timestream-write') # ,config=Config(read_timeout=20, max_pool_connections=5000, retries={'max_attempts': 10}))
54 |
55 | # pf.portfolio = pf.__portfolio(portfolio_id)
56 | # pf.portfolio_pd = pf.__portfolio_pd()
57 | # pf.exception_threshold = 20
58 | # pf.current_exception_count = 0
59 |
60 | if __name__ == '__main__':
61 | unittest.main()
--------------------------------------------------------------------------------
/aws-quant-infra/src/utils/portfolio-test_ptf_50.json:
--------------------------------------------------------------------------------
1 | {"portf_id": "836f371e8f355fef40271ed159b362e8", "portf_create_ts": 1648667149615, "positions": [{"WYN US Equity": 0.02235615513305881}, {"GPI US Equity": 0.04536344304387179}, {"BIF US Equity": 0.004992252272622707}, {"CLV US Equity": 0.030965417825768827}, {"HT$C US Equity": 0.009080419283530443}, {"TWI US Equity": 0.0024633883922348877}, {"BBY US Equity": 0.008007715637182234}, {"HBI US Equity": 0.003658419109114956}, {"CRY US Equity": 0.00286677105803924}, {"RBS$L US Equity": 0.026925149793916126}, {"SYX US Equity": 0.010938923082783079}, {"MPO US Equity": 0.011976070108143267}, {"ACCO US Equity": 0.09789328248050205}, {"TNP$C US Equity": 0.001955477899450369}, {"RNE US Equity": 0.004723824211136291}, {"CMI US Equity": 0.006418669992817907}, {"PFG US Equity": 0.04475657489030634}, {"HHS US Equity": 0.01333040806436062}, {"GIMO US Equity": 8.098515393910154e-05}, {"IX US Equity": 0.04074203480878982}, {"GDO US Equity": 0.02704499627690528}, {"TKR US Equity": 0.011890423408337056}, {"DYN$A US Equity": 0.013148219928885334}, {"GDL$B US Equity": 0.0029026692456888934}, {"VLO US Equity": 0.011926532984156163}, {"NTP US Equity": 0.016428025323423863}, {"HPF US Equity": 0.004985510299594176}, {"GDL$B US Equity": 0.0015375478900127636}, {"EFM US Equity": 0.010614335277110573}, {"SLG US Equity": 0.06988917117001088}, {"VNO$J US Equity": 0.042728074918333345}, {"IQI US Equity": 0.004898631057392717}, {"DOC US Equity": 0.008364876087066554}, {"BKK US Equity": 0.011618802162312768}, {"RBS$H US Equity": 0.007084365017959009}, {"SNY US Equity": 0.05334397480200784}, {"TRMR US Equity": 0.008509918978111172}, {"DCUA US Equity": 0.02503251297432368}, {"BBL US Equity": 0.00949317854023446}, {"SWC US Equity": 0.023743786252093307}, {"NMT US Equity": 0.013307997798733285}, {"TE US Equity": 0.0061871567676523445}, {"DSX$B US Equity": 0.013095705774043266}, {"CMC US Equity": 0.03965599686193546}, {"BCS$D US Equity": 0.01408064008447235}, {"GPN US Equity": 0.011924049003860309}, {"NMA US Equity": 0.07175736959440869}, {"MRH$A US Equity": 0.020119649568072048}, {"BTO US Equity": 0.018887989812990464}, {"PFG$B US Equity": 0.036302509898303054}], "portf_name": "test_ptf_50", "handler_info": {"refresh_sec": "60", "deploy": "batch"}, "app_config_dict": {"Configuration": "PortfolioMonitoringConfigProfile", "Application": "PortfolioMonitoring", "Environment": "dev"}}
--------------------------------------------------------------------------------
/aws-quant-infra/src/utils/portfolio_generator.py:
--------------------------------------------------------------------------------
1 | import csv
2 | import argparse
3 | import json
4 | from hashlib import md5
5 | import random
6 | import numpy as np
7 | import json
8 | import boto3
9 | import datetime
10 | import time
11 | import random
12 | import sys
13 | import os
14 |
15 | from decimal import *
16 |
17 | import pandas as pd
18 | # IEX
19 | from sseclient import SSEClient
20 | # BPIPE
21 | # TODO add logic to dockerfiles
22 | import asyncio
23 | from xbbg import blp
24 |
25 | sys.path.append('/var/task/shared/python')
26 | sys.path.append('/src/shared/python/')
27 | sys.path.append(('/home/ec2-user/environment/MvpPortfolioMonitoring-code-repo/aws-quant-infra'))
28 | sys.path.append(('/home/ec2-user/environment/MvpPortfolioMonitoring-code-repo/aws-quant-infra/src/shared/python'))
29 | import aws_quant_infra as aq_i
30 | import aws_quant_market_data as aq_md
31 | import aws_quant_risk as aq_r
32 |
33 | parser = argparse.ArgumentParser(description="Portfolio generator")
34 | parser.add_argument('--name', required=True)
35 | parser.add_argument('--filename', required=True)
36 | parser.add_argument('--ticker-amount', default=50, required=False, type=int)
37 | parser.add_argument('--random-order', default=False, required=False, type=bool)
38 | args = parser.parse_args()
39 |
40 |
41 | def write_portfolio(name, data, weights, ticker_amount, random_order):
42 | portfolio = dict()
43 | portfolio['portf_id'] = md5(name.encode('utf-8')).hexdigest()
44 | portfolio['portf_create_ts'] = int(datetime.datetime.now().timestamp()*1000)
45 | portfolio['positions'] = []
46 | portfolio['portf_name'] = name
47 | portfolio['handler_info'] = {
48 | "refresh_sec": "60",
49 | "deploy": "batch",
50 | }
51 | portfolio["app_config_dict"] = {
52 | "Configuration": "PortfolioMonitoringConfigProfile",
53 | "Application": "PortfolioMonitoring",
54 | "Environment": "dev"
55 | }
56 |
57 | tickers = []
58 |
59 | if random_order is False and ticker_amount <= len(data):
60 | tickers = data[:ticker_amount]
61 | elif ticker_amount <= len(data):
62 | for i in range(ticker_amount):
63 | rnx = random.randrange(len(data) -1)
64 | tickers.append(data[rnx])
65 |
66 | for tick in tickers:
67 | mapper = dict()
68 | mapper[f'{tick} US Equity'] = (weights.pop())
69 | portfolio['positions'].append(mapper)
70 |
71 | #with open(f'portfolio-{name}.json', 'w') as f:
72 | # f.write(json.dumps(portfolio))
73 |
74 | return(portfolio)
75 |
76 | def main():
77 | list_tickers = []
78 |
79 | try:
80 | with open(args.filename, 'r', newline='') as csvfile:
81 | reader = csv.reader(csvfile)
82 | for row in reader:
83 | list_tickers.append(row[0])
84 | except Exception as e:
85 | print(e)
86 |
87 | weights = np.random.dirichlet(np.ones(args.ticker_amount), size=1)
88 | weights = [Rounded(round(i,4)) for i in list(weights.flatten())]
89 |
90 | ptf=write_portfolio(args.name, list_tickers, weights, args.ticker_amount, True)
91 | aq_r.PortfolioTrackerTester.test_harness(ptf=ptf)
92 |
93 |
94 |
95 | if __name__ == '__main__':
96 | main()
97 |
--------------------------------------------------------------------------------
/aws-quant-infra/src/utils/resize_root.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # Specify the desired volume size in GiB as a command line argument. If not specified, default to 20 GiB.
4 | SIZE=${1:-60}
5 |
6 | # Get the ID of the environment host Amazon EC2 instance.
7 | INSTANCEID=$(curl http://169.254.169.254/latest/meta-data/instance-id)
8 | REGION=$(curl -s http://169.254.169.254/latest/meta-data/placement/availability-zone | sed 's/\(.*\)[a-z]/\1/')
9 |
10 | # Get the ID of the Amazon EBS volume associated with the instance.
11 | VOLUMEID=$(aws ec2 describe-instances \
12 | --instance-id $INSTANCEID \
13 | --query "Reservations[0].Instances[0].BlockDeviceMappings[0].Ebs.VolumeId" \
14 | --output text \
15 | --region $REGION)
16 |
17 | # Resize the EBS volume.
18 | aws ec2 modify-volume --volume-id $VOLUMEID --size $SIZE
19 |
20 | # Wait for the resize to finish.
21 | while [ \
22 | "$(aws ec2 describe-volumes-modifications \
23 | --volume-id $VOLUMEID \
24 | --filters Name=modification-state,Values="optimizing","completed" \
25 | --query "length(VolumesModifications)"\
26 | --output text)" != "1" ]; do
27 | sleep 1
28 | done
29 |
30 | #Check if we're on an NVMe filesystem
31 | if [[ -e "/dev/xvda" && $(readlink -f /dev/xvda) = "/dev/xvda" ]]
32 | then
33 | # Rewrite the partition table so that the partition takes up all the space that it can.
34 | sudo growpart /dev/xvda 1
35 |
36 | # Expand the size of the file system.
37 | # Check if we're on AL2
38 | STR=$(cat /etc/os-release)
39 | SUB="VERSION_ID=\"2\""
40 | if [[ "$STR" == *"$SUB"* ]]
41 | then
42 | sudo xfs_growfs -d /
43 | else
44 | sudo resize2fs /dev/xvda1
45 | fi
46 |
47 | else
48 | # Rewrite the partition table so that the partition takes up all the space that it can.
49 | sudo growpart /dev/nvme0n1 1
50 |
51 | # Expand the size of the file system.
52 | # Check if we're on AL2
53 | STR=$(cat /etc/os-release)
54 | SUB="VERSION_ID=\"2\""
55 | if [[ "$STR" == *"$SUB"* ]]
56 | then
57 | sudo xfs_growfs -d /
58 | else
59 | sudo resize2fs /dev/nvme0n1p1
60 | fi
61 | fi
--------------------------------------------------------------------------------
/bandit.toml:
--------------------------------------------------------------------------------
1 | [tool.bandit]
2 | exclude_dirs = ["./aws-quant-infra/node_modules/", "./aws-quant-infra/deployment/cdk/node_modules/", "./aws-quant-infra/deployment/cdk/cdk.out"]
3 | tests = []
4 | skips = ["B311", "B303", "B303", "B101", "B608", "B105", "B113", "B607", "B404", "B603", "B310", "B108"]
5 |
6 | [tool.bandit.any_other_function_with_shell_equals_true]
7 | no_shell = [
8 | "os.execl",
9 | "os.execle",
10 | "os.execlp",
11 | "os.execlpe",
12 | "os.execv",
13 | "os.execve",
14 | "os.execvp",
15 | "os.execvpe",
16 | "os.spawnl",
17 | "os.spawnle",
18 | "os.spawnlp",
19 | "os.spawnlpe",
20 | "os.spawnv",
21 | "os.spawnve",
22 | "os.spawnvp",
23 | "os.spawnvpe",
24 | "os.startfile"
25 | ]
26 | shell = [
27 | "os.system",
28 | "os.popen",
29 | "os.popen2",
30 | "os.popen3",
31 | "os.popen4",
32 | "popen2.popen2",
33 | "popen2.popen3",
34 | "popen2.popen4",
35 | "popen2.Popen3",
36 | "popen2.Popen4",
37 | "commands.getoutput",
38 | "commands.getstatusoutput"
39 | ]
40 | subprocess = [
41 | "subprocess.Popen",
42 | "subprocess.call",
43 | "subprocess.check_call",
44 | "subprocess.check_output"
45 | ]
46 |
--------------------------------------------------------------------------------
/deployment.sh:
--------------------------------------------------------------------------------
1 |
2 | #!/bin/bash
3 |
4 |
5 | current_region=`aws configure get region`
6 | declare -A arn_region
7 |
8 | arn_region["us-east-1"]="arn:aws:lambda:us-east-1:027255383542:layer:AWS-AppConfig-Extension:110"
9 | arn_region["us-east-2"]="arn:aws:lambda:us-east-2:728743619870:layer:AWS-AppConfig-Extension:79"
10 | arn_region["us-west-1"]="arn:aws:lambda:us-west-1:958113053741:layer:AWS-AppConfig-Extension:121"
11 | arn_region["us-west-2"]="arn:aws:lambda:us-west-2:359756378197:layer:AWS-AppConfig-Extension:143"
12 | arn_region["ca-central-1"]="arn:aws:lambda:ca-central-1:039592058896:layer:AWS-AppConfig-Extension:79"
13 | arn_region["eu-central-1"]="arn:aws:lambda:eu-central-1:066940009817:layer:AWS-AppConfig-Extension:91"
14 | arn_region["eu-central-2"]="arn:aws:lambda:eu-central-2:758369105281:layer:AWS-AppConfig-Extension:29"
15 | arn_region["eu-west-1"]="arn:aws:lambda:eu-west-1:434848589818:layer:AWS-AppConfig-Extension:108"
16 | arn_region["eu-west-2"]="arn:aws:lambda:eu-west-2:282860088358:layer:AWS-AppConfig-Extension:79"
17 | arn_region["eu-west-3"]="arn:aws:lambda:eu-west-3:493207061005:layer:AWS-AppConfig-Extension:80"
18 | arn_region["eu-north-1"]="arn:aws:lambda:eu-north-1:646970417810:layer:AWS-AppConfig-Extension:139"
19 | arn_region["eu-south-1"]="arn:aws:lambda:eu-south-1:203683718741:layer:AWS-AppConfig-Extension:71"
20 | arn_region["eu-south-2"]="arn:aws:lambda:eu-south-2:586093569114:layer:AWS-AppConfig-Extension:26"
21 | arn_region["cn-north-1"]="arn:aws-cn:lambda:cn-north-1:615057806174:layer:AWS-AppConfig-Extension:66"
22 | arn_region["cn-northwest-1"]="arn:aws-cn:lambda:cn-northwest-1:615084187847:layer:AWS-AppConfig-Extension:66"
23 | arn_region["ap-east-1"]="arn:aws:lambda:ap-east-1:630222743974:layer:AWS-AppConfig-Extension:71"
24 | arn_region["ap-northeast-1"]="arn:aws:lambda:ap-northeast-1:980059726660:layer:AWS-AppConfig-Extension:82"
25 | arn_region["ap-northeast-2"]="arn:aws:lambda:ap-northeast-2:826293736237:layer:AWS-AppConfig-Extension:91"
26 | arn_region["ap-northeast-3"]="arn:aws:lambda:ap-northeast-3:706869817123:layer:AWS-AppConfig-Extension:84"
27 | arn_region["ap-southeast-1"]="arn:aws:lambda:ap-southeast-1:421114256042:layer:AWS-AppConfig-Extension:89"
28 | arn_region["ap-southeast-2"]="arn:aws:lambda:ap-southeast-2:080788657173:layer:AWS-AppConfig-Extension:91"
29 | arn_region["ap-southeast-3"]="arn:aws:lambda:ap-southeast-3:418787028745:layer:AWS-AppConfig-Extension:60"
30 | arn_region["ap-southeast-4"]="arn:aws:lambda:ap-southeast-4:307021474294:layer:AWS-AppConfig-Extension:2"
31 | arn_region["ap-south-1"]="arn:aws:lambda:ap-south-1:554480029851:layer:AWS-AppConfig-Extension:92"
32 | arn_region["ap-south-2"]=" arn:aws:lambda:ap-south-2:489524808438:layer:AWS-AppConfig-Extension:29"
33 | arn_region["sa-east-1"]="arn:aws:lambda:sa-east-1:000010852771:layer:AWS-AppConfig-Extension:110"
34 | arn_region["af-south-1"]="arn:aws:lambda:af-south-1:574348263942:layer:AWS-AppConfig-Extension:71"
35 | arn_region["me-central-1"]="arn:aws:lambda:me-central-1:662846165436:layer:AWS-AppConfig-Extension:31"
36 | arn_region["me-south-1"]="arn:aws:lambda:me-south-1:559955524753:layer:AWS-AppConfig-Extension:71"
37 |
38 |
39 |
40 | echo "******** Checking if JQ is installed ********"
41 | if ! command -v jq &> /dev/null
42 | then
43 | if [[ "$OSTYPE" == "linux-gnu"* ]]; then
44 | echo "******** Installing JQ on linux ********"
45 | sudo yum install jq -y
46 |
47 | else
48 | echo "Please make sure you have installed the JQ binary and is on the path to continue"
49 | exit 1
50 | fi
51 | fi
52 | echo "******** Done ********"
53 |
54 | echo "******** Downloading AppConfig for lambda ********"
55 | aws lambda get-layer-version-by-arn --arn ${arn_region[$current_region]} | jq -r '.Content.Location' | xargs curl -o aws-quant-infra/src/lambda/extension.zip
56 | echo "******** Done ********"
57 |
58 | echo "******** Installing NPM packages ********"
59 | cd aws-quant-infra/deployment/cdk && npm install
60 | echo "******** Done ********"
61 |
62 |
63 | echo "******** CDK Bootstrap ********"
64 | pwd
65 | cdk bootstrap
66 | echo "******** Done ********"
67 |
68 | echo "******** CDK Checking drift ********"
69 | pwd
70 | cdk diff --no-color &> changes.txt
71 | echo "******** Done ********"
72 |
73 | echo "******** CDK Deploy ********"
74 | cdk deploy "*" --outputs-file outputs.json --require-approval never
75 | echo "******** Done ********"
76 |
77 | echo "******** Checking if Terraform is installed ********"
78 | #From the CDK folder
79 | if ! command -v terraform &> /dev/null
80 | then
81 | if [[ "$OSTYPE" == "linux-gnu"* ]]; then
82 | echo "******** Installing Terraform on linux ********"
83 | yum install terraform -y
84 | else
85 | echo "Please make sure you have installed the Terraform binary and is on the path to continue"
86 | exit 1
87 | fi
88 | fi
89 | echo "******** Done ********"
90 |
91 | echo "******** Initializing terraform ********"
92 | cd ../grafana
93 | terraform init
94 | terraform plan -var-file=grafana.tfvars -out=plan.tfx
95 | terraform apply plan.tfx
96 |
97 | echo "*****************************************************************"
98 | echo "Deployment completed"
99 | echo "*****************************************************************"
100 | echo " ~ Next Steps ~ "
101 | echo " Reset the password for user grafana_admin in the IAM Identity Center"
102 | echo " Add Timestream data source to Grafana"
103 |
--------------------------------------------------------------------------------
/grafana-infra/README.md:
--------------------------------------------------------------------------------
1 | # CDK Grafana
2 | ## Summary
3 | This project aims to deploy a resilient Grafana container on [AWS Fargate](https://aws.amazon.com/fargate/) using the [Grafana Docker Image](https://grafana.com/docs/grafana/latest/installation/docker/) on [DockerHub](https://hub.docker.com/r/grafana/grafana/) with no modifications/rebuilds of the container.
4 |
5 | * The [AWS CDK](https://aws.amazon.com/cdk/) is used for infrastructure-as-code and deployment.
6 | * Persistence is provided by [Amazon EFS and AWS Fargate support](https://aws.amazon.com/about-aws/whats-new/2020/04/amazon-ecs-aws-fargate-support-amazon-efs-filesystems-generally-available/).
7 | * High Availability is supported but Autoscaling of the container is not provided at this point i.e. the ECS/Fargate service will ensure that only **one** Grafana container is online between Availability Zone A or B.
8 | * Access for the Grafana container to CloudWatch and TimeStream is configured with an IAM Role, preventing the need to configure Access/Secret keys
9 | * The initial admin password is securely generated and configured using [Secrets Manager](https://console.aws.amazon.com/secretsmanager)
10 | * A TLS certificate is automatically created deployed to the Application Load Balancer using [AWS Certificate Manager](https://aws.amazon.com/certificate-manager/) enabling secure HTTPS Only communication with the Grafana portal
11 | * *Optional* PrivateLink endpoints can be added for increased security
12 |
13 | ## System Diagram
14 | 
15 |
16 | ## Prerequisites
17 | ### Route 53
18 | This project requires that you have already registered a domain and configured a hosted zone in [Route 53](https://aws.amazon.com/route53/). Once this is completed you will need the domainName, hostedZoneId and zoneName to continue the deployment.
19 |
20 | ### Node.js
21 | * Install [Node.js and NPM](https://nodejs.org/en/)
22 |
23 | ### AWS
24 | * An AWS account
25 | * [AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/install-cliv2.html)
26 | * AWS CLI [configured](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-quickstart.html#cli-configure-quickstart-config)
27 | * [AWS CDK](https://docs.aws.amazon.com/cdk/latest/guide/getting_started.html)
28 |
29 | ### Install node modules
30 | ```bash
31 | cd aws-cdk-grafana
32 | npm install
33 | ```
34 |
35 | ## Deploy
36 | 1. Deploy the project using the following command in the root of the aws-cdk-grafana folder (replacing the domainName, hostedZoneId and zoneName with the correct values for your environment)
37 | ``` bash
38 | cdk deploy --context domainName="grafana.example.com" --context hostedZoneId="Z0123456789ABCDEFGHIJ" --context zoneName="example.com"
39 | ```
40 | 2. Wait for the new service to provision (This takes 5-10 mins)
41 | 3. Retrieve the admin password from [Secrets Manager](https://console.aws.amazon.com/secretsmanager)
42 | 4. Log into Grafana (url was output by the CDK Deploy), username = `admin` and use the password you retrieved from Secrets Manager
43 | 5. In Grafana add AWS CloudWatch as a Data Source: Configuration, Data Sources, CloudWatch, set the region to the region you wish to monitor e.g. us-east-1. Leave the other fields blank and click Save and Test.
44 | 6. On the Dashboards tab you can add one of the prebuilt Dashboards, e.g. AWS Lambda.
45 |
46 | You can test Grafana config persists past a reboot by simply terminating the task in ECS, let a new task launch (a few seconds) and then logging back into Grafana and all of your configuration of the Dashboards will have persisted.
47 |
48 | `Note` changes to the Admin password in Secrets Manager will not be reflected in Grafana as this is set only once at initial deploy time.
49 |
50 | ## PrivateLink Endpoints (Optional)
51 | PrivateLink endpoints can be added using the following command. This will incur [additional cost](https://aws.amazon.com/privatelink/pricing/) for the endpoints, but will prevent CloudWatch, EFS (Elastic File System) and Secrets Manager traffic from traversing the public internet.
52 | ``` bash
53 | cdk deploy --context domainName="grafana.example.com" --context hostedZoneId="Z0123456789ABCDEFGHIJ" --context zoneName="example.com" --context enablePrivateLink=="true"
54 | ```
55 |
56 |
57 |
58 |
--------------------------------------------------------------------------------
/grafana-infra/bin/cdk-grafana.ts:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env node
2 | import * as cdk from '@aws-cdk/core';
3 | import { CdkGrafanaStack } from '../lib/cdk-grafana-stack';
4 |
5 | const app = new cdk.App();
6 | new CdkGrafanaStack(app, 'CdkGrafanaStack');
7 |
--------------------------------------------------------------------------------
/grafana-infra/cdk.context.json:
--------------------------------------------------------------------------------
1 | {
2 | "@aws-cdk/core:enableStackNameDuplicates": "true",
3 | "aws-cdk:enableDiffNoFail": "true",
4 | "domainName": "",
5 | "hostedZoneId": "",
6 | "zoneName": ""
7 | }
8 |
--------------------------------------------------------------------------------
/grafana-infra/cdk.json:
--------------------------------------------------------------------------------
1 | {
2 | "app": "npx ts-node bin/cdk-grafana.ts",
3 | "context": {
4 | "@aws-cdk/core:enableStackNameDuplicates": "true",
5 | "aws-cdk:enableDiffNoFail": "true",
6 | "@aws-cdk/core:stackRelativeExports": "true"
7 | }
8 | }
9 |
--------------------------------------------------------------------------------
/grafana-infra/img/Network.drawio:
--------------------------------------------------------------------------------
1 | 7Vzbkps4EP0aV+0+rAsQF/Po6yRVk93Znezm8uKSQcZKMHIJ+ZavXwmEDRLOeGYg40mcmkqhBkui+5xuqdV2BwyXuxsKV4t3JERxxzLCXQeMOpbl2y7/Xwj2ucA1zVwQURzmopLgHn9DUmhI6RqHKK08yAiJGV5VhQFJEhSwigxSSrbVx+Ykro66ghHSBPcBjHXpBxyyhZS6jn288QbhaFEMbbp+fmcJi6flq6QLGJJtSQTGHTCkhLD8arkbolgor1BM/rnJibuHmVGUsHM+MMbw26ePd0v0b3/2JbjZjft/f/4D5L1sYLyWb/wPijBJ5JTZvlDEiuCEZcp0BvyPDzU0Og6/MxStruUoArXtVQWm3hJ9VAVq26sKTLV7UxnfVCdYEmitSveGMr5RmiD/AwOyZjFO0PAAO4MLIwpDzK0xJDGhXJaQhGtvsGDLmLdMfrldYIbuVzAQWt1yxnDZnCRMAt+0irZUvOiV42Ylrpe7SFCsC7ep3Y0oWa+yId9y6NfendLckrwHRslXVMyqYwHT9saDvhgLx7Ey2w2iDHMC9GMciY4ZEeNA2YrRnIke+SvgJLrNWiNgyGnXDRHCdIFC+S46ZiWMxahoVxJJDN8gskSM7vkjxd2CT9KjWD3Z3h756RayRYmari2FUPqE6ND3kTX8QhLnESSyNBL9dze8Muj1M2izCuroY9m9nmm3Sp9+f+ANem3Qx63SB1g19DFq6OO4bdHH1ujT30AcwxmOMRNT/yxUaxl9jVI1+j/t6w6aNE+aSQXOKZ/WgB0so2oH29DtYNk1drBbc2POmXYY/Ex2sHsXZwdXs8PdehbjQOh8PUsQq+XCNby8uvCSomBNObWmx4fvM9oUHZ8VeLh87E/A2G0u+hzGaTr6HJi0V0i0LW+uathmthZ9vDPYpnu8K9uubLt4tjnWxbGtp7ON4g1k6BrcfnW61SYlBN3ciTXpvYLMhBrc3Jenm38O3a7R7Uq3V0g3NbpdAN2KU4sSlVAYocLAhLIFiUgC4/FROuD2SsKDYo7P3BKh8AxJXxBjewkbuGakijO0w+yj+DjHcN76VLoz2smes8b+If2nZE0D9J13lMlOBmmE2MPPiff/rjUpiiHDm+p5S/OW0Q86+kKRliEOeziCsxMowZyrL7x8XzjkYyH6gEcURJmmuXXrvKJZ5xVHPdflWHlCajco5qT4v0OPlbyX9JvvM4pbTkMLECW369Ycjdh1uV3TaM0jmhrvbiicwwR2LDcWEWRG+VUkrjimGOTYohoHa0Gn7qKANRETOwVGJcyNDGdoehoE5MOVWFXY/RbOUHxHUsyyY7XRjDBGlucDo0SHh6AP01X+onO8E/MYZF4I0fEG5c7IPMWPoFDi1GwIVD2zCirf0UDl9XRMOa1BSs+PvkTUfXoENe0zQ2hBnuZiqPzonUDT0cTAtSomtny/2kU+U/mpo/n6lMJ96TEZKk+Po6zYnGrFAr/Iezxi4/COz4BLzfHSrb7lacHFOH1gDJyfz8XA1SrmcxFTnMYEhtMZjGESZBNowOHYltN1KjixPV9zOUWZTeWE3z+N/Oe5HD1J/Gf/PRfc8K30Fu6vaHo6mhLIppFUYzMn3MqJHvA8DT1OzSKovYClJz2v6LlU9Lj+w+ixa5Y7hax59JzM4d3i5GtHhFB1IT1OwiIUq7d++2slzAnj36+oezrq0EHBjWDOA8q6yDd1j1W3xG4Lc8U6rYS5t0Izeb746raeDSAstdms77JBdatm+fr+/8fiSN/936OAIuGZjHcwgVHTm31+b+LYnu2X7o0w5R3laEjETk/H2dAzgTmpSwjNs3+XCLZaYFGUbz/ztNiAN+sSZGluhOlSmqClSioNfHV5Aq818Fka+IYxWYcfIAsWzaMO2HbPehzqBkMTOO4vg7pAaH+bab+ZsKmWN7w04PQzhgmkUXbaqq7D7hHd4AA1D0OvNzbsx8HwRCr0Z4XhXNqkGadXfMFJYtBzLA2EVg0IrdZAqFfxjmOYsqygbYJjcfJ/v08ZWjaOPdfoA+A9DnuW55nmr+MCUW6KKdcCmqa5GZrZthrVFO6L+0I9ST8iwVex3DPerGca9jQllwxUPYEd1ZkTL7Pv8Wm2lPIRXkb8NWI8Ey+TBpCzYJLPp5tuoobyTlq1nWaCntMFXo0VWvtikp63fNt/d11x/3ji45BPQJTnwCScwiBAabH6XmaIawKAwPEvywfoec9/SNzwmqc+b3ACXa8+b0BJ3NDCRQWLZ/jdwmAlwIC6tYvZLXxb45gBeuZpKPQ/FwdO6Jo1eNH929EQReZgCppBo/a9kBd2XUBPXFVgePVgT65PqQGR1dReDGTlcGUc1Xy7FVjF8XIZSqCtNRjQ6w9+BH4Oj/9k+Cky5w1BxqpmzM2ajHmd42nttA/o23cNLSgJ++KnTYTu+X4yxUGnocokcG5lUkGrByuTzjxzf2YBU08JH4763dz8zbUCJr0j5fTX9pWOTlRCNVWsBM6pbWvN/IVVHy5Msy/K/LZXtZpGz3PNr3YE1LrYts2v79d187+G0sa2KxZtoNaIOWoAP9fmrvJDDN6ZNn9s8aM6jqP+XNOZ83pisSRvHn9qKn/8+INdYPw/
--------------------------------------------------------------------------------
/grafana-infra/img/diagram01.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aws-samples/quant-trading/f1dd9ace0c732cae3d0b103affae3d6e6b2da80f/grafana-infra/img/diagram01.png
--------------------------------------------------------------------------------
/grafana-infra/jest.config.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | roots: ['/test'],
3 | testMatch: ['**/*.test.ts'],
4 | transform: {
5 | '^.+\\.tsx?$': 'ts-jest'
6 | }
7 | };
8 |
--------------------------------------------------------------------------------
/grafana-infra/lib/cdk-grafana-stack.ts:
--------------------------------------------------------------------------------
1 | import * as cdk from '@aws-cdk/core';
2 | import * as ec2 from '@aws-cdk/aws-ec2';
3 | import * as ecs from '@aws-cdk/aws-ecs';
4 | import * as ecs_patterns from '@aws-cdk/aws-ecs-patterns';
5 | import * as efs from '@aws-cdk/aws-efs';
6 | import * as iam from '@aws-cdk/aws-iam';
7 | import * as logs from '@aws-cdk/aws-logs';
8 | import * as r53 from '@aws-cdk/aws-route53';
9 | import * as secretsmanager from '@aws-cdk/aws-secretsmanager';
10 | import {ApplicationProtocol} from "@aws-cdk/aws-elasticloadbalancingv2";
11 |
12 | export class CdkGrafanaStack extends cdk.Stack {
13 | constructor(scope: cdk.Construct, id: string, props?: cdk.StackProps) {
14 | super(scope, id, props);
15 |
16 | // Get Context Values
17 | // const domainName = this.node.tryGetContext('domainName');
18 | // const hostedZoneId = this.node.tryGetContext('hostedZoneId');
19 | // const zoneName = this.node.tryGetContext('zoneName');
20 |
21 | // if (!domainName || !hostedZoneId || !zoneName) {
22 | // throw new Error('Please provide required parameters domainName, hostedZoneId, zoneName via context variables');
23 | // }
24 |
25 | const enablePrivateLink = this.node.tryGetContext('enablePrivateLink');
26 | // const domainZone = r53.PublicHostedZone.fromHostedZoneAttributes( this, "MyHostedZone", {
27 | // hostedZoneId: hostedZoneId,
28 | // zoneName: zoneName
29 | // });
30 | // vpc
31 | const vpc = new ec2.Vpc(this, "MyVpc", {
32 | maxAzs: 2 // Default is all AZs in region
33 | });
34 |
35 | if (enablePrivateLink == 'true') {
36 | vpc.addInterfaceEndpoint('CWEndpoint', {service: ec2.InterfaceVpcEndpointAwsService.CLOUDWATCH});
37 | vpc.addInterfaceEndpoint('EFSEndpoint', {service: ec2.InterfaceVpcEndpointAwsService.ELASTIC_FILESYSTEM});
38 | vpc.addInterfaceEndpoint('SMEndpoint', {service: ec2.InterfaceVpcEndpointAwsService.SECRETS_MANAGER});
39 | }
40 |
41 | const cluster = new ecs.Cluster(this, "MyCluster", {
42 | vpc: vpc
43 | });
44 |
45 | // EFS
46 | const fileSystem = new efs.FileSystem(this, 'EfsFileSystem', {
47 | vpc: vpc,
48 | encrypted: true,
49 | lifecyclePolicy: efs.LifecyclePolicy.AFTER_14_DAYS,
50 | performanceMode: efs.PerformanceMode.GENERAL_PURPOSE,
51 | throughputMode: efs.ThroughputMode.BURSTING
52 | });
53 |
54 | const accessPoint = new efs.AccessPoint(this, 'EfsAccessPoint', {
55 | fileSystem: fileSystem,
56 | path: '/var/lib/grafana',
57 | posixUser: {
58 | gid: '1000',
59 | uid: '1000'
60 | },
61 | createAcl: {
62 | ownerGid: '1000',
63 | ownerUid: '1000',
64 | permissions: '755'
65 | }
66 | });
67 |
68 | // task log group
69 | const logGroup = new logs.LogGroup(this, 'taskLogGroup', {
70 | retention: logs.RetentionDays.ONE_MONTH
71 | });
72 |
73 | // container log driver
74 | const containerLogDriver = ecs.LogDrivers.awsLogs({
75 | streamPrefix: 'fargate-grafana', //cdk.Stack.stackName,
76 | logGroup: logGroup
77 | });
78 |
79 | // Use customer provided Grafana Role
80 | const grafanaRole = iam.Role.fromRoleArn(this, 'Role','ROLE_ARN')
81 | // // Grafana service task Role
82 | // const taskRole = new iam.Role(this, 'taskRole', {
83 | // assumedBy: new iam.ServicePrincipal('ecs-tasks.amazonaws.com'),
84 | // });
85 |
86 | // taskRole.addToPolicy(new iam.PolicyStatement({
87 | // effect: iam.Effect.ALLOW,
88 | // actions: [
89 | // 'cloudwatch:DescribeAlarmsForMetric',
90 | // 'cloudwatch:DescribeAlarmHistory',
91 | // 'cloudwatch:DescribeAlarms',
92 | // 'cloudwatch:ListMetrics',
93 | // 'cloudwatch:GetMetricStatistics',
94 | // 'cloudwatch:GetMetricData',
95 | // 'ec2:DescribeTags',
96 | // 'ec2:DescribeInstances',
97 | // 'ec2:DescribeRegions',
98 | // 'tag:GetResources',
99 | // "timestream:CancelQuery",
100 | // 'timestream:DescribeEndpoints',
101 | // "timestream:DescribeTable",
102 | // "timestream:ListTables",
103 | // "timestream:ListDatabases",
104 | // "timestream:ListMeasures",
105 | // 'timestream:SelectValues',
106 | // "timestream:Select"
107 | // ],
108 | // resources: ['*']
109 | // }));
110 |
111 | // // execution Role
112 | // const executionRole = new iam.Role(this, 'executionRole', {
113 | // assumedBy: new iam.ServicePrincipal('ecs-tasks.amazonaws.com'),
114 | // });
115 |
116 | // executionRole.addToPolicy(new iam.PolicyStatement({
117 | // effect: iam.Effect.ALLOW,
118 | // actions: [
119 | // 'logs:CreateLogStream',
120 | // 'logs:PutLogEvents',
121 | // ],
122 | // resources: [
123 | // logGroup.logGroupArn
124 | // ]
125 | // }));
126 |
127 | // Create Task Definition - # EFS integration currently uses escape hatches until native CDK support is added #
128 | const volumeName = 'efsGrafanaVolume';
129 |
130 | const volumeConfig: ecs.Volume = {
131 | name: volumeName,
132 | efsVolumeConfiguration: {
133 | fileSystemId: fileSystem.fileSystemId,
134 | transitEncryption: 'ENABLED',
135 | authorizationConfig: { accessPointId: accessPoint.accessPointId}
136 | },
137 | };
138 |
139 | // https://aws.amazon.com/blogs/aws/amazon-ecs-supports-efs/
140 | const task_definition = new ecs.FargateTaskDefinition(this, "TaskDef",{
141 | taskRole: grafanaRole,
142 | executionRole: grafanaRole,
143 | volumes: [volumeConfig]
144 | });
145 |
146 | // Grafana Admin Password
147 | const grafanaAdminPassword = new secretsmanager.Secret(this, 'grafanaAdminPassword');
148 | // Allow Task to access Grafana Admin Password
149 | grafanaAdminPassword.grantRead(grafanaRole);
150 |
151 | // Web Container
152 | const container_web = task_definition.addContainer("web", {
153 | image: ecs.ContainerImage.fromRegistry('grafana/grafana'),
154 | logging: containerLogDriver,
155 | secrets: {
156 | GF_SECURITY_ADMIN_PASSWORD: ecs.Secret.fromSecretsManager(grafanaAdminPassword)
157 | },
158 | // environment: {
159 | // 'GF_SERVER_ROOT_URL' : `https://${domainZone.zoneName}`,
160 | // }
161 |
162 | }
163 | );
164 | // set port mapping
165 | container_web.addPortMappings({
166 | containerPort: 3000
167 | });
168 | container_web.addMountPoints({
169 | sourceVolume: volumeConfig.name,
170 | containerPath: '/var/lib/grafana',
171 | readOnly: false
172 | });
173 |
174 | // Create a load-balanced Fargate service and make it public
175 | const fargateService = new ecs_patterns.ApplicationLoadBalancedFargateService(this, "MyFargateService", {
176 | // domainName: domainName,
177 | // domainZone: domainZone,
178 | cluster: cluster, // Required
179 | cpu: 1024, // https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task-cpu-memory-error.html
180 | desiredCount: 1, // Should be set to 1 to prevent multiple tasks attempting to write to EFS volume concurrently
181 | taskDefinition: task_definition,
182 | memoryLimitMiB: 2048, // https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task-cpu-memory-error.html
183 | // protocol: ApplicationProtocol.HTTPS,
184 | platformVersion: ecs.FargatePlatformVersion.VERSION1_4
185 | });
186 |
187 | fargateService.targetGroup.configureHealthCheck({
188 | path: '/api/health'
189 | });
190 |
191 | // Allow Task to access EFS
192 | fileSystem.connections.allowDefaultPortFrom(fargateService.service.connections);
193 |
194 | }
195 | }
196 |
--------------------------------------------------------------------------------
/grafana-infra/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "cdk-grafana",
3 | "version": "0.1.0",
4 | "bin": {
5 | "grafana": "bin/cdk-grafana.js"
6 | },
7 | "scripts": {
8 | "build": "tsc",
9 | "watch": "tsc -w",
10 | "test": "jest",
11 | "cdk": "cdk"
12 | },
13 | "devDependencies": {
14 | "@aws-cdk/assert": "1.69.0",
15 | "@types/jest": "^26.0.24",
16 | "@types/node": "10.17.27",
17 | "aws-cdk": "^1.127.0",
18 | "jest": "^27.2.5",
19 | "ts-jest": "^26.5.6",
20 | "ts-node": "^8.1.0",
21 | "typescript": "^3.9.10"
22 | },
23 | "dependencies": {
24 | "@aws-cdk/aws-ec2": "1.69.0",
25 | "@aws-cdk/aws-ecs": "1.69.0",
26 | "@aws-cdk/aws-ecs-patterns": "1.69.0",
27 | "@aws-cdk/aws-efs": "1.69.0",
28 | "@aws-cdk/aws-iam": "1.69.0",
29 | "@aws-cdk/aws-secretsmanager": "1.69.0",
30 | "@aws-cdk/core": "1.69.0",
31 | "source-map-support": "^0.5.20"
32 | }
33 | }
34 |
--------------------------------------------------------------------------------
/grafana-infra/test/grafana.test.ts:
--------------------------------------------------------------------------------
1 | import * as cdk from '@aws-cdk/core';
2 | import * as Grafana from '../lib/cdk-grafana-stack';
3 | import '@aws-cdk/assert/jest';
4 |
5 |
6 | test('Empty Stack', () => {
7 | const app = new cdk.App( { context: {
8 | 'domainName': 'example.com',
9 | 'hostedZoneId': 'IIDASEED',
10 | 'zoneName': 'example.com'
11 | }});
12 | // WHEN
13 | const stack = new Grafana.CdkGrafanaStack(app, 'MyTestStack');
14 |
15 | // THEN
16 | expect(stack).toHaveResourceLike('AWS::ECS::Service');
17 | });
18 |
--------------------------------------------------------------------------------
/grafana-infra/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "target": "ES2018",
4 | "module": "commonjs",
5 | "lib": ["es2018"],
6 | "declaration": true,
7 | "strict": true,
8 | "noImplicitAny": true,
9 | "strictNullChecks": true,
10 | "noImplicitThis": true,
11 | "alwaysStrict": true,
12 | "noUnusedLocals": false,
13 | "noUnusedParameters": false,
14 | "noImplicitReturns": true,
15 | "noFallthroughCasesInSwitch": false,
16 | "inlineSourceMap": true,
17 | "inlineSources": true,
18 | "experimentalDecorators": true,
19 | "strictPropertyInitialization": false,
20 | "typeRoots": ["./node_modules/@types"]
21 | },
22 | "exclude": ["cdk.out"]
23 | }
24 |
--------------------------------------------------------------------------------
/portfolio-test_ptf_50.json:
--------------------------------------------------------------------------------
1 | {"portf_id": "836f371e8f355fef40271ed159b362e8", "portf_create_ts": 1648665078.860413, "positions": [], "portf_name": "test_ptf_50", "handler_info": {"refresh_sec": "60", "deploy": "batch"}, "app_config_dict": {"Configuration": "PortfolioMonitoringConfigProfile", "Application": "PortfolioMonitoring", "Environment": "dev"}}
--------------------------------------------------------------------------------