├── .gitignore ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── LICENSE ├── PersonalizeCheatSheet2.0.md ├── README.md ├── getting_started ├── README.md ├── notebooks │ ├── 1.Building_Your_First_Campaign.ipynb │ ├── 2.View_Campaign_And_Interactions.ipynb │ ├── 3.Best_Practices-Clientside.ipynb │ ├── 99.Cleanup.ipynb │ └── README.md ├── notebooks_managed_domains │ ├── Building_Your_First_Recommender_Ecommerce.ipynb │ ├── Building_Your_First_Recommender_Video_On_Demand.ipynb │ ├── Clean_Up_Resources.ipynb │ └── README.md ├── personalize_getting_started.yaml └── static │ └── imgs │ ├── image.png │ ├── img1.png │ ├── img10.png │ ├── img11.png │ ├── img12.png │ ├── img13.png │ ├── img14.png │ ├── img15.png │ ├── img16.png │ ├── img17.png │ ├── img18.png │ ├── img2.png │ ├── img3.png │ ├── img4.png │ ├── img5.png │ ├── img6.png │ ├── img7.png │ ├── img8.png │ ├── img9.png │ ├── personalize_overview.png │ └── personalize_process.png └── next_steps ├── README.md ├── core_use_cases ├── README.md ├── automatic_context │ ├── README.md │ ├── images │ │ ├── architecture.png │ │ └── cloudformation-launch-stack.png │ ├── notebooks │ │ ├── 1.Building_Personalize_Campaign.ipynb │ │ ├── 2.Passing_Context_Automatically.ipynb │ │ └── 3.Cleanup.ipynb │ └── templates │ │ └── personalize-auto-context-template.yml ├── batch_recommendations │ ├── README.md │ └── user_personalization_batch_recommendations_example.ipynb ├── filters │ └── promotions │ │ ├── Clean_Up_Resources.ipynb │ │ ├── Promotions.ipynb │ │ └── images │ │ └── promotions-overview.png ├── metadata │ ├── README.md │ └── item-text-unstructured-metadata.ipynb ├── objective_optimization │ └── objective-optimization.ipynb ├── personalized_ranking │ ├── README.md │ └── personalize_ranking_example.ipynb ├── related_items │ ├── README.md │ ├── personalize_aws_similar_items_example.ipynb │ └── personalize_sims_example.ipynb ├── trending_now │ ├── README.md │ └── trending_now_example.ipynb ├── updating_datasets │ ├── README.md │ ├── sagemaker_notebook_exec_role.json │ ├── update-datasets-user-personalization-example.ipynb │ └── update-item-dataset-schema-example.ipynb ├── user_personalization │ ├── README.md │ ├── user-personalization-with-contextual-recommendations.ipynb │ └── user-personalization-with-exploration.ipynb └── user_segmentation │ └── user_segmentation_example.ipynb ├── data_science ├── README.md ├── diagnose │ ├── README.md │ ├── diagnose.py │ ├── example_ml_100k.ipynb │ ├── example_ml_1m.ipynb │ ├── example_ml_20m.ipynb │ └── imgs │ │ ├── power-law.png │ │ ├── retrain-freq.png │ │ ├── temporal-drift.png │ │ └── time-delta.png └── offline_performance_evaluation │ ├── README.md │ ├── metrics.py │ └── personalize_temporal_holdout.ipynb ├── evaluation └── measuring_impact_of_recommendations │ ├── Clean_Up_Resources.ipynb │ ├── Measure_Impact_of_Recommendations.ipynb │ └── images │ ├── metrics-overview-scenarios.png │ └── metrics-overview.png ├── generative_ai ├── README.md ├── personalized_marketing_campaign │ ├── README.md │ ├── airline_ticket_user_segmentation_09212023_github.ipynb │ ├── df_interactions.csv │ ├── df_item_deduplicated.csv │ ├── df_users_deduplicated.csv │ ├── image_to_image1.png │ ├── personalized_marketing_campaign_10032023_1600_github.ipynb │ ├── test-metadata.json │ ├── ticketing-template.json │ ├── top50inHK.json │ └── universal_negative_prompts.json ├── personalized_recommender_agent │ ├── 01_Recommender-Agent_Configure-Personalize-Resources.ipynb │ ├── 02_Recommender-Agent_Build-Agent-ConverseAPI.ipynb │ ├── 03_Recommender-Agent_CleanUp.ipynb │ ├── README.md │ ├── personalizeCFRecommenderAgent.yaml │ └── static │ │ └── function-flowchart.png └── user_personalized_marketing_messaging_with_amazon_personalize_and_gen_ai │ ├── 01_Introduction_and_Data_Preparation.ipynb │ ├── 02_Train_Personalize_Model_01_Data.ipynb │ ├── 03_Train_Personalize_Model_02_Training.ipynb │ ├── 04_Personalized_Emails_with_Amazon_Personalize_and_Generative_AI.ipynb │ ├── 05_Clean_Up.ipynb │ ├── README.md │ ├── images │ ├── IMDb_Logo_Rectangle.png │ ├── architecture.png │ └── personalize_metrics.png │ ├── params.json │ └── personalizeSimpleCFMarketingContentGen.yaml ├── operations ├── README.md ├── filter_rotator │ ├── LICENSE │ ├── README.md │ ├── conftest.py │ ├── src │ │ └── filter_rotator_function │ │ │ ├── __init__.py │ │ │ ├── filter_rotator.py │ │ │ ├── requirements.txt │ │ │ └── template_evaluation.py │ ├── template.yaml │ └── tests │ │ ├── requirements.txt │ │ └── unit │ │ ├── __init__.py │ │ └── test_handler.py ├── lambda_examples │ ├── README.md │ ├── Sending_Events_to_S3.ipynb │ └── event_processor.py ├── ml_ops │ ├── LICENSE │ ├── README.md │ ├── example │ │ ├── data │ │ │ ├── Interactions │ │ │ │ └── interactions.csv │ │ │ └── Items │ │ │ │ └── item-meta.csv │ │ └── params.json │ ├── images │ │ ├── architecture.png │ │ ├── icon.png │ │ └── stepfunctions.png │ └── personalize-step-functions │ │ ├── lambdas │ │ ├── create-campaign │ │ │ ├── __init__.py │ │ │ ├── campaign.py │ │ │ └── requirements.txt │ │ ├── create-dataset │ │ │ ├── __init__.py │ │ │ ├── dataset.py │ │ │ └── requirements.txt │ │ ├── create-datasetgroup │ │ │ ├── __init__.py │ │ │ ├── datasetgroup.py │ │ │ └── requirements.txt │ │ ├── create-filters │ │ │ ├── __init__.py │ │ │ ├── filters.py │ │ │ └── requirements.txt │ │ ├── create-solution │ │ │ ├── __init__.py │ │ │ ├── requirements.txt │ │ │ └── solution.py │ │ ├── delete-campaign │ │ │ ├── __init__.py │ │ │ ├── delete-campaign.py │ │ │ └── requirements.txt │ │ ├── delete-dataset │ │ │ ├── __init__.py │ │ │ ├── delete-dataset.py │ │ │ └── requirements.txt │ │ ├── delete-datasetgroup │ │ │ ├── __init__.py │ │ │ ├── delete-datasetgroup.py │ │ │ └── requirements.txt │ │ ├── delete-solution │ │ │ ├── __init__.py │ │ │ ├── delete-solution.py │ │ │ └── requirements.txt │ │ ├── delete-tracker │ │ │ ├── __init__.py │ │ │ ├── delete-tracker.py │ │ │ └── requirements.txt │ │ ├── event-tracker │ │ │ ├── __init__.py │ │ │ ├── eventTracker.py │ │ │ └── requirements.txt │ │ ├── import-data │ │ │ ├── __init__.py │ │ │ ├── datasetimport.py │ │ │ └── requirements.txt │ │ ├── list-campaigns │ │ │ ├── __init__.py │ │ │ ├── list-campaigns.py │ │ │ └── requirements.txt │ │ ├── list-datasets │ │ │ ├── __init__.py │ │ │ ├── list-datasets.py │ │ │ └── requirements.txt │ │ ├── list-solution-versions │ │ │ ├── __init__.py │ │ │ ├── list-solution-versions.py │ │ │ └── requirements.txt │ │ ├── list-solutions │ │ │ ├── __init__.py │ │ │ ├── list-solutions.py │ │ │ └── requirements.txt │ │ ├── list-trackers │ │ │ ├── __init__.py │ │ │ ├── list-trackers.py │ │ │ └── requirements.txt │ │ ├── notify-delete │ │ │ ├── __inti__.py │ │ │ ├── notify-delete.py │ │ │ └── requirements.txt │ │ ├── notify │ │ │ ├── __init__.py │ │ │ ├── notify.py │ │ │ └── requirements.txt │ │ ├── s3lambda │ │ │ ├── __init__.py │ │ │ ├── parse.py │ │ │ └── requirements.txt │ │ ├── wait-delete-campaign │ │ │ ├── __init__.py │ │ │ ├── requirements.txt │ │ │ └── wait-delete-campaign.py │ │ ├── wait-delete-dataset │ │ │ ├── __init__.py │ │ │ ├── requirements.txt │ │ │ └── wait-delete-dataset.py │ │ ├── wait-delete-datasetgroup │ │ │ ├── __init__.py │ │ │ ├── requirements.txt │ │ │ └── wait-delete-datasetgroup.py │ │ ├── wait-delete-solution │ │ │ ├── __init__.py │ │ │ ├── requirements.txt │ │ │ └── wait-delete-solution.py │ │ ├── wait-delete-tracker │ │ │ ├── __init__.py │ │ │ ├── requirements.txt │ │ │ └── wait-delete-tracker.py │ │ └── wait-solution-version │ │ │ ├── __init__.py │ │ │ ├── requirements.txt │ │ │ └── wait-solution-version.py │ │ ├── shared │ │ └── python │ │ │ ├── actions.py │ │ │ ├── loader.py │ │ │ └── parameters.py │ │ └── template.yaml ├── ml_ops_ds_sdk │ ├── CODE_OF_CONDUCT.md │ ├── CONTRIBUTING.md │ ├── LICENSE │ ├── Personalize-Stepfunction-Workflow.ipynb │ ├── README.md │ └── lambda │ │ ├── stepfunction-create-schema.py │ │ ├── stepfunction-createdatasetimportjob.py │ │ ├── stepfunction_create_personalize_role.py │ │ ├── stepfunction_create_solution_version.py │ │ ├── stepfunction_getRecommendations.py │ │ ├── stepfunction_getsolution_metric_create_campaign.py │ │ ├── stepfunction_select-recipe_create-solution.py │ │ ├── stepfunction_waitforCampaign.py │ │ ├── stepfunction_waitforDatasetGroup.py │ │ ├── stepfunction_waitforSolutionVersion.py │ │ ├── stepfunction_waitfordatasetimportjob.py │ │ ├── stepfunctioncreatedatagroup.py │ │ ├── stepfunctioncreatedataset.py │ │ └── stepfunctioncreatedatasetimportjob.py └── streaming_events │ ├── LICENSE │ ├── README.md │ ├── images │ └── architecture.png │ ├── lambdas │ ├── getRecommendations │ │ ├── __init__.py │ │ ├── getRecommendations.py │ │ └── requirements.txt │ └── putevents │ │ ├── package.json │ │ └── putevents.js │ └── template.yaml └── workshops ├── Immersion_Day ├── README.md ├── static │ └── imgs │ │ ├── img1.png │ │ ├── img2.png │ │ ├── img3.png │ │ ├── img4.png │ │ ├── img5.png │ │ ├── img6.png │ │ ├── img7.png │ │ ├── img8.png │ │ ├── img9.png │ │ └── personalize_metrics.png └── user_personalization_contextual_example.ipynb ├── POC_in_a_box ├── 01_Validating_and_Importing_User_Item_Interaction_Data.ipynb ├── 02_Validating_and_Importing_Item_Metadata.ipynb ├── 03_Creating_and_Evaluating_Solutions.ipynb ├── 04_Deploying_Campaigns_and_Filters.ipynb ├── 05_Interacting_with_Campaigns_and_Filters.ipynb ├── 06_Clean_Up_Resources.ipynb ├── PersonalizePOC.yaml ├── PersonalizePOCEE.yaml ├── README.md ├── completed │ ├── 01_Validating_and_Importing_User_Item_Interaction_Data.ipynb │ ├── 02_Validating_and_Importing_Item_Metadata.ipynb │ ├── 03_Creating_and_Evaluating_Solutions.ipynb │ ├── 04_Deploying_Campaigns_and_Filters.ipynb │ ├── 05_Interacting_with_Campaigns_and_Filters.ipynb │ ├── 06_Clean_Up_Resources.ipynb │ ├── PersonalizePOC.yaml │ ├── PersonalizePOCEE.yaml │ ├── README.md │ ├── poc_data │ │ ├── interactions.csv │ │ ├── item-meta.csv │ │ └── ml-latest-small │ │ │ ├── README.txt │ │ │ ├── links.csv │ │ │ ├── movies.csv │ │ │ ├── ratings.csv │ │ │ └── tags.csv │ └── static │ │ └── imgs │ │ ├── img1.png │ │ ├── img2.png │ │ ├── img3.png │ │ ├── img4.png │ │ ├── img5.png │ │ ├── img6.png │ │ ├── img7.png │ │ ├── img8.png │ │ └── img9.png └── static │ └── imgs │ ├── img1.png │ ├── img2.png │ ├── img3.png │ ├── img4.png │ ├── img5.png │ ├── img6.png │ ├── img7.png │ ├── img8.png │ ├── img9.png │ └── personalize_metrics.png ├── README.md ├── Reinvent_2019 ├── README.md ├── RI_PersonalizeWorkshop.yaml ├── ReInvent2019_Workshop.ipynb └── static │ └── imgs │ ├── image.png │ ├── img1.png │ ├── img10.png │ ├── img11.png │ ├── img12.png │ ├── img13.png │ ├── img14.png │ ├── img2.png │ ├── img3.png │ ├── img4.png │ ├── img5.png │ ├── img6.png │ ├── img7.png │ ├── img8.png │ ├── img9.png │ ├── personalize_overview.png │ └── personalize_process.png └── magic_movie_machine ├── README.md ├── notebooks ├── Building the Magic Movie Machine Recommender.ipynb ├── Clean_Up_Resources.ipynb └── README.md └── static └── imgs ├── MagicMovieMachine_banner.png ├── image.png ├── img1.png ├── img10.png ├── img11.png ├── img12.png ├── img13.png ├── img14.png ├── img2.png ├── img3.png ├── img4.png ├── img5.png ├── img6.png ├── img7.png ├── img8.png ├── img9.png ├── personalize_overview.png └── personalize_process.png /.gitignore: -------------------------------------------------------------------------------- 1 | *.zip 2 | ml-100k 3 | .ipynb_checkpoints 4 | ml-1m 5 | json_input.* 6 | .DS_Store 7 | .vscode 8 | .aws-sam 9 | *.toml 10 | __pycache__/ 11 | .pytest_cache 12 | .venv 13 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | ## Code of Conduct 2 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct). 3 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact 4 | opensource-codeofconduct@amazon.com with any additional questions or comments. 5 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing Guidelines 2 | 3 | Thank you for your interest in contributing to our project. Whether it's a bug report, new feature, correction, or additional 4 | documentation, we greatly value feedback and contributions from our community. 5 | 6 | Please read through this document before submitting any issues or pull requests to ensure we have all the necessary 7 | information to effectively respond to your bug report or contribution. 8 | 9 | 10 | ## Reporting Bugs/Feature Requests 11 | 12 | We welcome you to use the GitHub issue tracker to report bugs or suggest features. 13 | 14 | When filing an issue, please check [existing open](https://github.com/aws-samples/amazon-personalize-samples/issues), or [recently closed](https://github.com/aws-samples/amazon-personalize-samples/issues?utf8=%E2%9C%93&q=is%3Aissue%20is%3Aclosed%20), issues to make sure somebody else hasn't already 15 | reported the issue. Please try to include as much information as you can. Details like these are incredibly useful: 16 | 17 | * A reproducible test case or series of steps 18 | * The version of our code being used 19 | * Any modifications you've made relevant to the bug 20 | * Anything unusual about your environment or deployment 21 | 22 | 23 | ## Contributing via Pull Requests 24 | Contributions via pull requests are much appreciated. Before sending us a pull request, please ensure that: 25 | 26 | 1. You are working against the latest source on the *master* branch. 27 | 2. You check existing open, and recently merged, pull requests to make sure someone else hasn't addressed the problem already. 28 | 3. You open an issue to discuss any significant work - we would hate for your time to be wasted. 29 | 30 | To send us a pull request, please: 31 | 32 | 1. Fork the repository. 33 | 2. Modify the source; please focus on the specific change you are contributing. If you also reformat all the code, it will be hard for us to focus on your change. 34 | 3. Ensure local tests pass. 35 | 4. Commit to your fork using clear commit messages. 36 | 5. Send us a pull request, answering any default questions in the pull request interface. 37 | 6. Pay attention to any automated CI failures reported in the pull request, and stay involved in the conversation. 38 | 39 | GitHub provides additional document on [forking a repository](https://help.github.com/articles/fork-a-repo/) and 40 | [creating a pull request](https://help.github.com/articles/creating-a-pull-request/). 41 | 42 | 43 | ## Finding contributions to work on 44 | Looking at the existing issues is a great way to find something to contribute on. As our projects, by default, use the default GitHub issue labels (enhancement/bug/duplicate/help wanted/invalid/question/wontfix), looking at any ['help wanted'](https://github.com/aws-samples/amazon-personalize-samples/labels/help%20wanted) issues is a great place to start. 45 | 46 | 47 | ## Code of Conduct 48 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct). 49 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact 50 | opensource-codeofconduct@amazon.com with any additional questions or comments. 51 | 52 | 53 | ## Security issue notifications 54 | If you discover a potential security issue in this project we ask that you notify AWS/Amazon Security via our [vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/). Please do **not** create a public github issue. 55 | 56 | 57 | ## Licensing 58 | 59 | See the [LICENSE](https://github.com/aws-samples/amazon-personalize-samples/blob/master/LICENSE) file for our project's licensing. We will ask you to confirm the licensing of your contribution. 60 | 61 | We may ask you to sign a [Contributor License Agreement (CLA)](http://en.wikipedia.org/wiki/Contributor_License_Agreement) for larger changes. 62 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this 4 | software and associated documentation files (the "Software"), to deal in the Software 5 | without restriction, including without limitation the rights to use, copy, modify, 6 | merge, publish, distribute, sublicense, and/or sell copies of the Software, and to 7 | permit persons to whom the Software is furnished to do so. 8 | 9 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, 10 | INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 11 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 12 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 13 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 14 | SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 15 | -------------------------------------------------------------------------------- /getting_started/notebooks/99.Cleanup.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Cleanup\n", 8 | "\n", 9 | "After building your model you may want to delete your campaign, solutions, and datasets. The following cells will ensure that you have successfully cleaned up all of the resources you created in this lab.\n", 10 | "\n", 11 | "## Imports and Connections to AWS\n", 12 | "\n", 13 | "The following lines import all the necessary libraries and then connect you to Amazon Personalize.\n" 14 | ] 15 | }, 16 | { 17 | "cell_type": "code", 18 | "execution_count": null, 19 | "metadata": {}, 20 | "outputs": [], 21 | "source": [ 22 | "# Imports\n", 23 | "import boto3\n", 24 | "import json\n", 25 | "import numpy as np\n", 26 | "import pandas as pd\n", 27 | "import time" 28 | ] 29 | }, 30 | { 31 | "cell_type": "code", 32 | "execution_count": null, 33 | "metadata": {}, 34 | "outputs": [], 35 | "source": [ 36 | "# Configure the SDK to Personalize:\n", 37 | "personalize = boto3.client('personalize')\n", 38 | "personalize_runtime = boto3.client('personalize-runtime')" 39 | ] 40 | }, 41 | { 42 | "cell_type": "markdown", 43 | "metadata": {}, 44 | "source": [ 45 | "## Defining the Things to Cleanup\n", 46 | "\n", 47 | "Using the store command we will retrieve all the values needed to cleanup our work." 48 | ] 49 | }, 50 | { 51 | "cell_type": "code", 52 | "execution_count": null, 53 | "metadata": {}, 54 | "outputs": [], 55 | "source": [ 56 | "%store -r" 57 | ] 58 | }, 59 | { 60 | "cell_type": "code", 61 | "execution_count": null, 62 | "metadata": {}, 63 | "outputs": [], 64 | "source": [ 65 | "# Delete the campaign:\n", 66 | "personalize.delete_campaign(campaignArn=campaign_arn)\n", 67 | "time.sleep(60)" 68 | ] 69 | }, 70 | { 71 | "cell_type": "code", 72 | "execution_count": null, 73 | "metadata": {}, 74 | "outputs": [], 75 | "source": [ 76 | "# Delete the solution\n", 77 | "personalize.delete_solution(solutionArn=solution_arn)\n", 78 | "time.sleep(60)" 79 | ] 80 | }, 81 | { 82 | "cell_type": "code", 83 | "execution_count": null, 84 | "metadata": {}, 85 | "outputs": [], 86 | "source": [ 87 | "# Delete the event tracker\n", 88 | "personalize.delete_event_tracker(eventTrackerArn=event_tracker_arn)\n", 89 | "time.sleep(60)" 90 | ] 91 | }, 92 | { 93 | "cell_type": "code", 94 | "execution_count": null, 95 | "metadata": {}, 96 | "outputs": [], 97 | "source": [ 98 | "# Delete the interaction dataset\n", 99 | "personalize.delete_dataset(datasetArn=dataset_arn)\n", 100 | "time.sleep(60)" 101 | ] 102 | }, 103 | { 104 | "cell_type": "code", 105 | "execution_count": null, 106 | "metadata": {}, 107 | "outputs": [], 108 | "source": [ 109 | "# Delete the event dataset\n", 110 | "event_interactions_dataset_arn = dataset_arn\n", 111 | "event_interactions_dataset_arn = event_interactions_dataset_arn.replace(\"INTERACTIONS\", \"EVENT_INTERACTIONS\")\n", 112 | "personalize.delete_dataset(datasetArn=event_interactions_dataset_arn)\n", 113 | "time.sleep(60)" 114 | ] 115 | }, 116 | { 117 | "cell_type": "code", 118 | "execution_count": null, 119 | "metadata": {}, 120 | "outputs": [], 121 | "source": [ 122 | "# Delete the schema\n", 123 | "personalize.delete_schema(schemaArn=schema_arn)" 124 | ] 125 | }, 126 | { 127 | "cell_type": "markdown", 128 | "metadata": {}, 129 | "source": [ 130 | "## Last Step\n", 131 | "\n", 132 | "After cleaning up all of the resources you can now close this window and go back to the github page you stareted on. At the bottom of the Readme file are steps to delete the CloudFormation stack you created earlier. Once that is done you are 100% done with the lab.\n", 133 | "\n", 134 | "Congratulations!" 135 | ] 136 | }, 137 | { 138 | "cell_type": "code", 139 | "execution_count": null, 140 | "metadata": {}, 141 | "outputs": [], 142 | "source": [] 143 | } 144 | ], 145 | "metadata": { 146 | "kernelspec": { 147 | "display_name": "conda_python3", 148 | "language": "python", 149 | "name": "conda_python3" 150 | }, 151 | "language_info": { 152 | "codemirror_mode": { 153 | "name": "ipython", 154 | "version": 3 155 | }, 156 | "file_extension": ".py", 157 | "mimetype": "text/x-python", 158 | "name": "python", 159 | "nbconvert_exporter": "python", 160 | "pygments_lexer": "ipython3", 161 | "version": "3.6.5" 162 | } 163 | }, 164 | "nbformat": 4, 165 | "nbformat_minor": 4 166 | } 167 | -------------------------------------------------------------------------------- /getting_started/notebooks/README.md: -------------------------------------------------------------------------------- 1 | # Getting Started 2 | 3 | This tutorial outlines the process of building your own recommendation model, improving it, and then cleaning up all of your resources to prevent any unwanted charges. To get started executing these follow the steps in the next section. 4 | 5 | 1. `1.Building_Your_First_Campaign.ipynb` - Guides you through building your first campaign and recommendation algorithm. 6 | 2. `2.View_Campaign_And_Interactions.ipynb` - Showcase how to generate a recommendation and how to modify it with real time intent. 7 | 3. `3.Best_Practices-Clientside.ipynb` - Guides you to implement best practices. 8 | 4. `Cleanup.ipynb` - Deletes anything that was created so you are not charged for additional resources. 9 | 10 | You can download the Jupyter notebooks from the `/notebooks` folder or deploy the following Cloud Formation template which clones this repo. 11 | 12 | If you have any issues with any of the content please open an issue here in the repository. -------------------------------------------------------------------------------- /getting_started/notebooks_managed_domains/README.md: -------------------------------------------------------------------------------- 1 | # Getting Started 2 | 3 | This tutorial outlines the process of building your own recommendation model using use-case optimized recommenders, testing them and then cleaning up all of your resources to prevent any unwanted charges. To get started executing these, follow the steps in the next section. 4 | 5 | 1. `Building_Your_First_Recommender_Video_On_Demand.ipynb` - Guides you through building your first video on demand recommenders and getting your first recommendations. 6 | 1. `Building_Your_First_Recommender_Ecommerce.ipynb` - Guides you through building your first e-commerce recommenders and getting your first recommendations. 7 | 2. `Clean_Up_Resources.ipynb` - Deletes anything that was created in the previous notebook so you are not charged for additional resources. Note: make sure you run this notebook after each of the notebooks above, as it will only delete the latest resources. 8 | 9 | You can download the Jupyter notebooks from the `/notebooks_managed_domains` folder or deploy the Cloud Formation template which clones this repository. 10 | 11 | If you have any issues with any of the content in this repository please open an issue here in the repository. -------------------------------------------------------------------------------- /getting_started/static/imgs/image.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/getting_started/static/imgs/image.png -------------------------------------------------------------------------------- /getting_started/static/imgs/img1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/getting_started/static/imgs/img1.png -------------------------------------------------------------------------------- /getting_started/static/imgs/img10.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/getting_started/static/imgs/img10.png -------------------------------------------------------------------------------- /getting_started/static/imgs/img11.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/getting_started/static/imgs/img11.png -------------------------------------------------------------------------------- /getting_started/static/imgs/img12.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/getting_started/static/imgs/img12.png -------------------------------------------------------------------------------- /getting_started/static/imgs/img13.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/getting_started/static/imgs/img13.png -------------------------------------------------------------------------------- /getting_started/static/imgs/img14.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/getting_started/static/imgs/img14.png -------------------------------------------------------------------------------- /getting_started/static/imgs/img15.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/getting_started/static/imgs/img15.png -------------------------------------------------------------------------------- /getting_started/static/imgs/img16.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/getting_started/static/imgs/img16.png -------------------------------------------------------------------------------- /getting_started/static/imgs/img17.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/getting_started/static/imgs/img17.png -------------------------------------------------------------------------------- /getting_started/static/imgs/img18.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/getting_started/static/imgs/img18.png -------------------------------------------------------------------------------- /getting_started/static/imgs/img2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/getting_started/static/imgs/img2.png -------------------------------------------------------------------------------- /getting_started/static/imgs/img3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/getting_started/static/imgs/img3.png -------------------------------------------------------------------------------- /getting_started/static/imgs/img4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/getting_started/static/imgs/img4.png -------------------------------------------------------------------------------- /getting_started/static/imgs/img5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/getting_started/static/imgs/img5.png -------------------------------------------------------------------------------- /getting_started/static/imgs/img6.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/getting_started/static/imgs/img6.png -------------------------------------------------------------------------------- /getting_started/static/imgs/img7.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/getting_started/static/imgs/img7.png -------------------------------------------------------------------------------- /getting_started/static/imgs/img8.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/getting_started/static/imgs/img8.png -------------------------------------------------------------------------------- /getting_started/static/imgs/img9.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/getting_started/static/imgs/img9.png -------------------------------------------------------------------------------- /getting_started/static/imgs/personalize_overview.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/getting_started/static/imgs/personalize_overview.png -------------------------------------------------------------------------------- /getting_started/static/imgs/personalize_process.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/getting_started/static/imgs/personalize_process.png -------------------------------------------------------------------------------- /next_steps/README.md: -------------------------------------------------------------------------------- 1 | # Amazon Personalize Next Steps 2 | 3 | Notebooks and examples on how to onboard and use various features of Amazon Personalize 4 | 5 | ## Amazon Personalize Use Cases examples 6 | 7 | The [core_use_cases/](core_use_cases/) folder contains detailed examples of the most typical use cases. 8 | 9 | ## Generative AI 10 | 11 | The [generative_ai/](generative_ai/) folder contains examples of combining foundation models with Amazon Personalize. 12 | 13 | ## Scalable Operations examples for your Amazon Personalize deployments 14 | 15 | The [operations/](operations/) folder contains examples on the following topics: 16 | 17 | * [Maintaining Personalized Experiences with Machine Learning](https://aws.amazon.com/solutions/implementations/maintaining-personalized-experiences-with-ml/) 18 | - This AWS Solution allows you to automate the end-to-end process of importing datasets, creating solutions and solution versions, creating and updating campaigns, creating filters, and running batch inference jobs. These processes can be run on-demand or triggered based on a schedule that you define. 19 | 20 | * MLOps (legacy) 21 | - This is a project to showcase how to quickly deploy a Personalize Campaign in a fully automated fashion using AWS Step Functions. To get started navigate to the [ml_ops](operations/ml_ops/) folder and follow the README instructions. This example has been replaced by the [Maintaining Personalized Experiences with Machine Learning](https://aws.amazon.com/solutions/implementations/maintaining-personalized-experiences-with-ml/) solution. 22 | 23 | * MLOps Data Science SDK 24 | - This is a project to showcase how to quickly deploy a Personalize Campaign in a fully automated fashion using AWS Data Science SDK. To get started navigate to the [ml_ops_ds_sdk](operations/ml_ops_ds_sdk/) folder and follow the README instructions. 25 | 26 | * [Personalization APIs](https://github.com/aws-samples/personalization-apis) 27 | - Real-time low latency API framework that sits between your applications and recommender systems such as Amazon Personalize. Provides best practice implementations of response caching, API gateway configurations, A/B testing with [Amazon CloudWatch Evidently](https://docs.aws.amazon.com/cloudwatchevidently/latest/APIReference/Welcome.html), inference-time item metadata, automatic contextual recommendations, and more. 28 | 29 | * Streaming Events 30 | - This is a project to showcase how to quickly deploy an API Layer infront of your Amazon Personalize Campaign and your Event Tracker endpoint. To get started navigate to the [streaming_events](operations/streaming_events/) folder and follow the README instructions. 31 | 32 | * Lambda Examples 33 | - This folder starts with a basic example of integrating `put_events` into your Personalize Campaigns by using Lambda functions processing new data from S3. To get started navigate to the [lambda_examples](operations/lambda_examples/) folder and follow the README instructions. 34 | 35 | * [Personalize Monitor](https://github.com/aws-samples/amazon-personalize-monitor) 36 | - This project adds monitoring, alerting, a dashboard, and optimization tools for running Amazon Personalize across your AWS environments. 37 | 38 | ## Reference Architectures 39 | 40 | The following reference architectures provide examples of how to apply Amazon Personalize across industries: 41 | 42 | * Retail - the [Retail Demo Store](https://github.com/aws-samples/retail-demo-store) is a full stack web application that implements personalization using Personalize in a web application, messaging, and conversation AI interfaces. There are hands-on workshops 43 | * Media and Entertainment 44 | * Travel and Hospitality 45 | 46 | ## Workshops 47 | 48 | The [workshops/](workshops/) folder contains a list of our most current workshops: 49 | 50 | * POC in a Box 51 | * Re:invent 2019 52 | 53 | ## Data Science Tools 54 | 55 | The [data_science/](data_science/) folder contains an example on how to approach visualization of the key properties of your input datasets. 56 | 57 | The key components we look out for include: 58 | - Missing data, duplicated events, and repeated item consumptions 59 | - Power-law distribution of categorical fields 60 | - Temporal drift analysis for cold-start applicability 61 | - Analysis on user-session distribution 62 | 63 | ## License Summary 64 | 65 | This sample code is made available under a modified MIT license. See the LICENSE file. 66 | -------------------------------------------------------------------------------- /next_steps/core_use_cases/README.md: -------------------------------------------------------------------------------- 1 | Amazon Personalize Core Use Cases 2 | --- 3 | 4 | Amazon Personalize is a machine learning service that makes it easy for developers to produce individualized recommendations for customers who use their applications. It reflects the vast experience that Amazon has in building personalization systems. You can use Amazon Personalize in a variety of scenarios, such as giving users recommendations based on their preferences and behavior, personalized re-ranking of results, and personalizing content for emails and notifications. 5 | 6 | As the developer, you only need to do the following: 7 | 8 | - Format input data and upload the data into an Amazon S3 bucket, or send real-time event, user, and item data using the Personalize SDK. 9 | - Select a training recipe (algorithm) to use on the data. 10 | - Train a solution version using the recipe. 11 | - Deploy the solution version. 12 | 13 | ## Mapping use cases to recipes 14 | 15 | | Use Case | Recipe | Description 16 | |-------- | -------- |:------------ 17 | | User Personalization | aws-user-personalization | This recipe is optimized for all user recommendation scenarios. It predicts the items that a user will interact with based on Interactions, Items, and Users datasets. It uses an HRNN algorithm to generate recommendations based on relevance (exploitation) and automatic item exploration to recommend new/cold items. You control the weighting of exploitation vs exploration. 18 | | Related Items | aws-sims | Computes items similar to a given item based on co-occurrence of item in same user history in the Interactions dataset. 19 | | Personalized Ranking | aws-personalized-ranking | Reranks a list of items for a user. Trains on Interactions, Items, and Users datasets. 20 | 21 | *The above table lists the core and most recommended mappings of use-cases to recipes. Personalize does support other recipes such as aws-popularity-count and the legacy aws-hrnn, aws-hrnn-coldstart, and aws-hrnn-metadata recipes. However, the algorithms in the aws-hrnn-\* recipes were subsumed and extended by the aws-user-personalization recipe so are no longer recommended for user personalization use-cases.* 22 | 23 | ## Content 24 | 25 | In this directory we have examples various use cases 26 | 27 | 1. [User Personalization](user_personalization/) 28 | - Predicts items a user will interact with. A hierarchical recurrent neural network which can model the temporal order of user-item interactions combined with automatic exploration of new/cold items. 29 | 2. [Related Items](related_items/) 30 | - Computes items similar to a given item based on co-occurrence of item in same user history in user-item interaction dataset. 31 | 3. [Personalized Ranking](personalized_ranking/) 32 | - Reranks a list of items for a user based on relevance. 33 | 4. [Batch Recommendations](batch_recommendations/) 34 | - Create recommendations for multiple users or items in a single batch job. 35 | 5. [Metadata](metadata/) 36 | - Samples for how to prepare and include metadata in your datasets. 37 | 6. [Objective Optimization](objective_optimization/objective-optimization.ipynb) 38 | - Sample for how to balance business objectives with relevant recommendations using objective optmization. 39 | 7. [Updating Datasets](updating_datasets/) 40 | - Samples for how to update your Amazon Personalize Datasets. 41 | 42 | ## License Summary 43 | 44 | This sample code is made available under a modified MIT license. See the LICENSE file. 45 | -------------------------------------------------------------------------------- /next_steps/core_use_cases/automatic_context/README.md: -------------------------------------------------------------------------------- 1 | ## Amazon Personalize - Automatic Context 2 | 3 | ### Overview: 4 | 5 | This is a companion code repository to an AWS blog post on setting automatic user context of device type and dynamic filters on user's country in Amazon Personalize. AWS blog link is [here](https://aws.amazon.com/blogs/machine-learning/recommend-and-dynamically-filter-items-based-on-user-context-in-amazon-personalize/). In the blog, we discuss the benefits of setting user context like device type, time of day and location automatically with illustrations on how easily it can be setup. In this repository, we provide artifacts that demonstrate the ability of Amazon Personalize APIs to provide customized recommendations based on user's device type derived automatically. 6 | 7 | ### Architecture 8 | 9 | Here is a diagram showing the solution architecture: 10 | 11 | 12 | 13 | ### Prerequisites 14 | 15 | Prior to running the steps under Instructions, you will need access to an AWS Account where you have full Admin privileges. The CloudFormation template will deploy multiple AWS Lambda functions, IAM Roles, and a new SageMaker Studio domain. In addition, having basic knowledge of the following services will be valuable: AWS Lambda, Amazon CloudFront, and Amazon IAM Roles. 16 | 17 | ### Instructions 18 | 19 | 1. Click 'Launch Stack' button below to deploy resources into your AWS Account. If you want to deploy into a different region other than default, change the region drop down in the AWS console before creating the stack. 20 | 21 | [](https://console.aws.amazon.com/cloudformation/home?region=us-east-1#/stacks/new?stackName=personalize-auto-context-stack&templateURL=https://personalize-solution-staging-us-east-1.s3.amazonaws.com/personalize-auto-context/personalize-auto-context-template.yml) 22 | 23 | 24 | 2. Click 'Next' for 'Specify template', 'Specify stack details', and 'Configure stack options'. On the 'Review' step, check the box that says 'I acknowledge that AWS CloudFormation might create IAM resources with custom names.' and then click 'Create Stack'. You can view the CloudFormation template directly by looking [here](./templates/personalize-auto-context-template.yml). The stack will take a few minutes to launch. When it completes, you can view the items created by clicking on the Resources tab. 25 | 3. Once the stack is complete, browse to Amazon SageMaker in the AWS console and click on the 'Domains' tab on the left. 26 | 4. Click on the pre-created SageMaker domain and launch SageMaker Studio. 27 | 5. Inside SageMaker Studio's top menu, choose “Git” and choose “Clone a Repository” from the sub-menu. Provide the URL of this repository for the cloning operatiion. 28 | 29 | ### Running the Notebooks 30 | 31 | There are a series of notebooks which should be run in order. Follow the step-by-step guide in each notebook: 32 | 33 | * [notebooks/1.Building_Personalize_Campaign.ipynb](./notebooks/1.Building_Personalize_Campaign.ipynb) - generate synthetic dataset and host a Personalize campaign based off of it. 34 | * [notebooks/2.Passing_Context_Automatically.ipynb](./notebooks/2.Passing_Context_Automatically.ipynb) - Test context setting happening automatically using resources pre-created by CloudFormation template. 35 | * [notebooks/3.Cleanup.ipynb](./notebooks/3.Cleanup.ipynb) - Cleanup resources. 36 | 37 | ### Things to be aware of - IMPORTANT 38 | 39 | - In SageMaker Studio notebooks, the "Run All Cells" option is not recommended as there are important manual intervening steps that are essential for successful completion of the workshop. 40 | - Recommend setting Image: Data Science; Kernel: Python3 and Instance type: ml.t3.medium (2 vCPU + 4 GiB). Prefer instance sizes with larger memory if there are any out of memory situations from Pandas library calls. 41 | 42 | 43 | ### Clean up - IMPORTANT 44 | To destroy the AWS resources created as part of this example, complete the following two steps: 45 | 1. Run all cells in [notebooks/3.Cleanup.ipynb](./notebooks/3.Cleanup.ipynb) 46 | 2. Go to CloudFormation in the AWS console, select `personalize-auto-context-stack` and click 'Delete'. 47 | 3. Explicitly delete the SageMaker created EFS volume, its security groups and Elastic Network Interfaces. 48 | 49 | ## Security 50 | 51 | See [CONTRIBUTING](CONTRIBUTING.md#security-issue-notifications) for more information. 52 | 53 | ## License 54 | 55 | This library is licensed under the MIT-0 License. See the [LICENSE](./LICENSE) file. -------------------------------------------------------------------------------- /next_steps/core_use_cases/automatic_context/images/architecture.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/core_use_cases/automatic_context/images/architecture.png -------------------------------------------------------------------------------- /next_steps/core_use_cases/automatic_context/images/cloudformation-launch-stack.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/core_use_cases/automatic_context/images/cloudformation-launch-stack.png -------------------------------------------------------------------------------- /next_steps/core_use_cases/batch_recommendations/README.md: -------------------------------------------------------------------------------- 1 | Amazon Personalize Batch Recommendations 2 | --- 3 | 4 | Use an asynchronous batch workflow to get recommendations from large datasets that do not require real-time updates. For instance, you might create a batch inference job to get product recommendations for all users on an email list, or to get item-to-item similarities (SIMS) across an item catalog. To get batch recommendations, you can create a batch inference job by calling the CreateBatchInferenceJob API. 5 | 6 | ## Sample 7 | 8 | The [user_personalization_batch_recommendations_example.ipynb](user_personalization_batch_recommendations_example.ipynb) 9 | 10 | ## License Summary 11 | 12 | This sample code is made available under a modified MIT license. See the LICENSE file. 13 | -------------------------------------------------------------------------------- /next_steps/core_use_cases/filters/promotions/images/promotions-overview.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/core_use_cases/filters/promotions/images/promotions-overview.png -------------------------------------------------------------------------------- /next_steps/core_use_cases/metadata/README.md: -------------------------------------------------------------------------------- 1 | # Amazon Personalize Metadata 2 | 3 | Adding structured and unstructured metadata to your iteractions, items, and users datasets can improve the relevance of recommendations provided by Amazon Personalize. Selecting the right metadata and properly preparing it for Personalize is an important task. 4 | 5 | The notebooks below demonstrate features specifically designed for working with metadata fields. 6 | 7 | ## Sample 8 | 9 | The [item-text-unstructured-metadata.ipynb](item-text-unstructured-metadata.ipynb) provides an example of how to include unstructured text as item metadata and the impact it can have on the relevance of recommendations. 10 | 11 | ## License Summary 12 | 13 | This sample code is made available under a modified MIT license. See the LICENSE file. 14 | -------------------------------------------------------------------------------- /next_steps/core_use_cases/personalized_ranking/README.md: -------------------------------------------------------------------------------- 1 | Amazon Personalize Ranking 2 | --- 3 | 4 | The 'ranking' recipe generates personalized rankings. A personalized ranking is a list of recommended items that are re-ranked for a specific user. 5 | 6 | ## Sample 7 | 8 | The [personalize_ranking_example.ipynb](personalize_ranking_example.ipynb) 9 | 10 | ## License Summary 11 | 12 | This sample code is made available under a modified MIT license. See the LICENSE file. 13 | 14 | 15 | 16 | -------------------------------------------------------------------------------- /next_steps/core_use_cases/related_items/README.md: -------------------------------------------------------------------------------- 1 | Amazon Personalize Related Items 2 | --- 3 | 4 | The 'SIMS' recipe allows your system to recommend what items are most similar to a specific item using an item-to-item collaborative filtering algorithm. It is faster to train and easier to interpret. This recipe is commonly used on item detail pages where you want to display similar items to the current item based on the behavior of users who also interacted with the item. 5 | 6 | ## Sample 7 | 8 | The [personalize_sims_example.ipynb](personalize_sims_example.ipynb) uploads the 'past' data from temporal splitting and evaluates the recommendation against the held-out 'future' ground truth. The results compare favorably with a popularity-based recommendation baseline. We also include examples showing that different "cause" items would lead to different 'sims' results. 9 | 10 | ## License Summary 11 | 12 | This sample code is made available under a modified MIT license. See the LICENSE file. 13 | 14 | 15 | 16 | 17 | -------------------------------------------------------------------------------- /next_steps/core_use_cases/trending_now/README.md: -------------------------------------------------------------------------------- 1 | Amazon Personalize Trending-Now Recipe 2 | --- 3 | 4 | The sample notebook (trending_now_example.ipynb) will walk you through an example of using Trending Now recipe in [Amazon Personalize](https://aws.amazon.com/personalize) 5 | 6 | User interests can change based on a variety of factors, such as external events or the interests of other users. It is critical for websites and apps to tailor their recommendations to these changing interests to improve user engagement. With [Trending-Now](https://docs.aws.amazon.com/personalize/latest/dg/native-recipe-trending-now.html), you can surface items from your catalogue that are rising in popularity faster with higher velocity than other items, such as trending news, popular social content or newly released movies. Amazon Personalize looks for items that are rising in popularity at a faster rate than other catalogue items to help users discover items that are engaging their peers. 7 | 8 | Amazon Personalize also allows customers to define the time periods over which trends are calculated depending on their unique business context, with options for every 30 mins, 1 hour, 3 hours or 1 day, based on the most recent interactions data from users. This notebook will demonstrate how the new recipe aws-trending-now (or aws-vod-trending-now for recommenders) can help recommend the top trending items from the interactions dataset. 9 | 10 | ## Sample 11 | 12 | The [trending_now_example.ipynb](trending_now_example.ipynb) 13 | 14 | ## License Summary 15 | 16 | This sample code is made available under a modified MIT license. See the LICENSE file. 17 | 18 | 19 | 20 | -------------------------------------------------------------------------------- /next_steps/core_use_cases/updating_datasets/README.md: -------------------------------------------------------------------------------- 1 | Amazon Personalize Updating Datasets 2 | --- 3 | 4 | This directory has notebooks containing samples and explanations of common workflow patterns for updating datasets in Amazon Personalize. 5 | 6 | ## Samples 7 | Both samples notebooks contain an end-to-end workflow pattern including required setup and cleanup. These notebooks were developed and tested on an [Amazon SageMaker Notebook Instance](https://docs.aws.amazon.com/sagemaker/latest/dg/nbi.html). 8 | 9 | ## Deployment Instructions: 10 | - Launch a SageMaker Notebook Instance 11 | - Ensure the Execution Role of your Notebook instance has the required Amazon IAM permissions. A sample policy document that grants appropriately-scoped permissions is defined in [sagemaker_notebook_exec_role.json](sagemaker_notebook_exec_role.json). You can attach this custom policy as a *customer-managed* policy of your Execution Role. 12 | - Upload the notebook to the SageMaker Notebook Instance 13 | - Run the Notebook from the Jupyter environment 14 | 15 | ### Updating Item Schemas 16 | 17 | Notebook [update-item-dataset-schema-example.ipynb](update-item-dataset-schema-example.ipynb) goes over the process for updating schemas of your datasets in Amazon Personalize; specifically schemas for items. Commentary in this notebook is centered around the an e-commerce use case that uses the item-attribute-affinity recipe. 18 | 19 | ### Importing New Items Data 20 | 21 | Notebook [update-datasets-user-personalization-example.ipynb](update-datasets-user-personalization-example.ipynb) goes over the process for updating your datasets when using Amazon Personalize; specifically adding *new* items and interactions *for those new items*. Commentary in this notebook is centered around the an e-commerce use case that uses the user-personalization recipe. Thus, the auto-update feature for user-personalization solution versions is also discussed. 22 | 23 | 24 | ## License Summary 25 | 26 | This sample code is made available under a modified MIT license. See the LICENSE file. 27 | -------------------------------------------------------------------------------- /next_steps/core_use_cases/updating_datasets/sagemaker_notebook_exec_role.json: -------------------------------------------------------------------------------- 1 | { 2 | "Version": "2012-10-17", 3 | "Statement": [ 4 | { 5 | "Sid": "S3ReadPermissionsForSageMaker", 6 | "Effect": "Allow", 7 | "Action": [ 8 | "s3:GetObject", 9 | "s3:ListBucket", 10 | "s3:GetBucketPolicy", 11 | "s3:GetObjectTagging", 12 | "s3:GetObjectAttributes" 13 | ], 14 | "Resource": [ 15 | "arn:aws:s3:::*personalize*", 16 | "arn:aws:s3:::*personalize*/*", 17 | "arn:aws:s3:::*personalize*", 18 | "arn:aws:s3:::*Personalize*/*" 19 | ] 20 | }, 21 | { 22 | "Sid": "S3WritePermissionsForSageMaker", 23 | "Effect": "Allow", 24 | "Action": [ 25 | "s3:PutBucketPolicy", 26 | "s3:CreateBucket", 27 | "s3:DeleteObject", 28 | "s3:DeleteBucketPolicy", 29 | "s3:DeleteBucket", 30 | "s3:PutObject", 31 | "s3:PutObjectTagging" 32 | ], 33 | "Resource": [ 34 | "arn:aws:s3:::*personalize*", 35 | "arn:aws:s3:::*personalize*/*", 36 | "arn:aws:s3:::*personalize*", 37 | "arn:aws:s3:::*Personalize*/*" 38 | ] 39 | }, 40 | { 41 | "Sid": "IAMPermissionsForSageMaker", 42 | "Effect": "Allow", 43 | "Action": [ 44 | "iam:CreateRole", 45 | "iam:GetPolicyVersion", 46 | "iam:GetRole", 47 | "iam:GetRolePolicy", 48 | "iam:DeleteRole", 49 | "iam:CreatePolicy", 50 | "iam:AttachRolePolicy", 51 | "iam:PassRole", 52 | "iam:DetachRolePolicy", 53 | "iam:DeletePolicy" 54 | ], 55 | "Resource": [ 56 | "arn:aws:iam::*:role/PersonalizeRole*", 57 | "arn:aws:iam::*:policy/PersonalizePolicy*" 58 | ] 59 | }, 60 | { 61 | "Sid": "PersonalizePermissionsForSageMaker", 62 | "Effect": "Allow", 63 | "Action": [ 64 | "personalize:*" 65 | ], 66 | "Resource": [ 67 | "*" 68 | ] 69 | }, 70 | { 71 | "Sid": "ListIAMRolesAndPolicies", 72 | "Effect": "Allow", 73 | "Action": [ 74 | "iam:ListRoles", 75 | "iam:ListPolicies" 76 | ], 77 | "Resource": [ 78 | "*" 79 | ] 80 | } 81 | ] 82 | } -------------------------------------------------------------------------------- /next_steps/core_use_cases/user_personalization/README.md: -------------------------------------------------------------------------------- 1 | Amazon Personalize User Personalization 2 | --- 3 | 4 | Combining an HRNN-based algorithm for relevance with automatic exploration of new/cold item recommendations, the aws-user-personalization recipe provides the most flexibility when building user personalization use-case. Although the Interactions dataset is the only required dataset, this recipe will take advantage of all three dataset types (Interactions, Items, Users) if provided. In addition, it can optionally model on impression data if provided in your Interactions dataset and when streaming real-time events using an Event Tracker. 5 | 6 | Although we provide sample notebooks for the HRNN-\* recipes for posterity, it is recommended that you start with the user-personalization recipe. 7 | ## Samples 8 | 9 | ### User-Personalization 10 | 11 | The [user-personalization-with-exploration.ipynb](user-personalization-with-exploration.ipynb) demonstrates how to use an Interactions and Items dataset to create solution and campaign that balances making recommendations based on relevance (exploitation) and exploring recommending new/cold items. A Users dataset could have been used as well but is not included in this sample. This sample also demonstrates how to include impression data in the Interactions dataset and in PutEvents API calls. 12 | 13 | ### Contextual Recommendations + Event Tracker 14 | 15 | In this example we are going over how to leverage Metadata and Context to provide best airline recommendations for users based on historical ratings of such across multiple cabin types with user's location as user metadata 16 | 17 | This [user-personalization-with-contextual-recommendations.ipynb](user-personalization-with-contextual-recommendations.ipynb) shows how these useful information can be uploaded to our system to aid recommendation. A caveat is that the improvements of meta-data recipes depend on how much information can be extracted from the provided meta-data. 18 | 19 | 20 | *Note that the item cold start capabilities of the User-Personalization recipe are preferred over the legacy HRNN-Coldstart recipe. Therefore, it is recommended that you start with the User-Personalization recipe for cold item scenarios.* 21 | 22 | ## License Summary 23 | 24 | This sample code is made available under a modified MIT license. See the LICENSE file. 25 | -------------------------------------------------------------------------------- /next_steps/data_science/README.md: -------------------------------------------------------------------------------- 1 | # Amazon Personalize Data Science 2 | 3 | ### Diagnostics 4 | 5 | Open the [diagnose/](diagnose/) folder to find an example on how to approach visualization of the key properties of your input datasets. 6 | 7 | The key components we look out for include: 8 | - Missing data, duplicated events, and repeated item consumptions 9 | - Power-law distribution of categorical fields 10 | - Temporal drift analysis for cold-start applicability 11 | - Analysis on user-session distribution 12 | 13 | ### Offline Performance Evaluation 14 | 15 | Open the [offline_performance_evaluation/](offline_performance_evaluation/) folder to find an example on how to approach the offline evaluation of your Amazon Personalize Campaign recommendations. 16 | 17 | ## License Summary 18 | 19 | This sample code is made available under a modified MIT license. See the LICENSE file. 20 | -------------------------------------------------------------------------------- /next_steps/data_science/diagnose/imgs/power-law.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/data_science/diagnose/imgs/power-law.png -------------------------------------------------------------------------------- /next_steps/data_science/diagnose/imgs/retrain-freq.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/data_science/diagnose/imgs/retrain-freq.png -------------------------------------------------------------------------------- /next_steps/data_science/diagnose/imgs/temporal-drift.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/data_science/diagnose/imgs/temporal-drift.png -------------------------------------------------------------------------------- /next_steps/data_science/diagnose/imgs/time-delta.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/data_science/diagnose/imgs/time-delta.png -------------------------------------------------------------------------------- /next_steps/data_science/offline_performance_evaluation/README.md: -------------------------------------------------------------------------------- 1 | Offine Performance Evaluation 2 | === 3 | 4 | You have some historical data and you want to know how personalize performs on your data. Here is what we suggest: 5 | 6 | 1. Temporally split your data into a 'past' training set and a 'future' testing set. 7 | 2. Upload the 'past' data to Amazon Personalize, train a solution, and deploy a campaign. 8 | 3. Use your campaign to get recommendation for all of your users, and compare with the 'future' testing set. 9 | 10 | This is an example, [personalize_temporal_holdout.ipynb](personalize_temporal_holdout.ipynb/) to complete the steps above. We include a basic popularity-based recommendation, which should be easy to beat. This is for sanity checking purposes. A common next-step is to kepp the same training and testing splits, but train different models for more serious offline comparisons. 11 | 12 | ## License Summary 13 | 14 | This sample code is made available under a modified MIT license. See the LICENSE file. 15 | -------------------------------------------------------------------------------- /next_steps/evaluation/measuring_impact_of_recommendations/images/metrics-overview-scenarios.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/evaluation/measuring_impact_of_recommendations/images/metrics-overview-scenarios.png -------------------------------------------------------------------------------- /next_steps/evaluation/measuring_impact_of_recommendations/images/metrics-overview.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/evaluation/measuring_impact_of_recommendations/images/metrics-overview.png -------------------------------------------------------------------------------- /next_steps/generative_ai/README.md: -------------------------------------------------------------------------------- 1 | # Generative AI with Amazon Personalize 2 | 3 | There are many ways to use foundation models, the basis of generative AI, with Amazon Personalize. Samples will be added to this folder as they are developed. 4 | 5 | * Marketing use cases 6 | - [Personalized marketing campaigns](personalized_marketing_campaign/) 7 | - [User personalized marketing messaging with Amazon Personalize and Generative AI](user_personalized_marketing_messaging_with_amazon_personalize_and_gen_ai/). Use this sample to create personalized marketing content (for instance emails) for each user using [Amazon Personalize](https://aws.amazon.com/personalize/) and [Amazon Bedrock](https://aws.amazon.com/bedrock/). In this sample you will train an [Amazon Personalize](https://aws.amazon.com/personalize/) 'Top picks for you' Recommender to get personalized recommendations for each user. You will then generate a prompt that includes the user's preferences, recommendations, and demographics. Finally you will use [Amazon Bedrock](https://aws.amazon.com/bedrock/) to generate a personalized email for each user. 8 | -------------------------------------------------------------------------------- /next_steps/generative_ai/personalized_marketing_campaign/README.md: -------------------------------------------------------------------------------- 1 | # Amazon Personalize + Generative AI: Personalized Marketing Campaign 2 | 3 | The notebooks and data in this folder are designed to give you hands-on experience building models in Amazon Personalize to identify users with an affinity for specific items and then use Amazon Bedrock to generate personalized marketing content tailored to those users. 4 | 5 | ## Scenario 6 | 7 | Assume you are a marketing manager. Your task is to more effectively promote the flights for an airline through email campaigns using AI. To improve end user engagement (increase click-through-rate) and reduce churn (decrease email unsubscriptions), you will use AI to identify customers with an affinity for specific flights and then use generative AI to to generate marketing content to those users. There has two AI engines 8 | 9 | 1. Recommendation engine - Amazon Personalize service 10 | 11 | 2. Content Generative engine - Amazon Bedrock service 12 | 13 | ## Pipeline 14 | 15 | Marketing request-->Amazon Personalize-->retrieve metadata-->combine with PromptTemplate--> LangChain-->Amazon Bedrock & LLM--> Generate content -->save in JSON 16 | 17 | ## Workshop steps 18 | 19 | - Step1: Run the [airline_ticket_user_segmentation](airline_ticket_user_segmentation_09212023_github.ipynb) notebook 20 | 21 | - Step2: Run the [personalized_marketing_campaign](personalized_marketing_campaign_10032023_1600_github.ipynb) notebook 22 | 23 | If you're running the notebooks in Amazon SageMaker Studio, the IAM assume role requirements are, 24 | 1. AmazonPersonalizeFullAccess 25 | 2. AmazonS3FullAccess 26 | 3. bedrock_full_access_policy 27 | 4. IAMFullAccess 28 | 5. AmazonSageMakerFullAccess 29 | 30 | The datasets used in this sample were generated, not from a customer. 31 | -------------------------------------------------------------------------------- /next_steps/generative_ai/personalized_marketing_campaign/image_to_image1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/generative_ai/personalized_marketing_campaign/image_to_image1.png -------------------------------------------------------------------------------- /next_steps/generative_ai/personalized_marketing_campaign/test-metadata.json: -------------------------------------------------------------------------------- 1 | {"ITEM_ID": "-4293041568465629878", "DSTCity": "Hong Kong", "SRCCity": "Kuala Lumpur", "Airline": "TigerPounce Express", "DurationDays": "10", "Season": "October", "DynamicPrice": 9171, "DiscountForMember": 0.5} -------------------------------------------------------------------------------- /next_steps/generative_ai/personalized_marketing_campaign/ticketing-template.json: -------------------------------------------------------------------------------- 1 | { 2 | "Question": "Human: I will promote flight ticket of Airline {Airline}, from {SRCCity} to {DSTCity}, during {Season} 2023 for membership. The ticket original price is {DynamicPrice}, discount is {DiscountForMember} for member only, for example, a discount of 0.5 means 50%, promotion code is {ITEM_ID}, only show the last 5 digits of {ITEM_ID}. Please display the discount in the email I want to generate an attractive e-mail template, including email title and body to promote the flight ticket, booking website is https://demobooking.demo.co, pls help to write a body of words with landscape itinerary details for {DurationDays} days, with an attractive title to help me to promote the flight ticket to the customers. The output format is formal JSON, including Email title and Email body. Assistant:" 3 | } 4 | -------------------------------------------------------------------------------- /next_steps/generative_ai/personalized_marketing_campaign/top50inHK.json: -------------------------------------------------------------------------------- 1 | { 2 | "Restaurants and Food": [ 3 | "Tim Ho Wan", 4 | "Din Tai Fung", 5 | "Lung King Heen", 6 | "Yung Kee", 7 | "Jumbo Kingdom", 8 | "Mak's Noodle", 9 | "Ho Lee Fook", 10 | "Lin Heung Tea House", 11 | "Mammy Pancake", 12 | "Joy Hing Roasted Meat", 13 | "Little Bao", 14 | "DimDimSum Dim Sum Specialty Store", 15 | "Kau Kee Restaurant", 16 | "Tung Po Kitchen", 17 | "Luk Yu Tea House" 18 | ], 19 | "Street Food and Markets": [ 20 | "Temple Street Night Market", 21 | "Mong Kok Street Food", 22 | "Wong Tai Sin Temple" 23 | ], 24 | "Cafes and Desserts": [ 25 | "Australia Dairy Company", 26 | "Teakha", 27 | "The Cupping Room", 28 | "Holly Brown" 29 | ], 30 | "Bars and Nightlife": [ 31 | "Ozone", 32 | "Lan Kwai Fong", 33 | "Dragon-i" 34 | ], 35 | "Outdoor and Scenic Spots": [ 36 | "Victoria Peak", 37 | "Dragon's Back", 38 | "Lantau Island", 39 | "Hong Kong Disneyland" 40 | ], 41 | "Culture and Entertainment": [ 42 | "A Symphony of Lights", 43 | "Hong Kong Museum of History", 44 | "Avenue of Stars", 45 | "Hong Kong Heritage Museum" 46 | ], 47 | "Shopping": [ 48 | "Causeway Bay", 49 | "Stanley Market", 50 | "Harbour City", 51 | "Kowloon Street Markets" 52 | ], 53 | "Parks and Green Spaces": [ 54 | "Nan Lian Garden", 55 | "Hong Kong Park" 56 | ], 57 | "Museums and Art Galleries": [ 58 | "Hong Kong Museum of Art", 59 | "PMQ", 60 | "Tai Kwun" 61 | ], 62 | "Cultural Neighborhoods": [ 63 | "Sheung Wan", 64 | "Tai Hang", 65 | "Central and SoHo" 66 | ], 67 | "Island Getaways": [ 68 | "Cheung Chau Island", 69 | "Lamma Island", 70 | "Peng Chau" 71 | ], 72 | "Waterfront Dining": [ 73 | "Repulse Bay", 74 | "Sai Kung Seafood Street" 75 | ] 76 | } 77 | -------------------------------------------------------------------------------- /next_steps/generative_ai/personalized_marketing_campaign/universal_negative_prompts.json: -------------------------------------------------------------------------------- 1 | { 2 | "negative_prompts": [ 3 | "Absent", 4 | "Parts", 5 | "Added", 6 | "Components", 7 | "Asymmetrical", 8 | "Design", 9 | "Broken", 10 | "Cartoonish", 11 | "Cloned", 12 | "Collapsed", 13 | "Complex", 14 | "Background", 15 | "Distorted", 16 | "Distorted", 17 | "Perspective", 18 | "Extra", 19 | "Pieces", 20 | "Faded", 21 | "Color", 22 | "Flawed", 23 | "Shape", 24 | "Flipped", 25 | "Folded", 26 | "Improper", 27 | "Proportion", 28 | "Incomplete", 29 | "Incorrect", 30 | "Geometry", 31 | "Inverted", 32 | "Kitsch", 33 | "Low", 34 | "Quality", 35 | "Low", 36 | "Resolution", 37 | "Macabre", 38 | "Misaligned", 39 | "Parts", 40 | "Misshapen", 41 | "Missing", 42 | "Parts", 43 | "Mutated", 44 | "Off-center", 45 | "Out", 46 | "Of", 47 | "Focus", 48 | "Over-saturated", 49 | "Color", 50 | "Overexposed", 51 | "Oversized", 52 | "Poorly", 53 | "Rendered", 54 | "Replica", 55 | "Surreal", 56 | "Tilted", 57 | "Underexposed", 58 | "Unrealistic", 59 | "Upside", 60 | "Down", 61 | "human", 62 | "finger" 63 | ] 64 | } 65 | -------------------------------------------------------------------------------- /next_steps/generative_ai/personalized_recommender_agent/static/function-flowchart.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/generative_ai/personalized_recommender_agent/static/function-flowchart.png -------------------------------------------------------------------------------- /next_steps/generative_ai/user_personalized_marketing_messaging_with_amazon_personalize_and_gen_ai/README.md: -------------------------------------------------------------------------------- 1 | # Personalized marketing email with Movie Recommendations using Amazon Bedrock and Amazon Personalize 2 | 3 | # When to use this sample 4 | 5 | **Industry: Media & Entertainment (M&E)** 6 | 7 | # Key Technologies 8 | 9 | - Amazon Bedrock 10 | - Claude model 11 | - Amazon Personalize 12 | - Real-time item recommendations 13 | 14 | # Getting Started 15 | 16 | This demo will walk you through how to create personalized marketing content (for instance emails) for each user using Amazon Personalize and Amazon Bedrock. 17 | 18 | 1. Building a work environment (follow the steps bellow) 19 | 2. Format your data to use with [Amazon Personalize](https://aws.amazon.com/personalize/). We used the following data for model training: 20 | * Interations data: we use the ml-latest-small dataset from the [Movielens](https://grouplens.org/datasets/movielens/) project as a proxy for user-item interactions. 21 | * Item data: in order provide additional metadata, and also to provide a consistent experience for our users we leverage a subset of the IMDb Essential Metadata for Movies/TV/OTT dataset. IMDb is the world's most popular and authoritative source for information on movies, TV shows, and celebrities and powers entertainment experiences around the world. IMDb has [multiple datasets available in the Amazon Data Exchange](https://aws.amazon.com/marketplace/seller-profile?id=0af153a3-339f-48c2-8b42-3b9fa26d3367).
IMDb logo
22 | 23 | 3. Train an Amazon Personalize 'Top picks for you' Recommender to get personalized recommendations for each user. 24 | 4. Generate a prompt that includes the user's preferences, recommendations, and demographics. 25 | 5. Generate a personalized email for each user with [Amazon Bedrock](https://aws.amazon.com/bedrock/). 26 | 27 | ## Environment Prerequisites 28 | 29 | This only applies if you are deploying with the CloudFormation template, otherwise consult the IAM permissions needed for your specific task and add them to the role(s) you will be using when running this example. 30 | 31 | For this example you require: 32 | 1. An AWS Account 33 | 2. A user with administrator access to the AWS Account 34 | 35 | ## Building Your Environment 36 | 37 | The first step is to deploy a CloudFormation template that will perform much of the initial setup for you. In another browser window login to your AWS account. Once you have done that open the link below in a new tab to start the process of deploying the items you need via CloudFormation. After clicking one of the Launch Stack buttons below, follow the procedures to launch the template. Be sure to enter a CloudFront stack name in lowercase letters (numbers and hyphens are okay too). 38 | 39 | With this deployment option, the CloudFormation template will import this GitHub repository into an Amazon SageMaker Notebook it creates in your account. This notebook can be found in the AWS Console under Notebooks/Notebook Instances. This CloudFormation template will also create the roles with required permissions to do this demo. The CloudFormation template used can be found at [personalizeSimpleCFMarketingContentGen.yml](./personalizeSimpleCFMarketingContentGen.yml). 40 | 41 | | Region | Region Code | Launch stack | 42 | |--------|--------|--------------| 43 | | US East (N. Virginia) | us-east-1 | [![Launch Stack](https://s3.amazonaws.com/cloudformation-examples/cloudformation-launch-stack.png)](https://console.aws.amazon.com/cloudformation/home?region=us-east-1#/stacks/new?stackName=PersonalizeExample&templateURL=https://personalize-solution-staging-us-east-1.s3.amazonaws.com/personalize-samples-genai-marketing-content/personalizeSimpleCFMarketingContentGen.yml) | 44 | | Europe (Ireland) | eu-west-1 | [![Launch Stack](https://s3.amazonaws.com/cloudformation-examples/cloudformation-launch-stack.png)](https://console.aws.amazon.com/cloudformation/home?region=eu-west-1#/stacks/new?stackName=PersonalizeExample&templateURL=https://personalize-solution-staging-eu-west-1.s3.eu-west-1.amazonaws.com/personalize-samples-genai-marketing-content/personalizeSimpleCFMarketingContentGen.yml) | 45 | | Asia Pacific (Sydney) | ap-southeast-2 |[![Launch Stack](https://s3.amazonaws.com/cloudformation-examples/cloudformation-launch-stack.png)](https://console.aws.amazon.com/cloudformation/home?region=ap-southeast-2#/stacks/new?stackName=PersonalizeExample&templateURL=https://personalize-solution-staging-ap-southeast-2.s3.ap-southeast-2.amazonaws.com/personalize-samples-genai-marketing-content/personalizeSimpleCFMarketingContentGen.yml) | 46 | 47 | ## Cleanup Resources 48 | 49 | In order to cleanup the resources, you must do 2 steps: 50 | 1. Cleanup resources created during the demo. To do this, run [the cleanup notebook](./02_Clean_Up.ipynb). 51 | 2. Delete the stack you created with CloudFormation. To do this, in the AWS Console again click the `Services` link at the top, and this time enter in `CloudFormation` and click the link for it. Then Click the `Delete` button on the stack you created. 52 | 53 | Once you see `Delete Completed` you know that all resources created have been deleted. 54 | 55 | -------------------------------------------------------------------------------- /next_steps/generative_ai/user_personalized_marketing_messaging_with_amazon_personalize_and_gen_ai/images/IMDb_Logo_Rectangle.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/generative_ai/user_personalized_marketing_messaging_with_amazon_personalize_and_gen_ai/images/IMDb_Logo_Rectangle.png -------------------------------------------------------------------------------- /next_steps/generative_ai/user_personalized_marketing_messaging_with_amazon_personalize_and_gen_ai/images/architecture.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/generative_ai/user_personalized_marketing_messaging_with_amazon_personalize_and_gen_ai/images/architecture.png -------------------------------------------------------------------------------- /next_steps/generative_ai/user_personalized_marketing_messaging_with_amazon_personalize_and_gen_ai/images/personalize_metrics.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/generative_ai/user_personalized_marketing_messaging_with_amazon_personalize_and_gen_ai/images/personalize_metrics.png -------------------------------------------------------------------------------- /next_steps/generative_ai/user_personalized_marketing_messaging_with_amazon_personalize_and_gen_ai/params.json: -------------------------------------------------------------------------------- 1 | { 2 | "datasetGroup": { 3 | "serviceConfig": { 4 | "name": "personalize-demo", 5 | "domain": "VIDEO_ON_DEMAND" 6 | } 7 | }, 8 | "datasets": { 9 | "interactions": { 10 | "dataset": { 11 | "serviceConfig": { 12 | "name": "workshop_interactions" 13 | } 14 | }, 15 | "schema": { 16 | "serviceConfig": { 17 | "name": "workshop_interactions_schema", 18 | "domain": "VIDEO_ON_DEMAND", 19 | "schema": { 20 | "type": "record", 21 | "name": "interactions", 22 | "namespace": "com.amazonaws.personalize.schema", 23 | "fields": [ 24 | { 25 | "name": "USER_ID", 26 | "type": "string" 27 | }, 28 | { 29 | "name": "ITEM_ID", 30 | "type": "string" 31 | }, 32 | { 33 | "name": "EVENT_TYPE", 34 | "type": "string" 35 | }, 36 | { 37 | "name": "TIMESTAMP", 38 | "type": "long" 39 | } 40 | ] 41 | } 42 | } 43 | } 44 | }, 45 | "items": { 46 | "dataset": { 47 | "serviceConfig": { 48 | "name": "workshop_items" 49 | } 50 | }, 51 | "schema": { 52 | "serviceConfig": { 53 | "name": "workshop_items_schema", 54 | "domain": "VIDEO_ON_DEMAND", 55 | "schema": { 56 | "type": "record", 57 | "name": "items", 58 | "namespace": "com.amazonaws.personalize.schema", 59 | "fields": [ 60 | { 61 | "name": "ITEM_ID", 62 | "type": "string" 63 | }, 64 | { 65 | "name": "TITLE", 66 | "type": "string" 67 | }, 68 | { 69 | "name": "YEAR", 70 | "type": "int" 71 | }, 72 | { 73 | "name": "IMDB_RATING", 74 | "type": "int" 75 | }, 76 | { 77 | "name": "IMDB_NUMBEROFVOTES", 78 | "type": "int" 79 | }, 80 | { 81 | "name": "PLOT", 82 | "type": "string", 83 | "textual": true 84 | }, 85 | { 86 | "name": "US_MATURITY_RATING_STRING", 87 | "type": "string" 88 | }, 89 | { 90 | "name": "US_MATURITY_RATING", 91 | "type": "int" 92 | }, 93 | { 94 | "name": "GENRES", 95 | "type": "string", 96 | "categorical": true 97 | }, 98 | { 99 | "name": "CREATION_TIMESTAMP", 100 | "type": "long" 101 | }, 102 | { 103 | "name": "PROMOTION", 104 | "type": "string" 105 | } 106 | ] 107 | } 108 | } 109 | } 110 | } 111 | }, 112 | "recommenders": [ 113 | { 114 | "serviceConfig": { 115 | "name": "workshop_top_picks_for_you", 116 | "recipeArn": "arn:aws:personalize:::recipe/aws-vod-top-picks" 117 | } 118 | } 119 | ] 120 | } 121 | -------------------------------------------------------------------------------- /next_steps/operations/README.md: -------------------------------------------------------------------------------- 1 | # Amazon Personalize Operations 2 | 3 | This topic contains examples on the following topics: 4 | 5 | * [Maintaining Personalized Experiences with Machine Learning](https://aws.amazon.com/solutions/implementations/maintaining-personalized-experiences-with-ml/) 6 | - This AWS Solution allows you to automate the end-to-end process of importing datasets, creating solutions and solution versions, creating and updating campaigns, creating filters, and running batch inference jobs. These processes can be run on-demand or triggered based on a schedule that you define. 7 | 8 | * MLOps (legacy) 9 | - This is a project to showcase how to quickly deploy a Personalize Campaign in a fully automated fashion using AWS Step Functions. To get started navigate to the [ml_ops](ml_ops) folder and follow the README instructions. This project has been replaced by the [Maintaining Personalized Experiences with Machine Learning](https://aws.amazon.com/solutions/implementations/maintaining-personalized-experiences-with-ml/) solution. 10 | 11 | * Data Science SDK 12 | - This is a project to showcase how to quickly deploy a Personalize Campaign in a fully automated fashion using AWS Data Science SDK. To get started navigate to the [ml_ops_ds_sdk](ml_ops_ds_sdk) folder and follow the README instructions. 13 | 14 | * [Personalization APIs](https://github.com/aws-samples/personalization-apis) 15 | - Real-time low latency API framework that sits between your applications and recommender systems such as Amazon Personalize. Provides best practice implementations of response caching, API gateway configurations, A/B testing with [Amazon CloudWatch Evidently](https://docs.aws.amazon.com/cloudwatchevidently/latest/APIReference/Welcome.html), inference-time item metadata, automatic contextual recommendations, and more. 16 | 17 | * Lambda Examples 18 | - This folder starts with a basic example of integrating `put_events` into your Personalize Campaigns by using Lambda functions processing new data from S3. To get started navigate to the [lambda_examples](lambda_examples/) folder and follow the README instructions. 19 | 20 | * Streaming Events 21 | - This is a project to showcase how to quickly deploy an API Layer infront of your Amazon Personalize Campaign and your Event Tracker endpoint. To get started navigate to the [streaming_events](streaming_events/) folder and follow the README instructions. 22 | 23 | * [Personalize Monitor](https://github.com/aws-samples/amazon-personalize-monitor) 24 | - This project adds monitoring, alerting, a dashboard, and optimization tools for running Amazon Personalize across your AWS environments. 25 | 26 | ## License Summary 27 | 28 | This sample code is made available under a modified MIT license. See the LICENSE file. 29 | -------------------------------------------------------------------------------- /next_steps/operations/filter_rotator/LICENSE: -------------------------------------------------------------------------------- 1 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of 4 | this software and associated documentation files (the "Software"), to deal in 5 | the Software without restriction, including without limitation the rights to 6 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of 7 | the Software, and to permit persons to whom the Software is furnished to do so. 8 | 9 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 10 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS 11 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR 12 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER 13 | IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 14 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 15 | 16 | -------------------------------------------------------------------------------- /next_steps/operations/filter_rotator/conftest.py: -------------------------------------------------------------------------------- 1 | import sys, os 2 | 3 | here = os.path.abspath("src") 4 | sys.path.insert(0, here) -------------------------------------------------------------------------------- /next_steps/operations/filter_rotator/src/filter_rotator_function/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/filter_rotator/src/filter_rotator_function/__init__.py -------------------------------------------------------------------------------- /next_steps/operations/filter_rotator/src/filter_rotator_function/requirements.txt: -------------------------------------------------------------------------------- 1 | # Note: AWS Lambda Power Tools is also required but is satisfied by a Lambda layer at runtime (see template.yaml) 2 | simpleeval==0.9.11 -------------------------------------------------------------------------------- /next_steps/operations/filter_rotator/src/filter_rotator_function/template_evaluation.py: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: MIT-0 3 | 4 | from simpleeval import simple_eval, DEFAULT_FUNCTIONS, DEFAULT_NAMES 5 | import datetime 6 | from dateutil.parser import parse 7 | import re 8 | from typing import Any, Dict 9 | 10 | """ 11 | This script includes the utilities and functions for evaluating expressions/templates. 12 | """ 13 | def _unixtime(s: Any) -> float: 14 | if isinstance(s, datetime.time) or isinstance(s, datetime.date): 15 | return s.timestamp() 16 | return parse(str(s)).timestamp() 17 | 18 | def _datetime_format(d, pattern: str) -> str: 19 | return d.strftime(pattern) 20 | 21 | def _starts_with(s: str, prefix: str) -> bool: 22 | return s.startswith(prefix) 23 | 24 | def _ends_with(s: str, suffix: str) -> bool: 25 | return s.endswith(suffix) 26 | 27 | def _timedelta_days(v: int) -> datetime.timedelta: 28 | return datetime.timedelta(days=v) 29 | 30 | def _timedelta_hours(v: int) -> datetime.timedelta: 31 | return datetime.timedelta(hours=v) 32 | 33 | def _timedelta_minutes(v: int) -> datetime.timedelta: 34 | return datetime.timedelta(minutes=v) 35 | 36 | def _timedelta_seconds(v: int) -> datetime.timedelta: 37 | return datetime.timedelta(seconds=v) 38 | 39 | def _end(s: str, num: int) -> str: 40 | return s[-abs(num):] 41 | 42 | def _start(s: str, num: int) -> str: 43 | return s[0:num] 44 | 45 | def eval_expression(s: str, names: Dict = None) -> str: 46 | """ 47 | Customizes the functions and names available in 48 | expression and evaluates an expression. 49 | """ 50 | functions = DEFAULT_FUNCTIONS.copy() 51 | functions.update( 52 | unixtime=_unixtime, 53 | datetime_format=_datetime_format, 54 | starts_with=_starts_with, 55 | ends_with=_ends_with, 56 | start=_start, 57 | end=_end, 58 | timedelta_days=_timedelta_days, 59 | timedelta_hours=_timedelta_hours, 60 | timedelta_minutes=_timedelta_minutes, 61 | timedelta_seconds=_timedelta_seconds 62 | ) 63 | 64 | names_combined = DEFAULT_NAMES.copy() 65 | names_combined.update( 66 | now=datetime.datetime.now() 67 | ) 68 | 69 | if names: 70 | names_combined.update(names) 71 | 72 | return simple_eval(s, functions=functions, names=names_combined) 73 | 74 | def eval_template(s: str, names: Dict = None) -> str: 75 | """ 76 | Processes a template that includes zero or more expressions wrapped in handlebars ("{{ }}") 77 | where each embedded expression is replaced by the resolved expression. 78 | """ 79 | return re.sub(r'\{\{([^\}]*)\}\}', lambda m: str(eval_expression(m.group(1), names)), s) 80 | -------------------------------------------------------------------------------- /next_steps/operations/filter_rotator/template.yaml: -------------------------------------------------------------------------------- 1 | AWSTemplateFormatVersion: '2010-09-09' 2 | Transform: AWS::Serverless-2016-10-31 3 | Description: > 4 | Amazon Personalize filter rotatation utility that automatically creates and deletes filters using templates you provide 5 | 6 | Parameters: 7 | PersonalizeDatasetGroupArn: 8 | Type: String 9 | Description: Dataset Group Arn to rotate filters within. 10 | 11 | PersonalizeCurrentFilterNameTemplate: 12 | Type: String 13 | Description: > 14 | Filter name template for the filter that should currently exist. This template is used to either match an existing filter that 15 | is considered current or trigger the creation of a filter if it does not exist. 16 | 17 | PersonalizeCurrentFilterExpressionTemplate: 18 | Type: String 19 | Description: > 20 | Filter expression template for the filter that should currently exist. When the current filter does not exist, a filter is created 21 | using this value as the template for the new filter's expression. 22 | 23 | PersonalizeDeleteFilterMatchTemplate: 24 | Type: String 25 | Description: > 26 | Filter match template for filter(s) that should be automatically deleted. Any existing filter(s) matching this template will be deleted. 27 | 28 | RotationSchedule: 29 | Type: String 30 | Description: How often you want to rotation script to run. Can be either a cron or rate expression. 31 | Default: "rate(1 day)" 32 | 33 | Timezone: 34 | Type: String 35 | Description: Set the timezone of the rotator function's Lambda environment to match your local timezone. 36 | Default: UTC 37 | 38 | PublishFilterEvents: 39 | Type: String 40 | Description: Whether to publish events to the default Amazon EventBridge bus when created filters become active and when existing filters are deleted. 41 | AllowedValues: 42 | - 'Yes' 43 | - 'No' 44 | Default: 'Yes' 45 | 46 | Globals: 47 | Function: 48 | Runtime: python3.9 49 | Environment: 50 | Variables: 51 | LOG_LEVEL: INFO 52 | POWERTOOLS_LOGGER_LOG_EVENT: true 53 | POWERTOOLS_LOGGER_SAMPLE_RATE: 0 54 | 55 | Resources: 56 | PersonalizeFilterRotatorFunction: 57 | Type: AWS::Serverless::Function 58 | Properties: 59 | Description: Function that rotates Personalize filters based on user-defined criteria 60 | Timeout: 900 # In case we have to wait a while for filter to become active 61 | CodeUri: src/filter_rotator_function 62 | Handler: filter_rotator.lambda_handler 63 | Layers: 64 | - !Sub arn:aws:lambda:${AWS::Region}:017000801446:layer:AWSLambdaPowertoolsPython:3 65 | Policies: 66 | - Statement: 67 | - Sid: PersonalizePolicy 68 | Effect: Allow 69 | Action: 70 | - personalize:CreateFilter 71 | - personalize:DeleteFilter 72 | - personalize:DescribeFilter 73 | - personalize:ListFilters 74 | Resource: !Sub 'arn:aws:personalize:${AWS::Region}:${AWS::AccountId}:*' 75 | - Sid: EventBridgePolicy 76 | Effect: Allow 77 | Action: 78 | - events:PutEvents 79 | Resource: !Sub 'arn:aws:events:${AWS::Region}:${AWS::AccountId}:event-bus/default' 80 | Environment: 81 | Variables: 82 | PUBLISH_FILTER_EVENTS: !Ref PublishFilterEvents 83 | TZ: !Ref Timezone 84 | POWERTOOLS_SERVICE_NAME: personalize_filter_rotator 85 | Events: 86 | ScheduledEvent: 87 | Type: Schedule 88 | Properties: 89 | Description: Triggers Personalize filter rotation 90 | Input: !Sub '{"datasetGroupArn": "${PersonalizeDatasetGroupArn}","currentFilterNameTemplate": "${PersonalizeCurrentFilterNameTemplate}","currentFilterExpressionTemplate": "${PersonalizeCurrentFilterExpressionTemplate}","deleteFilterMatchTemplate": "${PersonalizeDeleteFilterMatchTemplate}"}' 91 | Schedule: !Ref RotationSchedule 92 | Enabled: True 93 | 94 | Outputs: 95 | PersonalizeFilterRotatorFunction: 96 | Description: "Personalize filter rotator Function ARN" 97 | Value: !GetAtt PersonalizeFilterRotatorFunction.Arn 98 | -------------------------------------------------------------------------------- /next_steps/operations/filter_rotator/tests/requirements.txt: -------------------------------------------------------------------------------- 1 | requests 2 | six 3 | regex 4 | pytest 5 | pytest-mock 6 | simpleeval==0.9.11 -------------------------------------------------------------------------------- /next_steps/operations/filter_rotator/tests/unit/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/filter_rotator/tests/unit/__init__.py -------------------------------------------------------------------------------- /next_steps/operations/filter_rotator/tests/unit/test_handler.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | 3 | from filter_rotator_function import template_evaluation 4 | 5 | def test_filter_expressions(): 6 | secs_now = int(datetime.datetime.now().timestamp()) 7 | res = template_evaluation.eval_template('INCLUDE ItemID WHERE Items.CREATION_TIMESTAMP > {{int(unixtime(now - timedelta_days(14)))}}') 8 | assert '{{' not in res and '}}' not in res 9 | secs = res.split(' ')[-1] 10 | assert secs.isdigit() 11 | assert (int(secs) - secs_now) < 2 12 | 13 | def test_filter_name(): 14 | today = datetime.datetime.now().strftime('%Y%m%d') 15 | res = template_evaluation.eval_template("include-recent-items-{{datetime_format(now,'%Y%m%d')}}") 16 | assert res.startswith('include-recent-items-') 17 | assert res.endswith(today) 18 | 19 | def test_expression_basics(): 20 | assert template_evaluation.eval_expression('1 == 1') 21 | assert not template_evaluation.eval_expression('1 > 1') 22 | assert template_evaluation.eval_expression('True == True') 23 | assert template_evaluation.eval_expression('False == False') 24 | assert not template_evaluation.eval_expression('True == False') 25 | assert template_evaluation.eval_expression('"personal" in "personalize"') 26 | 27 | assert template_evaluation.eval_expression("datetime_format(now,'%Y%m%d') == datetime_format(now,'%Y%m%d')") 28 | assert template_evaluation.eval_expression("datetime_format(now-timedelta_days(1),'%Y%m%d') < datetime_format(now,'%Y%m%d')") 29 | assert not template_evaluation.eval_expression("datetime_format(now-timedelta_days(1),'%Y%m%d') > datetime_format(now,'%Y%m%d')") 30 | 31 | def test_filter_match(): 32 | day_old = "starts_with(filter.name,'include-recent-items-') and int(end(filter.name,8)) < int(datetime_format(now - timedelta_days(1),'%Y%m%d'))" 33 | # Three days old 34 | filter = { 35 | 'name': template_evaluation.eval_template("include-recent-items-{{datetime_format(now-timedelta_days(3),'%Y%m%d')}}") 36 | } 37 | match = template_evaluation.eval_expression(day_old, {'filter': filter}) 38 | assert match 39 | 40 | # Two days old 41 | filter['name'] = template_evaluation.eval_template("include-recent-items-{{datetime_format(now-timedelta_days(2),'%Y%m%d')}}") 42 | match = template_evaluation.eval_expression(day_old, {'filter': filter}) 43 | assert match 44 | 45 | # One day old 46 | filter['name'] = template_evaluation.eval_template("include-recent-items-{{datetime_format(now-timedelta_days(1),'%Y%m%d')}}") 47 | match = template_evaluation.eval_expression(day_old, {'filter': filter}) 48 | assert not match 49 | 50 | # Same day 51 | filter['name'] = template_evaluation.eval_template("include-recent-items-{{datetime_format(now,'%Y%m%d')}}") 52 | match = template_evaluation.eval_expression(day_old, {'filter': filter}) 53 | assert not match 54 | -------------------------------------------------------------------------------- /next_steps/operations/lambda_examples/README.md: -------------------------------------------------------------------------------- 1 | # Lambda Examples 2 | 3 | This folder starts with a basic example of integrating `put_events` into your Personalize Campaigns by using Lambda functions processing new data from S3. 4 | 5 | To get started here, first complete the `getting_started` notebook collection, including the second notebook that creates your initial Event Tracker. 6 | 7 | 8 | ## Sending Events to S3 9 | 10 | Inside this folder you'll see a notebook `Sending_Events_to_S3.ipynb` it contains the boilerplate code to send a series of messages to your S3 bucket. 11 | 12 | This will be key for using your Lambda function which will then send them to Personalize. 13 | 14 | ## Lambda Function 15 | 16 | The notebook will now reliabily write files to your S3 bucket, the next task is to build a lambda function to invoke on the S3 trigger. The code for the Lambda is provided inside `event_processor.py` 17 | 18 | 19 | First visit the Lambda console then click `Create Function`, give it any name you like and select Python 3.6 for the runtime. 20 | 21 | You will need a new IAM role for this Lambda function, allow a default one first. Later it will be updated to work with Personalize and S3. Next hit `Create function` 22 | 23 | 24 | Now click the `+ Add trigger`, search for S3, select your bucket, select `All object create events` for demo purposes, then for Suffix add `.json`. Lastly on this page click `Add` 25 | 26 | Next click the icon for your Lamda function, when the editor appears below, copy the contents of `event_processor.py` into the editor and save it. Replace all the existing content. 27 | 28 | Scroll down below the editor and for `Environment Variables` enter `trackingId` for a key, and for the value provide your tracking ID from the second notebook. 29 | 30 | This is almost ready to go, the last configuration bit is to handle IAM, scroll below until you see `Execution role`, at the bottom you'll see a link `View the ....` right click and open that in a new tab. 31 | 32 | Click `Attach policies`, add both `AmazonS3FullAccess` and `AmazonPersonalizeFullAccess` then click `Attach policy`. These configurations are not ideal for security but will illustrate the point. For a production workload create custom policies tailored explicitly to the resources you are working with. 33 | 34 | Once attached, close the tab and revisit the Lambda Console page you left. Click `Save` in the top right corner. 35 | 36 | Scroll back to the top, and select `Monitoring`, then go back to your notebook that simulates the events and execute that cell again to write new files and execute the Lambda function. 37 | 38 | After a few seconds you can refresh the page and see the invocations were successful. 39 | -------------------------------------------------------------------------------- /next_steps/operations/lambda_examples/event_processor.py: -------------------------------------------------------------------------------- 1 | import boto3 2 | import json 3 | import numpy as np 4 | import pandas as pd 5 | import time 6 | import uuid 7 | import os 8 | import urllib.parse 9 | 10 | print('Loading function') 11 | 12 | personalize = boto3.client('personalize') 13 | 14 | s3 = boto3.client('s3') 15 | 16 | def push_event_to_Personalize(event): 17 | """ 18 | Here an event is a file object 19 | """ 20 | trackingId = os.environ['trackingId'] 21 | print(event[0]['userId']) 22 | 23 | personalize.put_events( 24 | trackingId = trackingId, 25 | userId = event[0]['userId'], 26 | sessionId = event[0]['sessionId'], 27 | eventList = [event[1]] 28 | ) 29 | 30 | def lambda_handler(event, context): 31 | #print("Received event: " + json.dumps(event, indent=2)) 32 | 33 | # Get the object from the event and show its content type 34 | bucket = event['Records'][0]['s3']['bucket']['name'] 35 | key = urllib.parse.unquote_plus(event['Records'][0]['s3']['object']['key'], encoding='utf-8') 36 | try: 37 | response = s3.get_object(Bucket=bucket, Key=key) 38 | body = response['Body'] 39 | push_event_to_Personalize(event=body.read()) 40 | return response['ContentType'] 41 | except Exception as e: 42 | print(e) 43 | print('Error getting object {} from bucket {}. Make sure they exist and your bucket is in the same region as this function.'.format(key, bucket)) 44 | raise e -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Amazon Inc. 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/example/params.json: -------------------------------------------------------------------------------- 1 | { 2 | "datasetGroup":{ 3 | "name":"MovieLensDemo" 4 | }, 5 | "datasets":{ 6 | "Interactions":{ 7 | "name":"InteractionsDataset", 8 | "schema":{ 9 | "type":"record", 10 | "name":"Interactions", 11 | "namespace":"com.amazonaws.personalize.schema", 12 | "fields":[ 13 | { 14 | "name":"USER_ID", 15 | "type":"string" 16 | }, 17 | { 18 | "name":"ITEM_ID", 19 | "type":"string" 20 | }, 21 | { 22 | "name":"EVENT_TYPE", 23 | "type":"string" 24 | }, 25 | { 26 | "name":"TIMESTAMP", 27 | "type":"long" 28 | } 29 | ], 30 | "version":"1.0" 31 | } 32 | }, 33 | "Items":{ 34 | "name":"ItemsDataset", 35 | "schema":{ 36 | "type":"record", 37 | "name":"Items", 38 | "namespace":"com.amazonaws.personalize.schema", 39 | "fields":[ 40 | { 41 | "name":"ITEM_ID", 42 | "type":"string" 43 | }, 44 | { 45 | "name":"GENRE", 46 | "type":"string", 47 | "categorical":true 48 | }, 49 | { 50 | "name":"YEAR", 51 | "type":"int" 52 | } 53 | ], 54 | "version":"1.0" 55 | } 56 | } 57 | }, 58 | "solutions":{ 59 | "userPersonalization":{ 60 | "name":"userPersonalizationSolution", 61 | "recipeArn":"arn:aws:personalize:::recipe/aws-user-personalization" 62 | }, 63 | "sims":{ 64 | "name":"simsSolution", 65 | "recipeArn":"arn:aws:personalize:::recipe/aws-sims" 66 | }, 67 | "personalizedRanking":{ 68 | "name":"personalizedRankingSolution", 69 | "recipeArn":"arn:aws:personalize:::recipe/aws-personalized-ranking" 70 | } 71 | }, 72 | "eventTracker":{ 73 | "name":"MovieLensDemoEventTracker" 74 | }, 75 | "campaigns":{ 76 | "userPersonalizationCampaign":{ 77 | "name":"userPersonalizationCampaign", 78 | "minProvisionedTPS":1 79 | }, 80 | "simsCampaign":{ 81 | "name":"simsCampaign", 82 | "minProvisionedTPS":1 83 | }, 84 | "personalizedRankingCampaign":{ 85 | "name":"personalizedRankingCampaign", 86 | "minProvisionedTPS":1 87 | } 88 | }, 89 | "filters": 90 | [ 91 | { 92 | "name":"media-filter-1", 93 | "filterExpression":"INCLUDE ItemID WHERE Items.YEAR >= 1970 AND Items.YEAR < 1980" 94 | } 95 | ] 96 | } -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/images/architecture.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/images/architecture.png -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/images/icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/images/icon.png -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/images/stepfunctions.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/images/stepfunctions.png -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/create-campaign/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/personalize-step-functions/lambdas/create-campaign/__init__.py -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/create-campaign/campaign.py: -------------------------------------------------------------------------------- 1 | from os import environ 2 | import actions 3 | from loader import Loader 4 | 5 | ARN = 'arn:aws:personalize:{region}:{account}:campaign/{name}' 6 | LOADER = Loader() 7 | 8 | 9 | def lambda_handler(event, context): 10 | campaignArn = ARN.format( 11 | region=environ['AWS_REGION'], 12 | account=LOADER.account_id, 13 | name=event['campaign']['name'] 14 | ) 15 | try: 16 | status = LOADER.personalize_cli.describe_campaign( 17 | campaignArn=campaignArn 18 | )['campaign'] 19 | # Point to new campaign if the new solution version is not the one listed in the campaign 20 | if(status['solutionVersionArn'] != event['solutionVersionArn']): 21 | try: 22 | newStatus = LOADER.personalize_cli.update_campaign( 23 | campaignArn=campaignArn, 24 | solutionVersionArn=event['solutionVersionArn'], 25 | minProvisionedTPS=event['campaign']['minProvisionedTPS']) 26 | status = LOADER.personalize_cli.describe_campaign( 27 | campaignArn=campaignArn 28 | )['campaign'] 29 | actions.take_action(status['latestCampaignUpdate']['status']) 30 | return campaignArn 31 | except LOADER.personalize_cli.exceptions.ResourceInUseException: 32 | actions.take_action(status['latestCampaignUpdate']['status']) 33 | return campaignArn 34 | 35 | except LOADER.personalize_cli.exceptions.ResourceNotFoundException: 36 | LOADER.logger.info( 37 | 'Campaign not found! Will follow to create a new campaign.' 38 | ) 39 | LOADER.personalize_cli.create_campaign( 40 | name=event['campaign']['name'], 41 | solutionVersionArn=event['solutionVersionArn'], 42 | minProvisionedTPS=event['campaign']['minProvisionedTPS'] 43 | ) 44 | status = LOADER.personalize_cli.describe_campaign( 45 | campaignArn=campaignArn 46 | )['campaign'] 47 | 48 | actions.take_action(status['status']) 49 | return campaignArn 50 | -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/create-campaign/requirements.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/personalize-step-functions/lambdas/create-campaign/requirements.txt -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/create-dataset/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/personalize-step-functions/lambdas/create-dataset/__init__.py -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/create-dataset/dataset.py: -------------------------------------------------------------------------------- 1 | from os import environ 2 | import actions 3 | from loader import Loader 4 | from random import randint 5 | from json import dumps 6 | 7 | DATASET_ARN = 'arn:aws:personalize:{region}:{account}:dataset/{datasetGroupName}/{type}' 8 | LOADER = Loader() 9 | 10 | 11 | def create_schema(name, schema): 12 | schemaArn = 'arn:aws:personalize:{region}:{account}:schema/{name}' 13 | 14 | try: 15 | schemaResponse = LOADER.personalize_cli.describe_schema( 16 | schemaArn=schemaArn.format( 17 | name=name, 18 | account=LOADER.account_id, 19 | region=environ['AWS_REGION'] 20 | ) 21 | ) 22 | 23 | if schemaResponse['schema']['schema'] != schema: 24 | LOADER.logger.info( 25 | '''{name} schema already exists with different schema! 26 | Will follow using different schema name.'''. 27 | format(name=name) 28 | ) 29 | return create_schema( 30 | name='{name}-{rand}'.format(name=name, rand=randint(0, 100000)), 31 | schema=schema 32 | ) 33 | return schemaResponse['schema']['schemaArn'] 34 | 35 | except LOADER.personalize_cli.exceptions.ResourceNotFoundException: 36 | LOADER.logger.info('Schema not found! Will follow to create schema.') 37 | return LOADER.personalize_cli.create_schema( 38 | name=name, schema=dumps(schema) 39 | )['schemaArn'] 40 | 41 | 42 | def lambda_handler(event, context): 43 | # return event 44 | dataset = event['datasets'][event['datasetType']] 45 | datasetArn = DATASET_ARN.format( 46 | region=environ['AWS_REGION'], 47 | account=LOADER.account_id, 48 | datasetGroupName=event['datasetGroupName'], 49 | type=str.upper(event['datasetType']) 50 | ) 51 | try: 52 | status = LOADER.personalize_cli.describe_dataset(datasetArn=datasetArn 53 | )['dataset'] 54 | 55 | except LOADER.personalize_cli.exceptions.ResourceNotFoundException: 56 | LOADER.logger.info( 57 | 'Dataset not found! Will follow to create schema and dataset.' 58 | ) 59 | LOADER.personalize_cli.create_dataset( 60 | name=dataset['name'], 61 | schemaArn=create_schema( 62 | dataset['schema']['name'], dataset['schema'] 63 | ), 64 | datasetGroupArn=event['datasetGroupArn'], 65 | datasetType=event['datasetType'] 66 | ) 67 | status = LOADER.personalize_cli.describe_dataset(datasetArn=datasetArn 68 | )['dataset'] 69 | 70 | actions.take_action(status['status']) 71 | return datasetArn 72 | -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/create-dataset/requirements.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/personalize-step-functions/lambdas/create-dataset/requirements.txt -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/create-datasetgroup/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/personalize-step-functions/lambdas/create-datasetgroup/__init__.py -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/create-datasetgroup/datasetgroup.py: -------------------------------------------------------------------------------- 1 | from os import environ 2 | import actions 3 | from loader import Loader 4 | 5 | ARN = 'arn:aws:personalize:{region}:{account}:dataset-group/{name}' 6 | LOADER = Loader() 7 | 8 | 9 | def lambda_handler(event, context): 10 | datasetGroupArn = ARN.format( 11 | account=LOADER.account_id, 12 | name=event['datasetGroup']['name'], 13 | region=environ['AWS_REGION'] 14 | ) 15 | try: 16 | status = LOADER.personalize_cli.describe_dataset_group( 17 | datasetGroupArn=datasetGroupArn 18 | )['datasetGroup'] 19 | 20 | except LOADER.personalize_cli.exceptions.ResourceNotFoundException: 21 | LOADER.logger.info( 22 | 'Dataset Group not found! Will follow to create Dataset Group.' 23 | ) 24 | LOADER.personalize_cli.create_dataset_group(**event['datasetGroup']) 25 | status = LOADER.personalize_cli.describe_dataset_group( 26 | datasetGroupArn=datasetGroupArn 27 | )['datasetGroup'] 28 | 29 | actions.take_action(status['status']) 30 | return datasetGroupArn 31 | -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/create-datasetgroup/requirements.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/personalize-step-functions/lambdas/create-datasetgroup/requirements.txt -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/create-filters/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/personalize-step-functions/lambdas/create-filters/__init__.py -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/create-filters/filters.py: -------------------------------------------------------------------------------- 1 | from os import environ 2 | import actions 3 | from loader import Loader 4 | 5 | LOADER = Loader() 6 | ARN = 'arn:aws:personalize:{region}:{account}:filter/{filter_name}' 7 | 8 | def create_filter(dataset_group_arn, filter_expression, filter_name): 9 | 10 | filterARN = ARN.format( 11 | region=environ['AWS_REGION'], 12 | account=LOADER.account_id, 13 | filter_name=filter_name 14 | ) 15 | 16 | try: 17 | status = LOADER.personalize_cli.describe_filter( 18 | filterArn=filterARN 19 | )['filter']['status'] 20 | 21 | except LOADER.personalize_cli.exceptions.ResourceNotFoundException: 22 | LOADER.logger.info( 23 | 'Filter not found! Will follow to create a new filter.' 24 | ) 25 | LOADER.personalize_cli.create_filter( 26 | datasetGroupArn = dataset_group_arn, 27 | filterExpression = filter_expression, 28 | name = filter_name 29 | ) 30 | status = LOADER.personalize_cli.describe_filter( 31 | filterArn=filterARN 32 | )['filter']['status'] 33 | 34 | while status in {'CREATE PENDING', 'CREATE IN_PROGRESS'}: 35 | status = LOADER.personalize_cli.describe_filter( 36 | filterArn=filterARN 37 | )['filter']['status'] 38 | 39 | if status != 'ACTIVE': 40 | raise actions.ResourceFailed 41 | 42 | return filterARN 43 | 44 | def lambda_handler(event, context): 45 | filter_arns = [] 46 | 47 | for filter in event['filters']: 48 | filter_arn = create_filter( 49 | event['datasetGroupArn'], 50 | filter['filterExpression'], 51 | filter['name'] 52 | ) 53 | filter_arns.append(filter_arn) 54 | 55 | return filter_arns 56 | -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/create-filters/requirements.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/personalize-step-functions/lambdas/create-filters/requirements.txt -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/create-solution/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/personalize-step-functions/lambdas/create-solution/__init__.py -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/create-solution/requirements.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/personalize-step-functions/lambdas/create-solution/requirements.txt -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/create-solution/solution.py: -------------------------------------------------------------------------------- 1 | from os import environ 2 | import actions 3 | from loader import Loader 4 | 5 | ARN = 'arn:aws:personalize:{region}:{account}:solution/{name}' 6 | LOADER = Loader() 7 | 8 | 9 | def create_solution(solutionArn, params): 10 | try: 11 | status = LOADER.personalize_cli.describe_solution( 12 | solutionArn=solutionArn 13 | )['solution']['status'] 14 | 15 | except LOADER.personalize_cli.exceptions.ResourceNotFoundException: 16 | LOADER.logger.info( 17 | 'Solution not found! Will follow to create a new solution.' 18 | ) 19 | LOADER.personalize_cli.create_solution(**params) 20 | status = LOADER.personalize_cli.describe_solution( 21 | solutionArn=solutionArn 22 | )['solution']['status'] 23 | 24 | while status in {'CREATE PENDING', 'CREATE IN_PROGRESS'}: 25 | status = LOADER.personalize_cli.describe_solution( 26 | solutionVersion=solutionArn 27 | )['solution']['status'] 28 | 29 | if status != 'ACTIVE': 30 | raise actions.ResourceFailed 31 | 32 | 33 | def lambda_handler(event, context): 34 | 35 | solutionArn = ARN.format( 36 | region=environ['AWS_REGION'], 37 | account=LOADER.account_id, 38 | name=event['solution']['{}'.format(event['solutionType'])]['name'] 39 | ) 40 | 41 | event['solution']['{}'.format(event['solutionType'])]['datasetGroupArn'] = event['datasetGroupArn'] 42 | create_solution(solutionArn, event['solution']['{}'.format(event['solutionType'])]) 43 | 44 | solutionVersionArn = LOADER.personalize_cli.create_solution_version( 45 | solutionArn=solutionArn, 46 | trainingMode='FULL' # Assumed given we are creating a new model. 47 | )['solutionVersionArn'] 48 | 49 | return solutionVersionArn 50 | -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/delete-campaign/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/personalize-step-functions/lambdas/delete-campaign/__init__.py -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/delete-campaign/delete-campaign.py: -------------------------------------------------------------------------------- 1 | from os import environ 2 | from loader import Loader 3 | import actions 4 | 5 | LOADER = Loader() 6 | 7 | 8 | def lambda_handler(event, context): 9 | try: 10 | response = LOADER.personalize_cli.delete_campaign( 11 | campaignArn=event['campaignArn'] 12 | ) 13 | except Exception as e: 14 | LOADER.logger.error(f'Error deleting campaign: {e}') 15 | raise e 16 | -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/delete-campaign/requirements.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/personalize-step-functions/lambdas/delete-campaign/requirements.txt -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/delete-dataset/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/personalize-step-functions/lambdas/delete-dataset/__init__.py -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/delete-dataset/delete-dataset.py: -------------------------------------------------------------------------------- 1 | from os import environ 2 | from loader import Loader 3 | import actions 4 | 5 | LOADER = Loader() 6 | 7 | 8 | def lambda_handler(event, context): 9 | try: 10 | response = LOADER.personalize_cli.delete_dataset( 11 | datasetArn=event['datasetArn'] 12 | ) 13 | except Exception as e: 14 | LOADER.logger.error(f'Error deleting dataset: {e}') 15 | raise e 16 | -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/delete-dataset/requirements.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/personalize-step-functions/lambdas/delete-dataset/requirements.txt -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/delete-datasetgroup/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/personalize-step-functions/lambdas/delete-datasetgroup/__init__.py -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/delete-datasetgroup/delete-datasetgroup.py: -------------------------------------------------------------------------------- 1 | from os import environ 2 | from loader import Loader 3 | import actions 4 | 5 | LOADER = Loader() 6 | 7 | 8 | def lambda_handler(event, context): 9 | try: 10 | response = LOADER.personalize_cli.delete_dataset_group( 11 | datasetGroupArn=event['datasetGroupArn'] 12 | ) 13 | except Exception as e: 14 | LOADER.logger.error(f'Error deleting dataset group: {e}') 15 | raise e 16 | -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/delete-datasetgroup/requirements.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/personalize-step-functions/lambdas/delete-datasetgroup/requirements.txt -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/delete-solution/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/personalize-step-functions/lambdas/delete-solution/__init__.py -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/delete-solution/delete-solution.py: -------------------------------------------------------------------------------- 1 | from os import environ 2 | from loader import Loader 3 | import actions 4 | 5 | LOADER = Loader() 6 | 7 | 8 | def lambda_handler(event, context): 9 | try: 10 | response = LOADER.personalize_cli.delete_solution( 11 | solutionArn=event['solutionArn'] 12 | ) 13 | except Exception as e: 14 | LOADER.logger.error(f'Error deleting solution: {e}') 15 | raise e 16 | -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/delete-solution/requirements.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/personalize-step-functions/lambdas/delete-solution/requirements.txt -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/delete-tracker/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/personalize-step-functions/lambdas/delete-tracker/__init__.py -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/delete-tracker/delete-tracker.py: -------------------------------------------------------------------------------- 1 | from os import environ 2 | from loader import Loader 3 | import actions 4 | 5 | LOADER = Loader() 6 | 7 | 8 | def lambda_handler(event, context): 9 | status = LOADER.personalize_cli.delete_event_tracker( 10 | eventTrackerArn=event['eventTrackerArn'] 11 | ) -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/delete-tracker/requirements.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/personalize-step-functions/lambdas/delete-tracker/requirements.txt -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/event-tracker/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/personalize-step-functions/lambdas/event-tracker/__init__.py -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/event-tracker/eventTracker.py: -------------------------------------------------------------------------------- 1 | from os import environ 2 | import actions 3 | from loader import Loader 4 | 5 | LOADER = Loader() 6 | 7 | def lambda_handler(event, context): 8 | listETResponse = LOADER.personalize_cli.list_event_trackers( 9 | datasetGroupArn=event['datasetGroupArn']) 10 | if(len(listETResponse['eventTrackers']) > 0): 11 | eventTrackerArn = listETResponse['eventTrackers'][0]['eventTrackerArn'] 12 | status = LOADER.personalize_cli.describe_event_tracker( 13 | eventTrackerArn=eventTrackerArn 14 | )['eventTracker'] 15 | status = LOADER.personalize_cli.describe_event_tracker( 16 | eventTrackerArn=eventTrackerArn 17 | )['eventTracker'] 18 | else: 19 | LOADER.logger.info( 20 | 'Event tracker not found!' 21 | ) 22 | event['eventTracker']['datasetGroupArn'] = event['datasetGroupArn'] 23 | createStatus = LOADER.personalize_cli.create_event_tracker(**event['eventTracker']) 24 | eventTrackerArn = createStatus['eventTrackerArn'] 25 | status = LOADER.personalize_cli.describe_event_tracker( 26 | eventTrackerArn=eventTrackerArn 27 | )['eventTracker'] 28 | 29 | actions.take_action(status['status']) 30 | return eventTrackerArn -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/event-tracker/requirements.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/personalize-step-functions/lambdas/event-tracker/requirements.txt -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/import-data/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/personalize-step-functions/lambdas/import-data/__init__.py -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/import-data/datasetimport.py: -------------------------------------------------------------------------------- 1 | from os import environ 2 | import actions 3 | from loader import Loader 4 | from datetime import datetime 5 | 6 | ARN = 'arn:aws:personalize:{region}:{account}:dataset-import-job/{type}_{date}' 7 | LOADER = Loader() 8 | 9 | 10 | def lambda_handler(event, context): 11 | # return event 12 | datasetImportJobArn = ARN.format( 13 | region=environ['AWS_REGION'], 14 | account=LOADER.account_id, 15 | date=event['date'], 16 | type=event['datasetType'] 17 | ) 18 | 19 | try: 20 | status = LOADER.personalize_cli.describe_dataset_import_job( 21 | datasetImportJobArn=datasetImportJobArn 22 | )['datasetImportJob'] 23 | 24 | except LOADER.personalize_cli.exceptions.ResourceNotFoundException: 25 | LOADER.logger.info( 26 | 'Dataset import job not found! Will follow to create new job.' 27 | ) 28 | LOADER.personalize_cli.create_dataset_import_job( 29 | jobName='{datasetType}_{date}'.format(**event), 30 | datasetArn=event['datasetArn'], 31 | dataSource={ 32 | 'dataLocation': 's3://{bucket}/{datasetType}/'.format(**event) 33 | }, 34 | roleArn=environ['PERSONALIZE_ROLE'] 35 | ) 36 | status = LOADER.personalize_cli.describe_dataset_import_job( 37 | datasetImportJobArn=datasetImportJobArn 38 | )['datasetImportJob'] 39 | 40 | actions.take_action(status['status']) 41 | return datasetImportJobArn 42 | -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/import-data/requirements.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/personalize-step-functions/lambdas/import-data/requirements.txt -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/list-campaigns/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/personalize-step-functions/lambdas/list-campaigns/__init__.py -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/list-campaigns/list-campaigns.py: -------------------------------------------------------------------------------- 1 | from os import environ 2 | from loader import Loader 3 | import actions 4 | import json 5 | LOADER = Loader() 6 | 7 | 8 | def lambda_handler(event, context): 9 | try: 10 | response = LOADER.personalize_cli.list_campaigns( 11 | solutionArn=event['solutionArn'], 12 | maxResults=100 13 | ) 14 | 15 | return json.loads(json.dumps(response['campaigns'], default=str)) 16 | except Exception as e: 17 | LOADER.logger.error(f'Error listing campaigns {e}') 18 | raise e 19 | -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/list-campaigns/requirements.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/personalize-step-functions/lambdas/list-campaigns/requirements.txt -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/list-datasets/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/personalize-step-functions/lambdas/list-datasets/__init__.py -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/list-datasets/list-datasets.py: -------------------------------------------------------------------------------- 1 | from os import environ 2 | from loader import Loader 3 | import actions 4 | import json 5 | 6 | LOADER = Loader() 7 | 8 | 9 | def lambda_handler(event, context): 10 | try: 11 | response = LOADER.personalize_cli.list_datasets( 12 | datasetGroupArn=event['datasetGroupArn'], 13 | maxResults = 100 14 | ) 15 | 16 | return json.loads(json.dumps(response['datasets'], default=str)) 17 | except Exception as e: 18 | LOADER.logger.error(f'Error listing datasets {e}') 19 | raise e 20 | -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/list-datasets/requirements.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/personalize-step-functions/lambdas/list-datasets/requirements.txt -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/list-solution-versions/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/personalize-step-functions/lambdas/list-solution-versions/__init__.py -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/list-solution-versions/list-solution-versions.py: -------------------------------------------------------------------------------- 1 | from os import environ 2 | from loader import Loader 3 | import actions 4 | import json 5 | 6 | LOADER = Loader() 7 | 8 | 9 | def lambda_handler(event, context): 10 | try: 11 | response = LOADER.personalize_cli.list_solution_versions( 12 | solutionArn=event['solutionArn'], 13 | maxResults = 100 14 | ) 15 | 16 | return json.loads(json.dumps(response['solutionVersions'], default=str)) 17 | except Exception as e: 18 | LOADER.logger.error(f'Error listing solution versions {e}') 19 | raise e 20 | -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/list-solution-versions/requirements.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/personalize-step-functions/lambdas/list-solution-versions/requirements.txt -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/list-solutions/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/personalize-step-functions/lambdas/list-solutions/__init__.py -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/list-solutions/list-solutions.py: -------------------------------------------------------------------------------- 1 | from os import environ 2 | from loader import Loader 3 | import actions 4 | import json 5 | 6 | LOADER = Loader() 7 | 8 | 9 | def lambda_handler(event, context): 10 | try: 11 | response = LOADER.personalize_cli.list_solutions( 12 | datasetGroupArn=event['datasetGroupArn'], 13 | maxResults=100 14 | ) 15 | 16 | return json.loads(json.dumps(response['solutions'], default=str)) 17 | except Exception as e: 18 | LOADER.logger.error(f'Error listing solutions {e}') 19 | raise e 20 | -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/list-solutions/requirements.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/personalize-step-functions/lambdas/list-solutions/requirements.txt -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/list-trackers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/personalize-step-functions/lambdas/list-trackers/__init__.py -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/list-trackers/list-trackers.py: -------------------------------------------------------------------------------- 1 | from os import environ 2 | from loader import Loader 3 | import actions 4 | import json 5 | 6 | LOADER = Loader() 7 | 8 | 9 | def lambda_handler(event, context): 10 | try: 11 | response = LOADER.personalize_cli.list_event_trackers( 12 | datasetGroupArn=event['datasetGroupArn'], 13 | maxResults = 100 14 | ) 15 | 16 | return json.loads(json.dumps(response['eventTrackers'], default=str)) 17 | except Exception as e: 18 | LOADER.logger.error(f'Error listing event trackers {e}') 19 | raise e 20 | -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/list-trackers/requirements.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/personalize-step-functions/lambdas/list-trackers/requirements.txt -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/notify-delete/__inti__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/personalize-step-functions/lambdas/notify-delete/__inti__.py -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/notify-delete/notify-delete.py: -------------------------------------------------------------------------------- 1 | import os 2 | from boto3 import client 3 | SNS = client('sns') 4 | 5 | def get_message(event): 6 | message = 'Resource Delete: ' 7 | if 'statesError' in event.keys(): 8 | message += f"Internal error: {event['statesError']}" 9 | if 'serviceError' in event.keys(): 10 | message += f"Service error: {event['statesError']}" 11 | if 'datasetGroupArn' in event.keys(): 12 | message += f"DatasetGroup deleted: {event['datasetGroupArn']}" 13 | if 'Error' in event.keys(): 14 | message += f"State machine failed: {event['Error']} : {event['Cause']}" 15 | return message 16 | 17 | 18 | def lambda_handler(event, context): 19 | return SNS.publish( 20 | TopicArn=os.environ['SNS_TOPIC_ARN'], Message=get_message(event) 21 | ) 22 | -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/notify-delete/requirements.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/personalize-step-functions/lambdas/notify-delete/requirements.txt -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/notify/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/personalize-step-functions/lambdas/notify/__init__.py -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/notify/notify.py: -------------------------------------------------------------------------------- 1 | import os 2 | from boto3 import client 3 | SNS = client('sns') 4 | 5 | def get_message(event): 6 | if 'statesError' in event.keys(): 7 | return 'Internal error: {}'.format(event['statesError']) 8 | if 'serviceError' in event.keys(): 9 | return 'Service error: {}'.format(event['statesError']) 10 | return 'Your Personalize Endpoint is ready!' 11 | 12 | def lambda_handler(event, context): 13 | return SNS.publish( 14 | TopicArn=os.environ['SNS_TOPIC_ARN'], Message=get_message(event) 15 | ) -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/notify/requirements.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/personalize-step-functions/lambdas/notify/requirements.txt -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/s3lambda/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/personalize-step-functions/lambdas/s3lambda/__init__.py -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/s3lambda/parse.py: -------------------------------------------------------------------------------- 1 | import os 2 | from json import loads, dumps 3 | from datetime import datetime 4 | from boto3 import client 5 | 6 | STEP_FUNCTIONS_CLI = client('stepfunctions') 7 | 8 | 9 | def get_params(bucket_name, key_name): 10 | params = loads( 11 | client('s3').get_object(Bucket=bucket_name, 12 | Key=key_name)['Body'].read().decode('utf-8') 13 | ) 14 | return params 15 | 16 | 17 | def lambda_handler(event, context): 18 | bucket_name = event['Records'][0]['s3']['bucket']['name'] 19 | return dumps( 20 | STEP_FUNCTIONS_CLI.start_execution( 21 | stateMachineArn=os.environ['STEP_FUNCTIONS_ARN'], 22 | name=datetime.now().strftime("%Y_%m_%d_%H_%M_%S"), 23 | input=dumps( 24 | { 25 | 'bucket': bucket_name, 26 | 'currentDate': datetime.now().strftime("%Y_%m_%d_%H_%M_%S"), 27 | 'params': 28 | get_params(bucket_name, os.environ['PARAMS_FILE']) 29 | } 30 | ) 31 | ), 32 | default=str 33 | ) 34 | -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/s3lambda/requirements.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/personalize-step-functions/lambdas/s3lambda/requirements.txt -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/wait-delete-campaign/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/personalize-step-functions/lambdas/wait-delete-campaign/__init__.py -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/wait-delete-campaign/requirements.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/personalize-step-functions/lambdas/wait-delete-campaign/requirements.txt -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/wait-delete-campaign/wait-delete-campaign.py: -------------------------------------------------------------------------------- 1 | from os import environ 2 | from loader import Loader 3 | import actions 4 | 5 | LOADER = Loader() 6 | 7 | 8 | def lambda_handler(event, context): 9 | # return event 10 | status = LOADER.personalize_cli.describe_campaign( 11 | campaignArn=event['campaignArn'] 12 | )['campaign'] 13 | 14 | actions.take_action_delete(status['status']) 15 | return status['status'] 16 | -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/wait-delete-dataset/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/personalize-step-functions/lambdas/wait-delete-dataset/__init__.py -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/wait-delete-dataset/requirements.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/personalize-step-functions/lambdas/wait-delete-dataset/requirements.txt -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/wait-delete-dataset/wait-delete-dataset.py: -------------------------------------------------------------------------------- 1 | from os import environ 2 | from loader import Loader 3 | import actions 4 | 5 | LOADER = Loader() 6 | 7 | 8 | def lambda_handler(event, context): 9 | # return event 10 | status = LOADER.personalize_cli.describe_dataset( 11 | datasetArn=event['datasetArn'] 12 | )['dataset'] 13 | 14 | actions.take_action_delete(status['status']) 15 | return status['status'] 16 | -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/wait-delete-datasetgroup/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/personalize-step-functions/lambdas/wait-delete-datasetgroup/__init__.py -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/wait-delete-datasetgroup/requirements.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/personalize-step-functions/lambdas/wait-delete-datasetgroup/requirements.txt -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/wait-delete-datasetgroup/wait-delete-datasetgroup.py: -------------------------------------------------------------------------------- 1 | from os import environ 2 | from loader import Loader 3 | import actions 4 | 5 | LOADER = Loader() 6 | 7 | 8 | def lambda_handler(event, context): 9 | # return event 10 | status = LOADER.personalize_cli.describe_dataset_group( 11 | datasetGroupArn=event['datasetGroupArn'] 12 | )['datasetGroup'] 13 | 14 | actions.take_action_delete(status['status']) 15 | return status['status'] 16 | -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/wait-delete-solution/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/personalize-step-functions/lambdas/wait-delete-solution/__init__.py -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/wait-delete-solution/requirements.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/personalize-step-functions/lambdas/wait-delete-solution/requirements.txt -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/wait-delete-solution/wait-delete-solution.py: -------------------------------------------------------------------------------- 1 | from os import environ 2 | from loader import Loader 3 | import actions 4 | 5 | LOADER = Loader() 6 | 7 | 8 | def lambda_handler(event, context): 9 | # return event 10 | status = LOADER.personalize_cli.describe_solution( 11 | solutionArn=event['solutionArn'] 12 | )['solution'] 13 | 14 | actions.take_action_delete(status['status']) 15 | return status['status'] 16 | -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/wait-delete-tracker/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/personalize-step-functions/lambdas/wait-delete-tracker/__init__.py -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/wait-delete-tracker/requirements.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/personalize-step-functions/lambdas/wait-delete-tracker/requirements.txt -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/wait-delete-tracker/wait-delete-tracker.py: -------------------------------------------------------------------------------- 1 | from os import environ 2 | from loader import Loader 3 | import actions 4 | 5 | LOADER = Loader() 6 | 7 | 8 | def lambda_handler(event, context): 9 | # return event 10 | status = LOADER.personalize_cli.describe_event_tracker( 11 | eventTrackerArn=event['eventTrackerArn'] 12 | )['eventTracker'] 13 | 14 | actions.take_action_delete(status['status']) 15 | return status['status'] -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/wait-solution-version/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/personalize-step-functions/lambdas/wait-solution-version/__init__.py -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/wait-solution-version/requirements.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/ml_ops/personalize-step-functions/lambdas/wait-solution-version/requirements.txt -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/lambdas/wait-solution-version/wait-solution-version.py: -------------------------------------------------------------------------------- 1 | from os import environ 2 | from loader import Loader 3 | import actions 4 | 5 | LOADER = Loader() 6 | 7 | 8 | def lambda_handler(event, context): 9 | # return event 10 | status = LOADER.personalize_cli.describe_solution_version( 11 | solutionVersionArn=event['solutionVersionArn'] 12 | )['solutionVersion'] 13 | 14 | actions.take_action(status['status']) 15 | return status['status'] 16 | -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/shared/python/actions.py: -------------------------------------------------------------------------------- 1 | class ResourcePending(Exception): 2 | pass 3 | 4 | 5 | class ResourceFailed(Exception): 6 | pass 7 | 8 | 9 | def take_action(status): 10 | if status in {'CREATE PENDING', 'CREATE IN_PROGRESS'}: 11 | raise ResourcePending 12 | if status != 'ACTIVE': 13 | raise ResourceFailed 14 | return True 15 | 16 | 17 | def take_action_delete(status): 18 | if status in {'DELETE PENDING', 'DELETE IN_PROGRESS'}: 19 | raise ResourcePending 20 | raise ResourceFailed 21 | -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/shared/python/loader.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from boto3 import client 3 | 4 | 5 | class Loader: 6 | def __init__(self): 7 | self.personalize_cli = client('personalize') 8 | self.logger = logging.getLogger() 9 | self.logger.setLevel(logging.INFO) 10 | self.account_id = client('sts').get_caller_identity()['Account'] 11 | -------------------------------------------------------------------------------- /next_steps/operations/ml_ops/personalize-step-functions/shared/python/parameters.py: -------------------------------------------------------------------------------- 1 | from json import loads 2 | from boto3 import client 3 | 4 | 5 | def get_params(bucket_name, key_name): 6 | S3 = client('s3') 7 | return loads( 8 | client('s3').get_object(Bucket=bucket_name, 9 | Key=key_name)['Body'].read().decode('utf-8') 10 | ) 11 | -------------------------------------------------------------------------------- /next_steps/operations/ml_ops_ds_sdk/CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | ## Code of Conduct 2 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct). 3 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact 4 | opensource-codeofconduct@amazon.com with any additional questions or comments. 5 | -------------------------------------------------------------------------------- /next_steps/operations/ml_ops_ds_sdk/CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing Guidelines 2 | 3 | Thank you for your interest in contributing to our project. Whether it's a bug report, new feature, correction, or additional 4 | documentation, we greatly value feedback and contributions from our community. 5 | 6 | Please read through this document before submitting any issues or pull requests to ensure we have all the necessary 7 | information to effectively respond to your bug report or contribution. 8 | 9 | 10 | ## Reporting Bugs/Feature Requests 11 | 12 | We welcome you to use the GitHub issue tracker to report bugs or suggest features. 13 | 14 | When filing an issue, please check existing open, or recently closed, issues to make sure somebody else hasn't already 15 | reported the issue. Please try to include as much information as you can. Details like these are incredibly useful: 16 | 17 | * A reproducible test case or series of steps 18 | * The version of our code being used 19 | * Any modifications you've made relevant to the bug 20 | * Anything unusual about your environment or deployment 21 | 22 | 23 | ## Contributing via Pull Requests 24 | Contributions via pull requests are much appreciated. Before sending us a pull request, please ensure that: 25 | 26 | 1. You are working against the latest source on the *master* branch. 27 | 2. You check existing open, and recently merged, pull requests to make sure someone else hasn't addressed the problem already. 28 | 3. You open an issue to discuss any significant work - we would hate for your time to be wasted. 29 | 30 | To send us a pull request, please: 31 | 32 | 1. Fork the repository. 33 | 2. Modify the source; please focus on the specific change you are contributing. If you also reformat all the code, it will be hard for us to focus on your change. 34 | 3. Ensure local tests pass. 35 | 4. Commit to your fork using clear commit messages. 36 | 5. Send us a pull request, answering any default questions in the pull request interface. 37 | 6. Pay attention to any automated CI failures reported in the pull request, and stay involved in the conversation. 38 | 39 | GitHub provides additional document on [forking a repository](https://help.github.com/articles/fork-a-repo/) and 40 | [creating a pull request](https://help.github.com/articles/creating-a-pull-request/). 41 | 42 | 43 | ## Finding contributions to work on 44 | Looking at the existing issues is a great way to find something to contribute on. As our projects, by default, use the default GitHub issue labels (enhancement/bug/duplicate/help wanted/invalid/question/wontfix), looking at any 'help wanted' issues is a great place to start. 45 | 46 | 47 | ## Code of Conduct 48 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct). 49 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact 50 | opensource-codeofconduct@amazon.com with any additional questions or comments. 51 | 52 | 53 | ## Security issue notifications 54 | If you discover a potential security issue in this project we ask that you notify AWS/Amazon Security via our [vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/). Please do **not** create a public github issue. 55 | 56 | 57 | ## Licensing 58 | 59 | See the [LICENSE](LICENSE) file for our project's licensing. We will ask you to confirm the licensing of your contribution. 60 | 61 | We may ask you to sign a [Contributor License Agreement (CLA)](http://en.wikipedia.org/wiki/Contributor_License_Agreement) for larger changes. 62 | -------------------------------------------------------------------------------- /next_steps/operations/ml_ops_ds_sdk/LICENSE: -------------------------------------------------------------------------------- 1 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of 4 | this software and associated documentation files (the "Software"), to deal in 5 | the Software without restriction, including without limitation the rights to 6 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of 7 | the Software, and to permit persons to whom the Software is furnished to do so. 8 | 9 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 10 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS 11 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR 12 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER 13 | IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 14 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 15 | 16 | -------------------------------------------------------------------------------- /next_steps/operations/ml_ops_ds_sdk/README.md: -------------------------------------------------------------------------------- 1 | ## Automating your Personalize workflow using AWS Step Functions Data Science SDK 2 | 3 | As machine learning (ML) becomes a larger part of companies’ core business, there is a greater emphasis on reducing the time from model creation to deployment. In November of 2019, AWS released the AWS Step Functions Data Science SDK for Amazon SageMaker, an open-source SDK that allows developers to create Step Functions-based machine learning workflows in Python. You can now use the SDK to create reusable model deployment workflows with the same tools you use to develop models. You can find the complete notebook for this solution in the “automate_personalize_workflow” folder of our GitHub repo. 4 | 5 | This repo demonstrates the capabilities of the Data Science SDK with a common use case: how to automate Personalize. In this post, you create a serverless workflow to train a movie recommendation engine. Finally, the shows how to trigger a workflow based off a periodic schedule. 6 | 7 | ### This post uses the following AWS services: 8 | • AWS Step Functions allows you to coordinate several AWS services into a serverless workflow. You can design and run workflows in which the output of one step acts as the input to the next step, and embed error handling into the workflow.\ 9 | • AWS Lambda is a compute service that lets you run code without provisioning or managing servers. Lambda executes your code only when triggered and scales automatically, from a few requests per day to thousands per second.\ 10 | • Amazon Personalize is a machine learning service which enables you to personalize your website, app, ads, emails, and more, with custom machine learning models which can be created in Amazon Personalize, with no prior machine learning experience. 11 | 12 | ## Overview of the SDK 13 | The SDK provides a new way to use AWS Step Functions. A Step Function is a state machine that consists of a series of discrete steps. Each step can perform work, make choices, initiate parallel execution, or manage timeouts. You can develop individual steps and use Step Functions to handle the triggering, coordination, and state of the overall workflow. Before the Data Science SDK, you had to define Step Functions using the JSON-based Amazon States Language. With the SDK, you can now easily create, execute, and visualize Step Functions using Python code. 14 | 15 | This repo provides an overview of the SDK, including how to create Step Function steps, work with parameters, integrate service-specific capabilities, and link these steps together to create and visualize a workflow. You can find several code examples throughout the post; however, we created a detailed Amazon SageMaker notebook of the entire process. 16 | 17 | ## Overview of Amazon Personalize 18 | Amazon Personalize is a machine learning service that makes it easy for developers to create individualized recommendations for customers using their applications. 19 | 20 | Machine learning is being increasingly used to improve customer engagement by powering personalized product and content recommendations, tailored search results, and targeted marketing promotions. However, developing the machine-learning capabilities necessary to produce these sophisticated recommendation systems has been beyond the reach of most organizations today due to the complexity. Amazon Personalize allows developers with no prior machine learning experience to easily build sophisticated personalization capabilities into their applications, using machine learning technology perfected from years of use on Amazon.com. 21 | 22 | With Amazon Personalize, you provide an activity stream from your application – clicks, page views, signups, purchases, and so forth – as well as an inventory of the items you want to recommend, such as articles, products, videos, or music. You can also choose to provide Amazon Personalize with additional demographic information from your users such as age, or geographic location. Amazon Personalize will process and examine the data, identify what is meaningful, select the right algorithms, and train and optimize a personalization model that is customized for your data. All data analyzed by Amazon Personalize is kept private and secure, and only used for your customized recommendations. You can start serving personalized recommendations via a simple API call. You pay only for what you use, and there are no minimum fees and no upfront commitments. 23 | 24 | Amazon Personalize is like having your own Amazon.com machine learning personalization team at your disposal, 24 hours a day. 25 | 26 | 27 | 28 | ## Instructions 29 | Upload the notebook and follow the instructions 30 | 31 | ## License 32 | 33 | This library is licensed under the MIT-0 License. See the LICENSE file. 34 | 35 | -------------------------------------------------------------------------------- /next_steps/operations/ml_ops_ds_sdk/lambda/stepfunction-create-schema.py: -------------------------------------------------------------------------------- 1 | import json 2 | import base64 3 | import boto3 4 | 5 | personalize = boto3.client('personalize') 6 | personalize_runtime = boto3.client('personalize-runtime') 7 | 8 | def lambda_handler(event, context): 9 | 10 | schema = { 11 | "type": "record", 12 | "name": "Interactions", 13 | "namespace": "com.amazonaws.personalize.schema", 14 | "fields": [ 15 | { 16 | "name": "USER_ID", 17 | "type": "string" 18 | }, 19 | { 20 | "name": "ITEM_ID", 21 | "type": "string" 22 | }, 23 | { 24 | "name": "TIMESTAMP", 25 | "type": "long" 26 | } 27 | ], 28 | "version": "1.0" 29 | } 30 | 31 | create_schema_response = personalize.create_schema( 32 | name = event['input'], 33 | schema = json.dumps(schema) 34 | ) 35 | 36 | schema_arn = create_schema_response['schemaArn'] 37 | print(json.dumps(create_schema_response, indent=2)) 38 | 39 | return { 40 | 'statusCode': 200, 41 | 'schemaArn':schema_arn, 42 | 'output': schema_arn 43 | } -------------------------------------------------------------------------------- /next_steps/operations/ml_ops_ds_sdk/lambda/stepfunction-createdatasetimportjob.py: -------------------------------------------------------------------------------- 1 | import json 2 | import boto3 3 | import base64 4 | 5 | 6 | personalize = boto3.client('personalize') 7 | personalize_runtime = boto3.client('personalize-runtime') 8 | 9 | def lambda_handler(event, context): 10 | 11 | datasetArn = event['dataset_arn'] 12 | bucket = event['bucket_name'] 13 | filename = event['file_name'] 14 | roleArn = event['role_arn'] 15 | 16 | create_dataset_import_job_response = personalize.create_dataset_import_job( 17 | jobName = "stepfunction-dataset-import-job", 18 | datasetArn = datasetArn, 19 | dataSource = { 20 | "dataLocation": "s3://{}/{}".format(bucket, filename) 21 | }, 22 | roleArn = roleArn 23 | ) 24 | 25 | dataset_import_job_arn = create_dataset_import_job_response['datasetImportJobArn'] 26 | print(json.dumps(create_dataset_import_job_response, indent=2)) 27 | 28 | 29 | 30 | 31 | # TODO implement 32 | return { 33 | 'statusCode': 200, 34 | 'dataset_import_job_arn': dataset_import_job_arn, 35 | 'datasetGroupArn': event['datasetGroupArn'] 36 | #'body': json.dumps('Hello from Lambda!') 37 | } 38 | -------------------------------------------------------------------------------- /next_steps/operations/ml_ops_ds_sdk/lambda/stepfunction_create_personalize_role.py: -------------------------------------------------------------------------------- 1 | import json 2 | import boto3 3 | import base64 4 | 5 | def lambda_handler(event, context): 6 | # TODO implement 7 | 8 | 9 | #### Attach Policy to S3 Bucket 10 | 11 | s3 = boto3.client("s3") 12 | 13 | policy = { 14 | "Version": "2012-10-17", 15 | "Id": "PersonalizeS3BucketAccessPolicy", 16 | "Statement": [ 17 | { 18 | "Sid": "PersonalizeS3BucketAccessPolicy", 19 | "Effect": "Allow", 20 | "Principal": { 21 | "Service": "personalize.amazonaws.com" 22 | }, 23 | "Action": [ 24 | "s3:GetObject", 25 | "s3:ListBucket" 26 | ], 27 | "Resource": [ 28 | "arn:aws:s3:::{}".format(event['bucket']), 29 | "arn:aws:s3:::{}/*".format(event['bucket']) 30 | ] 31 | } 32 | ] 33 | } 34 | 35 | s3.put_bucket_policy(Bucket=event['bucket'], Policy=json.dumps(policy)) 36 | 37 | #### Create Personalize Role 38 | 39 | 40 | iam = boto3.client("iam") 41 | 42 | role_name = "PersonalizeRole" 43 | assume_role_policy_document = { 44 | "Version": "2012-10-17", 45 | "Statement": [ 46 | { 47 | "Effect": "Allow", 48 | "Principal": { 49 | "Service": "personalize.amazonaws.com" 50 | }, 51 | "Action": "sts:AssumeRole" 52 | } 53 | ] 54 | } 55 | 56 | create_role_response = iam.create_role( 57 | RoleName = role_name, 58 | AssumeRolePolicyDocument = json.dumps(assume_role_policy_document) 59 | ) 60 | 61 | # AmazonPersonalizeFullAccess provides access to any S3 bucket with a name that includes "personalize" or "Personalize" 62 | # if you would like to use a bucket with a different name, please consider creating and attaching a new policy 63 | # that provides read access to your bucket or attaching the AmazonS3ReadOnlyAccess policy to the role 64 | 65 | policy_arn = "arn:aws:iam::aws:policy/service-role/AmazonPersonalizeFullAccess" 66 | iam.attach_role_policy( 67 | RoleName = role_name, 68 | PolicyArn = policy_arn 69 | ) 70 | 71 | time.sleep(60) # wait for a minute to allow IAM role policy attachment to propagate 72 | 73 | role_arn = create_role_response["Role"]["Arn"] 74 | print(role_arn) 75 | 76 | 77 | 78 | 79 | return { 80 | 'statusCode': 200, 81 | 'role_arn':role_arn 82 | #'body': json.dumps('Hello from Lambda!') 83 | } 84 | -------------------------------------------------------------------------------- /next_steps/operations/ml_ops_ds_sdk/lambda/stepfunction_create_solution_version.py: -------------------------------------------------------------------------------- 1 | import json 2 | import boto3 3 | import base64 4 | 5 | personalize = boto3.client('personalize') 6 | personalize_runtime = boto3.client('personalize-runtime') 7 | 8 | 9 | def lambda_handler(event, context): 10 | 11 | create_solution_version_response = personalize.create_solution_version( 12 | solutionArn = event['solution_arn'] 13 | ) 14 | 15 | solution_version_arn = create_solution_version_response['solutionVersionArn'] 16 | #print(json.dumps(create_solution_version_response, indent=2)) 17 | 18 | # TODO implement 19 | return { 20 | 'statusCode': 200, 21 | 'solution_version_arn': solution_version_arn 22 | #'body': json.dumps('Hello from Lambda!') 23 | } 24 | -------------------------------------------------------------------------------- /next_steps/operations/ml_ops_ds_sdk/lambda/stepfunction_getRecommendations.py: -------------------------------------------------------------------------------- 1 | import json 2 | import boto3 3 | import base64 4 | 5 | personalize = boto3.client('personalize') 6 | personalize_runtime = boto3.client('personalize-runtime') 7 | 8 | def lambda_handler(event, context): 9 | 10 | userId = str(event['user_id']) 11 | itemId = str(event['item_id']) 12 | campaignArn = event['campaign_arn'] 13 | 14 | 15 | 16 | print("userId, itemId",userId, itemId ) 17 | 18 | 19 | get_recommendations_response = personalize_runtime.get_recommendations( 20 | campaignArn=campaignArn, 21 | userId=userId, 22 | itemId=itemId 23 | 24 | ) 25 | 26 | item_list = get_recommendations_response['itemList'] 27 | 28 | return { 29 | 'item_list': item_list 30 | #'body': json.dumps('Hello from Lambda!') 31 | } 32 | -------------------------------------------------------------------------------- /next_steps/operations/ml_ops_ds_sdk/lambda/stepfunction_getsolution_metric_create_campaign.py: -------------------------------------------------------------------------------- 1 | import json 2 | import base64 3 | import boto3 4 | 5 | personalize = boto3.client('personalize') 6 | personalize_runtime = boto3.client('personalize-runtime') 7 | 8 | def lambda_handler(event, context): 9 | # TODO implement 10 | get_solution_metrics_response = personalize.get_solution_metrics( 11 | solutionVersionArn = event['solution_version_arn'] 12 | ) 13 | 14 | create_campaign_response = personalize.create_campaign( 15 | name = "stepfunction-campaign", 16 | solutionVersionArn = event['solution_version_arn'], 17 | minProvisionedTPS = 1 18 | ) 19 | 20 | campaign_arn = create_campaign_response['campaignArn'] 21 | print(json.dumps(create_campaign_response, indent=2)) 22 | 23 | 24 | return { 25 | 'campaign_arn': campaign_arn, 26 | 'solution_version_arn': event['solution_version_arn'] 27 | #'o': status, 28 | #'datasetGroupArn': datasetGroupArnVal 29 | 30 | #'body': json.dumps('Hello from Lambda!') 31 | } 32 | -------------------------------------------------------------------------------- /next_steps/operations/ml_ops_ds_sdk/lambda/stepfunction_select-recipe_create-solution.py: -------------------------------------------------------------------------------- 1 | import json 2 | import boto3 3 | import base64 4 | 5 | personalize = boto3.client('personalize') 6 | personalize_runtime = boto3.client('personalize-runtime') 7 | 8 | def lambda_handler(event, context): 9 | 10 | list_recipes_response = personalize.list_recipes() 11 | recipe_arn = "arn:aws:personalize:::recipe/aws-hrnn" # aws-hrnn selected for demo purposes 12 | #list_recipes_response 13 | 14 | create_solution_response = personalize.create_solution( 15 | name = "stepfunction-solution", 16 | datasetGroupArn = event['dataset_group_arn'], 17 | recipeArn = recipe_arn 18 | ) 19 | 20 | solution_arn = create_solution_response['solutionArn'] 21 | print(json.dumps(create_solution_response, indent=2)) 22 | 23 | 24 | 25 | 26 | # TODO implement 27 | return { 28 | 'statusCode': 200, 29 | 'solution_arn': solution_arn 30 | #'body': json.dumps('Hello from Lambda!') 31 | } 32 | -------------------------------------------------------------------------------- /next_steps/operations/ml_ops_ds_sdk/lambda/stepfunction_waitforCampaign.py: -------------------------------------------------------------------------------- 1 | import json 2 | import base64 3 | import boto3 4 | 5 | personalize = boto3.client('personalize') 6 | personalize_runtime = boto3.client('personalize-runtime') 7 | 8 | def lambda_handler(event, context): 9 | describe_campaign_response = personalize.describe_campaign( 10 | campaignArn = event['campaign_arn'] 11 | ) 12 | status = describe_campaign_response["campaign"]["status"] 13 | print("Campaign: {}".format(status)) 14 | 15 | return { 16 | 'status': status, 17 | 'campaign_arn': event['campaign_arn'] 18 | #'o': status, 19 | #'datasetGroupArn': datasetGroupArnVal 20 | 21 | #'body': json.dumps('Hello from Lambda!') 22 | } 23 | -------------------------------------------------------------------------------- /next_steps/operations/ml_ops_ds_sdk/lambda/stepfunction_waitforDatasetGroup.py: -------------------------------------------------------------------------------- 1 | import json 2 | import base64 3 | import boto3 4 | 5 | personalize = boto3.client('personalize') 6 | personalize_runtime = boto3.client('personalize-runtime') 7 | 8 | def lambda_handler(event, context): 9 | # TODO implement 10 | datasetGroupArnVal = event['input'] 11 | describe_dataset_group_response = personalize.describe_dataset_group( 12 | datasetGroupArn = datasetGroupArnVal 13 | #datasetGroupArn = event['Payload']['datasetGroupArn'] 14 | 15 | ) 16 | #personalize.describe_dataset_group 17 | #print("DatasetGroup: {}".format(datasetGroupArn)) 18 | return_status = False 19 | status = describe_dataset_group_response["datasetGroup"]["status"] 20 | print("DatasetGroup: {}".format(status)) 21 | 22 | return { 23 | 'status': status, 24 | 'DatasetGroup': status, 25 | 'datasetGroupArn': datasetGroupArnVal, 26 | 'schemaArn': event['schemaArn'] 27 | 28 | #'body': json.dumps('Hello from Lambda!') 29 | } 30 | -------------------------------------------------------------------------------- /next_steps/operations/ml_ops_ds_sdk/lambda/stepfunction_waitforSolutionVersion.py: -------------------------------------------------------------------------------- 1 | import json 2 | import base64 3 | import boto3 4 | 5 | personalize = boto3.client('personalize') 6 | personalize_runtime = boto3.client('personalize-runtime') 7 | 8 | def lambda_handler(event, context): 9 | describe_solution_version_response = personalize.describe_solution_version( 10 | solutionVersionArn = event['solution_version_arn'] 11 | ) 12 | status = describe_solution_version_response["solutionVersion"]["status"] 13 | #print("SolutionVersion: {}".format(status)) 14 | 15 | return { 16 | 'status': status, 17 | 'solution_version_arn': event['solution_version_arn'] 18 | #'DatasetGroup': status, 19 | #'datasetGroupArn': datasetGroupArnVal 20 | 21 | #'body': json.dumps('Hello from Lambda!') 22 | } 23 | -------------------------------------------------------------------------------- /next_steps/operations/ml_ops_ds_sdk/lambda/stepfunction_waitfordatasetimportjob.py: -------------------------------------------------------------------------------- 1 | import json 2 | import base64 3 | import boto3 4 | 5 | personalize = boto3.client('personalize') 6 | personalize_runtime = boto3.client('personalize-runtime') 7 | 8 | def lambda_handler(event, context): 9 | # TODO implement 10 | describe_dataset_import_job_response = personalize.describe_dataset_import_job( 11 | datasetImportJobArn = event['dataset_import_job_arn'] 12 | ) 13 | status = describe_dataset_import_job_response["datasetImportJob"]['status'] 14 | print("DatasetImportJob: {}".format(status)) 15 | 16 | return { 17 | 'status': status, 18 | 'dataset_import_job_arn': event['dataset_import_job_arn'], 19 | 'datasetGroupArn': event['datasetGroupArn'] 20 | 21 | #'o': status, 22 | #'datasetGroupArn': datasetGroupArnVal 23 | 24 | #'body': json.dumps('Hello from Lambda!') 25 | } 26 | -------------------------------------------------------------------------------- /next_steps/operations/ml_ops_ds_sdk/lambda/stepfunctioncreatedatagroup.py: -------------------------------------------------------------------------------- 1 | import json 2 | import base64 3 | import boto3 4 | 5 | personalize = boto3.client('personalize') 6 | personalize_runtime = boto3.client('personalize-runtime') 7 | 8 | def lambda_handler(event, context): 9 | 10 | create_dataset_group_response = personalize.create_dataset_group( 11 | name = event['input'] 12 | ) 13 | 14 | dataset_group_arn = create_dataset_group_response['datasetGroupArn'] 15 | print(json.dumps(create_dataset_group_response, indent=2)) 16 | 17 | return { 18 | 'statusCode': 200, 19 | 'datasetGroupArn':dataset_group_arn, 20 | 'schemaArn': event['schemaArn'] 21 | #'output': dataset_group_arn 22 | 23 | } 24 | -------------------------------------------------------------------------------- /next_steps/operations/ml_ops_ds_sdk/lambda/stepfunctioncreatedataset.py: -------------------------------------------------------------------------------- 1 | import json 2 | import base64 3 | import boto3 4 | 5 | 6 | personalize = boto3.client('personalize') 7 | personalize_runtime = boto3.client('personalize-runtime') 8 | 9 | def lambda_handler(event, context): 10 | # TODO implement 11 | 12 | dataset_type = "INTERACTIONS" 13 | datasetGroupArn = event['datasetGroupArn'] 14 | create_dataset_response = personalize.create_dataset( 15 | name = "personalize-stepfunction-dataset", 16 | datasetType = dataset_type, 17 | datasetGroupArn = event['datasetGroupArn'], 18 | schemaArn = event['schemaArn'] 19 | ) 20 | 21 | dataset_arn = create_dataset_response['datasetArn'] 22 | print(json.dumps(create_dataset_response, indent=2)) 23 | 24 | 25 | 26 | 27 | return { 28 | 'statusCode': 200, 29 | 'dataset_arn': dataset_arn, 30 | 'datasetGroupArn': datasetGroupArn 31 | #'body': json.dumps('Hello from Lambda!') 32 | } 33 | -------------------------------------------------------------------------------- /next_steps/operations/ml_ops_ds_sdk/lambda/stepfunctioncreatedatasetimportjob.py: -------------------------------------------------------------------------------- 1 | import json 2 | import base64 3 | import boto3 4 | 5 | personalize = boto3.client('personalize') 6 | personalize_runtime = boto3.client('personalize-runtime') 7 | 8 | def lambda_handler(event, context): 9 | 10 | create_dataset_import_job_response = personalize.create_dataset_import_job( 11 | jobName = event['datasetimportjob'], 12 | datasetArn = event['dataset_arn'], 13 | bucket = event['bucket_name'], 14 | filename = event['file_name'], 15 | role_arn = event['role_arn'], 16 | 17 | dataSource = { 18 | "dataLocation": "s3://{}/{}".format(bucket, filename) 19 | }, 20 | roleArn = role_arn 21 | ) 22 | 23 | dataset_import_job_arn = create_dataset_import_job_response['datasetImportJobArn'] 24 | print(json.dumps(create_dataset_import_job_response, indent=2)) 25 | return { 26 | 'statusCode': 200, 27 | 'dataset_import_job_arn':dataset_import_job_arn, 28 | 'output': dataset_import_job_arn 29 | } 30 | 31 | -------------------------------------------------------------------------------- /next_steps/operations/streaming_events/LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Luis Lopez 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /next_steps/operations/streaming_events/README.md: -------------------------------------------------------------------------------- 1 | # Getting Started 2 | 3 | This example showcases a key piece you can use to construct your API Layer to consume Amazon Personalize recommendations and produce real time events 4 | 5 | As we can see below this is the architecture that you will be deploying from this project. 6 | 7 | ![Architecture Diagram](images/architecture.png) 8 | 9 | **Note:** The Amazon Personalize Campaigns and Event trackers need to be deployed independently beforehand for you to complete this tutorial. You can deploy your Amazon Personalize Campaign by using the following automation example under the MLOps folder, or by leveraging the getting started folder. 10 | 11 | ## Prerequisites 12 | 13 | ### Installing AWS SAM 14 | 15 | The AWS Serverless Application Model (SAM) is an open-source framework for building serverless applications. It provides shorthand syntax to express functions, APIs, databases, and event source mappings. With just a few lines per resource, you can define the application you want and model it using YAML. During deployment, SAM transforms and expands the SAM syntax into AWS CloudFormation syntax, enabling you to build serverless applications faster. 16 | 17 | **Install** the [AWS SAM CLI](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-install.html). 18 | This will install the necessary tools to build, deploy, and locally test your project. In this particular example we will be using AWS SAM to build and deploy only. For additional information please visit our [documentation](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/what-is-sam.html). 19 | 20 | ### Create your Personalize Components 21 | 22 | **Create** an Amazon Personalize Campaign and attach an event tracker to it, after following our getting started [instructions](https://github.com/aws-samples/amazon-personalize-samples/tree/master/getting_started). 23 | 24 | You could also automate this part by leveraging this MLOps [example](https://github.com/aws-samples/amazon-personalize-samples/tree/master/next_steps/operations/ml_ops) 25 | 26 | ## Build and Deploy 27 | 28 | In order to deploy the project you will need to run the following commands: 29 | 30 | 1. Clone the Amazon Personalize Samples repo 31 | - `git clone https://github.com/aws-samples/amazon-personalize-samples.git` 32 | 2. Navigate into the *next_steps/operations/streaming_events* directory 33 | - `cd amazon-personalize-samples/next_steps/operations/streaming_events` 34 | 3. Build your SAM project. [Installation instructions](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-install.html) 35 | - `sam build` 36 | 4. Deploy your project. SAM offers a guided deployment option, note that you will need to provide your email address as a parameter to receive a notification. 37 | - `sam deploy --guided` 38 | 5. Enter the S3 bucket where you will like to store your events data, the Personalize Campaign ARN and EventTracker ID. 39 | 40 | ## Testing the endpoints 41 | 42 | - Navigate to the Amazon CloudFormation [console](https://console.aws.amazon.com/cloudformation/home?region=us-east-1) 43 | - Select the stack deployed by SAM 44 | - Navigate to the outputs sections where you will find 2 endpoints an API Key: 45 | 1. POST getRecommendations Endpoint 46 | 2. POST Events Endopoint 47 | 3. Redirect to the API Gateway console where you can click on the Show Key section to display the API Key 48 | 49 | If you are using PostMan or something similar you will need to provide a header with: 50 | `x-api-key: ` 51 | 52 | **POST getRecommendations example:** 53 | 54 | *Body Parameter:* 55 | ``` 56 | { 57 | "userId":"12345" 58 | 59 | } 60 | ``` 61 | 62 | *Endpoint:* `https://XXXXXX.execute-api.us-east-1.amazonaws.com/dev2/recommendations` 63 | 64 | 65 | **POST event example** 66 | 67 | For the POST endpoint you need so send an event similar to the following in the *body* of the request: 68 | 69 | *Enpoint:* `https://XXXXXX.execute-api.us-east-1.amazonaws.com/dev2/history` 70 | 71 | *Body:* 72 | ``` 73 | { 74 | "Event":{ 75 | "itemId": "ITEMID", 76 | "eventValue": EVENT-VALUE, 77 | "CONTEXT": "VALUE" //optional 78 | }, 79 | "SessionId": "SESSION-ID-IDENTIFIER", 80 | "EventType": "YOUR-EVENT-TYPE", 81 | "UserId": "USERID" 82 | } 83 | ``` 84 | 85 | ## Summary 86 | 87 | Now that you have this architecture in your account, you can consume Amazon Personalize recommendations over the API Gateway POST recommendations endpoint and stream real time interactions data to the POST event endpoint. 88 | 89 | There are two additional features to this architecture: 90 | 91 | - A S3 bucket containing your events persisted from your Kinesis Stream. You can run analysis on this bucket by using other AWS services such as Glue and Athena. For example you can follow this [blog](https://aws.amazon.com/blogs/big-data/build-and-automate-a-serverless-data-lake-using-an-aws-glue-trigger-for-the-data-catalog-and-etl-jobs/) on how to automate an ETL pipeline. 92 | 93 | 94 | 95 | ## Next Steps 96 | 97 | Congratulations! You have successfully deployed and tested the API layer around your Amazon Personalize deployment. 98 | 99 | For additional information on Getting Recommendations please visit our [documentation](https://docs.aws.amazon.com/personalize/latest/dg/getting-recommendations.html) 100 | -------------------------------------------------------------------------------- /next_steps/operations/streaming_events/images/architecture.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/streaming_events/images/architecture.png -------------------------------------------------------------------------------- /next_steps/operations/streaming_events/lambdas/getRecommendations/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/streaming_events/lambdas/getRecommendations/__init__.py -------------------------------------------------------------------------------- /next_steps/operations/streaming_events/lambdas/getRecommendations/getRecommendations.py: -------------------------------------------------------------------------------- 1 | from boto3 import client 2 | personalize_cli = client('personalize-runtime') 3 | import json 4 | import os 5 | 6 | def handler(event, context): 7 | print(f"Event = {event}") 8 | payload = json.loads(event['body']) 9 | try: 10 | response = personalize_cli.get_recommendations( 11 | campaignArn=os.environ['CAMPAIGN_ARN'], 12 | userId=payload['userId'], 13 | # numResults=123, 14 | # filterArn = 'string', 15 | # context=payload['context'] 16 | ) 17 | print(f"RawRecommendations = {response['itemList']}") 18 | return {'statusCode': '200', 'body': json.dumps(response)} 19 | except personalize_cli.exceptions.ResourceNotFoundException as e: 20 | print(f"Personalize Error: {e}") 21 | return {'statusCode': '500', 'body': json.dumps("Campaign Not Found")} 22 | except personalize_cli.exceptions.InvalidInputException as e: 23 | print(f"Invalid Input Error: {e}") 24 | return {'statusCode': '400', 'body': json.dumps("Invalid Input")} 25 | except KeyError as e: 26 | print(f"Key Error: {e}") 27 | return {'statusCode': '400', 'body': json.dumps("Key Error")} 28 | -------------------------------------------------------------------------------- /next_steps/operations/streaming_events/lambdas/getRecommendations/requirements.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/operations/streaming_events/lambdas/getRecommendations/requirements.txt -------------------------------------------------------------------------------- /next_steps/operations/streaming_events/lambdas/putevents/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "putevents", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "putevents.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1" 8 | }, 9 | "author": "", 10 | "license": "ISC" 11 | } 12 | -------------------------------------------------------------------------------- /next_steps/operations/streaming_events/lambdas/putevents/putevents.js: -------------------------------------------------------------------------------- 1 | const AWS = require('aws-sdk') 2 | var personalizeevents = new AWS.PersonalizeEvents(); 3 | var dynamoClient = new AWS.DynamoDB.DocumentClient(); 4 | 5 | console.log('Loading function'); 6 | 7 | exports.handler = (event, context, callback) => { 8 | console.log(JSON.stringify(event, null, 2)); 9 | 10 | event.Records.forEach(function(record) { 11 | // Kinesis data is base64 encoded so decode here 12 | var payload = Buffer.from(record.kinesis.data, 'base64').toString('ascii'); 13 | console.log('Decoded payload:', payload); 14 | payload = JSON.parse(payload); 15 | var eventDate = new Date(); 16 | var putEventsParams= { 17 | 'sessionId': payload.SessionId, /* required */ 18 | 'trackingId': process.env.TRACKING_ID, /* required */ 19 | 'userId': payload.UserId, 20 | eventList: [ 21 | { 22 | 'eventType': payload.EventType, /* required */ 23 | 'properties': payload.Event, /* required */ 24 | 'sentAt': eventDate 25 | }, 26 | ] 27 | } 28 | console.log("THIS IS THE OBJECT = " + JSON.stringify(putEventsParams,null,3)) 29 | personalizeevents.putEvents(putEventsParams, function (err, data) { 30 | if (err) { 31 | console.log(err, err.stack); // an error occurred 32 | } 33 | else{ 34 | console.log(data); // successful response 35 | putEventsParams['eventList'][0]['sentAt']=putEventsParams['eventList'][0]['sentAt'].toTimeString(); 36 | const putEventsErrResponse = { 37 | statusCode: 500, 38 | body: JSON.stringify(err), 39 | }; 40 | callback(null, putEventsErrResponse); 41 | const response = { 42 | statusCode: 200, 43 | body: JSON.stringify(putEventsParams), 44 | }; 45 | callback(null, response); 46 | } 47 | }); 48 | }); 49 | }; -------------------------------------------------------------------------------- /next_steps/workshops/Immersion_Day/README.md: -------------------------------------------------------------------------------- 1 | ## Amazon Personalize Immersion Day 2 | 3 | This goal of this Immersion Day is to provide a common starting point for learning how to use the various features of [Amazon Personalize](https://aws.amazon.com/personalize/). 4 | 5 | For detailed specifics of any concept mentioned look at the [Personalize developer guide](https://docs.aws.amazon.com/personalize/latest/dg/what-is-personalize.html) 6 | 7 | In the Notebooks you will learn to: 8 | 9 | 1. Prepare a dataset for use with Amazon Personalize. 10 | 1. Build models based on that dataset. 11 | 1. Evaluate a model's performance based on real observations. 12 | 13 | ## Agenda 14 | 15 | The steps below outline the process of building your own time-series prediction models, evaluating them, and then cleaning up all of yuour resources to prevent any unwanted charges. To get started execute the following steps. 16 | 17 | 1. Deploy the CloudFormation Template below or build a local Jupyter environment with the AWS CLI installed and configured for your IAM account. 18 | 1. This [personalize_hrnn_metadata_contextual_example.ipynb](personalize_hrnn_metadata_contextual_example.ipynb) shows how these useful information can be uploaded to our system to aid recommendation. A caveat is that the improvements of meta-data recipes depend on how much information can be extracted from the provided meta-data. 19 | 20 | 21 | ## Prerequisites 22 | 23 | 1. An AWS Account 24 | 1. A user in the account with administrative privileges 25 | 26 | 27 | ## Outline 28 | 29 | 1. First you will deploy a CloudFormation template that will create an S3 bucket for data storage, a SageMaker Notebook Instance where the exercises are executed, IAM policies for the Notebook Instance, and it will clone this repository into the Notebook Instance so you are ready to get started. 30 | 1. Next you will open the `personalize_hrnn_metadata_contextual_example.ipynb` to get started. 31 | 1. This notebook will guide you through the process of the other notebooks until you have a working and evaluated Amazon Personalize. 32 | 33 | 34 | ## Building Your Environment: 35 | 36 | As mentioned above, the first step is to deploy a CloudFormation template that will perform much of the initial setup work for you. In another browser window or tab, login to your AWS account. Once you have done that, open the link below in a new tab to start the process of deploying the items you need via CloudFormation. 37 | 38 | [![Launch Stack](https://s3.amazonaws.com/cloudformation-examples/cloudformation-launch-stack.png)](https://console.aws.amazon.com/cloudformation/home#/stacks/new?stackName=PersonalizePOC&templateURL=https://luis-guides.s3.amazonaws.com/personalize-id/PersonalizeImmersionDay.yaml) 39 | 40 | Follow along with the screenshots below if you have any questions about deploying the stack. 41 | 42 | ### Cloud Formation Wizard 43 | 44 | Start by clicking `Next` at the bottom like this: 45 | 46 | ![StackWizard](static/imgs/img1.png) 47 | 48 | On this page you have a few tasks: 49 | 50 | 1. Change the Stack name to something relevant like `PersonalizeImmersionDay` 51 | 1. Change the Notebook Name (Optional) 52 | 1. Alter the VolumeSize for the SageMaker EBS volume, default is 10GB, if your dataset is expected to be larger, please increase this accordingly. 53 | 54 | 55 | When you are done click `Next` at the bottom. 56 | 57 | ![StackWizard2](static/imgs/img2.png) 58 | 59 | This page is a bit longer, so scroll to the bottom to click `Next`. All of the defaults should be sufficient to complete the POC, if you have custom requirements, alter as necessary. 60 | 61 | ![StackWizard3](static/imgs/img3.png) 62 | 63 | 64 | Again scroll to the bottom, check the box to enable the template to create new IAM resources and then click `Create Stack`. 65 | 66 | ![StackWizard4](static/imgs/img4.png) 67 | 68 | For a few minutes CloudFormation will be creating the resources described above on your behalf it will look like this while it is provisioning: 69 | 70 | ![StackWizard5](static/imgs/img5.png) 71 | 72 | Once it has completed you'll see green text like below indicating that the work has been completed: 73 | 74 | ![StackWizard5](static/imgs/img6.png) 75 | 76 | Now that your environment has been created go to the service page for SageMaker by clicking `Services` in the top of the console and then searching for `SageMaker` and clicking the service. 77 | 78 | 79 | ![StackWizard5](static/imgs/img7.png) 80 | 81 | From the SageMaker console, scroll until you see the green box indicating now many notebooks you have in service and click that. 82 | 83 | ![StackWizard5](static/imgs/img8.png) 84 | 85 | On this page you will see a list of any SageMaker notebooks you have running, simply click the `Open JupyterLab` link on the Personalize POC notebook you have created 86 | 87 | ![StackWizard5](static/imgs/img9.png) 88 | 89 | This will open the Jupyter environment for your POC; think of it as a web based data science IDE if you are not familiar with it. 90 | 91 | On your left hand side please navigate to the following directory `amazon-personalize-samples/workshops/Immersion_Day/` and double click the `personalize_hrnn_metadata_contextual_example.ipynb` notebook. -------------------------------------------------------------------------------- /next_steps/workshops/Immersion_Day/static/imgs/img1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/Immersion_Day/static/imgs/img1.png -------------------------------------------------------------------------------- /next_steps/workshops/Immersion_Day/static/imgs/img2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/Immersion_Day/static/imgs/img2.png -------------------------------------------------------------------------------- /next_steps/workshops/Immersion_Day/static/imgs/img3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/Immersion_Day/static/imgs/img3.png -------------------------------------------------------------------------------- /next_steps/workshops/Immersion_Day/static/imgs/img4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/Immersion_Day/static/imgs/img4.png -------------------------------------------------------------------------------- /next_steps/workshops/Immersion_Day/static/imgs/img5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/Immersion_Day/static/imgs/img5.png -------------------------------------------------------------------------------- /next_steps/workshops/Immersion_Day/static/imgs/img6.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/Immersion_Day/static/imgs/img6.png -------------------------------------------------------------------------------- /next_steps/workshops/Immersion_Day/static/imgs/img7.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/Immersion_Day/static/imgs/img7.png -------------------------------------------------------------------------------- /next_steps/workshops/Immersion_Day/static/imgs/img8.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/Immersion_Day/static/imgs/img8.png -------------------------------------------------------------------------------- /next_steps/workshops/Immersion_Day/static/imgs/img9.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/Immersion_Day/static/imgs/img9.png -------------------------------------------------------------------------------- /next_steps/workshops/Immersion_Day/static/imgs/personalize_metrics.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/Immersion_Day/static/imgs/personalize_metrics.png -------------------------------------------------------------------------------- /next_steps/workshops/POC_in_a_box/PersonalizePOC.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | AWSTemplateFormatVersion: '2010-09-09' 3 | 4 | Description: IAM Policies, and SageMaker Notebook to work with Personalize. 5 | 6 | Parameters: 7 | 8 | NotebookName: 9 | Type: String 10 | Default: PersonalizePOC 11 | Description: Enter the name of the SageMaker notebook instance. Deafault is PersonalizePOC. 12 | 13 | VolumeSize: 14 | Type: Number 15 | Default: 10 16 | MinValue: 5 17 | MaxValue: 16384 18 | ConstraintDescription: Must be an integer between 5 (GB) and 16384 (16 TB). 19 | Description: Enter the size of the EBS volume in GB. 20 | 21 | Resources: 22 | # SageMaker Execution Role 23 | SageMakerIamRole: 24 | Type: "AWS::IAM::Role" 25 | Properties: 26 | AssumeRolePolicyDocument: 27 | Version: "2012-10-17" 28 | Statement: 29 | - 30 | Effect: Allow 31 | Principal: 32 | Service: sagemaker.amazonaws.com 33 | Action: sts:AssumeRole 34 | Path: "/" 35 | ManagedPolicyArns: 36 | - "arn:aws:iam::aws:policy/AmazonSageMakerFullAccess" 37 | - "arn:aws:iam::aws:policy/AmazonS3FullAccess" 38 | - "arn:aws:iam::aws:policy/service-role/AmazonPersonalizeFullAccess" 39 | - "arn:aws:iam::aws:policy/IAMFullAccess" 40 | 41 | # SageMaker notebook 42 | NotebookInstance: 43 | Type: "AWS::SageMaker::NotebookInstance" 44 | Properties: 45 | InstanceType: "ml.t2.medium" 46 | NotebookInstanceName: !Ref NotebookName 47 | RoleArn: !GetAtt SageMakerIamRole.Arn 48 | VolumeSizeInGB: !Ref VolumeSize 49 | DefaultCodeRepository: https://github.com/aws-samples/amazon-personalize-samples.git 50 | -------------------------------------------------------------------------------- /next_steps/workshops/POC_in_a_box/PersonalizePOCEE.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | AWSTemplateFormatVersion: '2010-09-09' 3 | 4 | Description: IAM Policies, and SageMaker Notebook to work with Amazon Forecast, it will also clone the POC codebase into the Notebook before you get started. 5 | 6 | Parameters: 7 | 8 | NotebookName: 9 | Type: String 10 | Default: PersonalizePOCNotebook 11 | Description: Enter the name of the SageMaker notebook instance. Deafault is PersonalizePOCNotebook. 12 | 13 | DefaultCodeRepo: 14 | Type: String 15 | Default: https://github.com/aws-samples/amazon-personalize-samples.git 16 | Description: Enter the url of a git code repository for this lab 17 | 18 | InstanceType: 19 | Type: String 20 | Default: ml.t2.medium 21 | AllowedValues: 22 | - ml.t2.medium 23 | - ml.m4.xlarge 24 | - ml.c5.xlarge 25 | - ml.p2.xlarge 26 | - ml.p3.2xlarge 27 | Description: Enter instance type. Default is ml.t2.medium. 28 | 29 | VolumeSize: 30 | Type: Number 31 | Default: 10 32 | MinValue: 5 33 | MaxValue: 16384 34 | ConstraintDescription: Must be an integer between 5 (GB) and 16384 (16 TB). 35 | Description: Enter the size of the EBS volume in GB. Default is 10 GB. 36 | 37 | Resources: 38 | # SageMaker Execution Role 39 | SageMakerIamRole: 40 | Type: "AWS::IAM::Role" 41 | Properties: 42 | AssumeRolePolicyDocument: 43 | Version: "2012-10-17" 44 | Statement: 45 | - 46 | Effect: Allow 47 | Principal: 48 | Service: sagemaker.amazonaws.com 49 | Action: sts:AssumeRole 50 | Path: "/" 51 | ManagedPolicyArns: 52 | - "arn:aws:iam::aws:policy/AmazonSageMakerFullAccess" 53 | - "arn:aws:iam::aws:policy/AmazonS3FullAccess" 54 | - "arn:aws:iam::aws:policy/service-role/AmazonPersonalizeFullAccess" 55 | - "arn:aws:iam::aws:policy/IAMFullAccess" 56 | 57 | 58 | # SageMaker notebook 59 | NotebookInstance: 60 | Type: "AWS::SageMaker::NotebookInstance" 61 | Properties: 62 | InstanceType: "ml.t2.medium" 63 | NotebookInstanceName: !Ref NotebookName 64 | RoleArn: !GetAtt SageMakerIamRole.Arn 65 | VolumeSizeInGB: !Ref VolumeSize 66 | DefaultCodeRepository: !Ref DefaultCodeRepo 67 | 68 | -------------------------------------------------------------------------------- /next_steps/workshops/POC_in_a_box/completed/PersonalizePOC.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | AWSTemplateFormatVersion: '2010-09-09' 3 | 4 | Description: IAM Policies, and SageMaker Notebook to work with Personalize. 5 | 6 | Parameters: 7 | 8 | NotebookName: 9 | Type: String 10 | Default: PersonalizePOC 11 | Description: Enter the name of the SageMaker notebook instance. Deafault is PersonalizePOC. 12 | 13 | VolumeSize: 14 | Type: Number 15 | Default: 10 16 | MinValue: 5 17 | MaxValue: 16384 18 | ConstraintDescription: Must be an integer between 5 (GB) and 16384 (16 TB). 19 | Description: Enter the size of the EBS volume in GB. 20 | 21 | Resources: 22 | # SageMaker Execution Role 23 | SageMakerIamRole: 24 | Type: "AWS::IAM::Role" 25 | Properties: 26 | AssumeRolePolicyDocument: 27 | Version: "2012-10-17" 28 | Statement: 29 | - 30 | Effect: Allow 31 | Principal: 32 | Service: sagemaker.amazonaws.com 33 | Action: sts:AssumeRole 34 | Path: "/" 35 | ManagedPolicyArns: 36 | - "arn:aws:iam::aws:policy/AmazonSageMakerFullAccess" 37 | - "arn:aws:iam::aws:policy/AmazonS3FullAccess" 38 | - "arn:aws:iam::aws:policy/service-role/AmazonPersonalizeFullAccess" 39 | - "arn:aws:iam::aws:policy/IAMFullAccess" 40 | 41 | # SageMaker notebook 42 | NotebookInstance: 43 | Type: "AWS::SageMaker::NotebookInstance" 44 | Properties: 45 | InstanceType: "ml.t2.medium" 46 | NotebookInstanceName: !Ref NotebookName 47 | RoleArn: !GetAtt SageMakerIamRole.Arn 48 | VolumeSizeInGB: !Ref VolumeSize 49 | DefaultCodeRepository: https://github.com/chrisking/PersonalizePOC.git -------------------------------------------------------------------------------- /next_steps/workshops/POC_in_a_box/completed/PersonalizePOCEE.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | AWSTemplateFormatVersion: '2010-09-09' 3 | 4 | Description: IAM Policies, and SageMaker Notebook to work with Amazon Forecast, it will also clone the POC codebase into the Notebook before you get started. 5 | 6 | Parameters: 7 | 8 | NotebookName: 9 | Type: String 10 | Default: PersonalizePOCNotebook 11 | Description: Enter the name of the SageMaker notebook instance. Deafault is PersonalizePOCNotebook. 12 | 13 | DefaultCodeRepo: 14 | Type: String 15 | Default: https://github.com/chrisking/PersonalizePOC.git 16 | Description: Enter the url of a git code repository for this lab 17 | 18 | InstanceType: 19 | Type: String 20 | Default: ml.t2.medium 21 | AllowedValues: 22 | - ml.t2.medium 23 | - ml.m4.xlarge 24 | - ml.c5.xlarge 25 | - ml.p2.xlarge 26 | - ml.p3.2xlarge 27 | Description: Enter instance type. Default is ml.t2.medium. 28 | 29 | VolumeSize: 30 | Type: Number 31 | Default: 10 32 | MinValue: 5 33 | MaxValue: 16384 34 | ConstraintDescription: Must be an integer between 5 (GB) and 16384 (16 TB). 35 | Description: Enter the size of the EBS volume in GB. Default is 10 GB. 36 | 37 | Resources: 38 | # SageMaker Execution Role 39 | SageMakerIamRole: 40 | Type: "AWS::IAM::Role" 41 | Properties: 42 | AssumeRolePolicyDocument: 43 | Version: "2012-10-17" 44 | Statement: 45 | - 46 | Effect: Allow 47 | Principal: 48 | Service: sagemaker.amazonaws.com 49 | Action: sts:AssumeRole 50 | Path: "/" 51 | ManagedPolicyArns: 52 | - "arn:aws:iam::aws:policy/AmazonSageMakerFullAccess" 53 | - "arn:aws:iam::aws:policy/AmazonS3FullAccess" 54 | - "arn:aws:iam::aws:policy/service-role/AmazonPersonalizeFullAccess" 55 | - "arn:aws:iam::aws:policy/IAMFullAccess" 56 | 57 | 58 | # SageMaker notebook 59 | NotebookInstance: 60 | Type: "AWS::SageMaker::NotebookInstance" 61 | Properties: 62 | InstanceType: "ml.t2.medium" 63 | NotebookInstanceName: !Ref NotebookName 64 | RoleArn: !GetAtt SageMakerIamRole.Arn 65 | VolumeSizeInGB: !Ref VolumeSize 66 | DefaultCodeRepository: !Ref DefaultCodeRepo 67 | 68 | -------------------------------------------------------------------------------- /next_steps/workshops/POC_in_a_box/completed/static/imgs/img1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/POC_in_a_box/completed/static/imgs/img1.png -------------------------------------------------------------------------------- /next_steps/workshops/POC_in_a_box/completed/static/imgs/img2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/POC_in_a_box/completed/static/imgs/img2.png -------------------------------------------------------------------------------- /next_steps/workshops/POC_in_a_box/completed/static/imgs/img3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/POC_in_a_box/completed/static/imgs/img3.png -------------------------------------------------------------------------------- /next_steps/workshops/POC_in_a_box/completed/static/imgs/img4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/POC_in_a_box/completed/static/imgs/img4.png -------------------------------------------------------------------------------- /next_steps/workshops/POC_in_a_box/completed/static/imgs/img5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/POC_in_a_box/completed/static/imgs/img5.png -------------------------------------------------------------------------------- /next_steps/workshops/POC_in_a_box/completed/static/imgs/img6.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/POC_in_a_box/completed/static/imgs/img6.png -------------------------------------------------------------------------------- /next_steps/workshops/POC_in_a_box/completed/static/imgs/img7.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/POC_in_a_box/completed/static/imgs/img7.png -------------------------------------------------------------------------------- /next_steps/workshops/POC_in_a_box/completed/static/imgs/img8.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/POC_in_a_box/completed/static/imgs/img8.png -------------------------------------------------------------------------------- /next_steps/workshops/POC_in_a_box/completed/static/imgs/img9.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/POC_in_a_box/completed/static/imgs/img9.png -------------------------------------------------------------------------------- /next_steps/workshops/POC_in_a_box/static/imgs/img1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/POC_in_a_box/static/imgs/img1.png -------------------------------------------------------------------------------- /next_steps/workshops/POC_in_a_box/static/imgs/img2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/POC_in_a_box/static/imgs/img2.png -------------------------------------------------------------------------------- /next_steps/workshops/POC_in_a_box/static/imgs/img3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/POC_in_a_box/static/imgs/img3.png -------------------------------------------------------------------------------- /next_steps/workshops/POC_in_a_box/static/imgs/img4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/POC_in_a_box/static/imgs/img4.png -------------------------------------------------------------------------------- /next_steps/workshops/POC_in_a_box/static/imgs/img5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/POC_in_a_box/static/imgs/img5.png -------------------------------------------------------------------------------- /next_steps/workshops/POC_in_a_box/static/imgs/img6.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/POC_in_a_box/static/imgs/img6.png -------------------------------------------------------------------------------- /next_steps/workshops/POC_in_a_box/static/imgs/img7.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/POC_in_a_box/static/imgs/img7.png -------------------------------------------------------------------------------- /next_steps/workshops/POC_in_a_box/static/imgs/img8.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/POC_in_a_box/static/imgs/img8.png -------------------------------------------------------------------------------- /next_steps/workshops/POC_in_a_box/static/imgs/img9.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/POC_in_a_box/static/imgs/img9.png -------------------------------------------------------------------------------- /next_steps/workshops/POC_in_a_box/static/imgs/personalize_metrics.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/POC_in_a_box/static/imgs/personalize_metrics.png -------------------------------------------------------------------------------- /next_steps/workshops/README.md: -------------------------------------------------------------------------------- 1 | # Amazon Personalize Workshops 2 | 3 | This folder contains examples on the following topics: 4 | 5 | * [Immersion Day](./Immersion_Day) 6 | * [POC in a box](./POC_in_a_box) 7 | * [re:Invent 2019 Workshop](./Reinvent_2019) 8 | 9 | 10 | ## License Summary 11 | 12 | This sample code is made available under a modified MIT license. See the LICENSE file. 13 | -------------------------------------------------------------------------------- /next_steps/workshops/Reinvent_2019/README.md: -------------------------------------------------------------------------------- 1 | # Getting Started 2 | 3 | The tutorial below will walk you through building an environment to create a custom dataset, model, and recommendation campaign with Amazon Personalize. If you have any issues with any of the content below please open an issue here in the repository. 4 | 5 | ## Prerequisites 6 | 7 | Only applies if you are deploying with the CloudFormation template, otherwise consult the IAM permissions needed for your specific task. 8 | 9 | 1. AWS Account 10 | 2. User with administrator access to the AWS Account 11 | 12 | ## Re:Invent 2019 13 | 14 | If you are building this workshop for Re:Invent 2019 simply click the Launch Stack button below. Get your Jupyter Notebook Instance running, and open the "getting_started" folder and `ReInvent2019_Workshop.ipynb`! 15 | 16 | [![Launch Stack](https://s3.amazonaws.com/cloudformation-examples/cloudformation-launch-stack.png)](https://console.aws.amazon.com/cloudformation/home#/stacks/new?stackName=PersonalizeDemo&templateURL=https://chriskingpartnershare.s3.amazonaws.com/RI_PersonalizeWorkshop.yaml) 17 | 18 | Follow along with the screenshots if you have any questions about these steps. 19 | 20 | ### Cloud Formation Wizard 21 | 22 | Start by clicking `Next` at the bottom like shown: 23 | 24 | ![StackWizard](static/imgs/img1.png) 25 | 26 | In the next page you need to provide a unique S3 bucket name for your file storage, it is recommended to simply add your first name and last name to the end of the default option as shown below, after that update click `Next` again. 27 | 28 | ![StackWizard2](static/imgs/img3.png) 29 | 30 | This page is a bit longer so scroll to the bottom to click `Next`. 31 | 32 | ![StackWizard3](static/imgs/img4.png) 33 | 34 | Again scroll to the bottom, check the box to enable the template to create new IAM resources and then click `Create Stack`. 35 | 36 | ![StackWizard4](static/imgs/img5.png) 37 | 38 | For a few minutes CloudFormation will be creating the resources described above on your behalf it will look like this while it is provisioning: 39 | 40 | ![StackWizard5](static/imgs/img6.png) 41 | 42 | Once it has completed you'll see green text like below indicating that the work has been completed: 43 | 44 | ## Agenda 45 | 46 | The steps below outline the process of building your own recommendation model, improving it, and then cleaning up all of your resources to prevent any unwanted charges. To get started executing these follow the steps in the next section. 47 | 48 | 1. `ReInvent2019_Workshop.ipynb` - Guides you through building your first campaign and recommendation algorithm. 49 | 50 | ## Using the Notebooks 51 | 52 | Start by navigating to the SageMaker serivce page by clicking the `Services` link in the top navigation bar of the AWS console. 53 | 54 | ![StackWizard5](static/imgs/img9.png) 55 | 56 | In the search field enter `SageMaker` and then click for the service when it appears, from the service page click the `Notebook Instances` link on the far left menu bar. 57 | 58 | ![StackWizard5](static/imgs/img10.png) 59 | 60 | To get to the Jupyter interface, simply click `Open JupyterLab` on the far right next to your notebook instance. 61 | 62 | ![StackWizard5](static/imgs/img11.png) 63 | 64 | Clicking the open link will take a few seconds to redirect you to the Jupyter system but once there you should see a collection of files on your left. Get started by clicking on `1.Building_Your_First_Campaign.ipynb`. 65 | 66 | ![StackWizard5](static/imgs/img12.png) 67 | 68 | The rest of the lab will take place via the Jupyter notebooks, simply read each block before executing it and moving onto the next. If you have any questions about how to use the notebooks please ask your instructor or if you are working independently this is a pretty good video to get started: 69 | 70 | https://www.youtube.com/watch?v=Gzun8PpyBCo 71 | 72 | ## After the Notebook 73 | 74 | Once you have completed all of the work in the Notebooks and have completed the cleanup steps there as well, the last thing to do is to delete the stack you created with CloudFormation. To do that, inside the AWS Console again click the `Services` link at the top, and this time enter in `CloudFormation` and click the link for it. 75 | 76 | ![StackWizard5](static/imgs/img9.png) 77 | 78 | Click the `Delete` button on the demo stack you created: 79 | 80 | ![StackWizard5](static/imgs/img13.png) 81 | 82 | Lastly click the `Delete Stack` button that shows up on the popup: 83 | 84 | ![StackWizard5](static/imgs/img14.png) 85 | 86 | You'll now notice that the stack is in progress of being deleted. Once you see `Delete Completed` you know that everything has been deleted and you are 100% done with this lab. 87 | 88 | -------------------------------------------------------------------------------- /next_steps/workshops/Reinvent_2019/RI_PersonalizeWorkshop.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | AWSTemplateFormatVersion: '2010-09-09' 3 | 4 | Description: Creates an S3 Bucket, IAM Policies, and SageMaker Notebook to work with Personalize. 5 | 6 | Parameters: 7 | 8 | NotebookName: 9 | Type: String 10 | Default: PersonalizeDemoLab 11 | Description: Enter the name of the SageMaker notebook instance. Deafault is PersonalizeDemoLab. 12 | 13 | VolumeSize: 14 | Type: Number 15 | Default: 10 16 | MinValue: 5 17 | MaxValue: 16384 18 | ConstraintDescription: Must be an integer between 5 (GB) and 16384 (16 TB). 19 | Description: Enter the size of the EBS volume in GB. 20 | 21 | Resources: 22 | # SageMaker Execution Role 23 | SageMakerIamRole: 24 | Type: "AWS::IAM::Role" 25 | Properties: 26 | AssumeRolePolicyDocument: 27 | Version: "2012-10-17" 28 | Statement: 29 | - 30 | Effect: Allow 31 | Principal: 32 | Service: sagemaker.amazonaws.com 33 | Action: sts:AssumeRole 34 | Path: "/" 35 | ManagedPolicyArns: 36 | - "arn:aws:iam::aws:policy/AmazonSageMakerFullAccess" 37 | - "arn:aws:iam::aws:policy/AmazonS3FullAccess" 38 | - "arn:aws:iam::aws:policy/service-role/AmazonPersonalizeFullAccess" 39 | - "arn:aws:iam::aws:policy/IAMFullAccess" 40 | 41 | # SageMaker notebook 42 | NotebookInstance: 43 | Type: "AWS::SageMaker::NotebookInstance" 44 | Properties: 45 | InstanceType: "ml.t2.medium" 46 | NotebookInstanceName: !Ref NotebookName 47 | RoleArn: !GetAtt SageMakerIamRole.Arn 48 | VolumeSizeInGB: !Ref VolumeSize 49 | DefaultCodeRepository: https://github.com/aws-samples/amazon-personalize-samples.git 50 | -------------------------------------------------------------------------------- /next_steps/workshops/Reinvent_2019/static/imgs/image.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/Reinvent_2019/static/imgs/image.png -------------------------------------------------------------------------------- /next_steps/workshops/Reinvent_2019/static/imgs/img1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/Reinvent_2019/static/imgs/img1.png -------------------------------------------------------------------------------- /next_steps/workshops/Reinvent_2019/static/imgs/img10.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/Reinvent_2019/static/imgs/img10.png -------------------------------------------------------------------------------- /next_steps/workshops/Reinvent_2019/static/imgs/img11.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/Reinvent_2019/static/imgs/img11.png -------------------------------------------------------------------------------- /next_steps/workshops/Reinvent_2019/static/imgs/img12.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/Reinvent_2019/static/imgs/img12.png -------------------------------------------------------------------------------- /next_steps/workshops/Reinvent_2019/static/imgs/img13.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/Reinvent_2019/static/imgs/img13.png -------------------------------------------------------------------------------- /next_steps/workshops/Reinvent_2019/static/imgs/img14.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/Reinvent_2019/static/imgs/img14.png -------------------------------------------------------------------------------- /next_steps/workshops/Reinvent_2019/static/imgs/img2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/Reinvent_2019/static/imgs/img2.png -------------------------------------------------------------------------------- /next_steps/workshops/Reinvent_2019/static/imgs/img3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/Reinvent_2019/static/imgs/img3.png -------------------------------------------------------------------------------- /next_steps/workshops/Reinvent_2019/static/imgs/img4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/Reinvent_2019/static/imgs/img4.png -------------------------------------------------------------------------------- /next_steps/workshops/Reinvent_2019/static/imgs/img5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/Reinvent_2019/static/imgs/img5.png -------------------------------------------------------------------------------- /next_steps/workshops/Reinvent_2019/static/imgs/img6.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/Reinvent_2019/static/imgs/img6.png -------------------------------------------------------------------------------- /next_steps/workshops/Reinvent_2019/static/imgs/img7.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/Reinvent_2019/static/imgs/img7.png -------------------------------------------------------------------------------- /next_steps/workshops/Reinvent_2019/static/imgs/img8.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/Reinvent_2019/static/imgs/img8.png -------------------------------------------------------------------------------- /next_steps/workshops/Reinvent_2019/static/imgs/img9.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/Reinvent_2019/static/imgs/img9.png -------------------------------------------------------------------------------- /next_steps/workshops/Reinvent_2019/static/imgs/personalize_overview.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/Reinvent_2019/static/imgs/personalize_overview.png -------------------------------------------------------------------------------- /next_steps/workshops/Reinvent_2019/static/imgs/personalize_process.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/Reinvent_2019/static/imgs/personalize_process.png -------------------------------------------------------------------------------- /next_steps/workshops/magic_movie_machine/notebooks/README.md: -------------------------------------------------------------------------------- 1 | # Getting Started 2 | 3 | This tutorial outlines the process of building your own movie recommender with Video on Demand use-case optimized recommenders, testing them and then cleaning up all of your resources to prevent any unwanted charges. To get started executing these, follow the steps in the next section. 4 | 5 | 1. `Building_the_Magic_Movie_Machine_Recommender.ipynb` - Guides you through building your first movie recommenders and getting your first recommendations. In this notebook, you'll create resources that are similar to those that power the Magic Movie Machine. 6 | *Note:* Make sure you use the `conda_amazonei_mxnet_p36` 7 | 2. `Clean_Up_Resources.ipynb` - Deletes anything that was created in the previous notebook so you are not charged for additional resources. Note: make sure you run this notebook every time you build the recommenders, as it will only delete the latest resources. -------------------------------------------------------------------------------- /next_steps/workshops/magic_movie_machine/static/imgs/MagicMovieMachine_banner.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/magic_movie_machine/static/imgs/MagicMovieMachine_banner.png -------------------------------------------------------------------------------- /next_steps/workshops/magic_movie_machine/static/imgs/image.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/magic_movie_machine/static/imgs/image.png -------------------------------------------------------------------------------- /next_steps/workshops/magic_movie_machine/static/imgs/img1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/magic_movie_machine/static/imgs/img1.png -------------------------------------------------------------------------------- /next_steps/workshops/magic_movie_machine/static/imgs/img10.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/magic_movie_machine/static/imgs/img10.png -------------------------------------------------------------------------------- /next_steps/workshops/magic_movie_machine/static/imgs/img11.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/magic_movie_machine/static/imgs/img11.png -------------------------------------------------------------------------------- /next_steps/workshops/magic_movie_machine/static/imgs/img12.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/magic_movie_machine/static/imgs/img12.png -------------------------------------------------------------------------------- /next_steps/workshops/magic_movie_machine/static/imgs/img13.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/magic_movie_machine/static/imgs/img13.png -------------------------------------------------------------------------------- /next_steps/workshops/magic_movie_machine/static/imgs/img14.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/magic_movie_machine/static/imgs/img14.png -------------------------------------------------------------------------------- /next_steps/workshops/magic_movie_machine/static/imgs/img2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/magic_movie_machine/static/imgs/img2.png -------------------------------------------------------------------------------- /next_steps/workshops/magic_movie_machine/static/imgs/img3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/magic_movie_machine/static/imgs/img3.png -------------------------------------------------------------------------------- /next_steps/workshops/magic_movie_machine/static/imgs/img4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/magic_movie_machine/static/imgs/img4.png -------------------------------------------------------------------------------- /next_steps/workshops/magic_movie_machine/static/imgs/img5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/magic_movie_machine/static/imgs/img5.png -------------------------------------------------------------------------------- /next_steps/workshops/magic_movie_machine/static/imgs/img6.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/magic_movie_machine/static/imgs/img6.png -------------------------------------------------------------------------------- /next_steps/workshops/magic_movie_machine/static/imgs/img7.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/magic_movie_machine/static/imgs/img7.png -------------------------------------------------------------------------------- /next_steps/workshops/magic_movie_machine/static/imgs/img8.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/magic_movie_machine/static/imgs/img8.png -------------------------------------------------------------------------------- /next_steps/workshops/magic_movie_machine/static/imgs/img9.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/magic_movie_machine/static/imgs/img9.png -------------------------------------------------------------------------------- /next_steps/workshops/magic_movie_machine/static/imgs/personalize_overview.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/magic_movie_machine/static/imgs/personalize_overview.png -------------------------------------------------------------------------------- /next_steps/workshops/magic_movie_machine/static/imgs/personalize_process.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/amazon-personalize-samples/a62e726b6b4935939da1ed68a153a1a2a9bd3584/next_steps/workshops/magic_movie_machine/static/imgs/personalize_process.png --------------------------------------------------------------------------------