├── .claude └── settings.local.json ├── .github ├── CODEOWNERS ├── SECURITY.md ├── copilot-instructions.md ├── dependabot.yml ├── markdown-link-check-config.json └── workflows │ ├── check-readme-links.yml │ └── secret-scanning.yml ├── .gitignore ├── .gitignore-patterns ├── .gitignore-patterns.txt ├── .pre-commit-config.yaml ├── .secrets.baseline ├── .vscode └── mcp.json ├── CLAUDE.md ├── Datasets ├── .DS_Store ├── Essentials │ ├── Employee Data - firstname_lastname_age_gender_department_salary.csv │ ├── Marketing - recordid_name_gender_age_location_email_phone_product_category_amount.csv │ ├── Medical Patient - patient_id_patient_name_gender_date_of_birth_visit_date_symptoms_diagnosis_medication_dosage.csv │ ├── Student Scores - studentid_name_gender_class_math_english_science.csv │ ├── Survey Data - recordid_name_age_gender_edu_emp_income_martialstatus_city_satisfaction_recommendation.csv │ └── Transactions - id_customerid_date_time_product_category_quantity.csv ├── Financial │ ├── Financial Statement for Horizontal Analysis.csv │ ├── Financial Statment for Vertical Analysis.csv │ ├── Leverage Ratio - debt to ebitda.csv │ ├── Wall Street Market Data - Fictional.csv │ └── World Bank - Fictional.csv ├── Human Resources │ ├── Demographic Data.csv │ ├── Employee Absence.csv │ ├── Employee Engagement Survey Results.csv │ ├── HRIS.csv │ ├── Learning Management.csv │ ├── Radford Global Compensation Database.csv │ └── Talent Development.csv ├── Insurance │ ├── Auto Claims.csv │ ├── Customer Acquisition.csv │ ├── Customer Retention.csv │ ├── Insurance Fraud Prevention.csv │ ├── Life Insurance.csv │ └── Pricing and Risk.csv ├── Marketing │ ├── A-B Testing.csv │ ├── Consumer Data.csv │ ├── Firmographic.csv │ ├── Online Content.csv │ ├── Product Preferences eshop.csv │ ├── Qualitative data.csv │ └── Social Media Profiles.csv └── Pandas Essentials │ ├── time_series_data.csv │ └── time_series_data.json ├── LICENSE ├── README.md ├── SECURITY.md ├── chat_app ├── app │ ├── __init__.py │ ├── static │ │ └── css │ │ │ └── style.css │ └── templates │ │ └── index.html ├── prompt.txt ├── requirements.txt ├── run.py └── stub.txt ├── chatgpt-cover-slide.png ├── chatgptclass.code-workspace ├── course-plan.md ├── debug.log ├── diagnose-performance.ps1 ├── exercises ├── README.md ├── chatgpt │ ├── 00-prompting-strategies.md │ ├── dev │ │ └── 01-code-review.md │ ├── information-worker │ │ └── 01-document-analysis.md │ └── itops │ │ └── 01-incident-response.md └── github-copilot │ ├── 01-test-generation.md │ └── security │ └── 01-secure-api.md ├── infra └── create-repo-scaffold.ps1 ├── knowledge ├── choose-an-agile-approach.zip ├── choose-an-agile-approach │ ├── 1-introduction.yml │ ├── 2-what-is-agile.yml │ ├── 3-what-is-azure-boards.yml │ ├── 4-plan-work-azure-boards.yml │ ├── 5-summary.yml │ ├── includes │ │ ├── 1-introduction.md │ │ ├── 2-what-is-agile.md │ │ ├── 3-what-is-azure-boards.md │ │ ├── 4-plan-work-azure-boards.md │ │ └── 5-summary.md │ ├── index.yml │ └── media │ │ ├── 3-assign-owner.png │ │ ├── 3-assign-sprint.png │ │ ├── 3-blank-board.png │ │ ├── 3-whiteboard.png │ │ └── 4-boards-sprints-menu.png ├── combine-ppt.ps1 ├── combine_ppt.py └── sc-100-combined_text.txt ├── mcp-demos ├── MCP_DEMO_GUIDE.md └── weather-server │ ├── README.md │ ├── package.json │ └── server.js ├── mcp-teaching.guide.md ├── projects-custom-gpt ├── From_Zero_to_ChatGPT_-_Tutorial.pdf └── about-chatgpt-projects.txt ├── src ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── Dockerfile ├── alerts_to_sarif.py ├── app.py ├── bad-python.py ├── build-chat-app.md ├── call_openai_api.py ├── chaggpt-prompt-examples.md ├── chatgptclass.code-workspace ├── codex-prompt-examples.md ├── combine_ppt.py ├── completion.py ├── context-scope.py ├── convert-ps-to-cli.ps1 ├── copilot-dev-cover.png ├── count_tokens.py ├── create_unit_tests.py ├── data_analysis.py ├── dependabot-alerts.json ├── dependabot-github-cli.sh ├── dependabot-report.py ├── dependabot_alerts.csv ├── deploy.azcli ├── fine-tuning-dataset.jsonl ├── fine-tuning.py ├── fine-tuning2.py ├── gh-cli-code-scanning.ps1 ├── gpt-metrics-davinci.py ├── gpt-metrics-gpt35.py ├── inline_suggestions.py ├── interact-with-docker-image.sh ├── main.py ├── march_2024.py ├── microsoft-LLMLingua.url ├── monitor.py ├── powershell.ps1 ├── project_specific_variable.py ├── prompt.txt ├── py.py ├── python.py ├── refactor_code.py ├── req-azureopenai.py ├── requirements.txt ├── run.py ├── sample_openai_chat.py ├── self-hosted-runner.ps1 ├── test.cs ├── test.ps1 ├── test.py ├── testchat-azure-openai.py └── testchat-openai.py ├── startup-optimization.ps1 ├── tame-defender.ps1 ├── test-secrets.js ├── test-secrets2.js ├── tim-dev ├── ChatGPT and GitHub Copilot in 4 Hours.pdf ├── convert_pdf.py ├── course-reg-page-copy.txt └── secret-scanning.yml.disabled └── warner-chatgpt-github-copilot.pptx /.claude/settings.local.json: -------------------------------------------------------------------------------- 1 | { 2 | "permissions": { 3 | "allow": [ 4 | "Bash(git add:*)", 5 | "Bash(git push:*)", 6 | "Bash(grep:*)", 7 | "Bash(rg:*)", 8 | "Bash(/usr/lib/node_modules/@anthropic-ai/claude-code/vendor/ripgrep/x64-linux/rg -n \"test|lint\" CLAUDE.md)", 9 | "Bash(mkdir:*)" 10 | ], 11 | "deny": [] 12 | } 13 | } -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/timothywarner/chatgptclass/93786f110785ece14306ec13656f108d24929645/.github/CODEOWNERS -------------------------------------------------------------------------------- /.github/SECURITY.md: -------------------------------------------------------------------------------- 1 | ## Security Policy\n\nPlease report any security vulnerabilities through the project's issues. -------------------------------------------------------------------------------- /.github/copilot-instructions.md: -------------------------------------------------------------------------------- 1 | # GitHub Copilot Custom Instructions 2 | 3 | > A teaching example for effective AI pair programming 4 | 5 | ## 🎯 Core Principles 6 | 7 | 1. **Explicit > Implicit**: Always prefer clear, unambiguous instructions over clever shortcuts 8 | 2. **Context First**: Provide relevant context before asking for code 9 | 3. **Iterative Refinement**: Start broad, then narrow down with specific requirements 10 | 4. **Safety First**: Prioritize secure, maintainable code over clever solutions 11 | 12 | ## 📝 Markdown Formatting 13 | 14 | ### Auto-Linting Rules 15 | 16 | ```markdown 17 | # Required Formatting 18 | 19 | 1. One blank line after each heading (## and ###) 20 | 2. One blank line before and after lists 21 | 3. One blank line before and after code blocks 22 | 4. Language specifier for all code blocks (e.g., ```markdown) 23 | 5. Consistent triple backtick fencing 24 | 6. No trailing whitespace 25 | 7. UTF-8 encoding 26 | 8. Line endings: LF (Unix-style) 27 | ``` 28 | 29 | ### VS Code Settings 30 | 31 | ```json 32 | { 33 | "markdownlint.config": { 34 | "MD013": false, // Line length 35 | "MD024": false, // Multiple headers with same content 36 | "MD033": false, // Allow inline HTML 37 | "MD041": false, // First line in file should be a top level header 38 | "MD022": true, // Headings should be surrounded by blank lines 39 | "MD032": true, // Lists should be surrounded by blank lines 40 | "MD031": true, // Fenced code blocks should be surrounded by blank lines 41 | "MD040": true, // Fenced code blocks should have a language specified 42 | "MD047": true // Files should end with a single newline 43 | } 44 | } 45 | ``` 46 | 47 | ## 📝 Instruction Templates 48 | 49 | ### Basic Function Generation 50 | 51 | ```markdown 52 | Create a function that [action] using [technology/framework]. 53 | Requirements: 54 | - Input: [describe inputs] 55 | - Output: [describe expected output] 56 | - Error handling: [specific error cases] 57 | - Performance: [any specific performance requirements] 58 | ``` 59 | 60 | ### API Integration 61 | 62 | ```markdown 63 | Implement an API client for [service] that: 64 | - Handles authentication using [method] 65 | - Implements rate limiting 66 | - Includes retry logic with exponential backoff 67 | - Provides TypeScript types for all responses 68 | ``` 69 | 70 | ### Security-Focused 71 | 72 | ```markdown 73 | Create a secure implementation of [feature] that: 74 | - Validates all inputs using [validation library] 75 | - Implements proper authentication/authorization 76 | - Follows OWASP guidelines for [specific vulnerability] 77 | - Includes security headers and CSP configuration 78 | ``` 79 | 80 | ## 🎓 Teaching Examples 81 | 82 | ### Example 1: Basic Function 83 | 84 | ```markdown 85 | Create a function to validate email addresses that: 86 | - Uses regex for basic format validation 87 | - Checks for common typos in popular domains 88 | - Returns a detailed validation result object 89 | - Includes JSDoc documentation 90 | ``` 91 | 92 | ### Example 2: API Integration 93 | 94 | ```markdown 95 | Build a GitHub API client that: 96 | - Uses OAuth2 for authentication 97 | - Implements proper caching 98 | - Handles rate limiting gracefully 99 | - Provides TypeScript types for all responses 100 | ``` 101 | 102 | ### Example 3: Security Implementation 103 | 104 | ```markdown 105 | Implement a secure file upload system that: 106 | - Validates file types and sizes 107 | - Implements virus scanning 108 | - Uses secure file naming 109 | - Stores files in a secure location 110 | ``` 111 | 112 | ## 💡 Best Practices 113 | 114 | ### DO: 115 | 116 | - Provide clear context and requirements 117 | - Specify error handling expectations 118 | - Include performance requirements 119 | - Request documentation and tests 120 | - Ask for security considerations 121 | 122 | ### DON'T: 123 | 124 | - Use vague or ambiguous language 125 | - Skip error handling requirements 126 | - Forget to specify input/output formats 127 | - Ignore security implications 128 | - Use overly complex language 129 | 130 | ## 🔍 Debugging Tips 131 | 132 | When Copilot generates unexpected results: 133 | 134 | 1. Break down the request into smaller parts 135 | 2. Add more specific requirements 136 | 3. Include example inputs/outputs 137 | 4. Specify edge cases to handle 138 | 5. Request step-by-step implementation 139 | 140 | ## 📚 Learning Resources 141 | 142 | - [GitHub Copilot Documentation](https://docs.github.com/en/copilot) 143 | - [OWASP Security Guidelines](https://owasp.org/www-project-top-ten/) 144 | - [TypeScript Documentation](https://www.typescriptlang.org/docs/) 145 | - [Node.js Best Practices](https://github.com/goldbergyoni/nodebestpractices) 146 | 147 | ## 🎯 Practice Exercises 148 | 149 | 1. Try implementing each example above 150 | 2. Modify the requirements and observe how Copilot adapts 151 | 3. Experiment with different levels of specificity 152 | 4. Practice debugging unexpected results 153 | 5. Create your own instruction templates 154 | 155 | Remember: The quality of Copilot's output directly correlates with the quality of your instructions. Be specific, be clear, and iterate! 156 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "pip" # Choose your package ecosystem 4 | directory: "/" # Location of package manifests 5 | schedule: 6 | interval: "weekly" # How often to check for updates 7 | -------------------------------------------------------------------------------- /.github/markdown-link-check-config.json: -------------------------------------------------------------------------------- 1 | { 2 | "ignorePatterns": [ 3 | { 4 | "pattern": "^https://openai.com" 5 | } 6 | ], 7 | "timeout": "10s" 8 | } -------------------------------------------------------------------------------- /.github/workflows/check-readme-links.yml: -------------------------------------------------------------------------------- 1 | name: Link Checker 2 | 3 | on: 4 | workflow_dispatch: 5 | # push: 6 | # branches: 7 | # - main 8 | 9 | jobs: 10 | link-check: 11 | runs-on: ubuntu-latest 12 | 13 | steps: 14 | - name: Checkout code 15 | uses: actions/checkout@v4 16 | 17 | - name: Run markdown-link-check on README.md 18 | uses: gaurav-nelson/github-action-markdown-link-check@v1 19 | with: 20 | config-file: .github/markdown-link-check-config.json 21 | file-path: ./README.md 22 | 23 | - name: Check for broken links 24 | run: | 25 | readme1_output=$(cat ./README.md) 26 | 27 | readme1_broken=$(echo "$readme1_output" | grep -E '"fail":\s*[1-9]+') 28 | 29 | if [ -n "$readme1_broken" ]; then 30 | echo "❌ Broken links found in README.md" 31 | exit 1 32 | else 33 | echo "✅ No broken links found in README.md" 34 | fi 35 | 36 | # - name: Notify user of broken links 37 | # if: failure() 38 | # uses: dawidd6/action-send-mail@v4 39 | # with: 40 | # server_address: smtp.gmail.com 41 | # server_port: 587 42 | # username: ${{ secrets.GMAIL_USERNAME }} 43 | # password: ${{ secrets.GMAIL_PASSWORD }} 44 | # subject: ❌ Broken links detected in README.md 45 | # body: '❌ Broken links were detected in README.md. Please check the workflow logs for details.' 46 | # to: timothywarner316@gmail.com 47 | # from: noreply@github.com -------------------------------------------------------------------------------- /.github/workflows/secret-scanning.yml: -------------------------------------------------------------------------------- 1 | name: Secret Scanning Disabled 2 | on: 3 | # Intentionally disabled 4 | # push: 5 | # branches: [ main, master ] 6 | # pull_request: 7 | # branches: [ main, master ] 8 | workflow_dispatch: 9 | 10 | jobs: 11 | notify: 12 | runs-on: ubuntu-latest 13 | steps: 14 | - name: Log message 15 | run: echo "Secret scanning is currently disabled" -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | ~$warner-copilot.pptx 3 | ~$warner-chatgpt-github-copilot.pptx 4 | -------------------------------------------------------------------------------- /.gitignore-patterns: -------------------------------------------------------------------------------- 1 | # Common secret pattern identifiers 2 | # API Keys and Tokens 3 | api_key 4 | apikey 5 | api_secret 6 | api_token 7 | access_token 8 | auth_token 9 | client_secret 10 | client_id 11 | password 12 | secret 13 | token 14 | private_key 15 | secret_key 16 | 17 | # Common AWS patterns 18 | AKIA[0-9A-Z]{16} 19 | aws_access_key_id 20 | aws_secret_access_key 21 | 22 | # Common Azure patterns 23 | ConnectionStrings 24 | AccountKey= 25 | 26 | # Database connection strings 27 | mongodb:// 28 | postgresql:// 29 | mysql:// 30 | redis:// 31 | jdbc: 32 | 33 | # Private keys and certificates 34 | BEGIN RSA PRIVATE KEY 35 | BEGIN PRIVATE KEY 36 | BEGIN PGP PRIVATE KEY 37 | BEGIN DSA PRIVATE KEY 38 | BEGIN EC PRIVATE KEY 39 | BEGIN CERTIFICATE 40 | 41 | # Other common secrets 42 | authorization: 43 | Authorization: 44 | stripe_api_key 45 | STRIPE_SECRET_KEY -------------------------------------------------------------------------------- /.gitignore-patterns.txt: -------------------------------------------------------------------------------- 1 | # Common secret pattern identifiers 2 | # API Keys and Tokens 3 | api_key 4 | apikey 5 | api_secret 6 | api_token 7 | access_token 8 | auth_token 9 | client_secret 10 | client_id 11 | password 12 | secret 13 | token 14 | private_key 15 | secret_key 16 | 17 | # Common AWS patterns 18 | AKIA[0-9A-Z]{16} 19 | aws_access_key_id 20 | aws_secret_access_key 21 | 22 | # Common Azure patterns 23 | ConnectionStrings 24 | AccountKey= 25 | 26 | # Database connection strings 27 | mongodb:// 28 | postgresql:// 29 | mysql:// 30 | redis:// 31 | jdbc: 32 | 33 | # Private keys and certificates 34 | BEGIN RSA PRIVATE KEY 35 | BEGIN PRIVATE KEY 36 | BEGIN PGP PRIVATE KEY 37 | BEGIN DSA PRIVATE KEY 38 | BEGIN EC PRIVATE KEY 39 | BEGIN CERTIFICATE 40 | 41 | # Other common secrets 42 | authorization: 43 | Authorization: 44 | stripe_api_key 45 | STRIPE_SECRET_KEY -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/Yelp/detect-secrets 3 | rev: v1.4.0 4 | hooks: 5 | - id: detect-secrets 6 | args: ['--baseline', '.secrets.baseline'] -------------------------------------------------------------------------------- /.secrets.baseline: -------------------------------------------------------------------------------- 1 | { 2 | "version": "1.5.0", 3 | "plugins_used": [ 4 | { 5 | "name": "ArtifactoryDetector" 6 | }, 7 | { 8 | "name": "AWSKeyDetector" 9 | }, 10 | { 11 | "name": "AzureStorageKeyDetector" 12 | }, 13 | { 14 | "name": "Base64HighEntropyString", 15 | "limit": 4.5 16 | }, 17 | { 18 | "name": "BasicAuthDetector" 19 | }, 20 | { 21 | "name": "CloudantDetector" 22 | }, 23 | { 24 | "name": "DiscordBotTokenDetector" 25 | }, 26 | { 27 | "name": "GitHubTokenDetector" 28 | }, 29 | { 30 | "name": "GitLabTokenDetector" 31 | }, 32 | { 33 | "name": "HexHighEntropyString", 34 | "limit": 3.0 35 | }, 36 | { 37 | "name": "IbmCloudIamDetector" 38 | }, 39 | { 40 | "name": "IbmCosHmacDetector" 41 | }, 42 | { 43 | "name": "IPPublicDetector" 44 | }, 45 | { 46 | "name": "JwtTokenDetector" 47 | }, 48 | { 49 | "name": "KeywordDetector", 50 | "keyword_exclude": "" 51 | }, 52 | { 53 | "name": "MailchimpDetector" 54 | }, 55 | { 56 | "name": "NpmDetector" 57 | }, 58 | { 59 | "name": "OpenAIDetector" 60 | }, 61 | { 62 | "name": "PrivateKeyDetector" 63 | }, 64 | { 65 | "name": "PypiTokenDetector" 66 | }, 67 | { 68 | "name": "SendGridDetector" 69 | }, 70 | { 71 | "name": "SlackDetector" 72 | }, 73 | { 74 | "name": "SoftlayerDetector" 75 | }, 76 | { 77 | "name": "SquareOAuthDetector" 78 | }, 79 | { 80 | "name": "StripeDetector" 81 | }, 82 | { 83 | "name": "TelegramBotTokenDetector" 84 | }, 85 | { 86 | "name": "TwilioKeyDetector" 87 | } 88 | ], 89 | "filters_used": [ 90 | { 91 | "path": "detect_secrets.filters.allowlist.is_line_allowlisted" 92 | }, 93 | { 94 | "path": "detect_secrets.filters.common.is_baseline_file", 95 | "filename": ".secrets.baseline" 96 | }, 97 | { 98 | "path": "detect_secrets.filters.common.is_ignored_due_to_verification_policies", 99 | "min_level": 2 100 | }, 101 | { 102 | "path": "detect_secrets.filters.heuristic.is_indirect_reference" 103 | }, 104 | { 105 | "path": "detect_secrets.filters.heuristic.is_likely_id_string" 106 | }, 107 | { 108 | "path": "detect_secrets.filters.heuristic.is_lock_file" 109 | }, 110 | { 111 | "path": "detect_secrets.filters.heuristic.is_not_alphanumeric_string" 112 | }, 113 | { 114 | "path": "detect_secrets.filters.heuristic.is_potential_uuid" 115 | }, 116 | { 117 | "path": "detect_secrets.filters.heuristic.is_prefixed_with_dollar_sign" 118 | }, 119 | { 120 | "path": "detect_secrets.filters.heuristic.is_sequential_string" 121 | }, 122 | { 123 | "path": "detect_secrets.filters.heuristic.is_swagger_file" 124 | }, 125 | { 126 | "path": "detect_secrets.filters.heuristic.is_templated_secret" 127 | } 128 | ], 129 | "results": { 130 | "test-secrets.js": [ 131 | { 132 | "type": "AWS Access Key", 133 | "filename": "test-secrets.js", 134 | "hashed_secret": "25910f981e85ca04baf359199dd0bd4a3ae738b6", 135 | "is_verified": false, 136 | "line_number": 5 137 | }, 138 | { 139 | "type": "Secret Keyword", 140 | "filename": "test-secrets.js", 141 | "hashed_secret": "25910f981e85ca04baf359199dd0bd4a3ae738b6", 142 | "is_verified": false, 143 | "line_number": 5 144 | } 145 | ] 146 | }, 147 | "generated_at": "2025-03-30T21:04:31Z" 148 | } 149 | -------------------------------------------------------------------------------- /.vscode/mcp.json: -------------------------------------------------------------------------------- 1 | { 2 | "mcpServers": { 3 | "weather": { 4 | "command": "node", 5 | "args": ["mcp-demos/weather-server/server.js"], 6 | "disabled": false 7 | }, 8 | "filesystem": { 9 | "command": "npx", 10 | "args": ["@modelcontextprotocol/server-filesystem", "/mnt/c/Users/tim/chatgptclass"], 11 | "disabled": true 12 | } 13 | } 14 | } -------------------------------------------------------------------------------- /CLAUDE.md: -------------------------------------------------------------------------------- 1 | # CLAUDE.md 2 | 3 | This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository. 4 | 5 | ## Repository Overview 6 | 7 | This is a ChatGPT and GitHub Copilot training course repository designed for a 4-hour hands-on workshop. The course teaches professionals how to use AI tools (ChatGPT and GitHub Copilot) across various roles including developers, IT ops, data scientists, and information workers. 8 | 9 | ## Common Development Tasks 10 | 11 | ### Python Development 12 | - Main Python code is in `/src/` directory 13 | - Dependencies: `pip install -r src/requirements.txt` 14 | - Note: The requirements.txt contains intentionally outdated packages with security vulnerabilities for educational purposes 15 | - Flask app entry point: `python src/app.py` or `python chat_app/run.py` 16 | 17 | ### Running Examples 18 | - OpenAI API examples: `python src/call_openai_api.py`, `python src/sample_openai_chat.py` 19 | - Data analysis: `python src/data_analysis.py` 20 | - Fine-tuning demos: `python src/fine-tuning.py` 21 | 22 | ### Docker 23 | - Build: `docker build -f src/Dockerfile -t chatgpt-demo .` 24 | - Run: `docker run -p 5000:5000 chatgpt-demo` 25 | 26 | ## Code Architecture 27 | 28 | ### Directory Structure 29 | - `/src/` - Python examples, API integrations, and demos (intentionally vulnerable for security education) 30 | - `/chat_app/` - Flask-based chat application template 31 | - `/exercises/` - Structured course exercises by tool (ChatGPT/Copilot) and role 32 | - `/Datasets/` - CSV datasets for hands-on data analysis exercises 33 | - `/knowledge/` - YAML-based learning modules 34 | - `/tim-dev/` - Instructor materials and utilities 35 | 36 | ### Key Components 37 | 1. **API Integration Scripts** - Multiple OpenAI API examples demonstrating various features 38 | 2. **Security Demos** - Intentionally vulnerable code for teaching security best practices 39 | 3. **Data Processing** - Scripts using pandas for data analysis exercises 40 | 4. **Web Applications** - Flask-based chat app demonstrating API integration 41 | 5. **Automation Scripts** - PowerShell scripts for Windows/Surface optimization and GitHub automation 42 | 43 | ### Important Notes 44 | - This is primarily an educational repository with example code 45 | - Many dependencies are intentionally outdated for security demonstration purposes 46 | - No formal test suite or linting configuration exists 47 | - Code examples are designed for learning, not production use 48 | - The repository contains both working examples and incomplete templates for exercises 49 | 50 | ## API Keys and Configuration 51 | When working with OpenAI API examples: 52 | - Set environment variable `OPENAI_API_KEY` for OpenAI examples 53 | - Azure OpenAI examples may require additional configuration 54 | - Never commit API keys or sensitive information to the repository -------------------------------------------------------------------------------- /Datasets/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/timothywarner/chatgptclass/93786f110785ece14306ec13656f108d24929645/Datasets/.DS_Store -------------------------------------------------------------------------------- /Datasets/Essentials/Employee Data - firstname_lastname_age_gender_department_salary.csv: -------------------------------------------------------------------------------- 1 | employee_id,first_name,last_name,age,gender,department,salary 2 | 1,John,Doe,35,Male,Marketing,50000 3 | 2,Jane,Smith,28,Female,Sales,60000 4 | 3,Michael,Johnson,42,Male,Finance,70000 5 | 4,Emily,Williams,31,Female,HR,55000 6 | 5,James,Brown,37,Male,Operations,65000 7 | 6,Emma,Taylor,29,Female,IT,80000 8 | 7,William,Clark,33,Male,Marketing,52000 9 | 8,Olivia,Miller,26,Female,Sales,62000 10 | 9,Liam,Anderson,39,Male,Finance,72000 11 | 10,Sophia,Lewis,32,Female,HR,58000 12 | 11,Benjamin,Thomas,34,Male,Operations,68000 13 | 12,Ava,Hall,27,Female,IT,85000 14 | 13,Alexander,White,36,Male,Marketing,53000 15 | 14,Charlotte,Jackson,30,Female,Sales,63000 16 | 15,Ethan,Harris,31,Male,Finance,73000 17 | 16,Mia,Martin,29,Female,HR,60000 18 | 17,David,Anderson,35,Male,Operations,70000 19 | 18,Sofia,Moore,28,Female,IT,90000 20 | 19,Joseph,King,37,Male,Marketing,54000 21 | 20,Amelia,Cooper,29,Female,Sales,65000 22 | 21,Lucas,Hill,33,Male,Finance,75000 23 | 22,Grace,Walker,26,Female,HR,62000 24 | 23,Aiden,Green,38,Male,Operations,72000 25 | 24,Harper,Young,31,Female,IT,95000 26 | 25,Christopher,Allen,34,Male,Marketing,55000 27 | 26,Lily,Wright,27,Female,Sales,67000 28 | 27,Daniel,Robinson,36,Male,Finance,77000 29 | 28,Sofia,Hall,30,Female,HR,64000 30 | 29,Logan,Turner,32,Male,Operations,74000 31 | 30,Ella,Lee,29,Female,IT,98000 32 | 31,William,Parker,35,Male,Marketing,56000 33 | 32,Avery,Carter,28,Female,Sales,68000 34 | 33,Lincoln,Gonzalez,34,Male,Finance,80000 35 | 34,Chloe,Harris,27,Female,HR,66000 36 | 35,Michael,Walker,36,Male,Operations,76000 37 | 36,Emily,Moore,31,Female,IT,102000 38 | 37,James,Phillips,33,Male,Marketing,58000 39 | 38,Olivia,Hall,26,Female,Sales,70000 40 | 39,Benjamin,Clark,38,Male,Finance,82000 41 | 40,Ava,Lopez,30,Female,HR,68000 42 | 41,Alexander,Turner,35,Male,Operations,78000 43 | 42,Sophia,Campbell,29,Female,IT,105000 44 | 43,Joseph,Roberts,32,Male,Marketing,60000 45 | 44,Mia,Hill,27,Female,Sales,72000 46 | 45,David,Ross,36,Male,Finance,85000 47 | 46,Lucy,Perez,31,Female,HR,70000 48 | 47,Logan,Cooper,33,Male,Operations,80000 49 | 48,Emma,Richardson,28,Female,IT,108000 50 | 49,Ethan,Phillips,34,Male,Marketing,62000 51 | 50,Charlotte,Smith,26,Female,Sales,75000 52 | 51,Noah,Kelly,37,Male,Finance,87000 53 | 52,Sophia,Reed,30,Female,HR,72000 54 | 53,Lucas,Cox,35,Male,Operations,82000 55 | 54,Avery,Collins,29,Female,IT,110000 56 | 55,William,Rivera,32,Male,Marketing,64000 57 | 56,Olivia,Hall,27,Female,Sales,77000 58 | 57,Mason,Morris,36,Male,Finance,90000 59 | 58,Ava,Gonzalez,31,Female,HR,75000 60 | 59,Aiden,Cook,33,Male,Operations,85000 61 | 60,Emily,Allen,28,Female,IT,112000 62 | 61,James,Hill,34,Male,Marketing,66000 63 | 62,Isabella,Turner,26,Female,Sales,79000 64 | 63,Ethan,Walker,37,Male,Finance,92000 65 | 64,Emily,Parker,31,Female,HR,78000 66 | 65,Liam,Smith,33,Male,Operations,88000 67 | 66,Abigail,Adams,29,Female,IT,115000 68 | 67,Lucas,Carter,35,Male,Marketing,68000 69 | 68,Charlotte,Reed,28,Female,Sales,81000 70 | 69,Benjamin,Cook,34,Male,Finance,95000 71 | 70,Ava,Baker,30,Female,HR,80000 72 | 71,William,Morris,37,Male,Operations,90000 73 | 72,Sophia,Young,31,Female,IT,118000 74 | 73,Mason,Turner,33,Male,Marketing,70000 75 | 74,Ava,Harris,26,Female,Sales,83000 76 | 75,Ethan,Collins,36,Male,Finance,98000 77 | 76,Olivia,Rivera,31,Female,HR,82000 78 | 77,Noah,Martinez,33,Male,Operations,92000 79 | 78,Sophia,Peterson,28,Female,IT,122000 80 | 79,Logan,Howard,34,Male,Marketing,72000 81 | 80,Ava,Perez,26,Female,Sales,86000 82 | 81,William,Adams,37,Male,Finance,105000 83 | 82,Emily,Turner,31,Female,HR,88000 84 | 83,Liam,Carter,33,Male,Operations,98000 85 | 84,Sophia,Harris,29,Female,IT,125000 86 | 85,Aiden,Parker,35,Male,Marketing,74000 87 | 86,Olivia,Ross,28,Female,Sales,89000 88 | 87,James,Evans,36,Male,Finance,108000 89 | 88,Sophia,Stewart,31,Female,HR,92000 90 | 89,Ethan,Phillips,33,Male,Operations,102000 91 | 90,Emily,Allen,29,Female,IT,128000 92 | 91,Liam,Kelly,37,Male,Marketing,76000 93 | 92,Abigail,Ward,28,Female,Sales,92000 94 | 93,Lucas,Rivera,36,Male,Finance,112000 95 | 94,Charlotte,Wilson,31,Female,HR,96000 96 | 95,Benjamin,Roberts,33,Male,Operations,108000 97 | 96,Ava,Cook,29,Female,IT,132000 98 | 97,William,Hill,35,Male,Marketing,78000 99 | 98,Olivia,Martinez,26,Female,Sales,95000 100 | 99,Mason,Reed,37,Male,Finance,115000 101 | 100,Ella,Brown,31,Female,HR,99000 -------------------------------------------------------------------------------- /Datasets/Essentials/Student Scores - studentid_name_gender_class_math_english_science.csv: -------------------------------------------------------------------------------- 1 | student_id,name,age,gender,math_score,english_score,science_score 2 | 1,John Smith,18,Male,75,82,89 3 | 2,Jane Johnson,17,Female,92,88,93 4 | 3,Michael Brown,16,Male,68,74,70 5 | 4,Emily Davis,18,Female,85,90,87 6 | 5,James Wilson,17,Male,78,76,80 7 | 6,Emma Taylor,16,Female,92,89,95 8 | 7,William Clark,17,Male,65,72,68 9 | 8,Olivia Martinez,16,Female,88,93,90 10 | 9,Liam Anderson,18,Male,80,84,82 11 | 10,Sophia Rodriguez,17,Female,95,91,96 12 | 11,Benjamin Thomas,16,Male,72,70,75 13 | 12,Ava Lewis,18,Female,90,87,92 14 | 13,Alexander Lee,17,Male,75,78,73 15 | 14,Charlotte Walker,16,Female,92,95,91 16 | 15,Ethan Hall,17,Male,70,68,72 17 | 16,Mia White,16,Female,85,88,84 18 | 17,David Harris,18,Male,78,82,79 19 | 18,Sofia Young,17,Female,90,92,88 20 | 19,Joseph Hill,16,Male,68,70,65 21 | 20,Amelia Adams,18,Female,92,94,91 22 | 21,Lucas Green,17,Male,76,74,78 23 | 22,Grace Turner,16,Female,93,90,95 24 | 23,Aiden Moore,17,Male,72,75,71 25 | 24,Harper Scott,16,Female,89,87,91 26 | 25,Christopher Reed,18,Male,80,82,81 27 | 26,Lily Bailey,17,Female,95,92,97 28 | 27,Daniel Evans,16,Male,70,75,68 29 | 28,Sofia Harris,18,Female,88,92,90 30 | 29,Logan Thompson,17,Male,73,72,74 31 | 30,Ella Martinez,16,Female,93,95,92 32 | 31,William Allen,17,Male,75,78,76 33 | 32,Avery Parker,16,Female,92,94,90 34 | 33,Lincoln Cook,17,Male,68,72,69 35 | 34,Chloe Turner,16,Female,90,88,92 36 | 35,Michael Wright,18,Male,77,81,79 37 | 36,Emily Scott,17,Female,85,87,83 38 | 37,James King,16,Male,70,68,71 39 | 38,Olivia Brooks,18,Female,92,93,91 40 | 39,Benjamin Rivera,17,Male,75,76,74 41 | 40,Ava Price,16,Female,93,91,94 42 | 41,Alexander Long,17,Male,72,70,76 43 | 42,Sophia Phillips,16,Female,90,92,88 44 | 43,Joseph Cox,17,Male,68,69,65 45 | 44,Mia Morris,16,Female,88,90,87 46 | 45,David Turner,17,Male,78,79,80 47 | 46,Lucy Bennett,16,Female,92,89,94 48 | 47,Logan Davis,18,Male,70,68,73 49 | 48,Emma Foster,17,Female,85,86,82 50 | 49,Ethan Howard,16,Male,77,78,75 51 | 50,Charlotte Powell,16,Female,91,89,93 52 | 51,Noah Cooper,17,Male,70,72,70 53 | 52,Sophia Ward,16,Female,89,90,87 54 | 53,Lucas Rodriguez,17,Male,75,76,73 55 | 54,Avery Adams,16,Female,92,94,91 56 | 55,William Ross,17,Male,68,71,67 57 | 56,Olivia Edwards,16,Female,90,92,89 58 | 57,Mason Foster,17,Male,78,80,77 59 | 58,Ava Peterson,16,Female,86,85,88 60 | 59,Aiden Powell,17,Male,73,75,72 61 | 60,Emily Collins,16,Female,89,91,88 62 | 61,James Morris,17,Male,70,72,69 63 | 62,Isabella Reed,16,Female,88,90,87 64 | 63,Ethan Nelson,17,Male,76,78,75 65 | 64,Emily Taylor,16,Female,92,93,91 66 | 65,Liam Moore,17,Male,71,70,75 67 | 66,Abigail Baker,16,Female,90,88,92 68 | 67,Lucas Turner,17,Male,75,77,73 69 | 68,Charlotte Adams,16,Female,91,92,89 70 | 69,Benjamin Cox,17,Male,70,73,69 71 | 70,Ava Wright,16,Female,88,89,86 72 | 71,William Mitchell,17,Male,78,76,80 73 | 72,Sophia Stewart,16,Female,85,88,83 74 | 73,Mason Peterson,17,Male,70,72,69 75 | 74,Avery Turner,16,Female,89,91,87 76 | 75,Ethan Cooper,17,Male,76,78,75 77 | 76,Olivia Collins,16,Female,91,92,89 78 | 77,Noah Martinez,17,Male,72,74,70 79 | 78,Sophia Phillips,16,Female,90,88,92 80 | 79,Logan Adams,17,Male,75,77,73 81 | 80,Ava Wilson,16,Female,92,94,91 82 | 81,William Green,17,Male,68,71,67 83 | 82,Emily Allen,16,Female,90,92,89 84 | 83,Liam Rodriguez,17,Male,78,80,77 85 | 84,Sophia Foster,16,Female,86,85,88 86 | 85,Aiden Powell,17,Male,73,75,72 87 | 86,Olivia Collins,16,Female,89,91,88 88 | 87,James Morris,17,Male,70,72,69 89 | 88,Isabella Reed,16,Female,88,90,87 90 | 89,Ethan Nelson,17,Male,76,78,75 91 | 90,Emily Taylor,16,Female,92,93,91 92 | 91,Liam Moore,17,Male,71,70,75 93 | 92,Abigail Baker,16,Female,90,88,92 94 | 93,Lucas Turner,17,Male,75,77,73 95 | 94,Charlotte Adams,16,Female,91,92,89 96 | 95,Benjamin Cox,17,Male,70,73,69 97 | 96,Ava Wright,16,Female,88,89,86 98 | 97,William Mitchell,17,Male, -------------------------------------------------------------------------------- /Datasets/Essentials/Transactions - id_customerid_date_time_product_category_quantity.csv: -------------------------------------------------------------------------------- 1 | transaction_id,customer_id,date,time,product_name,category,quantity,price 2 | 1,C1001,2023-01-01,09:12:34,Shirt,Apparel,2,29.99 3 | 2,C1002,2023-01-01,10:25:17,Headphones,Electronics,1,59.99 4 | 3,C1003,2023-01-02,13:45:52,Book,Books,3,12.99 5 | 4,C1004,2023-01-03,16:38:21,Shoes,Apparel,1,49.99 6 | 5,C1005,2023-01-03,17:55:43,TV,Electronics,1,399.99 7 | 6,C1006,2023-01-04,11:28:09,Phone,Electronics,2,699.99 8 | 7,C1007,2023-01-05,14:50:36,Backpack,Accessories,1,39.99 9 | 8,C1008,2023-01-06,09:15:22,Pen,Stationery,5,2.99 10 | 9,C1009,2023-01-06,12:37:18,Headphones,Electronics,1,59.99 11 | 10,C1010,2023-01-07,16:09:43,Shirt,Apparel,3,29.99 12 | 11,C1001,2023-01-08,10:22:19,Phone,Electronics,1,699.99 13 | 12,C1002,2023-01-08,13:45:11,Book,Books,2,12.99 14 | 13,C1003,2023-01-09,15:56:37,Shoes,Apparel,1,49.99 15 | 14,C1004,2023-01-09,18:20:49,Headphones,Electronics,1,59.99 16 | 15,C1005,2023-01-10,11:33:25,Backpack,Accessories,1,39.99 17 | 16,C1006,2023-01-10,14:45:52,Shirt,Apparel,2,29.99 18 | 17,C1007,2023-01-11,17:18:09,Phone,Electronics,1,699.99 19 | 18,C1008,2023-01-12,09:52:36,Book,Books,1,12.99 20 | 19,C1009,2023-01-12,12:10:42,Shoes,Apparel,1,49.99 21 | 20,C1010,2023-01-13,15:27:54,Headphones,Electronics,1,59.99 22 | 21,C1001,2023-01-14,10:18:17,Shirt,Apparel,2,29.99 23 | 22,C1002,2023-01-14,13:40:29,Phone,Electronics,1,699.99 24 | 23,C1003,2023-01-15,16:55:11,Book,Books,2,12.99 25 | 24,C1004,2023-01-15,19:25:43,Backpack,Accessories,1,39.99 26 | 25,C1005,2023-01-16,11:48:56,Shoes,Apparel,1,49.99 27 | 26,C1006,2023-01-16,15:10:22,Headphones,Electronics,1,59.99 28 | 27,C1007,2023-01-17,18:35:08,Shirt,Apparel,2,29.99 29 | 28,C1008,2023-01-18,09:20:54,Phone,Electronics,1,699.99 30 | 29,C1009,2023-01-18,12:41:42,Book,Books,1,12.99 31 | 30,C1010,2023-01-19,15:52:18,Shoes,Apparel,1,49.99 32 | 31,C1001,2023-01-20,11:23:06,Headphones,Electronics,1,59.99 33 | 32,C1002,2023-01-20,14:44:29,Backpack,Accessories,1,39.99 34 | 33,C1003,2023-01-21,17:56:15,Shirt,Apparel,2,29.99 35 | 34,C1004,2023-01-21,20:18:43,Phone,Electronics,1,699.99 36 | 35,C1005,2023-01-22,12:35:29,Book,Books,1,12.99 37 | 36,C1006,2023-01-23,15:50:17,Shoes,Apparel,1,49.99 38 | 37,C1007,2023-01-23,19:15:06,Headphones,Electronics,1,59.99 39 | 38,C1008,2023-01-24,10:32:54,Backpack,Accessories,1,39.99 40 | 39,C1009,2023-01-25,13:46:37,Pen,Stationery,5,2.99 41 | 40,C1010,2023-01-25,16:58:22,Shirt,Apparel,2,29.99 42 | 41,C1001,2023-01-26,11:21:09,Phone,Electronics,1,699.99 43 | 42,C1002,2023-01-26,14:45:33,Book,Books,2,12.99 44 | 43,C1003,2023-01-27,17:57:11,Shoes,Apparel,1,49.99 45 | 44,C1004,2023-01-27,20:20:54,Headphones,Electronics,1,59.99 46 | 45,C1005,2023-01-28,12:35:29,Backpack,Accessories,1,39.99 47 | 46,C1006,2023-01-29,15:55:17,Shirt,Apparel,2,29.99 48 | 47,C1007,2023-01-29,19:18:43,Phone,Electronics,1,699.99 49 | 48,C1008,2023-01-30,10:42:18,Book,Books,1,12.99 50 | 49,C1009,2023-01-30,14:01:52,Shoes,Apparel,1,49.99 51 | 50,C1010,2023-01-31,17:18:29,Headphones,Electronics,1,59.99 52 | 51,C1001,2023-02-01,11:24:15,Shirt,Apparel,2,29.99 53 | 52,C1002,2023-02-01,14:50:08,Phone,Electronics,1,699.99 54 | 53,C1003,2023-02-02,18:05:02,Book,Books,2,12.99 55 | 54,C1004,2023-02-02,20:30:54,Backpack,Accessories,1,39.99 56 | 55,C1005,2023-02-03,12:48:40,Shoes,Apparel,1,49.99 57 | 56,C1006,2023-02-03,16:12:18,Headphones,Electronics,1,59.99 58 | 57,C1007,2023-02-04,19:35:06,Shirt,Apparel,2,29.99 59 | 58,C1008,2023-02-05,10:58:52,Phone,Electronics,1,699.99 60 | 59,C1009,2023-02-05,14:21:42,Book,Books,1,12.99 61 | 60,C1010,2023-02-06,17:44:29,Shoes,Apparel,1,49.99 62 | 61,C1001,2023-02-07,12:08:17,Headphones,Electronics,1,59.99 63 | 62,C1002,2023-02-07,15:32:04,Backpack,Accessories,1,39.99 64 | 63,C1003,2023-02-08,18:55:52,Shirt,Apparel,2,29.99 65 | 64,C1004,2023-02-08,21:20:39,Phone,Electronics,1,699.99 66 | 65,C1005,2023-02-09,13:35:27,Book,Books,2,12.99 67 | 66,C1006,2023-02-10,16:59:14,Shoes,Apparel,1,49.99 68 | 67,C1007,2023-02-10,20:23:02,Headphones,Electronics,1,59.99 69 | 68,C1008,2023-02-11,11:47:50,Backpack,Accessories,1,39.99 70 | 69,C1009,2023-02-12,15:11:38,Pen,Stationery,5,2.99 71 | 70,C1010,2023-02-12,18:35:26,Shirt,Apparel,2,29.99 72 | 71,C1001,2023-02-13,12:59:14,Phone,Electronics,1,699.99 73 | 72,C1002,2023-02-13,16:23:02,Book,Books,2,12.99 74 | 73,C1003,2023-02-14,19:46:50,Shoes,Apparel,1,49.99 75 | 74,C1004,2023-02-14,22:10:38,Headphones,Electronics,1,59.99 76 | 75,C1005,2023-02-15,14:35:26,Backpack,Accessories,1,39.99 77 | 76,C1006,2023-02-16,17:59:14,Shirt,Apparel,2,29.99 78 | 77,C1007,2023-02-16,21:23:02,Phone,Electronics,1,699.99 79 | 78,C1008,2023-02-17,12:47:50,Book,Books,1,12.99 80 | 79,C1009,2023-02-17,16:11:38,Shoes,Apparel,1,49.99 81 | 80,C1010,2023-02-18,19:35:26,Headphones,Electronics,1,59.99 82 | 81,C1001,2023-02-19,13:59:14,Shirt,Apparel,2,29.99 83 | 82,C1002,2023-02-19,17:23:02,Phone,Electronics,1,699.99 84 | 83,C1003,2023-02-20,20:46:50,Book,Books,2,12.99 85 | 84,C1004,2023-02-20,23:10:38,Backpack,Accessories,1,39.99 86 | 85,C1005,2023-02-21,15:35:26,Shoes,Apparel,1,49.99 87 | 86,C1006,2023-02-22,18:59:14,Headphones,Electronics,1,59.99 88 | 87,C1007,2023-02-22,22:23:02,Shirt,Apparel,2,29.99 89 | 88,C1008,2023-02-23,13:47:50,Phone,Electronics,1,699.99 90 | 89,C1009,2023-02-23,17:11:38,Book,Books,1,12.99 91 | 90,C1010,2023-02-24,20:35:26,Shoes,Apparel,1,49.99 92 | 91,C1001,2023-02-25,14:59:14,Headphones,Electronics,1,59.99 93 | 92,C1002,2023-02-25,18:23:02,Backpack,Accessories,1,39.99 94 | 93,C1003,2023-02-26,21:46:50,Shirt,Apparel,2,29.99 95 | 94,C1004,2023-02-26,00:10:38,Phone,Electronics,1,699.99 96 | 95,C1005,2023-02-27,16:35:26,Book,Books,2,12.99 97 | 96,C1006,2023-02-28,19:59:14,Shoes,Apparel,1,49.99 98 | 97,C1007,2023-02-28,23:23:02,Headphones,Electronics,1,59.99 99 | 98,C1008,2023-03-01,14:47:50,Backpack,Accessories,1,39.99 100 | 99,C1009,2023-03-02,18:11:38,Pen,Stationery,5,2.99 101 | 100,C1010,2023-03-02,21:35:26,Shirt,Apparel,2,29.99 -------------------------------------------------------------------------------- /Datasets/Financial/Financial Statement for Horizontal Analysis.csv: -------------------------------------------------------------------------------- 1 | Company,Year1,Year2,Year3,Year4,Year5,Year6,Year7,Year8 2 | ABC Corp,500000,520000,550000,590000,620000,650000,680000,710000 3 | XYZ Inc,400000,420000,440000,470000,500000,530000,560000,590000 4 | DEF Ltd,300000,320000,340000,370000,400000,430000,460000,490000 5 | GHI Co,250000,270000,290000,320000,350000,380000,410000,440000 6 | JKL Corp,450000,470000,490000,520000,550000,580000,610000,640000 7 | MNO Inc,350000,370000,390000,420000,450000,480000,510000,540000 8 | PQR Co,275000,290000,305000,330000,355000,380000,405000,430000 9 | STU Inc,425000,445000,465000,495000,525000,555000,585000,615000 10 | VWX Ltd,325000,340000,355000,380000,405000,430000,455000,480000 11 | YZA Corp,400000,420000,440000,470000,500000,530000,560000,590000 12 | ABC Corp2,550000,570000,600000,640000,670000,700000,730000,760000 13 | XYZ Inc2,450000,470000,490000,520000,550000,580000,610000,640000 14 | DEF Ltd2,350000,370000,390000,420000,450000,480000,510000,540000 15 | GHI Co2,300000,320000,340000,370000,400000,430000,460000,490000 16 | JKL Corp2,500000,520000,550000,590000,620000,650000,680000,710000 17 | MNO Inc2,400000,420000,440000,470000,500000,530000,560000,590000 18 | PQR Co2,325000,340000,355000,380000,405000,430000,455000,480000 19 | STU Inc2,475000,495000,515000,545000,575000,605000,635000,665000 20 | VWX Ltd2,375000,390000,405000,430000,455000,480000,505000,530000 21 | YZA Corp2,450000,470000,490000,520000,550000,580000,610000,640000 -------------------------------------------------------------------------------- /Datasets/Financial/Financial Statment for Vertical Analysis.csv: -------------------------------------------------------------------------------- 1 | Company,Year,Revenue,COGS,OperatingExpenses,Depreciation,InterestExpense,TaxExpense,NetIncome 2 | ABC Corp,2019,500000,250000,150000,50000,20000,30000,55000 3 | ABC Corp,2020,550000,275000,165000,55000,22000,33000,60000 4 | ABC Corp,2021,600000,300000,180000,60000,24000,36000,65000 5 | XYZ Inc,2019,400000,200000,120000,40000,16000,24000,44000 6 | XYZ Inc,2020,450000,225000,135000,45000,18000,27000,49000 7 | XYZ Inc,2021,480000,240000,144000,48000,19200,28800,52000 8 | DEF Ltd,2019,300000,150000,90000,30000,12000,18000,22000 9 | DEF Ltd,2020,350000,175000,105000,35000,14000,21000,26000 10 | DEF Ltd,2021,400000,200000,120000,40000,16000,24000,30000 11 | GHI Co,2019,250000,125000,75000,25000,10000,15000,18000 12 | GHI Co,2020,300000,150000,90000,30000,12000,18000,22000 13 | GHI Co,2021,350000,175000,105000,35000,14000,21000,26000 14 | JKL Corp,2019,450000,225000,135000,45000,18000,27000,52000 15 | JKL Corp,2020,500000,250000,150000,50000,20000,30000,57000 16 | JKL Corp,2021,550000,275000,165000,55000,22000,33000,62000 17 | MNO Inc,2019,350000,175000,105000,35000,14000,21000,25000 18 | MNO Inc,2020,400000,200000,120000,40000,16000,24000,29000 19 | MNO Inc,2021,450000,225000,135000,45000,18000,27000,33000 20 | PQR Co,2019,275000,137500,82500,27500,11000,16500,20000 21 | PQR Co,2020,325000,162500,97500,32500,13000,19500,24000 22 | PQR Co,2021,375000,187500,112500,37500,15000,22500,28000 23 | STU Inc,2019,425000,212500,127500,42500,17000,25500,36000 24 | STU Inc,2020,475000,237500,142500,47500,19000,28500,40000 25 | STU Inc,2021,525000,262500,157500,52500,21000,31500,44000 26 | VWX Ltd,2019,325000,162500,97500,32500,13000,19500,26000 27 | VWX Ltd,2020,375000,187500,112500,37500,15000,22500,30000 28 | VWX Ltd,2021,425000,212500,127500,42500,17000,25500,34000 29 | YZA Corp,2019,400000,200000,120000,40000,16000,24000,38000 30 | YZA Corp,2020,450000,225000,135000,45000,18000,27000,43000 31 | YZA Corp,2021,500000,250000,150000,50000,20000,30000,48000 32 | BCD Inc,2019,275000,137500,82500,27500,11000,16500,22000 33 | BCD Inc,2020,325000,162500,97500,32500,13000,19500,26000 34 | BCD Inc,2021,375000,187500,112500,37500,15000,22500,30000 35 | EFG Ltd,2019,225000,112500,67500,22500,9000,13500,16000 36 | EFG Ltd,2020,275000,137500,82500,27500,11000,16500,20000 37 | EFG Ltd,2021,325000,162500,97500,32500,13000,19500,24000 38 | HIJ Co,2019,350000,175000,105000,35000,14000,21000,28000 39 | HIJ Co,2020,400000,200000,120000,40000,16000,24000,32000 40 | HIJ Co,2021,450000,225000,135000,45000,18000,27000,36000 41 | LMN Inc,2019,275000,137500,82500,27500,11000,16500,22000 42 | LMN Inc,2020,325000,162500,97500,32500,13000,19500,26000 43 | LMN Inc,2021,375000,187500,112500,37500,15000,22500,30000 -------------------------------------------------------------------------------- /Datasets/Financial/Leverage Ratio - debt to ebitda.csv: -------------------------------------------------------------------------------- 1 | Company,Year,Debt,EBITDA 2 | ABC Corp,2019,500000,150000 3 | ABC Corp,2020,550000,180000 4 | ABC Corp,2021,600000,200000 5 | XYZ Inc,2019,400000,120000 6 | XYZ Inc,2020,450000,135000 7 | XYZ Inc,2021,480000,144000 8 | DEF Ltd,2019,300000,90000 9 | DEF Ltd,2020,350000,105000 10 | DEF Ltd,2021,400000,120000 11 | GHI Co,2019,250000,75000 12 | GHI Co,2020,300000,90000 13 | GHI Co,2021,350000,105000 14 | JKL Corp,2019,450000,135000 15 | JKL Corp,2020,500000,150000 16 | JKL Corp,2021,550000,165000 17 | MNO Inc,2019,350000,105000 18 | MNO Inc,2020,400000,120000 19 | MNO Inc,2021,450000,135000 20 | PQR Co,2019,275000,82500 21 | PQR Co,2020,325000,97500 22 | PQR Co,2021,375000,112500 23 | STU Inc,2019,425000,127500 24 | STU Inc,2020,475000,142500 25 | STU Inc,2021,525000,157500 26 | VWX Ltd,2019,325000,97500 27 | VWX Ltd,2020,375000,112500 28 | VWX Ltd,2021,425000,127500 29 | YZA Corp,2019,400000,120000 30 | YZA Corp,2020,450000,135000 31 | YZA Corp,2021,500000,150000 32 | BCD Inc,2019,275000,82500 33 | BCD Inc,2020,325000,97500 34 | BCD Inc,2021,375000,112500 35 | EFG Ltd,2019,225000,67500 36 | EFG Ltd,2020,275000,82500 37 | EFG Ltd,2021,325000,97500 38 | HIJ Co,2019,350000,105000 39 | HIJ Co,2020,400000,120000 40 | HIJ Co,2021,450000,135000 41 | LMN Inc,2019,275000,82500 42 | LMN Inc,2020,325000,97500 43 | LMN Inc,2021,375000,112500 -------------------------------------------------------------------------------- /Datasets/Financial/Wall Street Market Data - Fictional.csv: -------------------------------------------------------------------------------- 1 | Symbol,Date,Open,High,Low,Close,Volume 2 | AAPL,2022-01-03,182.25,186.42,181.30,185.35,67183000 3 | AAPL,2022-01-04,186.35,188.99,185.68,188.75,72395000 4 | AAPL,2022-01-05,189.25,191.78,188.28,191.50,75806000 5 | AAPL,2022-01-06,191.20,192.37,188.95,190.80,66693000 6 | AAPL,2022-01-07,190.50,193.40,189.50,192.20,67829000 7 | AAPL,2022-01-10,192.80,193.84,191.40,193.05,62818000 8 | AAPL,2022-01-11,193.15,193.75,191.81,192.45,60109000 9 | AAPL,2022-01-12,192.70,193.35,191.24,192.89,50569000 10 | AAPL,2022-01-13,192.62,194.75,191.99,194.20,48905000 11 | AAPL,2022-01-14,194.70,195.60,193.33,195.40,51831000 12 | GOOGL,2022-01-03,2855.00,2885.05,2838.79,2882.50,2018700 13 | GOOGL,2022-01-04,2883.75,2901.40,2857.47,2889.50,1629800 14 | GOOGL,2022-01-05,2898.50,2920.37,2873.07,2905.25,1576100 15 | GOOGL,2022-01-06,2902.00,2912.34,2873.27,2900.75,1137800 16 | GOOGL,2022-01-07,2900.00,2925.55,2895.26,2918.00,1077200 17 | GOOGL,2022-01-10,2917.50,2927.84,2898.01,2923.75,888700 18 | GOOGL,2022-01-11,2925.00,2935.18,2906.82,2924.25,823600 19 | GOOGL,2022-01-12,2921.50,2932.75,2902.89,2916.00,777800 20 | GOOGL,2022-01-13,2921.00,2936.43,2908.75,2933.50,779300 21 | GOOGL,2022-01-14,2935.00,2955.63,2922.78,2951.75,874100 22 | AMZN,2022-01-03,3178.00,3198.21,3141.11,3190.95,3101500 23 | AMZN,2022-01-04,3200.00,3238.50,3186.49,3234.50,2730600 24 | AMZN,2022-01-05,3242.50,3289.29,3230.00,3274.00,3229600 25 | AMZN,2022-01-06,3289.00,3305.00,3250.00,3272.25,3008700 26 | AMZN,2022-01-07,3281.00,3294.00,3250.00,3293.50,2558700 27 | AMZN,2022-01-10,3293.00,3309.98,3262.50,3287.00,2549700 28 | AMZN,2022-01-11,3289.00,3305.00,3258.10,3294.50,2232100 29 | AMZN,2022-01-12,3295.00,3325.00,3284.01,3314.00,2457100 30 | AMZN,2022-01-13,3321.00,3328.00,3290.00,3320.50,1994600 31 | AMZN,2022-01-14,3322.00,3342.74,3301.45,3340.00,2225400 32 | -------------------------------------------------------------------------------- /Datasets/Financial/World Bank - Fictional.csv: -------------------------------------------------------------------------------- 1 | Country,Year,Population,GDP,GDP_Growth,Inflation,Life_Expectancy,Literacy_Rate,Unemployment_Rate 2 | United States,2010,309346863,14992000,2.5,1.5,78.7,99,5.1 3 | United States,2011,311591917,15542000,1.8,2.0,79.1,99.2,4.8 4 | United States,2012,313874218,16197000,2.2,1.7,79.4,99.3,4.6 5 | United States,2013,316057727,16896000,2.5,1.5,79.6,99.4,4.4 6 | United States,2014,318386421,17669000,2.9,1.8,79.8,99.5,4.1 7 | United States,2015,320742673,18455000,2.4,2.2,80.1,99.6,3.8 8 | United States,2016,323071342,19296000,1.6,2.1,80.3,99.7,3.6 9 | United States,2017,325147121,20093000,2.2,2.5,80.5,99.8,3.4 10 | United States,2018,327167434,20913000,2.9,2.3,80.7,99.9,3.1 11 | United States,2019,329064917,21775000,2.3,1.8,81.0,99.9,2.9 12 | China,2010,1339724852,6098400,9.4,4.3,75.2,96.4,4.1 13 | China,2011,1359821467,6527200,9.6,5.2,75.5,96.5,4.0 14 | China,2012,1380087156,6962400,7.8,3.2,75.7,96.7,3.9 15 | China,2013,1400338841,7413000,7.7,2.6,75.9,96.8,3.8 16 | China,2014,1420691377,7872000,7.3,2.0,76.1,96.9,3.7 17 | China,2015,1444216107,8336600,6.9,1.5,76.3,97.0,3.6 18 | China,2016,1467810577,8805200,6.7,2.0,76.5,97.1,3.5 19 | China,2017,1489400000,9285300,6.8,1.6,76.7,97.2,3.4 20 | China,2018,1508700000,9785400,6.6,2.3,76.9,97.3,3.3 21 | China,2019,1528800000,10256000,6.1,2.9,77.1,97.4,3.2 22 | India,2010,1230984504,1663200,8.5,6.0,67.3,74.0,6.5 23 | India,2011,1247446011,1766200,8.9,7.5,67.7,74.5,6.4 24 | India,2012,1263580485,1875600,6.5,6.2,68.0,75.0,6.3 25 | India,2013,1279498874,1992200,6.8,5.8,68.3,75.5,6.2 26 | India,2014,1295291543,2116300,7.5,5.5,68.6,76.0,6.1 27 | India,2015,1311050527,2247800,7.3,5.0,68.9,76.5,6.0 28 | India,2016,1326770142,2387100,7.9,4.8,69.2,77.0,5.9 29 | India,2017,1342538000,2534300,7.7,4.5,69.5,77.5,5.8 30 | India,2018,1358532000,2690400,7.2,4.2,69.8,78.0,5.7 31 | India,2019,1374271000,2855400,7.5,3.8,70.1,78.5,5.6 32 | -------------------------------------------------------------------------------- /Datasets/Human Resources/Demographic Data.csv: -------------------------------------------------------------------------------- 1 | employee_id,employee_name,gender,date_of_birth,marital_status,nationality,city,state,country 2 | 101,John Smith,Male,1985-02-15,Married,United States,New York,NY,USA 3 | 102,Sarah Johnson,Female,1990-07-10,Single,United States,Los Angeles,CA,USA 4 | 103,Michael Davis,Male,1982-11-25,Married,Canada,Toronto,Ontario,Canada 5 | 104,Emily Wilson,Female,1987-04-03,Single,United Kingdom,London,England,UK 6 | 105,Robert Anderson,Male,1979-09-12,Divorced,Australia,Sydney,New South Wales,Australia 7 | 106,Olivia Thompson,Female,1984-06-18,Married,United States,Chicago,IL,USA 8 | 107,David Martinez,Male,1992-03-05,Single,Spain,Madrid,Madrid,Spain 9 | 108,Sophia Lee,Female,1989-08-20,Single,United States,San Francisco,CA,USA 10 | 109,James Rodriguez,Male,1986-01-10,Married,Mexico,Mexico City,Distrito Federal,Mexico 11 | 110,Emma Taylor,Female,1993-10-28,Single,United States,Miami,FL,USA 12 | 111,William Hernandez,Male,1988-05-07,Married,United States,Houston,TX,USA 13 | 112,Isabella Moore,Female,1991-12-22,Single,Canada,Vancouver,British Columbia,Canada 14 | 113,Aiden Brown,Male,1983-07-31,Married,United Kingdom,Manchester,England,UK 15 | 114,Mia Clark,Female,1987-02-13,Single,Australia,Melbourne,Victoria,Australia 16 | 115,Liam Lewis,Male,1975-11-05,Divorced,United States,Seattle,WA,USA 17 | 116,Sofia Green,Female,1988-04-08,Married,Germany,Berlin,Berlin,Germany 18 | 117,Noah Baker,Male,1995-09-19,Single,United States,Boston,MA,USA 19 | 118,Ava Gonzalez,Female,1992-06-16,Single,United States,Atlanta,GA,USA 20 | 119,Lucas Martinez,Male,1989-01-27,Married,Spain,Barcelona,Catalonia,Spain 21 | 120,Charlotte Hill,Female,1994-08-14,Single,United Kingdom,Birmingham,England,UK 22 | 121,Harper Collins,Female,1991-03-30,Single,United States,Philadelphia,PA,USA 23 | 122,Emma Adams,Female,1988-10-23,Married,Canada,Montreal,Quebec,Canada 24 | 123,Liam Turner,Male,1985-05-16,Married,United States,Dallas,TX,USA 25 | 124,Olivia Harris,Female,1990-12-09,Single,Australia,Brisbane,Queensland,Australia 26 | 125,Noah Butler,Male,1978-09-02,Divorced,United States,Denver,CO,USA 27 | 126,Sophia Bell,Female,1983-04-25,Married,United States,Phoenix,AZ,USA 28 | 127,Lucas Carter,Male,1991-11-18,Single,United States,Detroit,MI,USA 29 | 128,Ava Anderson,Female,1988-06-11,Single,Canada,Ottawa,Ontario,Canada 30 | 129,James Walker,Male,1985-01-14,Married,United States,Austin,TX,USA 31 | 130,Olivia Richardson,Female,1992-10-17,Single,United States,Pittsburgh,PA,USA 32 | 131,Benjamin Wright,Male,1987-05-01,Married,United States,Minneapolis,MN,USA 33 | 132,Sophia Rodriguez,Female,1994-12-24,Single,Canada,Calgary,Alberta,Canada 34 | 133,Lucas Turner,Male,1989-07-06,Married,United States,Portland,OR,USA 35 | 134,Emma Davis,Female,1992-02-29,Single,Australia,Perth,Western Australia,Australia 36 | 135,Noah Hernandez,Male,1983-09-10,Married,Mexico,Guanajuato,Guanajuato,Mexico 37 | 136,Ava Wilson,Female,1988-04-02,Single,United States,Columbus,OH,USA 38 | 137,Liam Adams,Male,1995-11-15,Single,United States,Indianapolis,IN,USA 39 | 138,Emily Moore,Female,1990-06-29,Married,Canada,Edmonton,Alberta,Canada 40 | 139,William Phillips,Male,1987-01-09,Married,United States,Raleigh,NC,USA 41 | 140,Sophia Anderson,Female,1994-10-26,Single,United States,Tampa,FL,USA 42 | 141,Charlotte Thompson,Female,1989-03-11,Single,United Kingdom,Leeds,England,UK 43 | 142,Aiden Harris,Male,1984-08-24,Married,United States,Orlando,FL,USA 44 | 143,Liam Turner,Male,1981-05-02,Married,United States,San Diego,CA,USA 45 | 144,Olivia Adams,Female,1986-10-05,Single,Australia,Adelaide,South Australia,Australia 46 | 145,Noah Richardson,Male,1979-07-18,Divorced,United States,Nashville,TN,USA 47 | 146,Sophia Walker,Female,1988-12-01,Married,Germany,Hamburg,Hamburg,Germany 48 | 147,Lucas Clark,Male,1996-09-14,Single,United States,Louisville,KY,USA 49 | 148,Emily Hill,Female,1991-06-27,Single,United States,Richmond,VA,USA 50 | 149,William Baker,Male,1988-01-30,Married,United States,Oklahoma City,OK,USA 51 | 150,Ava Gonzalez,Female,1985-10-13,Single,United States,San Antonio,TX,USA 52 | -------------------------------------------------------------------------------- /Datasets/Human Resources/Employee Absence.csv: -------------------------------------------------------------------------------- 1 | employee_number,employee_name,gender,city,job_title,department,store_location,business_unit,division,age,length_of_service,hours_absent 2 | 101,John Smith,Male,New York,Accountant,Finance,Store A,Finance Division,Accounting,32,5,8 3 | 102,Sarah Johnson,Female,Los Angeles,Marketing Specialist,Marketing,Store B,Marketing Division,Marketing,28,3,16 4 | 103,Michael Davis,Male,Chicago,Sales Representative,Sales,Store C,Sales Division,Sales,35,7,4 5 | 104,Emily Wilson,Female,San Francisco,Software Engineer,Technology,Store D,Technology Division,Engineering,29,4,0 6 | 105,Robert Anderson,Male,Houston,Human Resources Manager,Human Resources,Store E,HR Division,Human Resources,37,8,24 7 | 106,Olivia Thompson,Female,Seattle,Operations Supervisor,Operations,Store F,Operations Division,Operations,31,6,8 8 | 107,David Martinez,Male,Miami,Customer Service Representative,Customer Service,Store G,Customer Service Division,Customer Service,26,2,0 9 | 108,Sophia Lee,Female,Boston,Product Manager,Product Management,Store H,Product Division,Product Management,33,5,0 10 | 109,James Rodriguez,Male,Dallas,IT Specialist,IT,Store I,IT Division,IT,30,4,16 11 | 110,Emma Taylor,Female,Atlanta,Administrative Assistant,Administration,Store J,Admin Division,Administration,27,3,0 12 | 111,William Hernandez,Male,New York,Accountant,Finance,Store A,Finance Division,Accounting,34,6,8 13 | 112,Isabella Moore,Female,Los Angeles,Marketing Specialist,Marketing,Store B,Marketing Division,Marketing,29,4,16 14 | 113,Aiden Brown,Male,Chicago,Sales Representative,Sales,Store C,Sales Division,Sales,36,7,0 15 | 114,Mia Clark,Female,San Francisco,Software Engineer,Technology,Store D,Technology Division,Engineering,30,5,0 16 | 115,Liam Lewis,Male,Houston,Human Resources Manager,Human Resources,Store E,HR Division,Human Resources,38,9,32 17 | 116,Sofia Green,Female,Seattle,Operations Supervisor,Operations,Store F,Operations Division,Operations,32,7,16 18 | 117,Noah Baker,Male,Miami,Customer Service Representative,Customer Service,Store G,Customer Service Division,Customer Service,27,3,0 19 | 118,Ava Gonzalez,Female,Boston,Product Manager,Product Management,Store H,Product Division,Product Management,34,6,0 20 | 119,Lucas Martinez,Male,Dallas,IT Specialist,IT,Store I,IT Division,IT,31,5,24 21 | 120,Charlotte Hill,Female,Atlanta,Administrative Assistant,Administration,Store J,Admin Division,Administration,28,4,0 22 | 121,Harper Collins,Female,New York,Accountant,Finance,Store A,Finance Division,Accounting,33,6,8 23 | 122,Emma Adams,Female,Los Angeles,Marketing Specialist,Marketing,Store B,Marketing Division,Marketing,30,4,16 24 | 123,Liam Turner,Male,Chicago,Sales Representative,Sales,Store C,Sales Division,Sales,37,8,0 25 | 124,Olivia Harris,Female,San Francisco,Software Engineer,Technology,Store D,Technology Division,Engineering,31,5,0 26 | 125,Noah Butler,Male,Houston,Human Resources Manager,Human Resources,Store E,HR Division,Human Resources,39,9,40 27 | 126,Sophia Bell,Female,Seattle,Operations Supervisor,Operations,Store F,Operations Division,Operations,33,7,16 28 | 127,Lucas Carter,Male,Miami,Customer Service Representative,Customer Service,Store G,Customer Service Division,Customer Service,28,3,0 29 | 128,Ava Anderson,Female,Boston,Product Manager,Product Management,Store H,Product Division,Product Management,35,6,0 30 | 129,James Walker,Male,Dallas,IT Specialist,IT,Store I,IT Division,IT,32,5,16 31 | 130,Olivia Richardson,Female,Atlanta,Administrative Assistant,Administration,Store J,Admin Division,Administration,29,4,0 32 | 131,Benjamin Wright,Male,New York,Accountant,Finance,Store A,Finance Division,Accounting,34,6,8 33 | 132,Sophia Rodriguez,Female,Los Angeles,Marketing Specialist,Marketing,Store B,Marketing Division,Marketing,31,5,16 34 | 133,Lucas Turner,Male,Chicago,Sales Representative,Sales,Store C,Sales Division,Sales,38,8,0 35 | 134,Emma Davis,Female,San Francisco,Software Engineer,Technology,Store D,Technology Division,Engineering,32,6,0 36 | 135,Noah Hernandez,Male,Houston,Human Resources Manager,Human Resources,Store E,HR Division,Human Resources,40,9,48 37 | 136,Ava Wilson,Female,Seattle,Operations Supervisor,Operations,Store F,Operations Division,Operations,34,7,16 38 | 137,Liam Adams,Male,Miami,Customer Service Representative,Customer Service,Store G,Customer Service Division,Customer Service,29,4,0 39 | 138,Emily Moore,Female,Boston,Product Manager,Product Management,Store H,Product Division,Product Management,36,6,0 40 | 139,William Phillips,Male,Dallas,IT Specialist,IT,Store I,IT Division,IT,33,5,24 41 | 140,Sophia Anderson,Female,Atlanta,Administrative Assistant,Administration,Store J,Admin Division,Administration,30,4,0 42 | 141,Charlotte Thompson,Female,New York,Accountant,Finance,Store A,Finance Division,Accounting,35,6,8 43 | 142,Aiden Harris,Male,Los Angeles,Marketing Specialist,Marketing,Store B,Marketing Division,Marketing,32,5,16 44 | 143,Liam Turner,Male,Chicago,Sales Representative,Sales,Store C,Sales Division,Sales,39,8,0 45 | 144,Olivia Adams,Female,San Francisco,Software Engineer,Technology,Store D,Technology Division,Engineering,33,6,0 46 | 145,Noah Richardson,Male,Houston,Human Resources Manager,Human Resources,Store E,HR Division,Human Resources,41,9,56 47 | 146,Sophia Walker,Female,Seattle,Operations Supervisor,Operations,Store F,Operations Division,Operations,35,7,16 48 | 147,Lucas Clark,Male,Miami,Customer Service Representative,Customer Service,Store G,Customer Service Division,Customer Service,30,4,0 49 | 148,Emily Hill,Female,Boston,Product Manager,Product Management,Store H,Product Division,Product Management,37,6,0 50 | 149,William Baker,Male,Dallas,IT Specialist,IT,Store I,IT Division,IT,34,5,24 51 | 150,Ava Gonzalez,Female,Atlanta,Administrative Assistant,Administration,Store J,Admin Division,Administration,31,4,0 52 | -------------------------------------------------------------------------------- /Datasets/Human Resources/Employee Engagement Survey Results.csv: -------------------------------------------------------------------------------- 1 | employee_id,employee_name,gender,age,department,job_title,satisfaction_score,work_life_balance_score,career_growth_score,communication_score,teamwork_score 2 | 101,John Smith,Male,32,Finance,Accountant,4,3,3,4,4 3 | 102,Sarah Johnson,Female,28,Marketing,Marketing Specialist,5,4,3,5,4 4 | 103,Michael Davis,Male,35,Sales,Sales Representative,4,2,3,4,3 5 | 104,Emily Wilson,Female,29,Technology,Software Engineer,5,3,4,5,5 6 | 105,Robert Anderson,Male,37,Human Resources,HR Manager,3,2,2,3,3 7 | 106,Olivia Thompson,Female,31,Operations,Operations Supervisor,4,4,3,4,4 8 | 107,David Martinez,Male,26,Customer Service,Customer Service Representative,5,4,4,5,5 9 | 108,Sophia Lee,Female,33,Product Management,Product Manager,3,3,4,4,3 10 | 109,James Rodriguez,Male,30,IT,IT Specialist,4,3,4,4,4 11 | 110,Emma Taylor,Female,27,Administration,Administrative Assistant,5,4,3,5,5 12 | 111,William Hernandez,Male,34,Finance,Accountant,4,3,3,4,4 13 | 112,Isabella Moore,Female,29,Marketing,Marketing Specialist,5,4,3,5,4 14 | 113,Aiden Brown,Male,36,Sales,Sales Representative,4,2,3,4,3 15 | 114,Mia Clark,Female,30,Technology,Software Engineer,5,3,4,5,5 16 | 115,Liam Lewis,Male,38,Human Resources,HR Manager,3,2,2,3,3 17 | 116,Sofia Green,Female,32,Operations,Operations Supervisor,4,4,3,4,4 18 | 117,Noah Baker,Male,27,Customer Service,Customer Service Representative,5,4,4,5,5 19 | 118,Ava Gonzalez,Female,34,Product Management,Product Manager,3,3,4,4,3 20 | 119,Lucas Martinez,Male,31,IT,IT Specialist,4,3,4,4,4 21 | 120,Charlotte Hill,Female,28,Administration,Administrative Assistant,5,4,3,5,5 22 | 121,Harper Collins,Female,33,Finance,Accountant,4,3,3,4,4 23 | 122,Emma Adams,Female,30,Marketing,Marketing Specialist,5,4,3,5,4 24 | 123,Liam Turner,Male,37,Sales,Sales Representative,4,2,3,4,3 25 | 124,Olivia Harris,Female,31,Technology,Software Engineer,5,3,4,5,5 26 | 125,Noah Butler,Male,39,Human Resources,HR Manager,3,2,2,3,3 27 | 126,Sophia Bell,Female,33,Operations,Operations Supervisor,4,4,3,4,4 28 | 127,Lucas Carter,Male,28,Customer Service,Customer Service Representative,5,4,4,5,5 29 | 128,Ava Anderson,Female,35,Product Management,Product Manager,3,3,4,4,3 30 | 129,James Walker,Male,32,IT,IT Specialist,4,3,4,4,4 31 | 130,Olivia Richardson,Female,29,Administration,Administrative Assistant,5,4,3,5,5 32 | 131,Benjamin Wright,Male,34,Finance,Accountant,4,3,3,4,4 33 | 132,Sophia Rodriguez,Female,31,Marketing,Marketing Specialist,5,4,3,5,4 34 | 133,Lucas Turner,Male,38,Sales,Sales Representative,4,2,3,4,3 35 | 134,Emma Davis,Female,32,Technology,Software Engineer,5,3,4,5,5 36 | 135,Noah Hernandez,Male,40,Human Resources,HR Manager,3,2,2,3,3 37 | 136,Ava Wilson,Female,34,Operations,Operations Supervisor,4,4,3,4,4 38 | 137,Liam Adams,Male,29,Customer Service,Customer Service Representative,5,4,4,5,5 39 | 138,Emily Moore,Female,36,Product Management,Product Manager,3,3,4,4,3 40 | 139,William Phillips,Male,33,IT,IT Specialist,4,3,4,4,4 41 | 140,Sophia Anderson,Female,30,Administration,Administrative Assistant,5,4,3,5,5 42 | 141,Charlotte Thompson,Female,35,Finance,Accountant,4,3,3,4,4 43 | 142,Aiden Harris,Male,32,Marketing,Marketing Specialist,5,4,3,5,4 44 | 143,Liam Turner,Male,39,Sales,Sales Representative,4,2,3,4,3 45 | 144,Olivia Adams,Female,33,Technology,Software Engineer,5,3,4,5,5 46 | 145,Noah Richardson,Male,41,Human Resources,HR Manager,3,2,2,3,3 47 | 146,Sophia Walker,Female,35,Operations,Operations Supervisor,4,4,3,4,4 48 | 147,Lucas Clark,Male,30,Customer Service,Customer Service Representative,5,4,4,5,5 49 | 148,Emily Hill,Female,37,Product Management,Product Manager,3,3,4,4,3 50 | 149,William Baker,Male,34,IT,IT Specialist,4,3,4,4,4 51 | 150,Ava Gonzalez,Female,31,Administration,Administrative Assistant,5,4,3,5,5 52 | -------------------------------------------------------------------------------- /Datasets/Insurance/Auto Claims.csv: -------------------------------------------------------------------------------- 1 | Claim ID,Date of Accident,Date of Claim,Policy Holder ID,Vehicle Make,Vehicle Model,Vehicle Year,Claim Amount,Claim Status 2 | 1,2022-05-10,2022-05-15,1001,Toyota,Camry,2018,5000,Approved 3 | 2,2022-06-05,2022-06-10,1002,Honda,Accord,2019,3000,Approved 4 | 3,2022-07-15,2022-07-20,1003,Hyundai,Sonata,2017,8000,Rejected 5 | 4,2022-08-20,2022-08-25,1004,Ford,Escape,2020,4000,Approved 6 | 5,2022-09-12,2022-09-15,1005,Toyota,Rav4,2016,2000,Approved 7 | 6,2022-10-08,2022-10-10,1006,Honda,Civic,2019,3500,Rejected 8 | 7,2022-11-18,2022-11-20,1007,Chevrolet,Malibu,2018,6000,Approved 9 | 8,2022-12-25,2022-12-30,1008,Subaru,Forester,2017,4500,Approved 10 | 9,2023-01-05,2023-01-10,1009,Toyota,Corolla,2020,5500,Approved 11 | 10,2023-02-15,2023-02-20,1010,Honda,CR-V,2019,7000,Rejected 12 | 11,2023-03-22,2023-03-25,1011,Hyundai,Elantra,2015,1500,Approved 13 | 12,2023-04-10,2023-04-15,1012,Toyota,Highlander,2021,9000,Approved 14 | 13,2023-05-05,2023-05-10,1013,Honda,Pilot,2018,6500,Approved 15 | 14,2023-06-15,2023-06-20,1014,Subaru,Outback,2016,4000,Rejected 16 | 15,2023-07-20,2023-07-25,1015,Ford,Fusion,2019,5500,Approved 17 | 16,2023-08-10,2023-08-15,1016,Toyota,Camry,2017,3500,Approved 18 | 17,2023-09-12,2023-09-15,1017,Honda,Accord,2020,3000,Rejected 19 | 18,2023-10-25,2023-10-30,1018,Hyundai,Sonata,2019,5000,Approved 20 | 19,2023-11-18,2023-11-20,1019,Chevrolet,Impala,2018,4000,Approved 21 | 20,2023-12-05,2023-12-10,1020,Toyota,Camry,2016,2500,Rejected 22 | 21,2024-01-08,2024-01-10,1021,Honda,Civic,2019,3500,Approved 23 | 22,2024-02-15,2024-02-20,1022,Ford,Escape,2021,6000,Approved 24 | 23,2024-03-20,2024-03-25,1023,Subaru,Forester,2018,4500,Rejected 25 | 24,2024-04-10,2024-04-15,1024,Toyota,Rav4,2020,5500,Approved 26 | 25,2024-05-12,2024-05-15,1025,Honda,CR-V,2017,7000,Approved 27 | 26,2024-06-18,2024-06-20,1026,Hyundai,Elantra,2016,1500,Approved 28 | 27,2024-07-25,2024-07-30,1027,Toyota,Highlander,2019,8000,Rejected 29 | 28,2024-08-10,2024-08-15,1028,Honda,Pilot,2020,6500,Approved 30 | 29,2024-09-10,2024-09-15,1029,Chevrolet,Malibu,2017,4000,Approved 31 | 30,2024-10-18,2024-10-20,1030,Subaru,Outback,2018,5500,Rejected 32 | 31,2024-11-22,2024-11-25,1031,Ford,Fusion,2019,3500,Approved 33 | 32,2024-12-10,2024-12-15,1032,Toyota,Camry,2016,3000,Rejected 34 | 33,2025-01-05,2025-01-10,1033,Honda,Accord,2020,5000,Approved 35 | 34,2025-02-15,2025-02-20,1034,Hyundai,Sonata,2018,4000,Approved 36 | 35,2025-03-25,2025-03-30,1035,Chevrolet,Impala,2017,2500,Rejected 37 | 36,2025-04-10,2025-04-15,1036,Toyota,Camry,2019,3500,Approved 38 | 37,2025-05-15,2025-05-20,1037,Honda,Civic,2016,6000,Approved 39 | 38,2025-06-22,2025-06-25,1038,Ford,Escape,2020,4500,Rejected 40 | 39,2025-07-10,2025-07-15,1039,Subaru,Forester,2018,5500,Approved 41 | 40,2025-08-12,2025-08-15,1040,Toyota,Rav4,2021,7000,Approved 42 | 41,2025-09-18,2025-09-20,1041,Honda,CR-V,2019,1500,Approved 43 | 42,2025-10-25,2025-10-30,1042,Hyundai,Elantra,2017,9000,Rejected 44 | 43,2025-11-10,2025-11-15,1043,Toyota,Highlander,2016,6500,Approved 45 | 44,2025-12-05,2025-12-10,1044,Honda,Pilot,2019,4000,Approved 46 | 45,2026-01-08,2026-01-10,1045,Chevrolet,Malibu,2020,5500,Rejected 47 | 46,2026-02-15,2026-02-20,1046,Subaru,Outback,2017,3500,Approved 48 | 47,2026-03-22,2026-03-25,1047,Ford,Fusion,2018,3000,Rejected 49 | 48,2026-04-10,2026-04-15,1048,Toyota,Camry,2020,5000,Approved 50 | 49,2026-05-05,2026-05-10,1049,Honda,Accord,2017,4000,Approved 51 | 50,2026-06-15,2026-06-20,1050,Hyundai,Sonata,2019,2500,Rejected 52 | 51,2026-07-20,2026-07-25,1051,Chevrolet,Impala,2018,3500,Approved 53 | 52,2026-08-10,2026-08-15,1052,Toyota,Camry,2016,6000,Approved 54 | 53,2026-09-12,2026-09-15,1053,Honda,Civic,2020,4500,Rejected 55 | 54,2026-10-25,2026-10-30,1054,Ford,Escape,2018,5500,Approved 56 | 55,2026-11-18,2026-11-20,1055,Subaru,Forester,2017,7000,Approved 57 | 56,2026-12-05,2026-12-10,1056,Toyota,Rav4,2019,1500,Approved 58 | 57,2027-01-08,2027-01-10,1057,Honda,CR-V,2016,9000,Rejected 59 | 58,2027-02-15,2027-02-20,1058,Hyundai,Elantra,2020,6500,Approved 60 | 59,2027-03-20,2027-03-25,1059,Toyota,Highlander,2018,4000,Approved 61 | 60,2027-04-10,2027-04-15,1060,Honda,Pilot,2017,5500,Rejected 62 | 61,2027-05-12,2027-05-15,1061,Chevrolet,Malibu,2019,3500,Approved 63 | 62,2027-06-18,2027-06-20,1062,Subaru,Outback,2018,3000,Rejected 64 | 63,2027-07-25,2027-07-30,1063,Ford,Fusion,2020,5000,Approved 65 | 64,2027-08-10,2027-08-15,1064,Toyota,Camry,2017,4000,Approved 66 | 65,2027-09-12,2027-09-15,1065,Honda,Accord,2019,2500,Rejected 67 | 66,2027-10-25,2027-10-30,1066,Hyundai,Sonata,2018,3500,Approved 68 | 67,2027-11-18,2027-11-20,1067,Chevrolet,Impala,2016,6000,Approved 69 | 68,2027-12-05,2027-12-10,1068,Toyota,Camry,2019,4500,Rejected 70 | 69,2028-01-08,2028-01-10,1069,Honda,Civic,2017,5500,Approved 71 | 70,2028-02-15,2028-02-20,1070,Ford,Escape,2020,7000,Approved 72 | 71,2028-03-20,2028-03-25,1071,Subaru,Forester,2019,1500,Approved 73 | 72,2028-04-10,2028-04-15,1072,Toyota,Rav4,2017,9000,Rejected 74 | 73,2028-05-05,2028-05-10,1073,Honda,CR-V,2016,6500,Approved 75 | 74,2028-06-15,2028-06-20,1074,Hyundai,Elantra,2019,4000,Approved 76 | 75,2028-07-20,2028-07-25,1075,Toyota,Highlander,2017,5500,Rejected 77 | 76,2028-08-10,2028-08-15,1076,Honda,Pilot,2020,3500,Approved 78 | 77,2028-09-12,2028-09-15,1077,Chevrolet,Malibu,2018,3000,Rejected 79 | 78,2028-10-25,2028-10-30,1078,Subaru,Outback,2019,5000,Approved 80 | 79,2028-11-18,2028-11-20,1079,Ford,Fusion,2017,4000,Approved 81 | 80,2028-12-05,2028-12-10,1080,Toyota,Camry,2016,2500,Rejected 82 | 81,2029-01-08,2029-01-10,1081,Honda,Accord,2020,3500,Approved 83 | 82,2029-02-15,2029-02-20,1082,Hyundai,Sonata,2019,6000,Approved 84 | 83,2029-03-20,2029-03-25,1083,Chevrolet,Impala,2017,4500,Rejected 85 | 84,2029-04-10,2029-04-15,1084,Toyota,Camry,2018,5500,Approved 86 | 85,2029-05-12,2029-05-15,1085,Honda,Civic,2020,7000,Approved 87 | 86,2029-06-18,2029-06-20,1086,Ford,Escape,2017,1500,Approved 88 | 87,2029-07-25,2029-07-30,1087,Subaru,Forester,2019,9000,Rejected 89 | 88,2029-08-10,2029-08-15,1088,Toyota,Rav4,2018,6500,Approved 90 | 89,2029-09-10,2029-09-15,1089,Honda,CR-V,2016,4000,Approved 91 | 90,2029-10-18,2029-10-20,1090,Hyundai,Elantra,2020,5500,Rejected 92 | 91,2029-11-22,2029-11-25,1091,Toyota,Highlander,2017,3500,Approved 93 | 92,2029-12-10,2029-12-15,1092,Honda,Pilot,2018,3000,Rejected 94 | 93,2030-01-05,2030-01-10,1093,Chevrolet,Malibu,2020,5000,Approved 95 | 94,2030-02-15,2030-02-20,1094,Subaru,Outback,2019,4000,Approved 96 | 95,2030-03-25,2030-03-30,1095,Ford,Fusion,2017,2500,Rejected 97 | 96,2030-04-10,2030-04-15,1096,Toyota,Camry,2018,3500,Approved 98 | 97,2030-05-15,2030-05-20,1097,Honda,Accord,2016,6000,Approved 99 | 98,2030-06-22,2030-06-25,1098,Hyundai,Sonata,2019,4500,Rejected 100 | 99,2030-07-10,2030-07-15,1099,Chevrolet,Impala,2018,5500,Approved 101 | 100,2030-08-12,2030-08-15,1100,Toyota,Camry,2017,7000,Approved 102 | -------------------------------------------------------------------------------- /Datasets/Insurance/Insurance Fraud Prevention.csv: -------------------------------------------------------------------------------- 1 | claim_id,insured_name,claim_amount,claim_date,claim_type,investigation_status 2 | 1,John Smith,2500,2022-01-05,Auto Insurance,Under Investigation 3 | 2,Sarah Johnson,5000,2022-02-10,Health Insurance,Approved 4 | 3,Michael Davis,10000,2022-03-15,Home Insurance,Rejected 5 | 4,Emily Wilson,15000,2022-04-20,Auto Insurance,Approved 6 | 5,Robert Anderson,8000,2022-05-25,Life Insurance,Under Investigation 7 | 6,Olivia Thompson,3000,2022-06-30,Health Insurance,Rejected 8 | 7,David Martinez,12000,2022-07-05,Auto Insurance,Approved 9 | 8,Sophia Lee,2000,2022-08-10,Home Insurance,Rejected 10 | 9,James Rodriguez,10000,2022-09-15,Auto Insurance,Under Investigation 11 | 10,Emma Taylor,5000,2022-10-20,Life Insurance,Approved 12 | 11,William Hernandez,4000,2022-11-25,Auto Insurance,Approved 13 | 12,Isabella Moore,8000,2022-12-30,Health Insurance,Rejected 14 | 13,Aiden Brown,15000,2023-01-05,Auto Insurance,Under Investigation 15 | 14,Mia Clark,1000,2023-02-10,Home Insurance,Rejected 16 | 15,Liam Lewis,6000,2023-03-15,Auto Insurance,Approved 17 | 16,Sofia Green,2000,2023-04-20,Life Insurance,Approved 18 | 17,Noah Baker,9000,2023-05-25,Auto Insurance,Under Investigation 19 | 18,Ava Gonzalez,4000,2023-06-30,Health Insurance,Rejected 20 | 19,Lucas Martinez,7000,2023-07-05,Auto Insurance,Approved 21 | 20,Charlotte Hill,25000,2023-08-10,Home Insurance,Rejected 22 | 21,Harper Collins,12000,2023-09-15,Auto Insurance,Under Investigation 23 | 22,Emma Adams,3000,2023-10-20,Life Insurance,Approved 24 | 23,Liam Turner,5000,2023-11-25,Auto Insurance,Approved 25 | 24,Olivia Harris,10000,2023-12-30,Health Insurance,Rejected 26 | 25,Noah Butler,2000,2024-01-05,Auto Insurance,Under Investigation 27 | 26,Ava Carter,4000,2024-02-10,Home Insurance,Rejected 28 | 27,Lucas Moore,8000,2024-03-15,Auto Insurance,Approved 29 | 28,Emily Rivera,1500,2024-04-20,Life Insurance,Approved 30 | 29,Aiden Scott,6000,2024-05-25,Auto Insurance,Under Investigation 31 | 30,Sophia Adams,2000,2024-06-30,Health Insurance,Rejected 32 | 31,Liam Phillips,10000,2024-07-05,Auto Insurance,Approved 33 | 32,Olivia Wilson,2500,2024-08-10,Home Insurance,Rejected 34 | 33,Noah Turner,12000,2024-09-15,Auto Insurance,Under Investigation 35 | 34,Ava Hernandez,4000,2024-10-20,Life Insurance,Approved 36 | 35,Lucas Lewis,7000,2024-11-25,Auto Insurance,Approved 37 | 36,Emily Baker,25000,2024-12-30,Health Insurance,Rejected 38 | 37,William Thompson,9000,2025-01-05,Auto Insurance,Under Investigation 39 | 38,Sophia Martinez,1000,2025-02-10,Home Insurance,Rejected 40 | 39,Liam Clark,6000,2025-03-15,Auto Insurance,Approved 41 | 40,Olivia Green,2000,2025-04-20,Life Insurance,Approved 42 | 41,Noah Brown,8000,2025-05-25,Auto Insurance,Under Investigation 43 | 42,Ava Collins,3000,2025-06-30,Health Insurance,Rejected 44 | 43,Lucas Wilson,5000,2025-07-05,Auto Insurance,Approved 45 | 44,Emily Hill,10000,2025-08-10,Home Insurance,Rejected 46 | 45,William Thompson,2000,2025-09-15,Auto Insurance,Under Investigation 47 | 46,Sophia Hernandez,4000,2025-10-20,Life Insurance,Approved 48 | 47,Liam Adams,12000,2025-11-25,Auto Insurance,Approved 49 | 48,Olivia Lewis,1500,2025-12-30,Health Insurance,Rejected 50 | 49,Noah Baker,6000,2026-01-05,Auto Insurance,Under Investigation 51 | 50,Ava Gonzalez,1000,2026-02-10,Home Insurance,Rejected 52 | 51,Lucas Martinez,8000,2026-03-15,Auto Insurance,Approved 53 | 52,Emily Moore,2000,2026-04-20,Life Insurance,Approved 54 | 53,William Garcia,9000,2026-05-25,Auto Insurance,Under Investigation 55 | 54,Sophia Clark,4000,2026-06-30,Health Insurance,Rejected 56 | 55,Liam Wilson,7000,2026-07-05,Auto Insurance,Approved 57 | 56,Olivia Rivera,25000,2026-08-10,Home Insurance,Rejected 58 | 57,Noah Butler,12000,2026-09-15,Auto Insurance,Under Investigation 59 | 58,Ava Carter,3000,2026-10-20,Life Insurance,Approved 60 | 59,Lucas Turner,5000,2026-11-25,Auto Insurance,Approved 61 | 60,Emily Thompson,10000,2026-12-30,Health Insurance,Rejected 62 | 61,William Hernandez,2000,2027-01-05,Auto Insurance,Under Investigation 63 | 62,Sophia Harris,4000,2027-02-10,Home Insurance,Rejected 64 | 63,Liam Walker,8000,2027-03-15,Auto Insurance,Approved 65 | 64,Olivia Richardson,1500,2027-04-20,Life Insurance,Approved 66 | 65,Noah Evans,6000,2027-05-25,Auto Insurance,Under Investigation 67 | 66,Ava Stewart,1000,2027-06-30,Health Insurance,Rejected 68 | 67,Lucas Bell,9000,2027-07-05,Auto Insurance,Approved 69 | 68,Emily Watson,2000,2027-08-10,Home Insurance,Rejected 70 | 69,William Phillips,4000,2027-09-15,Auto Insurance,Under Investigation 71 | 70,Sophia Anderson,12000,2027-10-20,Life Insurance,Approved 72 | 71,Liam Moore,3000,2027-11-25,Auto Insurance,Approved 73 | 72,Olivia Hill,5000,2027-12-30,Health Insurance,Rejected 74 | 73,Noah Thompson,10000,2028-01-05,Auto Insurance,Under Investigation 75 | 74,Ava Rodriguez,2000,2028-02-10,Home Insurance,Rejected 76 | 75,Lucas Adams,8000,2028-03-15,Auto Insurance,Approved 77 | 76,Emily Turner,1500,2028-04-20,Life Insurance,Approved 78 | 77,William Wilson,6000,2028-05-25,Auto Insurance,Under Investigation 79 | 78,Sophia Baker,1000,2028-06-30,Health Insurance,Rejected 80 | 79,Liam Gonzalez,7000,2028-07-05,Auto Insurance,Approved 81 | 80,Olivia Martinez,25000,2028-08-10,Home Insurance,Rejected 82 | 81,Noah Clark,12000,2028-09-15,Auto Insurance,Under Investigation 83 | 82,Ava Lewis,3000,2028-10-20,Life Insurance,Approved 84 | 83,Lucas Green,5000,2028-11-25,Auto Insurance,Approved 85 | 84,Emily Brown,10000,2028-12-30,Health Insurance,Rejected 86 | 85,William Collins,2000,2029-01-05,Auto Insurance,Under Investigation 87 | 86,Sophia Johnson,4000,2029-02-10,Home Insurance,Rejected 88 | 87,Liam Davis,8000,2029-03-15,Auto Insurance,Approved 89 | 88,Olivia Wilson,1500,2029-04-20,Life Insurance,Approved 90 | 89,Noah Thompson,6000,2029-05-25,Auto Insurance,Under Investigation 91 | 90,Ava Turner,1000,2029-06-30,Health Insurance,Rejected 92 | 91,Lucas Hernandez,9000,2029-07-05,Auto Insurance,Approved 93 | 92,Emily Adams,2000,2029-08-10,Home Insurance,Rejected 94 | 93,William Moore,4000,2029-09-15,Auto Insurance,Under Investigation 95 | 94,Sophia Hill,12000,2029-10-20,Life Insurance,Approved 96 | 95,Liam Phillips,3000,2029-11-25,Auto Insurance,Approved 97 | 96,Olivia Wilson,5000,2029-12-30,Health Insurance,Rejected 98 | 97,Noah Baker,10000,2030-01-05,Auto Insurance,Under Investigation 99 | 98,Ava Gonzalez,2000,2030-02-10,Home Insurance,Rejected 100 | 99,Lucas Martinez,8000,2030-03-15,Auto Insurance,Approved 101 | 100,Emily Moore,1500,2030-04-20,Life Insurance,Approved 102 | -------------------------------------------------------------------------------- /Datasets/Insurance/Life Insurance.csv: -------------------------------------------------------------------------------- 1 | Policy ID,Date of Death,Date of Claim,Insured's Age,Gender,Smoker,Sum Assured,Claim Amount,Claim Status 2 | 101,2023-04-10,2023-04-15,45,Male,No,500000,250000,Approved 3 | 102,2023-05-12,2023-05-15,62,Female,Yes,750000,400000,Approved 4 | 103,2023-06-18,2023-06-20,32,Male,No,250000,100000,Approved 5 | 104,2023-07-25,2023-07-30,54,Male,Yes,1000000,500000,Rejected 6 | 105,2023-08-10,2023-08-15,38,Female,No,300000,150000,Approved 7 | 106,2023-09-10,2023-09-15,47,Male,Yes,500000,250000,Approved 8 | 107,2023-10-18,2023-10-20,55,Female,No,400000,200000,Approved 9 | 108,2023-11-22,2023-11-25,41,Male,No,300000,150000,Approved 10 | 109,2023-12-10,2023-12-15,29,Female,No,200000,100000,Approved 11 | 110,2024-01-05,2024-01-10,63,Male,Yes,1000000,500000,Rejected 12 | 111,2024-02-15,2024-02-20,51,Male,Yes,750000,400000,Approved 13 | 112,2024-03-25,2024-03-30,39,Female,No,250000,100000,Approved 14 | 113,2024-04-10,2024-04-15,48,Male,Yes,500000,250000,Approved 15 | 114,2024-05-15,2024-05-20,36,Male,No,300000,150000,Approved 16 | 115,2024-06-22,2024-06-25,57,Female,Yes,1000000,500000,Rejected 17 | 116,2024-07-10,2024-07-15,42,Male,No,400000,200000,Approved 18 | 117,2024-08-12,2024-08-15,31,Female,No,200000,100000,Approved 19 | 118,2024-09-18,2024-09-20,56,Male,Yes,750000,400000,Approved 20 | 119,2024-10-25,2024-10-30,43,Male,Yes,500000,250000,Approved 21 | 120,2024-11-10,2024-11-15,37,Female,No,300000,150000,Approved 22 | 121,2024-12-05,2024-12-10,52,Male,Yes,1000000,500000,Rejected 23 | 122,2025-01-08,2025-01-10,40,Female,No,400000,200000,Approved 24 | 123,2025-02-15,2025-02-20,58,Male,Yes,750000,400000,Approved 25 | 124,2025-03-20,2025-03-25,33,Male,No,250000,100000,Approved 26 | 125,2025-04-10,2025-04-15,46,Female,No,500000,250000,Approved 27 | 126,2025-05-12,2025-05-15,64,Male,Yes,1000000,500000,Rejected 28 | 127,2025-06-18,2025-06-20,50,Female,Yes,750000,400000,Approved 29 | 128,2025-07-25,2025-07-30,39,Male,No,300000,150000,Approved 30 | 129,2025-08-10,2025-08-15,53,Male,Yes,500000,250000,Approved 31 | 130,2025-09-10,2025-09-15,45,Female,Yes,400000,200000,Approved 32 | 131,2025-10-18,2025-10-20,35,Male,No,200000,100000,Approved 33 | 132,2025-11-22,2025-11-25,59,Female,No,750000,400000,Approved 34 | 133,2025-12-10,2025-12-15,41,Male,No,250000,100000,Approved 35 | 134,2026-01-05,2026-01-10,49,Male,Yes,500000,250000,Approved 36 | 135,2026-02-15,2026-02-20,38,Female,No,300000,150000,Approved 37 | 136,2026-03-25,2026-03-30,65,Male,Yes,1000000,500000,Rejected 38 | 137,2026-04-10,2026-04-15,54,Female,Yes,750000,400000,Approved 39 | 138,2026-05-15,2026-05-20,34,Male,No,250000,100000,Approved 40 | 139,2026-06-22,2026-06-25,47,Male,Yes,500000,250000,Approved 41 | 140,2026-07-10,2026-07-15,57,Female,No,400000,200000,Approved 42 | 141,2026-08-12,2026-08-15,43,Male,No,300000,150000,Approved 43 | 142,2026-09-18,2026-09-20,30,Female,No,200000,100000,Approved 44 | 143,2026-10-25,2026-10-30,60,Male,Yes,1000000,500000,Rejected 45 | 144,2026-11-10,2026-11-15,50,Male,Yes,750000,400000,Approved 46 | 145,2026-12-05,2026-12-10,36,Female,No,250000,100000,Approved 47 | 146,2027-01-08,2027-01-10,45,Male,No,500000,250000,Approved 48 | 147,2027-02-15,2027-02-20,63,Female,Yes,1000000,500000,Rejected 49 | 148,2027-03-20,2027-03-25,32,Male,No,250000,100000,Approved 50 | 149,2027-04-10,2027-04-15,55,Male,Yes,1000000,500000,Rejected 51 | 150,2027-05-12,2027-05-15,37,Female,No,300000,150000,Approved 52 | 151,2027-06-18,2027-06-20,48,Male,No,500000,250000,Approved 53 | 152,2027-07-25,2027-07-30,59,Male,Yes,750000,400000,Approved 54 | 153,2027-08-10,2027-08-15,33,Female,No,250000,100000,Approved 55 | 154,2027-09-10,2027-09-15,46,Male,Yes,500000,250000,Approved 56 | 155,2027-10-18,2027-10-20,52,Female,Yes,1000000,500000,Rejected 57 | 156,2027-11-22,2027-11-25,40,Male,No,400000,200000,Approved 58 | 157,2027-12-10,2027-12-15,61,Female,No,750000,400000,Approved 59 | 158,2028-01-05,2028-01-10,39,Male,No,250000,100000,Approved 60 | 159,2028-02-15,2028-02-20,56,Male,Yes,500000,250000,Approved 61 | 160,2028-03-25,2028-03-30,42,Female,No,300000,150000,Approved 62 | 161,2028-04-10,2028-04-15,34,Male,No,200000,100000,Approved 63 | 162,2028-05-15,2028-05-20,58,Female,Yes,1000000,500000,Rejected 64 | 163,2028-06-22,2028-06-25,45,Male,Yes,750000,400000,Approved 65 | 164,2028-07-10,2028-07-15,31,Female,No,250000,100000,Approved 66 | 165,2028-08-12,2028-08-15,53,Male,Yes,500000,250000,Approved 67 | 166,2028-09-18,2028-09-20,47,Female,Yes,400000,200000,Approved 68 | 167,2028-10-25,2028-10-30,35,Male,No,200000,100000,Approved 69 | 168,2028-11-10,2028-11-15,62,Female,No,750000,400000,Approved 70 | 169,2028-12-05,2028-12-10,38,Male,No,250000,100000,Approved 71 | 170,2029-01-08,2029-01-10,49,Male,Yes,500000,250000,Approved 72 | 171,2029-02-15,2029-02-20,36,Female,No,300000,150000,Approved 73 | 172,2029-03-20,2029-03-25,64,Male,Yes,1000000,500000,Rejected 74 | 173,2029-04-10,2029-04-15,54,Female,Yes,750000,400000,Approved 75 | 174,2029-05-15,2029-05-20,33,Male,No,250000,100000,Approved 76 | 175,2029-06-22,2029-06-25,46,Male,Yes,500000,250000,Approved 77 | 176,2029-07-10,2029-07-15,56,Female,No,400000,200000,Approved 78 | 177,2029-08-12,2029-08-15,43,Male,No,300000,150000,Approved 79 | 178,2029-09-18,2029-09-20,30,Female,No,200000,100000,Approved 80 | 179,2029-10-25,2029-10-30,59,Male,Yes,1000000,500000,Rejected 81 | 180,2029-11-10,2029-11-15,51,Male,Yes,750000,400000,Approved 82 | 181,2029-12-05,2029-12-10,37,Female,No,250000,100000,Approved 83 | 182,2030-01-08,2030-01-10,44,Male,No,500000,250000,Approved 84 | 183,2030-02-15,2030-02-20,62,Female,Yes,1000000,500000,Rejected 85 | 184,2030-03-20,2030-03-25,31,Male,No,250000,100000,Approved 86 | 185,2030-04-10,2030-04-15,57,Male,Yes,1000000,500000,Rejected 87 | 186,2030-05-12,2030-05-15,39,Female,No,300000,150000,Approved 88 | 187,2030-06-18,2030-06-20,48,Male,No,500000,250000,Approved 89 | 188,2030-07-25,2030-07-30,35,Male,No,200000,100000,Approved 90 | 189,2030-08-10,2030-08-15,54,Female,Yes,750000,400000,Approved 91 | 190,2030-09-10,2030-09-15,42,Male,No,300000,150000,Approved 92 | 191,2030-10-18,2030-10-20,33,Female,No,200000,100000,Approved 93 | 192,2030-11-22,2030-11-25,60,Male,Yes,1000000,500000,Rejected 94 | 193,2030-12-10,2030-12-15,50,Male,Yes,750000,400000,Approved 95 | 194,2031-01-05,2031-01-10,38,Female,No,250000,100000,Approved 96 | 195,2031-02-15,2031-02-20,55,Male,Yes,500000,250000,Approved 97 | 196,2031-03-25,2031-03-30,41,Female,No,300000,150000,Approved 98 | 197,2031-04-10,2031-04-15,34,Male,No,200000,100000,Approved 99 | 198,2031-05-15,2031-05-20,59,Female,Yes,1000000,500000,Rejected 100 | 199,2031-06-22,2031-06-25,47,Male,Yes,750000,400000,Approved 101 | 200,2031-07-10,2031-07-15,32,Female,No,250000,100000,Approved -------------------------------------------------------------------------------- /Datasets/Insurance/Pricing and Risk.csv: -------------------------------------------------------------------------------- 1 | Policy_ID,Policy_Type,Policy_Start_Date,Policy_End_Date,Premium_Amount,Insured_Amount,Claim_Amount,Location,Risk_Score 2 | 101,Auto,2022-01-01,2023-01-01,1000,50000,0,New York,0.75 3 | 102,Home,2022-02-15,2023-02-15,1500,300000,0,Chicago,0.85 4 | 103,Health,2022-03-10,2023-03-10,500,0,0,Los Angeles,0.6 5 | 104,Auto,2022-04-05,2023-04-05,800,40000,0,Dallas,0.8 6 | 105,Life,2022-05-20,2023-05-20,2000,500000,0,Miami,0.9 7 | 106,Home,2022-06-15,2023-06-15,1200,250000,0,San Francisco,0.85 8 | 107,Health,2022-07-10,2023-07-10,600,0,0,Seattle,0.65 9 | 108,Auto,2022-08-05,2023-08-05,900,45000,0,Denver,0.75 10 | 109,Life,2022-09-20,2023-09-20,1800,400000,0,Houston,0.95 11 | 110,Auto,2022-10-15,2023-10-15,950,50000,0,Phoenix,0.7 12 | 111,Home,2022-11-10,2023-11-10,1400,350000,0,Atlanta,0.8 13 | 112,Health,2022-12-05,2023-12-05,550,0,0,Detroit,0.55 14 | 113,Auto,2023-01-01,2024-01-01,1050,55000,0,Philadelphia,0.8 15 | 114,Life,2023-02-15,2024-02-15,2200,600000,0,Boston,0.9 16 | 115,Home,2023-03-10,2024-03-10,1600,280000,0,San Diego,0.85 17 | 116,Health,2023-04-05,2024-04-05,700,0,0,Portland,0.7 18 | 117,Auto,2023-05-20,2024-05-20,850,40000,0,Austin,0.75 19 | 118,Life,2023-06-15,2024-06-15,2400,550000,0,Charlotte,0.95 20 | 119,Home,2023-07-10,2024-07-10,1300,320000,0,Washington D.C.,0.8 21 | 120,Health,2023-08-05,2024-08-05,650,0,0,San Antonio,0.6 22 | 121,Auto,2023-09-01,2024-09-01,1150,60000,0,Baltimore,0.75 23 | 122,Life,2023-10-15,2024-10-15,2000,450000,0,Dallas,0.85 24 | 123,Home,2023-11-10,2024-11-10,1550,280000,0,Seattle,0.8 25 | 124,Health,2023-12-05,2024-12-05,600,0,0,Los Angeles,0.7 26 | 125,Auto,2024-01-01,2025-01-01,1000,40000,0,Chicago,0.75 27 | 126,Life,2024-02-15,2025-02-15,2100,550000,0,New York,0.9 28 | 127,Home,2024-03-10,2025-03-10,1400,320000,0,Miami,0.85 29 | 128,Health,2024-04-05,2025-04-05,650,0,0,Dallas,0.65 30 | 129,Auto,2024-05-20,2025-05-20,900,45000,0,Denver,0.8 31 | 130,Life,2024-06-15,2025-06-15,1900,500000,0,Houston,0.95 32 | 131,Home,2024-07-10,2025-07-10,1200,280000,0,Atlanta,0.85 33 | 132,Health,2024-08-05,2025-08-05,550,0,0,Detroit,0.55 34 | 133,Auto,2024-09-01,2025-09-01,1050,55000,0,Philadelphia,0.8 35 | 134,Life,2024-10-15,2025-10-15,2300,600000,0,Boston,0.9 36 | 135,Home,2024-11-10,2025-11-10,1500,350000,0,San Diego,0.85 37 | 136,Health,2024-12-05,2025-12-05,700,0,0,Portland,0.7 38 | 137,Auto,2025-01-01,2026-01-01,950,50000,0,Austin,0.75 39 | 138,Life,2025-02-15,2026-02-15,2200,550000,0,Charlotte,0.95 40 | 139,Home,2025-03-10,2026-03-10,1300,320000,0,Washington D.C.,0.8 41 | 140,Health,2025-04-05,2026-04-05,600,0,0,San Antonio,0.6 42 | 141,Auto,2025-05-20,2026-05-20,1000,40000,0,New York,0.75 43 | 142,Life,2025-06-15,2026-06-15,2000,500000,0,Chicago,0.9 44 | 143,Home,2025-07-10,2026-07-10,1500,300000,0,Miami,0.85 45 | 144,Health,2025-08-05,2026-08-05,550,0,0,Dallas,0.7 46 | 145,Auto,2025-09-01,2026-09-01,900,45000,0,Denver,0.8 47 | 146,Life,2025-10-15,2026-10-15,1900,400000,0,Houston,0.95 48 | 147,Home,2025-11-10,2026-11-10,1400,350000,0,Atlanta,0.85 49 | 148,Health,2025-12-05,2026-12-05,650,0,0,Detroit,0.6 50 | 149,Auto,2026-01-01,2027-01-01,1000,55000,0,Philadelphia,0.8 51 | 150,Life,2026-02-15,2027-02-15,2200,600000,0,Boston,0.9 52 | 151,Home,2026-03-10,2027-03-10,1600,280000,0,San Diego,0.85 53 | 152,Health,2026-04-05,2027-04-05,700,0,0,Portland,0.7 54 | 153,Auto,2026-05-20,2027-05-20,850,40000,0,Austin,0.75 55 | 154,Life,2026-06-15,2027-06-15,2400,550000,0,Charlotte,0.95 56 | 155,Home,2026-07-10,2027-07-10,1300,320000,0,Washington D.C.,0.8 57 | 156,Health,2026-08-05,2027-08-05,650,0,0,San Antonio,0.6 58 | 157,Auto,2026-09-01,2027-09-01,1100,60000,0,Baltimore,0.75 59 | 158,Life,2026-10-15,2027-10-15,2000,450000,0,Dallas,0.85 60 | 159,Home,2026-11-10,2027-11-10,1500,350000,0,New York,0.85 61 | 160,Health,2026-12-05,2027-12-05,600,0,0,Los Angeles,0.7 62 | 161,Auto,2027-01-01,2028-01-01,1000,40000,0,Chicago,0.75 63 | 162,Life,2027-02-15,2028-02-15,2100,550000,0,New York,0.9 64 | 163,Home,2027-03-10,2028-03-10,1400,320000,0,Miami,0.85 65 | 164,Health,2027-04-05,2028-04-05,650,0,0,Dallas,0.65 66 | 165,Auto,2027-05-20,2028-05-20,900,45000,0,Denver,0.8 67 | 166,Life,2027-06-15,2028-06-15,1900,500000,0,Houston,0.95 68 | 167,Home,2027-07-10,2028-07-10,1200,280000,0,Atlanta,0.85 69 | 168,Health,2027-08-05,2028-08-05,550,0,0,Detroit,0.55 70 | 169,Auto,2027-09-01,2028-09-01,1050,55000,0,Philadelphia,0.8 71 | 170,Life,2027-10-15,2028-10-15,2300,600000,0,Boston,0.9 72 | 171,Home,2027-11-10,2028-11-10,1500,350000,0,San Diego,0.85 73 | 172,Health,2027-12-05,2028-12-05,700,0,0,Portland,0.7 74 | 173,Auto,2028-01-01,2029-01-01,950,50000,0,Austin,0.75 75 | 174,Life,2028-02-15,2029-02-15,2200,550000,0,Charlotte,0.95 76 | 175,Home,2028-03-10,2029-03-10,1300,320000,0,Washington D.C.,0.8 77 | 176,Health,2028-04-05,2029-04-05,600,0,0,San Antonio,0.6 78 | 177,Auto,2028-05-20,2029-05-20,1000,40000,0,New York,0.75 79 | 178,Life,2028-06-15,2029-06-15,2000,500000,0,Chicago,0.9 80 | 179,Home,2028-07-10,2029-07-10,1500,300000,0,Miami,0.85 81 | 180,Health,2028-08-05,2029-08-05,550,0,0,Dallas,0.7 82 | 181,Auto,2028-09-01,2029-09-01,900,45000,0,Denver,0.8 83 | 182,Life,2028-10-15,2029-10-15,1900,400000,0,Houston,0.95 84 | 183,Home,2028-11-10,2029-11-10,1400,350000,0,Atlanta,0.85 85 | 184,Health,2028-12-05,2029-12-05,650,0,0,Detroit,0.6 86 | 185,Auto,2029-01-01,2030-01-01,1000,55000,0,Philadelphia,0.8 87 | 186,Life,2029-02-15,2030-02-15,2200,600000,0,Boston,0.9 88 | 187,Home,2029-03-10,2030-03-10,1600,280000,0,San Diego,0.85 89 | 188,Health,2029-04-05,2030-04-05,700,0,0,Portland,0.7 90 | 189,Auto,2029-05-20,2030-05-20,850,40000,0,Austin,0.75 91 | 190,Life,2029-06-15,2030-06-15,2400,550000,0,Charlotte,0.95 92 | 191,Home,2029-07-10,2030-07-10,1300,320000,0,Washington D.C.,0.8 93 | 192,Health,2029-08-05,2030-08-05,650,0,0,San Antonio,0.6 94 | 193,Auto,2029-09-01,2030-09-01,1150,60000,0,Baltimore,0.75 95 | 194,Life,2029-10-15,2030-10-15,2000,450000,0,Dallas,0.85 96 | 195,Home,2029-11-10,2030-11-10,1500,350000,0,New York,0.85 97 | 196,Health,2029-12-05,2030-12-05,600,0,0,Los Angeles,0.7 98 | 197,Auto,2030-01-01,2031-01-01,1000,40000,0,Chicago,0.75 99 | 198,Life,2030-02-15,2031-02-15,2100,550000,0,New York,0.9 100 | 199,Home,2030-03-10,2031-03-10,1400,320000,0,Miami,0.85 101 | 200,Health,2030-04-05,2031-04-05,650,0,0,Dallas,0.65 102 | -------------------------------------------------------------------------------- /Datasets/Marketing/A-B Testing.csv: -------------------------------------------------------------------------------- 1 | ID,time,control treatment,conversion 2 | 1,2023-05-31 08:15:22,1,0 3 | 2,2023-05-31 09:10:37,0,1 4 | 3,2023-05-31 10:05:42,1,0 5 | 4,2023-05-31 11:00:58,0,1 6 | 5,2023-05-31 11:56:01,1,0 7 | 6,2023-05-31 12:51:19,0,0 8 | 7,2023-05-31 13:46:27,1,1 9 | 8,2023-05-31 14:41:33,0,1 10 | 9,2023-05-31 15:36:40,1,0 11 | 10,2023-05-31 16:31:58,0,1 12 | 11,2023-05-31 17:27:04,1,0 13 | 12,2023-05-31 18:22:09,0,1 14 | 13,2023-05-31 19:17:19,1,0 15 | 14,2023-05-31 20:12:22,0,1 16 | 15,2023-05-31 21:07:30,1,1 17 | 16,2023-05-31 22:02:44,0,0 18 | 17,2023-05-31 22:57:52,1,0 19 | 18,2023-05-31 23:53:03,0,1 20 | 19,2023-06-01 00:48:18,1,1 21 | 20,2023-06-01 01:43:24,0,0 22 | 21,2023-06-01 02:38:31,1,0 23 | 22,2023-06-01 03:33:48,0,1 24 | 23,2023-06-01 04:29:03,1,0 25 | 24,2023-06-01 05:24:17,0,1 26 | 25,2023-06-01 06:19:24,1,1 27 | 26,2023-06-01 07:14:41,0,0 28 | 27,2023-06-01 08:09:56,1,1 29 | 28,2023-06-01 09:05:11,0,1 30 | 29,2023-06-01 10:00:27,1,0 31 | 30,2023-06-01 10:55:32,0,1 32 | 31,2023-06-01 11:50:46,1,0 33 | 32,2023-06-01 12:46:00,0,1 34 | 33,2023-06-01 13:41:18,1,0 35 | 34,2023-06-01 14:36:23,0,1 36 | 35,2023-06-01 15:31:35,1,1 37 | 36,2023-06-01 16:26:48,0,0 38 | 37,2023-06-01 17:22:04,1,0 39 | 38,2023-06-01 18:17:19,0,1 40 | 39,2023-06-01 19:12:31,1,1 41 | 40,2023-06-01 20:07:42,0,0 42 | 41,2023-06-01 21:02:58,1,0 43 | 42,2023-06-01 21:58:12,0,1 44 | 43,2023-06-01 22:53:26,1,0 45 | 44,2023-06-01 23:48:39,0,1 46 | 45,2023-06-02 00:43:50,1,0 47 | 46,2023-06-02 01:39:02,0,1 48 | 47,2023-06-02 02:34:18,1,0 49 | 48,2023-06-02 03:29:29,0,0 50 | 49,2023-06-02 04:24:42,1,1 51 | 50,2023-06-02 05:19:54,0,0 52 | 51,2023-06-02 06:15:10,1,1 53 | 52,2023-06-02 07:10:22,0,0 54 | 53,2023-06-02 08:05:38,1,1 55 | 54,2023-06-02 09:00:52,0,1 56 | 55,2023-06-02 09:56:07,1,0 57 | 56,2023-06-02 10:51:21,0,0 58 | 57,2023-06-02 11:46:36,1,0 59 | 58,2023-06-02 12:41:49,0,1 60 | 59,2023-06-02 13:37:03,1,0 61 | 60,2023-06-02 14:32:18,0,1 62 | 61,2023-06-02 15:27:32,1,1 63 | 62,2023-06-02 16:22:47,0,0 64 | 63,2023-06-02 17:18:03,1,0 65 | 64,2023-06-02 18:13:17,0,1 66 | 65,2023-06-02 19:08:33,1,1 67 | 66,2023-06-02 20:03:47,0,0 68 | 67,2023-06-02 20:59:01,1,1 69 | 68,2023-06-02 21:54:15,0,0 70 | 69,2023-06-02 22:49:29,1,1 71 | 70,2023-06-02 23:44:44,0,0 72 | 71,2023-06-03 00:39:57,1,1 73 | 72,2023-06-03 01:35:11,0,0 74 | 73,2023-06-03 02:30:25,1,0 75 | 74,2023-06-03 03:25:39,0,1 76 | 75,2023-06-03 04:20:53,1,0 77 | 76,2023-06-03 05:16:07,0,1 78 | 77,2023-06-03 06:11:22,1,0 79 | 78,2023-06-03 07:06:36,0,1 80 | 79,2023-06-03 08:01:50,1,1 81 | 80,2023-06-03 08:57:03,0,0 82 | 81,2023-06-03 09:52:17,1,1 83 | 82,2023-06-03 10:47:31,0,1 84 | 83,2023-06-03 11:42:46,1,0 85 | 84,2023-06-03 12:37:59,0,1 86 | 85,2023-06-03 13:33:12,1,0 87 | 86,2023-06-03 14:28:27,0,1 88 | 87,2023-06-03 15:23:41,1,1 89 | 88,2023-06-03 16:18:55,0,0 90 | 89,2023-06-03 17:14:09,1,0 91 | 90,2023-06-03 18:09:23,0,1 92 | 91,2023-06-03 19:04:37,1,0 93 | 92,2023-06-03 20:00:52,0,1 94 | 93,2023-06-03 20:56:06,1,0 95 | 94,2023-06-03 21:51:20,0,1 96 | 95,2023-06-03 22:46:35,1,0 97 | 96,2023-06-03 23:41:50,0,1 98 | 97,2023-06-04 00:37:04,1,1 99 | 98,2023-06-04 01:32:19,0,0 100 | 99,2023-06-04 02:27:32,1,1 101 | 100,2023-06-04 03:22:46,0,0 102 | -------------------------------------------------------------------------------- /Datasets/Marketing/Consumer Data.csv: -------------------------------------------------------------------------------- 1 | customer_id,age,gender,city,state,country,product_category,purchase_date 2 | 1,32,Male,New York,NY,United States,Electronics,2022-05-10 3 | 2,45,Female,Los Angeles,CA,United States,Clothing,2022-06-15 4 | 3,28,Male,Chicago,IL,United States,Home & Kitchen,2022-07-20 5 | 4,39,Female,Houston,TX,United States,Beauty,2022-08-25 6 | 5,41,Male,Phoenix,AZ,United States,Sports & Outdoors,2022-09-30 7 | 6,35,Female,Philadelphia,PA,United States,Electronics,2022-10-05 8 | 7,48,Male,San Antonio,TX,United States,Clothing,2022-11-10 9 | 8,29,Female,San Diego,CA,United States,Home & Kitchen,2022-12-15 10 | 9,37,Male,Dallas,TX,United States,Beauty,2023-01-20 11 | 10,33,Female,San Jose,CA,United States,Sports & Outdoors,2023-02-25 12 | 11,26,Male,Austin,TX,United States,Electronics,2023-03-31 13 | 12,42,Female,Indianapolis,IN,United States,Clothing,2023-04-05 14 | 13,31,Male,Jacksonville,FL,United States,Home & Kitchen,2023-05-10 15 | 14,36,Female,San Francisco,CA,United States,Beauty,2023-06-15 16 | 15,43,Male,Columbus,OH,United States,Sports & Outdoors,2023-07-20 17 | 16,30,Female,Fort Worth,TX,United States,Electronics,2023-08-25 18 | 17,44,Male,Charlotte,NC,United States,Clothing,2023-09-30 19 | 18,34,Female,Detroit,MI,United States,Home & Kitchen,2023-10-05 20 | 19,47,Male,El Paso,TX,United States,Beauty,2023-11-10 21 | 20,27,Female,Memphis,TN,United States,Sports & Outdoors,2023-12-15 22 | 21,38,Male,Boston,MA,United States,Electronics,2024-01-20 23 | 22,39,Female,Seattle,WA,United States,Clothing,2024-02-25 24 | 23,25,Male,Denver,CO,United States,Home & Kitchen,2024-03-31 25 | 24,46,Female,Washington,DC,United States,Beauty,2024-04-05 26 | 25,32,Male,Nashville,TN,United States,Sports & Outdoors,2024-05-10 27 | 26,41,Female,Baltimore,MD,United States,Electronics,2024-06-15 28 | 27,29,Male,Oklahoma City,OK,United States,Clothing,2024-07-20 29 | 28,40,Female,Louisville,KY,United States,Home & Kitchen,2024-08-25 30 | 29,35,Male,Portland,OR,United States,Beauty,2024-09-30 31 | 30,48,Female,Milwaukee,WI,United States,Sports & Outdoors,2024-10-05 32 | 31,33,Male,Las Vegas,NV,United States,Electronics,2024-11-10 33 | 32,37,Female,Albuquerque,NM,United States,Clothing,2024-12-15 34 | 33,26,Male,Tucson,AZ,United States,Home & Kitchen,2025-01-20 35 | 34,42,Female,Fresno,CA,United States,Beauty,2025-02-25 36 | 35,31,Male,Sacramento,CA,United States,Sports & Outdoors,2025-03-31 37 | 36,36,Female,Long Beach,CA,United States,Electronics,2025-04-05 38 | 37,43,Male,Kansas City,MO,United States,Clothing,2025-05-10 39 | 38,30,Female,Mesa,AZ,United States,Home & Kitchen,2025-06-15 40 | 39,44,Male,Virginia Beach,VA,United States,Beauty,2025-07-20 41 | 40,27,Female,Atlanta,GA,United States,Sports & Outdoors,2025-08-25 42 | 41,38,Male,Colorado Springs,CO,United States,Electronics,2025-09-30 43 | 42,39,Female,Omaha,NE,United States,Clothing,2025-10-05 44 | 43,25,Male,Raleigh,NC,United States,Home & Kitchen,2025-11-10 45 | 44,46,Female,Miami,FL,United States,Beauty,2025-12-15 46 | 45,32,Male,Oakland,CA,United States,Sports & Outdoors,2026-01-20 47 | 46,41,Female,Tulsa,OK,United States,Electronics,2026-02-25 48 | 47,29,Male,Cleveland,OH,United States,Clothing,2026-03-31 49 | 48,40,Female,Kansas City,KS,United States,Home & Kitchen,2026-04-05 50 | 49,35,Male,Arlington,TX,United States,Beauty,2026-05-10 51 | 50,48,Female,New Orleans,LA,United States,Sports & Outdoors,2026-06-15 52 | 51,33,Male,Tampa,FL,United States,Electronics,2026-07-20 53 | 52,37,Female,Honolulu,HI,United States,Clothing,2026-08-25 54 | 53,26,Male,Anchorage,AK,United States,Home & Kitchen,2026-09-30 55 | 54,42,Female,Anaheim,CA,United States,Beauty,2026-10-05 56 | 55,31,Male,Pittsburgh,PA,United States,Sports & Outdoors,2026-11-10 57 | 56,36,Female,Saint Paul,MN,United States,Electronics,2026-12-15 58 | 57,43,Male,Cincinnati,OH,United States,Clothing,2027-01-20 59 | 58,30,Female,Henderson,NV,United States,Home & Kitchen,2027-02-25 60 | 59,44,Male,Greensboro,NC,United States,Beauty,2027-03-31 61 | 60,27,Female,Plano,TX,United States,Sports & Outdoors,2027-04-05 62 | 61,38,Male,Newark,NJ,United States,Electronics,2027-05-10 63 | 62,39,Female,Lincoln,NE,United States,Clothing,2027-06-15 64 | 63,25,Male,Louisville,KY,United States,Home & Kitchen,2027-07-20 65 | 64,46,Female,Riverside,CA,United States,Beauty,2027-08-25 66 | 65,32,Male,Jersey City,NJ,United States,Sports & Outdoors,2027-09-30 67 | 66,41,Female,Baton Rouge,LA,United States,Electronics,2027-10-05 68 | 67,29,Male,Chandler,AZ,United States,Clothing,2027-11-10 69 | 68,40,Female,Fort Wayne,IN,United States,Home & Kitchen,2027-12-15 70 | 69,35,Male,Orlando,FL,United States,Beauty,2028-01-20 71 | 70,48,Female,St. Petersburg,FL,United States,Sports & Outdoors,2028-02-25 72 | 71,33,Male,Laredo,TX,United States,Electronics,2028-03-31 73 | 72,37,Female,Chula Vista,CA,United States,Clothing,2028-04-05 74 | 73,26,Male,Norfolk,VA,United States,Home & Kitchen,2028-05-10 75 | 74,42,Female,Chandler,AZ,United States,Beauty,2028-06-15 76 | 75,31,Male,Glendale,CA,United States,Sports & Outdoors,2028-07-20 77 | 76,36,Female,Lubbock,TX,United States,Electronics,2028-08-25 78 | 77,43,Male,Garland,TX,United States,Clothing,2028-09-30 79 | 78,30,Female,Irvine,CA,United States,Home & Kitchen,2028-10-05 80 | 79,44,Male,Rochester,NY,United States,Beauty,2028-11-10 81 | 80,27,Female,Akron,OH,United States,Sports & Outdoors,2028-12-15 82 | 81,38,Male,Irving,TX,United States,Electronics,2029-01-20 83 | 82,39,Female,Fremont,CA,United States,Clothing,2029-02-25 84 | 83,25,Male,Richmond,VA,United States,Home & Kitchen,2029-03-31 85 | 84,46,Female,Baton Rouge,LA,United States,Beauty,2029-04-05 86 | 85,32,Male,Spokane,WA,United States,Sports & Outdoors,2029-05-10 87 | 86,41,Female,Des Moines,IA,United States,Electronics,2029-06-15 88 | 87,29,Male,Tacoma,WA,United States,Clothing,2029-07-20 89 | 88,40,Female,Fontana,CA,United States,Home & Kitchen,2029-08-25 90 | 89,35,Male,Oxnard,CA,United States,Beauty,2029-09-30 91 | 90,48,Female,Moreno Valley,CA,United States,Sports & Outdoors,2029-10-05 92 | 91,33,Male,Richmond,CA,United States,Electronics,2029-11-10 93 | 92,37,Female,Boise,ID,United States,Clothing,2029-12-15 94 | 93,26,Male,Baton Rouge,LA,United States,Home & Kitchen,2030-01-20 95 | 94,42,Female,Santa Ana,CA,United States,Beauty,2030-02-25 96 | 95,31,Male,Spokane,WA,United States,Sports & Outdoors,2030-03-31 97 | 96,36,Female,Mobile,AL,United States,Electronics,2030-04-05 98 | 97,43,Male,Amarillo,TX,United States,Clothing,2030-05-10 99 | 98,30,Female,Huntington Beach,CA,United States,Home & Kitchen,2030-06-15 100 | 99,44,Male,Little Rock,AR,United States,Beauty,2030-07-20 101 | 100,27,Female,Glendale,CA,United States,Sports & Outdoors,2030-08-25 102 | -------------------------------------------------------------------------------- /Datasets/Pandas Essentials/time_series_data.csv: -------------------------------------------------------------------------------- 1 | timestamp,likes,shares,comments 2 | 2023-01-01 09:00:00,100,20,5 3 | 2023-01-02 10:00:00,150,30,8 4 | 2023-01-03 11:00:00,200,40,10 5 | 2023-01-04 12:00:00,250,50,12 6 | 2023-01-05 13:00:00,300,60,15 7 | 2023-01-06 14:00:00,350,70,18 8 | 2023-01-07 15:00:00,400,80,20 9 | 2023-01-08 16:00:00,450,90,22 10 | 2023-01-09 17:00:00,500,100,25 11 | 2023-01-10 18:00:00,550,110,28 12 | 2023-01-11 19:00:00,600,120,30 13 | 2023-01-12 20:00:00,650,130,32 14 | 2023-01-13 21:00:00,700,140,35 15 | 2023-01-14 22:00:00,750,150,38 16 | 2023-01-15 23:00:00,800,160,40 17 | 2023-01-16 00:00:00,850,170,42 18 | 2023-01-17 01:00:00,900,180,45 19 | 2023-01-18 02:00:00,950,190,48 20 | 2023-01-19 03:00:00,1000,200,50 21 | 2023-01-20 04:00:00,1050,210,53 22 | 2023-01-21 05:00:00,1100,220,56 23 | 2023-01-22 06:00:00,1150,230,58 24 | 2023-01-23 07:00:00,1200,240,60 25 | 2023-01-24 08:00:00,1250,250,63 26 | 2023-01-25 09:00:00,1300,260,66 27 | 2023-01-26 10:00:00,1350,270,68 28 | 2023-01-27 11:00:00,1400,280,70 29 | 2023-01-28 12:00:00,1450,290,73 30 | 2023-01-29 13:00:00,1500,300,76 31 | 2023-01-30 14:00:00,1550,310,78 32 | 2023-01-31 15:00:00,1600,320,80 33 | 2023-02-01 16:00:00,1650,330,83 34 | 2023-02-02 17:00:00,1700,340,86 35 | 2023-02-03 18:00:00,1750,350,88 36 | 2023-02-04 19:00:00,1800,360,90 37 | 2023-02-05 20:00:00,1850,370,93 38 | 2023-02-06 21:00:00,1900,380,96 39 | 2023-02-07 22:00:00,1950,390,98 40 | 2023-02-08 23:00:00,2000,400,100 41 | 2023-02-09 00:00:00,2050,410,103 42 | 2023-02-10 01:00:00,2100,420,106 43 | 2023-02-11 02:00:00,2150,430,108 44 | 2023-02-12 03:00:00,2200,440,110 45 | 2023-02-13 04:00:00,2250,450,113 46 | 2023-02-14 05:00:00,2300,460,116 47 | 2023-02-15 06:00:00,2350,470,118 48 | 2023-02-16 07:00:00,2400,480,120 49 | 2023-02-17 08:00:00,2450,490,123 50 | -------------------------------------------------------------------------------- /Datasets/Pandas Essentials/time_series_data.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "timestamp": "2023-01-01 09:00:00", 4 | "likes": 100, 5 | "shares": 20, 6 | "comments": 5 7 | }, 8 | { 9 | "timestamp": "2023-01-02 10:00:00", 10 | "likes": 150, 11 | "shares": 30, 12 | "comments": 8 13 | }, 14 | { 15 | "timestamp": "2023-01-03 11:00:00", 16 | "likes": 200, 17 | "shares": 40, 18 | "comments": 10 19 | }, 20 | { 21 | "timestamp": "2023-01-04 12:00:00", 22 | "likes": 250, 23 | "shares": 50, 24 | "comments": 12 25 | }, 26 | { 27 | "timestamp": "2023-01-05 13:00:00", 28 | "likes": 300, 29 | "shares": 60, 30 | "comments": 15 31 | }, 32 | { 33 | "timestamp": "2023-01-06 14:00:00", 34 | "likes": 350, 35 | "shares": 70, 36 | "comments": 18 37 | }, 38 | { 39 | "timestamp": "2023-01-07 15:00:00", 40 | "likes": 400, 41 | "shares": 80, 42 | "comments": 20 43 | }, 44 | { 45 | "timestamp": "2023-01-08 16:00:00", 46 | "likes": 450, 47 | "shares": 90, 48 | "comments": 22 49 | }, 50 | { 51 | "timestamp": "2023-01-09 17:00:00", 52 | "likes": 500, 53 | "shares": 100, 54 | "comments": 25 55 | }, 56 | { 57 | "timestamp": "2023-01-10 18:00:00", 58 | "likes": 550, 59 | "shares": 110, 60 | "comments": 28 61 | }, 62 | { 63 | "timestamp": "2023-01-11 19:00:00", 64 | "likes": 600, 65 | "shares": 120, 66 | "comments": 30 67 | }, 68 | { 69 | "timestamp": "2023-01-12 20:00:00", 70 | "likes": 650, 71 | "shares": 130, 72 | "comments": 32 73 | }, 74 | { 75 | 76 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License\n\nCopyright (c) [year] [fullname]\n\nPermission is hereby granted... -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Guidelines 2 | 3 | ## Secret Scanning 4 | 5 | This repository is configured with both local and GitHub-based secret scanning to prevent credentials and other sensitive information from being committed and pushed to the repository. 6 | 7 | ### Local Pre-commit Hook 8 | 9 | A pre-commit hook is installed that scans all staged changes for potential secrets before allowing a commit. This provides immediate feedback during development. 10 | 11 | To ensure you have the hook enabled: 12 | 13 | 1. Make sure Python is installed on your system 14 | 2. Install the detect-secrets package: 15 | ``` 16 | pip install detect-secrets 17 | ``` 18 | 3. The pre-commit hook should be automatically enabled. If not, you can manually copy it: 19 | ``` 20 | cp .git/hooks/pre-commit.sample .git/hooks/pre-commit 21 | chmod +x .git/hooks/pre-commit 22 | ``` 23 | 24 | ### GitHub Actions Workflow 25 | 26 | A GitHub Actions workflow (.github/workflows/secret-scanning.yml) runs on all pull requests and pushes to main/master branches. This provides an additional layer of protection using: 27 | 28 | 1. detect-secrets - For detecting a broad range of secret patterns 29 | 2. Gitleaks - For comprehensive secret scanning with an extensive ruleset 30 | 31 | ### Managing False Positives 32 | 33 | If you're getting false positives with detect-secrets: 34 | 35 | 1. Review the baseline file: 36 | ``` 37 | python -m detect_secrets audit .secrets.baseline 38 | ``` 39 | 2. Update the baseline after verifying false positives: 40 | ``` 41 | python -m detect_secrets scan --baseline .secrets.baseline 42 | ``` 43 | 44 | ### Common Secret Patterns 45 | 46 | See `.gitignore-patterns.txt` for a list of common patterns that may indicate secrets in your code. 47 | 48 | ### Best Practices 49 | 50 | 1. **Never** commit credentials, API keys, or other secrets to the repository 51 | 2. Use environment variables or a secure secrets manager for all sensitive values 52 | 3. Consider using template files (e.g., `.env.example`) to document required environment variables without values 53 | 4. Rotate any credentials that have been accidentally committed, even if removed later (Git history preserves them) -------------------------------------------------------------------------------- /chat_app/app/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/timothywarner/chatgptclass/93786f110785ece14306ec13656f108d24929645/chat_app/app/__init__.py -------------------------------------------------------------------------------- /chat_app/app/static/css/style.css: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/timothywarner/chatgptclass/93786f110785ece14306ec13656f108d24929645/chat_app/app/static/css/style.css -------------------------------------------------------------------------------- /chat_app/app/templates/index.html: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/timothywarner/chatgptclass/93786f110785ece14306ec13656f108d24929645/chat_app/app/templates/index.html -------------------------------------------------------------------------------- /chat_app/prompt.txt: -------------------------------------------------------------------------------- 1 | Contact info: 2 | Tim Warner (timothywarner316@gmail.com; tim-warner@pluralsight.com) 3 | LinkedIn: timw.info/li 4 | Course files: timw.info/edo 5 | 6 | System: You are expert in Python and OpenAI development. Your code is always documented and follows industry best practices. You never hallucinate, and your references are to the latest content. You think in a procedural, step-by-step manner. You never expose secrets in plain text. 7 | 8 | User: Please make a one-page Python web application using Flask. The simple chatbot interface looks like this: 9 | 10 | Title: OpenAI GPT-3 Chatbot 11 | 12 | Prompt: 13 | 14 | Submit button 15 | 16 | Response: 17 | 18 | The user enters a prompt in the prompt text box. The user clicks the Submit button. The request is sent to the Chat Completion endpoint (not Completion) using gpt-35-turbo model. 19 | 20 | The response text box displays the response from the OpenAI GPT-3 chatbot. The response is nicely formatted with line breaks and paragraphs. The response is limited to 1000 characters. -------------------------------------------------------------------------------- /chat_app/requirements.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/timothywarner/chatgptclass/93786f110785ece14306ec13656f108d24929645/chat_app/requirements.txt -------------------------------------------------------------------------------- /chat_app/run.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/timothywarner/chatgptclass/93786f110785ece14306ec13656f108d24929645/chat_app/run.py -------------------------------------------------------------------------------- /chat_app/stub.txt: -------------------------------------------------------------------------------- 1 | User 2 | I want a single-page web app that has a basic but functional UI: 3 | 4 | Title is "Ye Olde Anagram Maykere" 5 | 6 | What's yer word? 7 | 8 | Anagram. Scram! 9 | 10 | I need to minimize dependencies and complexity. -------------------------------------------------------------------------------- /chatgpt-cover-slide.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/timothywarner/chatgptclass/93786f110785ece14306ec13656f108d24929645/chatgpt-cover-slide.png -------------------------------------------------------------------------------- /chatgptclass.code-workspace: -------------------------------------------------------------------------------- 1 | { 2 | "folders": [ 3 | { 4 | "path": "." 5 | } 6 | ], 7 | "settings": { 8 | // Example GitHub Copilot Settings (commented out for teaching purposes) 9 | // These settings demonstrate best practices for GitHub Copilot configuration 10 | /* 11 | "github.copilot": { 12 | // Enable inline suggestions 13 | "inlineSuggest.enable": true, 14 | 15 | // Configure suggestion behavior 16 | "suggestMode": "inline", 17 | "suggestionDelay": 100, 18 | "suggestionTimeout": 5000, 19 | 20 | // Language-specific settings 21 | "language": { 22 | "typescript": { 23 | "enableAutoCompletions": true, 24 | "enableGhostText": true 25 | }, 26 | "python": { 27 | "enableAutoCompletions": true, 28 | "enableGhostText": true 29 | } 30 | }, 31 | 32 | // Editor-specific settings 33 | "editor": { 34 | "inlineSuggest.enable": true, 35 | "inlineSuggest.showToolbar": true, 36 | "inlineSuggest.showIcons": true 37 | }, 38 | 39 | // Advanced settings 40 | "advanced": { 41 | "debug.overrideEngine": "davinci", 42 | "debug.testOverrideProxyUrl": false, 43 | "debug.overrideProxyUrl": "", 44 | "debug.overrideProxyUrlWith": "" 45 | }, 46 | 47 | // Security settings 48 | "security": { 49 | "enableTelemetry": true, 50 | "enableCrashReporting": true, 51 | "enableUsageData": true 52 | }, 53 | 54 | // Performance settings 55 | "performance": { 56 | "maxCompletionsPerRequest": 5, 57 | "maxTokensPerCompletion": 100, 58 | "requestTimeout": 10000 59 | } 60 | }, 61 | */ 62 | 63 | // Example Copilot Chat settings (commented out for teaching purposes) 64 | /* 65 | "github.copilot.chat": { 66 | // Chat window settings 67 | "window": { 68 | "position": "right", 69 | "width": 400, 70 | "height": 600 71 | }, 72 | 73 | // Chat behavior settings 74 | "behavior": { 75 | "autoScroll": true, 76 | "showTimestamps": true, 77 | "showUserAvatars": true 78 | }, 79 | 80 | // Code generation settings 81 | "codeGeneration": { 82 | "includeComments": true, 83 | "includeTests": true, 84 | "includeDocumentation": true 85 | }, 86 | 87 | // Context settings 88 | "context": { 89 | "includeFileContent": true, 90 | "includeSelection": true, 91 | "includeCursorPosition": true 92 | } 93 | }, 94 | */ 95 | 96 | // Example Copilot Ignore settings (commented out for teaching purposes) 97 | /* 98 | "github.copilot.ignore": { 99 | // File patterns to ignore 100 | "files": [ 101 | "**/*.min.js", 102 | "**/*.map", 103 | "**/node_modules/**", 104 | "**/dist/**", 105 | "**/build/**" 106 | ], 107 | 108 | // Directory patterns to ignore 109 | "directories": [ 110 | "node_modules", 111 | "dist", 112 | "build", 113 | ".git", 114 | "coverage" 115 | ], 116 | 117 | // Language-specific ignores 118 | "languages": { 119 | "typescript": { 120 | "ignorePatterns": [ 121 | "**/*.d.ts", 122 | "**/*.min.js" 123 | ] 124 | }, 125 | "python": { 126 | "ignorePatterns": [ 127 | "**/*.pyc", 128 | "**/__pycache__/**" 129 | ] 130 | } 131 | } 132 | } 133 | */ 134 | } 135 | } -------------------------------------------------------------------------------- /course-plan.md: -------------------------------------------------------------------------------- 1 | # 🚀 ChatGPT + GitHub Copilot [4 HOURS] 2 | 3 | ## 🟦 HOUR 1: ChatGPT Foundations + Visual AI 4 | 5 | ### ⚡ Setup (5m) 6 | ``` 7 | Free → Plus ($20) → Pro ($200) 8 | GPT-4o → o1 → o1 pro 9 | Canvas = coding workspace 10 | ``` 11 | 12 | ### 🎨 Image Projects (15m) 13 | ``` 14 | DALL-E 3 Generation: 15 | → "Logo with gradient" 16 | → "Modify: add shadow" 17 | → Consistent characters 18 | 19 | Image Analysis: 20 | → Upload screenshot 21 | → "Extract the code" 22 | → "Create HTML from this mockup" 23 | ``` 24 | 25 | ### 🤖 Custom GPTs (20m) 26 | ``` 27 | GPT Store 🏪 28 | Build Your Own: 29 | 1. Configure persona 30 | 2. Upload knowledge 31 | 3. Add capabilities 32 | 4. Test & publish 33 | 34 | Popular GPTs: 35 | → Grimoire (coding) 36 | → Consensus (research) 37 | → Canva (design) 38 | ``` 39 | 40 | ### 🧪 LAB 1 (20m) 41 | ``` 42 | PROJECT 1: Image → Code 43 | → Upload UI mockup 44 | → Generate HTML/CSS 45 | → Iterate design 46 | 47 | PROJECT 2: Data GPT 48 | → Upload employee.csv 49 | → Create "HR Assistant" 50 | → Test queries 51 | ``` 52 | 53 | --- 54 | 55 | ## 🟧 HOUR 2: Advanced ChatGPT + New Frontiers 56 | 57 | ### 🎬 Sora (15m) 58 | ``` 59 | VIDEO GENERATION AI 60 | → Text to video (20s) 61 | → Image animation 62 | → Video extension 63 | → $200/mo access 64 | → Commercial use ✓ 65 | 66 | Demo prompts: 67 | "Drone shot of waves" 68 | "Robot learning to paint" 69 | ``` 70 | 71 | ### 🤖 Operator (15m) 72 | ``` 73 | COMPUTER USE AGENT 74 | → Browser automation 75 | → Form filling 76 | → Research tasks 77 | → Multi-step workflows 78 | 79 | Examples: 80 | → "Book a flight to Seattle" 81 | → "Find best laptop deals" 82 | → "Fill out this form" 83 | ``` 84 | 85 | ### 💻 Codex & Canvas (20m) 86 | ``` 87 | Canvas Mode: 88 | → Real-time editing 89 | → Version control 90 | → Inline suggestions 91 | → Port to languages 92 | 93 | API Integration: 94 | → Function calling 95 | → Structured outputs 96 | → Tool use 97 | → JSON mode 98 | ``` 99 | 100 | ### 🧪 LAB 2 (10m) 101 | ``` 102 | → Sora: Create product demo 103 | → Operator: Automate task 104 | → Canvas: Refactor code 105 | → API: Build integration 106 | ``` 107 | 108 | --- 109 | 110 | ## 🟩 HOUR 3: GitHub Copilot Core + CLI 111 | 112 | ### 🔧 Setup & Models (10m) 113 | ``` 114 | VS Code + CLI install 115 | Pick your model: 116 | → GPT-4o (default) 117 | → Claude 3.5 Sonnet 118 | → o1-preview (thinking) 119 | → Gemini 1.5 Pro 120 | ``` 121 | 122 | ### ⌨️ Core Features (15m) 123 | ``` 124 | Tab → accept 125 | Alt+\ → trigger 126 | Ctrl+I → inline chat 127 | // comment → code 128 | 129 | Smart Features: 130 | → Multi-cursor edits 131 | → Test generation 132 | → Docstring creation 133 | → Commit messages 134 | ``` 135 | 136 | ### 🖥️ Copilot CLI (20m) 137 | ``` 138 | Installation: 139 | gh extension install github/gh-copilot 140 | 141 | Commands: 142 | gh copilot suggest "find large files" 143 | gh copilot explain "git rebase -i" 144 | 145 | Aliases: 146 | ?? → gh copilot suggest 147 | !? → gh copilot explain 148 | 149 | Power moves: 150 | → Complex git commands 151 | → Shell scripting 152 | → System diagnostics 153 | ``` 154 | 155 | ### 🧪 LAB 3 (15m) 156 | ``` 157 | VS Code: 158 | → Generate CRUD API 159 | → Add error handling 160 | → Create unit tests 161 | 162 | CLI: 163 | → ?? "compress images" 164 | → !? "awk '{print $2}'" 165 | → Build deployment script 166 | ``` 167 | 168 | --- 169 | 170 | ## 🟪 HOUR 4: GitHub Enterprise + Security 171 | 172 | ### 🛡️ GitHub Advanced Security (20m) 173 | ``` 174 | GHAS Features: 175 | → Code scanning 🔍 176 | → Secret scanning 🔐 177 | → Dependency review 📦 178 | → Security overview 📊 179 | 180 | Autofix (AI-powered): 181 | → Suggests fixes 182 | → Explains vulnerabilities 183 | → One-click remediation 184 | → Learn as you fix 185 | ``` 186 | 187 | ### 🏢 GitHub Enterprise Cloud (15m) 188 | ``` 189 | GHEC Exclusive: 190 | → SAML/SCIM SSO 191 | → IP allow lists 192 | → Audit log streaming 193 | → EMU (managed users) 194 | 195 | Copilot Enterprise: 196 | → Custom models 197 | → Fine-tuning 198 | → Knowledge bases 199 | → Usage analytics 200 | ``` 201 | 202 | ### 🤖 Copilot Workspace (15m) 203 | ``` 204 | AI Development Environment: 205 | → Issue → Implementation 206 | → Multi-repo context 207 | → Automated planning 208 | → Test generation 209 | 210 | Workflow: 211 | 1. Describe task 212 | 2. Review plan 213 | 3. Generate code 214 | 4. Run tests 215 | 5. Create PR 216 | ``` 217 | 218 | ### 🧪 LAB 4 (10m) 219 | ``` 220 | Security: 221 | → Enable code scanning 222 | → Fix with Copilot Autofix 223 | → Review security alerts 224 | 225 | Enterprise: 226 | → Copilot usage metrics 227 | → Knowledge base demo 228 | → Workspace planning 229 | ``` 230 | 231 | --- 232 | 233 | ## 🎓 KEY TAKEAWAYS 234 | - **ChatGPT Pro**: Sora + Operator = Future 235 | - **Copilot CLI**: Your shell companion 236 | - **GHAS**: Security built-in 237 | - **Autofix**: Learn security by doing 238 | - **Workspace**: Issues → Code automatically -------------------------------------------------------------------------------- /debug.log: -------------------------------------------------------------------------------- 1 | [0529/231520.526:ERROR:registration_protocol_win.cc(108)] CreateFile: The system cannot find the file specified. (0x2) 2 | [0529/231524.613:ERROR:registration_protocol_win.cc(108)] CreateFile: The system cannot find the file specified. (0x2) 3 | [0529/231532.701:ERROR:registration_protocol_win.cc(108)] CreateFile: The system cannot find the file specified. (0x2) 4 | [0529/231556.374:ERROR:registration_protocol_win.cc(108)] CreateFile: The system cannot find the file specified. (0x2) 5 | -------------------------------------------------------------------------------- /diagnose-performance.ps1: -------------------------------------------------------------------------------- 1 | # Surface Pro 3 System Diagnostics Script 2 | # Run as Administrator in PowerShell 3 | # Creates: SystemDiagnostics_[timestamp].txt on Desktop 4 | 5 | $timestamp = Get-Date -Format "yyyyMMdd_HHmmss" 6 | $outputFile = "$env:USERPROFILE\Desktop\SystemDiagnostics_$timestamp.txt" 7 | 8 | Write-Host "Starting Surface Pro 3 diagnostics..." -ForegroundColor Green 9 | Write-Host "Output file: $outputFile" -ForegroundColor Yellow 10 | 11 | # Start output file 12 | "Surface Pro 3 System Diagnostics Report" | Out-File $outputFile 13 | "Generated: $(Get-Date)" | Out-File $outputFile -Append 14 | "=" * 60 | Out-File $outputFile -Append 15 | 16 | try { 17 | # System Information 18 | "`n### SYSTEM OVERVIEW ###" | Out-File $outputFile -Append 19 | $sysInfo = Get-ComputerInfo | Select-Object WindowsVersion, WindowsBuildLabEx, TotalPhysicalMemory, CsProcessors, CsSystemType 20 | $sysInfo | Out-File $outputFile -Append 21 | 22 | # Top CPU processes 23 | "`n### TOP 10 CPU PROCESSES ###" | Out-File $outputFile -Append 24 | Get-Process | Sort-Object CPU -Descending | Select-Object -First 10 Name, CPU, WorkingSet, Id | Format-Table | Out-File $outputFile -Append 25 | 26 | # CPU Performance Sample 27 | "`n### CPU PERFORMANCE (5 samples) ###" | Out-File $outputFile -Append 28 | $cpuSamples = Get-Counter "\Processor(_Total)\% Processor Time" -SampleInterval 1 -MaxSamples 5 29 | $cpuSamples.CounterSamples | ForEach-Object { 30 | "CPU Usage: $([math]::Round($_.CookedValue, 2))% at $($_.Timestamp)" 31 | } | Out-File $outputFile -Append 32 | 33 | # Memory Status 34 | "`n### MEMORY STATUS ###" | Out-File $outputFile -Append 35 | $memCounters = Get-Counter "\Memory\Available MBytes", "\Memory\Pages/sec", "\Memory\Committed Bytes" -SampleInterval 1 -MaxSamples 3 36 | $memCounters.CounterSamples | Group-Object Path | ForEach-Object { 37 | $counterName = $_.Name.Split('\')[-1] 38 | $avgValue = ($_.Group | Measure-Object CookedValue -Average).Average 39 | "$counterName Average: $([math]::Round($avgValue, 2))" 40 | } | Out-File $outputFile -Append 41 | 42 | # Disk Information 43 | "`n### DISK SPACE ###" | Out-File $outputFile -Append 44 | Get-WmiObject -Class Win32_LogicalDisk | Select-Object DeviceID, 45 | @{Name="Size(GB)";Expression={[math]::Round($_.Size/1GB,2)}}, 46 | @{Name="FreeSpace(GB)";Expression={[math]::Round($_.FreeSpace/1GB,2)}}, 47 | @{Name="FreePercent";Expression={[math]::Round(($_.FreeSpace/$_.Size)*100,2)}} | 48 | Format-Table | Out-File $outputFile -Append 49 | 50 | # Network Adapters 51 | "`n### NETWORK ADAPTERS ###" | Out-File $outputFile -Append 52 | Get-NetAdapter | Where-Object Status -eq "Up" | Select-Object Name, InterfaceDescription, LinkSpeed, FullDuplex | Format-Table | Out-File $outputFile -Append 53 | 54 | # Network Connectivity Test 55 | "`n### NETWORK CONNECTIVITY ###" | Out-File $outputFile -Append 56 | $netTest = Test-NetConnection -ComputerName google.com -Port 80 -WarningAction SilentlyContinue 57 | "Google.com connectivity: $($netTest.TcpTestSucceeded)" | Out-File $outputFile -Append 58 | "Ping result: $($netTest.PingSucceeded) - $($netTest.PingReplyDetails.RoundtripTime)ms" | Out-File $outputFile -Append 59 | 60 | # Graphics Information 61 | "`n### GRAPHICS HARDWARE ###" | Out-File $outputFile -Append 62 | Get-WmiObject Win32_VideoController | Select-Object Name, DriverVersion, DriverDate, VideoMemoryType, VideoModeDescription | Format-List | Out-File $outputFile -Append 63 | 64 | # Temperature Files 65 | "`n### TEMP FILES ANALYSIS ###" | Out-File $outputFile -Append 66 | $userTemp = Get-ChildItem $env:TEMP -ErrorAction SilentlyContinue | Measure-Object -Property Length -Sum 67 | $winTemp = Get-ChildItem "C:\Windows\Temp" -ErrorAction SilentlyContinue | Measure-Object -Property Length -Sum 68 | "User Temp Files: $($userTemp.Count) files, $([math]::Round($userTemp.Sum/1MB, 2)) MB" | Out-File $outputFile -Append 69 | "Windows Temp Files: $($winTemp.Count) files, $([math]::Round($winTemp.Sum/1MB, 2)) MB" | Out-File $outputFile -Append 70 | 71 | # Startup Programs 72 | "`n### STARTUP PROGRAMS ###" | Out-File $outputFile -Append 73 | Get-CimInstance Win32_StartupCommand | Select-Object Name, Command, Location | Format-Table | Out-File $outputFile -Append 74 | 75 | # Windows Update Status 76 | "`n### WINDOWS UPDATE ###" | Out-File $outputFile -Append 77 | $updateSession = New-Object -ComObject Microsoft.Update.Session -ErrorAction SilentlyContinue 78 | if ($updateSession) { 79 | $updateSearcher = $updateSession.CreateUpdateSearcher() 80 | $searchResult = $updateSearcher.Search("IsInstalled=0") 81 | "Pending Windows Updates: $($searchResult.Updates.Count)" | Out-File $outputFile -Append 82 | } else { 83 | "Could not check Windows Update status" | Out-File $outputFile -Append 84 | } 85 | 86 | } catch { 87 | "`nERROR: $($_.Exception.Message)" | Out-File $outputFile -Append 88 | } 89 | 90 | # Generate DXDiag separately (takes time) 91 | "`n### GENERATING DXDIAG REPORT ###" | Out-File $outputFile -Append 92 | $dxdiagFile = "$env:USERPROFILE\Desktop\dxdiag_$timestamp.txt" 93 | Start-Process "dxdiag" -ArgumentList "/t `"$dxdiagFile`"" -Wait -WindowStyle Hidden 94 | "DXDiag report saved to: $dxdiagFile" | Out-File $outputFile -Append 95 | 96 | "`n### DIAGNOSTICS COMPLETE ###" | Out-File $outputFile -Append 97 | "Report generated: $(Get-Date)" | Out-File $outputFile -Append 98 | 99 | Write-Host "`nDiagnostics complete!" -ForegroundColor Green 100 | Write-Host "Main report: $outputFile" -ForegroundColor Yellow 101 | Write-Host "DXDiag report: $dxdiagFile" -ForegroundColor Yellow 102 | Write-Host "`nUpload both files to Claude for analysis." -ForegroundColor Cyan 103 | -------------------------------------------------------------------------------- /exercises/README.md: -------------------------------------------------------------------------------- 1 | # AI Prompting Exercises 2 | 3 | This directory contains structured exercises for practicing prompting with both ChatGPT and GitHub Copilot, organized by roles and specializations. 4 | 5 | ## Structure 6 | 7 | ``` 8 | exercises/ 9 | ├── chatgpt/ 10 | │ ├── dev/ # Software Development prompts 11 | │ ├── itops/ # IT Operations prompts 12 | │ ├── data/ # Data Science/Analytics prompts 13 | │ └── information-worker/ # General office/knowledge work prompts 14 | └── github-copilot/ 15 | ├── swe/ # Software Engineer exercises 16 | ├── qa-test/ # QA/Test Engineer exercises 17 | ├── security/ # Security Engineer exercises 18 | ├── cicd/ # CI/CD Engineer exercises 19 | └── compliance/ # Compliance Engineer exercises 20 | ``` 21 | 22 | ## ChatGPT Exercises by Role 23 | 24 | ### Developer Exercises 25 | - Code review and improvement 26 | - Architecture design discussions 27 | - API documentation 28 | - Debugging assistance 29 | - Code refactoring 30 | 31 | ### IT Ops Exercises 32 | - Infrastructure troubleshooting 33 | - System monitoring 34 | - Cloud resource management 35 | - Automation script creation 36 | - Incident response 37 | 38 | ### Data Exercises 39 | - Data cleaning and transformation 40 | - Analysis workflow design 41 | - Visualization suggestions 42 | - Statistical analysis 43 | - ETL pipeline design 44 | 45 | ### Information Worker Exercises 46 | - Document processing 47 | - Report generation 48 | - Email communication 49 | - Process documentation 50 | - Meeting management 51 | 52 | ## GitHub Copilot Exercises by Role 53 | 54 | ### Software Engineer 55 | - Test-driven development 56 | - API implementation 57 | - Code optimization 58 | - Design patterns 59 | - Error handling 60 | 61 | ### QA/Test Engineer 62 | - Test case generation 63 | - Test automation 64 | - Performance testing 65 | - Integration testing 66 | - Test coverage analysis 67 | 68 | ### Security Engineer 69 | - Security testing 70 | - Vulnerability scanning 71 | - Security pattern implementation 72 | - Secure coding practices 73 | - Compliance validation 74 | 75 | ### CI/CD Engineer 76 | - Pipeline automation 77 | - Deployment scripts 78 | - Infrastructure as Code 79 | - Build optimization 80 | - Release automation 81 | 82 | ### Compliance Engineer 83 | - Policy implementation 84 | - Audit logging 85 | - Compliance checking 86 | - Documentation generation 87 | - Control validation -------------------------------------------------------------------------------- /exercises/chatgpt/dev/01-code-review.md: -------------------------------------------------------------------------------- 1 | # Exercise: Code Review and Best Practices 2 | 3 | ## Scenario 4 | You're reviewing a pull request that implements a new API endpoint for user authentication. 5 | 6 | ## Exercise Goals 7 | - Practice crafting prompts for code review 8 | - Learn to identify security and performance issues 9 | - Generate constructive feedback 10 | 11 | ## Sample Code to Review 12 | ```python 13 | @app.route('/api/login', methods=['POST']) 14 | def login(): 15 | data = request.json 16 | user = db.query(f"SELECT * FROM users WHERE username='{data['username']}'") 17 | if user and user.password == data['password']: 18 | token = generate_token(user.id) 19 | return jsonify({'token': token}) 20 | return jsonify({'error': 'Invalid credentials'}), 401 21 | ``` 22 | 23 | ## Sample Prompts 24 | 25 | ### 1. Initial Review Prompt 26 | ``` 27 | Please review this Python code for a login endpoint with focus on: 28 | - Security vulnerabilities 29 | - Performance issues 30 | - Best practices 31 | - Code organization 32 | Format the response as a structured review with severity levels for each issue. 33 | ``` 34 | 35 | ### 2. Security-Focused Prompt 36 | ``` 37 | For this login endpoint code: 38 | 1. Identify all potential security vulnerabilities 39 | 2. Suggest secure alternatives for each issue 40 | 3. Provide example code snippets showing secure implementation 41 | Focus especially on SQL injection, password handling, and token generation. 42 | ``` 43 | 44 | ### 3. Improvement Prompt 45 | ``` 46 | Help me refactor this login endpoint to: 47 | 1. Follow REST best practices 48 | 2. Implement proper error handling 49 | 3. Add input validation 50 | 4. Use secure password comparison 51 | 5. Add appropriate logging 52 | Please provide the refactored code with comments explaining each improvement. 53 | ``` 54 | 55 | ## Practice Tasks 56 | 1. Review the code using each prompt 57 | 2. Create your own security-focused prompts 58 | 3. Generate prompts for specific aspects (logging, validation, etc.) 59 | 4. Create prompts for suggesting test cases 60 | 61 | ## Tips 62 | - Be specific about what aspects to review 63 | - Ask for severity levels and priorities 64 | - Request example code for improvements 65 | - Use follow-up prompts for deeper analysis 66 | - Always verify security-related suggestions -------------------------------------------------------------------------------- /exercises/chatgpt/information-worker/01-document-analysis.md: -------------------------------------------------------------------------------- 1 | # Exercise: Document Analysis and Summary 2 | 3 | ## Scenario 4 | You're a business analyst who needs to analyze a lengthy technical report and extract key insights for your team. 5 | 6 | ## Exercise Goals 7 | - Practice crafting prompts for document analysis 8 | - Learn to extract specific types of information 9 | - Generate structured summaries 10 | 11 | ## Sample Prompts 12 | 13 | ### 1. Initial Analysis Prompt 14 | ``` 15 | I have a technical report about [topic]. Please analyze it with focus on: 16 | - Key findings and recommendations 17 | - Technical specifications 18 | - Risk factors 19 | - Implementation timeline 20 | Please format the response as a structured summary with bullet points. 21 | ``` 22 | 23 | ### 2. Follow-up Detail Prompt 24 | ``` 25 | For the section about [specific aspect], please: 26 | 1. Extract all numerical data and metrics 27 | 2. Identify potential bottlenecks 28 | 3. Suggest areas needing clarification 29 | ``` 30 | 31 | ### 3. Executive Summary Prompt 32 | ``` 33 | Based on the analysis, create a 3-paragraph executive summary that: 34 | - Opens with the most significant finding 35 | - Highlights critical business implications 36 | - Concludes with actionable recommendations 37 | Use clear, concise language suitable for senior management. 38 | ``` 39 | 40 | ## Practice Tasks 41 | 1. Take any technical document and try these prompts 42 | 2. Modify the prompts to extract different types of information 43 | 3. Create your own prompt template for similar document analysis tasks 44 | 45 | ## Tips 46 | - Be specific about the type of information you need 47 | - Request structured formats when appropriate 48 | - Use follow-up prompts to drill down into specific areas 49 | - Always verify critical information manually -------------------------------------------------------------------------------- /exercises/chatgpt/itops/01-incident-response.md: -------------------------------------------------------------------------------- 1 | # Exercise: Incident Response and Troubleshooting 2 | 3 | ## Scenario 4 | You're on-call and receive an alert about high latency and intermittent 503 errors in your production Kubernetes cluster. 5 | 6 | ## Exercise Goals 7 | - Practice crafting prompts for incident analysis 8 | - Learn to generate systematic troubleshooting steps 9 | - Create clear incident documentation 10 | 11 | ## Sample Alert Data 12 | ``` 13 | Alert: High Latency Detected 14 | Severity: P1 15 | Time: 2024-01-13 15:30 UTC 16 | Details: 17 | - Response time > 2000ms (threshold: 500ms) 18 | - 503 errors rate: 15% 19 | - CPU usage spike across multiple nodes 20 | - Connection timeouts to database reported 21 | ``` 22 | 23 | ## Sample Prompts 24 | 25 | ### 1. Initial Analysis Prompt 26 | ``` 27 | Given this production incident alert: 28 | 1. What are the potential root causes? 29 | 2. List immediate investigation steps 30 | 3. Suggest commands to gather more information 31 | 4. Identify potential impact areas 32 | Prioritize steps based on severity and ease of investigation. 33 | ``` 34 | 35 | ### 2. Troubleshooting Steps Prompt 36 | ``` 37 | Help me investigate this Kubernetes cluster issue by: 38 | 1. Providing kubectl commands to check cluster health 39 | 2. Listing key metrics to monitor 40 | 3. Suggesting log patterns to search for 41 | 4. Identifying potential bottlenecks 42 | Format as step-by-step troubleshooting guide with commands and expected outputs. 43 | ``` 44 | 45 | ### 3. Documentation Prompt 46 | ``` 47 | Help me document this incident with: 48 | 1. Timeline of events 49 | 2. Impact assessment 50 | 3. Root cause analysis 51 | 4. Resolution steps taken 52 | 5. Prevention measures 53 | Format as a structured incident report suitable for stakeholders. 54 | ``` 55 | 56 | ## Practice Tasks 57 | 1. Use the prompts to analyze the incident 58 | 2. Create prompts for different types of alerts 59 | 3. Generate runbooks using ChatGPT 60 | 4. Practice incident communication prompts 61 | 62 | ## Tips 63 | - Include relevant metrics and logs 64 | - Ask for specific commands and tools 65 | - Request step-by-step procedures 66 | - Consider different stakeholder perspectives 67 | - Always validate suggested commands before execution -------------------------------------------------------------------------------- /exercises/github-copilot/01-test-generation.md: -------------------------------------------------------------------------------- 1 | # Exercise: Test Generation with GitHub Copilot 2 | 3 | ## Scenario 4 | You need to write unit tests for a new function that processes user data. 5 | 6 | ## Exercise Goals 7 | - Learn to generate test cases with Copilot 8 | - Practice test-driven development 9 | - Understand how to guide Copilot for better test coverage 10 | 11 | ## Sample Function to Test 12 | ```python 13 | def validate_user_data(user_dict): 14 | """ 15 | Validates user data according to the following rules: 16 | - Username must be 3-20 characters 17 | - Email must be valid format 18 | - Age must be between 13 and 120 19 | Returns tuple of (is_valid: bool, errors: list) 20 | """ 21 | # Write this function using Copilot suggestions 22 | ``` 23 | 24 | ## Exercise Steps 25 | 26 | 1. **Initial Test Structure** 27 | ```python 28 | # Type: def test_validate_user_data 29 | # Let Copilot suggest test cases 30 | ``` 31 | 32 | 2. **Edge Cases** 33 | ```python 34 | # Type: def test_validate_user_data_edge_cases 35 | # Let Copilot suggest edge cases 36 | ``` 37 | 38 | 3. **Error Cases** 39 | ```python 40 | # Type: def test_validate_user_data_errors 41 | # Let Copilot suggest error cases 42 | ``` 43 | 44 | ## Tips for Working with Copilot 45 | - Start with clear function docstrings 46 | - Use descriptive test function names 47 | - Write comments about what you want to test 48 | - Press Tab to accept suggestions 49 | - Press Alt+] to see alternative suggestions 50 | - Type more specific comments for better suggestions 51 | 52 | ## Practice Tasks 53 | 1. Implement the `validate_user_data` function using Copilot 54 | 2. Generate test cases using the patterns above 55 | 3. Add more edge cases that Copilot might have missed 56 | 4. Create similar test patterns for your own functions -------------------------------------------------------------------------------- /exercises/github-copilot/security/01-secure-api.md: -------------------------------------------------------------------------------- 1 | # Exercise: Implementing Secure API Endpoints 2 | 3 | ## Scenario 4 | You need to implement a secure API endpoint for handling sensitive user data with proper authentication, authorization, and input validation. 5 | 6 | ## Exercise Goals 7 | - Practice secure coding with Copilot 8 | - Implement security best practices 9 | - Learn to guide Copilot for security-focused code 10 | 11 | ## Base Implementation 12 | 13 | Start with this basic endpoint structure: 14 | ```python 15 | from fastapi import FastAPI 16 | app = FastAPI() 17 | 18 | @app.post("/api/users/profile") 19 | async def update_user_profile(): 20 | # TODO: Implement secure profile update 21 | pass 22 | ``` 23 | 24 | ## Exercise Steps 25 | 26 | ### 1. Authentication Middleware 27 | ```python 28 | # Type the following comment and let Copilot suggest the implementation: 29 | # Implement JWT authentication middleware with rate limiting and token validation 30 | ``` 31 | 32 | ### 2. Input Validation 33 | ```python 34 | # Type these comments sequentially: 35 | # Create Pydantic model for user profile data with strict validation 36 | # Add validation for potentially dangerous fields like URLs and file paths 37 | ``` 38 | 39 | ### 3. Authorization Check 40 | ```python 41 | # Guide Copilot with: 42 | # Add role-based access control middleware 43 | # Implement function to verify user has permission to update profile 44 | ``` 45 | 46 | ### 4. Secure Data Handling 47 | ```python 48 | # Lead Copilot with: 49 | # Add input sanitization for XSS prevention 50 | # Implement secure file upload handling with virus scanning 51 | ``` 52 | 53 | ### 5. Audit Logging 54 | ```python 55 | # Direct Copilot: 56 | # Add structured audit logging for security events 57 | # Implement secure logging that masks sensitive data 58 | ``` 59 | 60 | ## Security Checklist 61 | - [ ] Input validation and sanitization 62 | - [ ] Authentication and authorization 63 | - [ ] Secure password handling 64 | - [ ] Rate limiting 65 | - [ ] Audit logging 66 | - [ ] Error handling without info disclosure 67 | - [ ] Security headers 68 | - [ ] CSRF protection 69 | 70 | ## Tips for Working with Copilot 71 | - Start with security-focused comments 72 | - Review each suggestion carefully 73 | - Use security-related keywords 74 | - Break down complex security features 75 | - Always validate generated security code 76 | - Add explicit security checks in comments 77 | 78 | ## Verification Steps 79 | 1. Test authentication bypass attempts 80 | 2. Verify input validation 81 | 3. Check authorization rules 82 | 4. Validate audit log contents 83 | 5. Test rate limiting 84 | 6. Verify error handling 85 | 7. Check security headers -------------------------------------------------------------------------------- /infra/create-repo-scaffold.ps1: -------------------------------------------------------------------------------- 1 | # Define repository structure 2 | $repoStructure = @{ 3 | "src" = @{} # Source code directory 4 | ".github" = @{ 5 | "workflows" = @{ 6 | "dependabot.yml" = "version: 2\nupdates:\n - package-ecosystem: ""npm""\n directory: ""/""\n schedule:\n interval: ""weekly""" 7 | "codeql-analysis.yml" = "# For more information, visit: https://docs.github.com/en/code-security/secure-coding/automatically-scanning-your-code-for-vulnerabilities-and-errors\nname: 'CodeQL'\non:\n push:\n branches: [ main ]\n pull_request:\n branches: [ main ]\n schedule:\n - cron: '0 14 * * 3'" 8 | } 9 | "CODEOWNERS" = "" # Example for code owners file 10 | "security" = @{ 11 | "SECURITY.md" = "## Security Policy\n\nPlease report any security vulnerabilities through the project's issues." 12 | } 13 | } 14 | "docs" = @{} # Documentation directory 15 | "tests" = @{} # Tests directory 16 | ".gitignore" = "/node_modules\n.DS_Store" # .gitignore file 17 | "README.md" = "# Project Title\n\nA brief description of what this project does and who it's for." 18 | "CONTRIBUTING.md" = "## Contributing Guidelines\n\nThank you for your interest in contributing to our project!" 19 | "LICENSE" = "MIT License\n\nCopyright (c) [year] [fullname]\n\nPermission is hereby granted..." 20 | "CODE_OF_CONDUCT.md" = "## Code of Conduct\n\nOur pledge to create a welcoming and safe environment." 21 | } 22 | 23 | # Helper function to create files and directories 24 | function Create-ItemFromStructure { 25 | param ( 26 | [string]$basePath, 27 | [hashtable]$structure 28 | ) 29 | 30 | foreach ($key in $structure.Keys) { 31 | $path = Join-Path -Path $basePath -ChildPath $key 32 | 33 | if ($structure[$key].GetType().Name -eq "String") { 34 | # It's a file, create with content 35 | New-Item -Path $path -ItemType File -Force -Value $structure[$key] 36 | Write-Host "Created file: $path" 37 | } 38 | else { 39 | # It's a directory, create it, then recurse into its structure 40 | New-Item -Path $path -ItemType Directory -Force | Out-Null 41 | Write-Host "Created directory: $path" 42 | Create-ItemFromStructure -basePath $path -structure $structure[$key] 43 | } 44 | } 45 | } 46 | 47 | # Create repository structure 48 | Create-ItemFromStructure -basePath (Get-Location) -structure $repoStructure 49 | -------------------------------------------------------------------------------- /knowledge/choose-an-agile-approach.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/timothywarner/chatgptclass/93786f110785ece14306ec13656f108d24929645/knowledge/choose-an-agile-approach.zip -------------------------------------------------------------------------------- /knowledge/choose-an-agile-approach/1-introduction.yml: -------------------------------------------------------------------------------- 1 | ### YamlMime:ModuleUnit 2 | uid: learn.azdo.choose-an-agile-approach.1-introduction 3 | title: Introduction 4 | metadata: 5 | title: Introduction 6 | description: Take your first DevOps steps by using an Agile approach and Azure Boards. 7 | ms.date: 10/11/2023 8 | ms.custom: devdivchpfy22 9 | author: chcomley 10 | ms.author: chcomley 11 | ms.topic: unit 12 | durationInMinutes: 1 13 | content: | 14 | [!include[](includes/1-introduction.md)] 15 | -------------------------------------------------------------------------------- /knowledge/choose-an-agile-approach/2-what-is-agile.yml: -------------------------------------------------------------------------------- 1 | ### YamlMime:ModuleUnit 2 | uid: learn.azdo.choose-an-agile-approach.2-what-is-agile 3 | title: What is Agile? 4 | metadata: 5 | title: What is Agile? 6 | description: Learn how Agile can offer a better approach to software development through some of its guiding principles. 7 | ms.date: 10/11/2023 8 | ms.custom: devdivchpfy22 9 | author: chcomley 10 | ms.author: chcomley 11 | ms.topic: unit 12 | durationInMinutes: 6 13 | content: | 14 | [!include[](includes/2-what-is-agile.md)] 15 | -------------------------------------------------------------------------------- /knowledge/choose-an-agile-approach/3-what-is-azure-boards.yml: -------------------------------------------------------------------------------- 1 | ### YamlMime:ModuleUnit 2 | uid: learn.azdo.choose-an-agile-approach.3-what-is-azure-boards 3 | title: What is Azure Boards? 4 | metadata: 5 | title: What is Azure Boards? 6 | description: Discover how Azure Boards can help you plan the work that needs to be done, and learn about the four processes you can choose from. 7 | ms.date: 10/11/2023 8 | ms.custom: devdivchpfy22 9 | author: chcomley 10 | ms.author: chcomley 11 | ms.topic: unit 12 | durationInMinutes: 15 13 | content: | 14 | [!include[](includes/3-what-is-azure-boards.md)] 15 | quiz: 16 | title: Check your knowledge 17 | questions: 18 | - content: "The _Agile Manifesto_ states:" 19 | choices: 20 | - content: "Processes and tools come before the individuals that use them." 21 | isCorrect: false 22 | explanation: "In fact, the opposite is true. The Agile Manifesto states that individuals and their interactions are more valuable than any process or tool." 23 | - content: "You need to fully document new features before you build them." 24 | isCorrect: false 25 | explanation: "Although documentation is an important part of the software development process, having working software is what comes first." 26 | - content: "Responding to change comes before following a plan." 27 | isCorrect: true 28 | explanation: "An agile mindset helps you respond to changing market conditions and customer needs, instead of simply following a rigid plan." 29 | - content: "Azure Boards is:" 30 | choices: 31 | - content: "A way for your customers to provide feedback." 32 | isCorrect: false 33 | explanation: "Although you can connect Azure Boards to your user feedback mechanism, it's a way for you and your team to plan and track work." 34 | - content: "A graphical way to plan and track your work." 35 | isCorrect: true 36 | explanation: "Azure Boards supports four popular planning types — CMMI, Agile, Scrum, and Basic." 37 | - content: "A way to list all of your projects." 38 | isCorrect: false 39 | explanation: "Your Azure DevOps organization can have multiple projects. You can set up Azure Boards to plan and track work for each one of them." 40 | - content: "A _sprint_ is:" 41 | choices: 42 | - content: "A fixed amount of time a team has to complete a set of tasks." 43 | isCorrect: true 44 | explanation: "When you plan your sprints, think about what's most important and what you can realistically accomplish during that time." 45 | - content: "Another name for a task board." 46 | isCorrect: false 47 | explanation: "The board holds the tasks you need to do or are currently working on. A sprint provides a time frame for a set amount of work." 48 | - content: "Time your team sets aside to work on bugs." 49 | isCorrect: false 50 | explanation: "Although you can address bugs during your sprint, a sprint can include almost any sort of work." 51 | -------------------------------------------------------------------------------- /knowledge/choose-an-agile-approach/4-plan-work-azure-boards.yml: -------------------------------------------------------------------------------- 1 | ### YamlMime:ModuleUnit 2 | uid: learn.azdo.choose-an-agile-approach.4-plan-work-azure-boards 3 | title: Exercise - Plan work using Azure Boards 4 | metadata: 5 | title: Exercise - Plan work using Azure Boards 6 | description: Create a project, a team, and a board in Azure DevOps. 7 | ms.date: 10/11/2023 8 | ms.custom: devdivchpfy22 9 | author: chcomley 10 | ms.author: chcomley 11 | ms.topic: unit 12 | durationInMinutes: 15 13 | content: | 14 | [!include[](includes/4-plan-work-azure-boards.md)] 15 | -------------------------------------------------------------------------------- /knowledge/choose-an-agile-approach/5-summary.yml: -------------------------------------------------------------------------------- 1 | ### YamlMime:ModuleUnit 2 | uid: learn.azdo.choose-an-agile-approach.5-summary 3 | title: Summary 4 | metadata: 5 | title: Summary 6 | description: Learn how to continue your journey by configuring build pipelines that continuously build, test, and verify your applications. 7 | ms.date: 10/11/2023 8 | ms.custom: devdivchpfy22 9 | author: chcomley 10 | ms.author: chcomley 11 | ms.topic: unit 12 | durationInMinutes: 3 13 | content: | 14 | [!include[](includes/5-summary.md)] 15 | -------------------------------------------------------------------------------- /knowledge/choose-an-agile-approach/includes/1-introduction.md: -------------------------------------------------------------------------------- 1 | You've met the Tailspin team and learned a bit about their problems. Mara, the newest team member, is trying to convince her teammates that a DevOps approach, using the services in Azure DevOps, is a great way to solve them. She's taken it upon herself to do a *value stream mapping* (VSM) exercise, and she's shown everyone the results. 2 | 3 | Her next goal is to convince the Tailspin team to take their first DevOps steps by using an Agile approach and Azure Boards, a part of the Azure DevOps suite. Azure Boards helps teams collaborate and plan their work better. This module shows how the team creates its first board. 4 | 5 | After completing this module, you'll be able to: 6 | 7 | - Define the term Agile. 8 | - Begin to make recommendations for incorporating Agile practices into your organization. 9 | - Create a project in Azure DevOps. 10 | - Add work items to Azure Boards by using the Basic process. 11 | 12 | ## Prerequisites 13 | 14 | The modules in this learning path form a progression. We recommend you start at the beginning of the [Get started with Azure DevOps](../../../paths/evolve-your-devops-practices/index.yml?azure-portal=true) learning path before you work on this module. 15 | 16 | If you'd rather do only this module, go through [Introduction to Azure DevOps](/training/modules/get-started-with-devops?azure-portal=true) first to set up your Azure DevOps organization. 17 | 18 | ## Meet the team 19 | 20 | You met the *Space Game* web team at Tailspin Toys in previous modules. As a refresher, here's who you'll work with in this module: 21 | 22 | :::row::: 23 | 24 | [!include[](../../shared/includes/meet-andy-short-col.md)] 25 | 26 | [!include[](../../shared/includes/meet-amita-short-col.md)] 27 | 28 | [!include[](../../shared/includes/meet-tim-short-col.md)] 29 | 30 | [!include[](../../shared/includes/meet-mara-short-col.md)] 31 | 32 | :::row-end::: 33 | 34 | Mara has prior experience with DevOps and is helping the team adopt a more streamlined process by using Azure DevOps. 35 | -------------------------------------------------------------------------------- /knowledge/choose-an-agile-approach/includes/3-what-is-azure-boards.md: -------------------------------------------------------------------------------- 1 | Azure Boards is a tool in Azure DevOps to help teams plan the work they need to do. The Tailspin team will use this tool to get a better idea of what work they need to do and how to prioritize it. 2 | 3 | Mara created her own project on Azure Boards using the Basic process. It shows the problems in the build process that she and Andy identified. Mara gets the team together for a quick demo. 4 | 5 | **Mara:** Hi everyone. I set up Azure Boards and wanted to show you some work items I came up with. 6 | 7 | **Andy:** What's a work item? 8 | 9 | **Mara:** Work items help us plan and manage a project. A work item can track all types of activities. Maybe it's a task to do, a bug to fix, or some other issue. We can assign them to people and keep track of their progress. 10 | 11 | Perhaps it's easier to show you. Here's Azure Boards using the Basic process: 12 | 13 | :::image type="content" source="../../shared/media/build-initial-tasks.png" alt-text="Screenshot of Azure Boards showing the initial three tasks. Each task is in the To Do column."::: 14 | 15 | **Amita:** Tell us about the Basic process. Are there other options? 16 | 17 | **Mara:** There are four processes from which to choose. We can use: 18 | 19 | * **Capability Maturity Model Integration (CMMI)**: This is really for large organizations and it's pretty complicated, so I didn't use it. 20 | * **Scrum**: Scrum depends on a Scrum master who leads the Scrum team. The Scrum master makes sure everybody understands Scrum theory, practices, and rules. We don't have a Scrum master; that's someone who's usually receives some training and certification, so I didn't pick that one either. 21 | * **Agile**: This seemed like the obvious choice because I'm always talking about Agile, but it has a few more things to consider than the simplest option. 22 | * **Basic**: Basic is, well, basic. It's simple but gives us enough power to start doing effective planning right away, and that's why I picked it. In the Basic workflow, you move work from **To Do** to **Doing** to **Done**. 23 | 24 | **Amita:** OK, let's use it to get started. We can switch to something else, right? 25 | 26 | **Mara:** Right! So, let's pick a few work items we think we can fix in a couple of weeks. 27 | 28 | Andy can identify with these issues, but the rest of the team has questions. 29 | 30 | **Tim:** These are mostly dev problems. But while we're on the subject, other teams are talking about code vulnerabilities, and I've been asked to show that our code is secure. Is there a way we can add that? 31 | 32 | **Mara:** I know the list isn't complete. The problems on the board are the ones Andy and I talked about earlier. And even some of these problems really need to get broken down into smaller tasks. I understand your concerns about code vulnerabilities. Andy, what do you think? 33 | 34 | **Andy:** Right now, just getting a build out the door is hard. Let's start with some of the basic problems. I do like that we have a central place where we can keep track of our issues. We can add issues to the backlog and prioritize them once we're ready. 35 | 36 | **Mara:** Before we add any issues, let's talk a bit more about what everyone is working on. 37 | 38 | :::row::: 39 | :::column span="4"::: 40 | Each team member shares what they're working on and other concerns they have. As a brainstorming activity, they add sticky notes to a whiteboard. Their whiteboard fills up quickly. 41 | :::column-end::: 42 | :::column::: 43 | :::image type="content" source="../media/3-whiteboard.png" alt-text="Screenshot of a whiteboard containing sticky notes. The contents of the sticky notes are not legible."::: 44 | :::column-end::: 45 | :::row-end::: 46 | 47 | Eventually, the team settles on seven top issues. Andy volunteers to add tasks to Azure Boards while everyone watches. Here's what the board looks like: 48 | 49 | :::image type="content" source="../../shared/media/build-all-tasks.png" alt-text="Screenshot of Azure Boards showing a backlog of issues."::: 50 | 51 | **Amita:** Wow, that's a lot of problems. How are we ever going to fix all those? 52 | 53 | **Mara:** We don't have to fix them all right away. For now, we've identified a _backlog_, or list of work from which we could pull. When we plan work, we get to choose what's most urgent or important. 54 | 55 | After some more discussion, the team decides to take on the three issues Mara originally proposed: 56 | 57 | * Stabilize the build server 58 | * Create a Git-based workflow 59 | * Create unit tests 60 | 61 | **Mara:** These seem like the easiest issues to take on, and they address some recent challenges that came up. Let's set up a project, a team, and a sprint. Then we can decide who does what. 62 | 63 | **Tim:** What's a sprint? 64 | 65 | **Mara:** Good question. A sprint is the amount of time we have to complete our tasks. Sprints help keep us focused. At the end, we can have a short retrospective meeting to share what we've accomplished. After that, we can plan the next one. 66 | 67 | Everyone looks nervous. 68 | 69 | **Mara:** We're still learning. A sprint is typically two to four weeks long. Let's just say two weeks and see how that goes. These are mostly tasks Andy and I can tackle. We'll share our progress as we go. Then we can find ways to include everybody. 70 | 71 | Mara and the team are off to a good start. Next, you'll set up the project, team, and some tasks on Azure Boards. 72 | -------------------------------------------------------------------------------- /knowledge/choose-an-agile-approach/includes/5-summary.md: -------------------------------------------------------------------------------- 1 | In this module, the Tailspin team took their first steps towards adopting DevOps practices. You worked with them and learned how to use Azure Boards to get started with Agile work planning. A board gives you an easy way to see what's going on with a project and to manage your work. Some of the things you learned to do with Azure Boards include how to: 2 | 3 | * Create projects. 4 | * Create work items. 5 | * Associate work items with a sprint, or iteration. 6 | 7 | ## Learn more 8 | 9 | This module touches on Agile and Agile processes, but there's a lot more to learn. 10 | 11 | If you're interested in learning more about the benefits of Agile, check out [What is Agile Development?](/devops/plan/what-is-agile-development?azure-portal=true) 12 | 13 | In this module, you followed the Basic process. You'll continue using this process in upcoming modules. For your own projects, learn how to [choose a process](/azure/devops/boards/work-items/guidance/choose-process?azure-portal=true) that best fits your team. You can also learn more about each process Azure Boards supports. 14 | 15 | * [Agile process](/azure/devops/boards/work-items/guidance/agile-process?azure-portal=true) 16 | * [Scrum process](/azure/devops/boards/work-items/guidance/scrum-process?azure-portal=true) 17 | * [CMMI process](/azure/devops/boards/work-items/guidance/cmmi-process?azure-portal=true) 18 | 19 | Also in this module, you added fictitious team members to your project. Learn more about how to [add users to your organization or project](/azure/devops/organizations/accounts/add-organization-users?azure-portal=true). 20 | 21 | As you plan and track your work with Azure Boards, you can refer to our complete [Azure Boards Documentation](/azure/devops/boards/?azure-portal=true) to get the most out of them. 22 | -------------------------------------------------------------------------------- /knowledge/choose-an-agile-approach/index.yml: -------------------------------------------------------------------------------- 1 | ### YamlMime:Module 2 | uid: learn.azdo.choose-an-agile-approach 3 | metadata: 4 | title: Choose an Agile approach to software development 5 | description: Learn with the Space Game web team how to use Azure Boards to implement Agile software practices along with DevOps transparency and collaboration. 6 | ms.date: 10/11/2023 7 | ms.custom: devdivchpfy22 8 | author: chcomley 9 | ms.author: chcomley 10 | ms.topic: module-standard-task-based 11 | ms.service: azure-devops 12 | manager: mijacobs 13 | title: Choose an Agile approach to software development 14 | summary: Learn to foster the DevOps values of transparency and team cooperation with Azure Boards. 15 | abstract: | 16 | After completing this module, you'll be able to: 17 | - Define the term Agile. 18 | - Begin to make recommendations for incorporating Agile practices into your organization. 19 | - Create a project in Azure DevOps. 20 | - Add work items to Azure Boards by using the Basic process. 21 | prerequisites: An Azure DevOps organization 22 | iconUrl: /training/achievements/azure-devops/choose-an-agile-approach-to-software-development.svg 23 | ratingEnabled: true 24 | levels: 25 | - beginner 26 | roles: 27 | - devops-engineer 28 | - administrator 29 | - developer 30 | - solution-architect 31 | products: 32 | - azure-devops 33 | subjects: 34 | - devops 35 | - process-workflow 36 | units: 37 | - learn.azdo.choose-an-agile-approach.1-introduction 38 | - learn.azdo.choose-an-agile-approach.2-what-is-agile 39 | - learn.azdo.choose-an-agile-approach.3-what-is-azure-boards 40 | - learn.azdo.choose-an-agile-approach.4-plan-work-azure-boards 41 | - learn.azdo.choose-an-agile-approach.5-summary 42 | badge: 43 | uid: learn.azdo.choose-an-agile-approach.badge 44 | -------------------------------------------------------------------------------- /knowledge/choose-an-agile-approach/media/3-assign-owner.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/timothywarner/chatgptclass/93786f110785ece14306ec13656f108d24929645/knowledge/choose-an-agile-approach/media/3-assign-owner.png -------------------------------------------------------------------------------- /knowledge/choose-an-agile-approach/media/3-assign-sprint.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/timothywarner/chatgptclass/93786f110785ece14306ec13656f108d24929645/knowledge/choose-an-agile-approach/media/3-assign-sprint.png -------------------------------------------------------------------------------- /knowledge/choose-an-agile-approach/media/3-blank-board.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/timothywarner/chatgptclass/93786f110785ece14306ec13656f108d24929645/knowledge/choose-an-agile-approach/media/3-blank-board.png -------------------------------------------------------------------------------- /knowledge/choose-an-agile-approach/media/3-whiteboard.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/timothywarner/chatgptclass/93786f110785ece14306ec13656f108d24929645/knowledge/choose-an-agile-approach/media/3-whiteboard.png -------------------------------------------------------------------------------- /knowledge/choose-an-agile-approach/media/4-boards-sprints-menu.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/timothywarner/chatgptclass/93786f110785ece14306ec13656f108d24929645/knowledge/choose-an-agile-approach/media/4-boards-sprints-menu.png -------------------------------------------------------------------------------- /knowledge/combine-ppt.ps1: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/timothywarner/chatgptclass/93786f110785ece14306ec13656f108d24929645/knowledge/combine-ppt.ps1 -------------------------------------------------------------------------------- /knowledge/combine_ppt.py: -------------------------------------------------------------------------------- 1 | import os 2 | from pptx import Presentation 3 | 4 | def extract_text_from_pptx(directory, output_file): 5 | all_text = [] 6 | 7 | files = [f for f in os.listdir(directory) if f.endswith('.pptx')] 8 | 9 | for file in files: 10 | file_path = os.path.join(directory, file) 11 | presentation = Presentation(file_path) 12 | 13 | for slide in presentation.slides: 14 | for shape in slide.shapes: 15 | if shape.has_text_frame: 16 | for paragraph in shape.text_frame.paragraphs: 17 | for run in paragraph.runs: 18 | all_text.append(run.text) 19 | 20 | with open(output_file, 'w', encoding='utf-8') as f: 21 | f.write('\n'.join(all_text)) 22 | 23 | print(f"Combined text saved as '{output_file}'") 24 | 25 | output_file_path = os.path.join('d:\\work', 'combined_text.txt') 26 | extract_text_from_pptx('d:\\work', output_file_path) 27 | -------------------------------------------------------------------------------- /mcp-demos/MCP_DEMO_GUIDE.md: -------------------------------------------------------------------------------- 1 | # MCP Server Demo Guide 2 | 3 | ## Quick Setup 4 | 5 | 1. **Install the weather server dependencies:** 6 | ```bash 7 | cd mcp-demos/weather-server 8 | npm install 9 | ``` 10 | 11 | 2. **Restart VS Code** to load the MCP configuration 12 | 13 | 3. **In Claude (VS Code)**, you can now use weather commands! 14 | 15 | ## Demo Script 16 | 17 | ### Basic Weather Query 18 | "What's the weather in Seattle?" 19 | 20 | ### List Available Cities 21 | "What cities do you have weather data for?" 22 | 23 | ### Compare Weather 24 | "Compare the weather between Miami and Chicago" 25 | 26 | ### Error Handling Demo 27 | "What's the weather in Paris?" (Shows graceful error handling) 28 | 29 | ## How MCP Works 30 | 31 | 1. **Configuration**: `.vscode/mcp.json` tells VS Code which MCP servers to run 32 | 2. **Server**: The Node.js server implements the MCP protocol 33 | 3. **Tools**: The server exposes tools that Claude can call 34 | 4. **Integration**: Claude automatically discovers and uses these tools 35 | 36 | ## Teaching Points 37 | 38 | - **Extensibility**: Show how easy it is to add new cities or weather properties 39 | - **Protocol**: Explain the request/response pattern 40 | - **Real-world Use**: Discuss how this could connect to real weather APIs 41 | - **Error Handling**: Demonstrate robustness with invalid inputs 42 | 43 | ## Troubleshooting 44 | 45 | - If tools don't appear, restart VS Code 46 | - Check the Output panel (View > Output > "MCP") for server logs 47 | - Ensure Node.js is installed (`node --version`) 48 | 49 | ## Next Steps 50 | 51 | 1. Try enabling the filesystem MCP server (set `"disabled": false`) 52 | 2. Add more weather properties (UV index, precipitation, etc.) 53 | 3. Create your own MCP server for a different domain! -------------------------------------------------------------------------------- /mcp-demos/weather-server/README.md: -------------------------------------------------------------------------------- 1 | # Weather MCP Server Demo 2 | 3 | A simple Model Context Protocol (MCP) server that provides weather information for teaching and demonstration purposes. 4 | 5 | ## What is MCP? 6 | 7 | MCP (Model Context Protocol) allows AI assistants like Claude to interact with external tools and services through a standardized protocol. This weather server demonstrates how to build a simple MCP server. 8 | 9 | ## Available Tools 10 | 11 | 1. **get_weather** - Get current weather for a specific city 12 | - Input: `city` (string) - The city name 13 | - Returns: Temperature, conditions, humidity, wind, and forecast 14 | 15 | 2. **list_cities** - List all available cities 16 | - No input required 17 | - Returns: List of cities with available weather data 18 | 19 | ## Setup Instructions 20 | 21 | 1. Install dependencies: 22 | ```bash 23 | cd mcp-demos/weather-server 24 | npm install 25 | ``` 26 | 27 | 2. The server is configured in `.vscode/mcp.json` and will be available in VS Code with Claude 28 | 29 | ## How It Works 30 | 31 | The server: 32 | - Uses the MCP SDK to create a server that communicates via stdio 33 | - Defines two tools that Claude can call 34 | - Returns mock weather data for demonstration (no API key needed!) 35 | 36 | ## Teaching Points 37 | 38 | 1. **Simple Implementation** - Shows basic MCP server structure 39 | 2. **No External Dependencies** - Uses hardcoded data for reliability in demos 40 | 3. **Clear Tool Design** - Demonstrates good tool naming and descriptions 41 | 4. **Error Handling** - Shows how to handle missing cities gracefully 42 | 43 | ## Example Usage in Claude 44 | 45 | Once configured, you can ask Claude: 46 | - "What's the weather in Seattle?" 47 | - "Show me all available cities" 48 | - "Compare weather between New York and Miami" -------------------------------------------------------------------------------- /mcp-demos/weather-server/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "weather-mcp-server", 3 | "version": "1.0.0", 4 | "description": "Simple MCP server for weather lookups - great for demos and teaching", 5 | "main": "server.js", 6 | "type": "module", 7 | "scripts": { 8 | "start": "node server.js" 9 | }, 10 | "dependencies": { 11 | "@modelcontextprotocol/sdk": "^0.5.0" 12 | } 13 | } -------------------------------------------------------------------------------- /mcp-demos/weather-server/server.js: -------------------------------------------------------------------------------- 1 | import { Server } from '@modelcontextprotocol/sdk/server/index.js'; 2 | import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js'; 3 | import { 4 | CallToolRequestSchema, 5 | ListToolsRequestSchema, 6 | } from '@modelcontextprotocol/sdk/types.js'; 7 | 8 | // Simple weather data for demo purposes 9 | const WEATHER_DATA = { 10 | 'seattle': { 11 | temp: 55, 12 | condition: 'Rainy', 13 | humidity: 85, 14 | wind: '10 mph SW', 15 | forecast: 'Rain continuing throughout the week' 16 | }, 17 | 'los angeles': { 18 | temp: 75, 19 | condition: 'Sunny', 20 | humidity: 40, 21 | wind: '5 mph W', 22 | forecast: 'Clear skies for the next 5 days' 23 | }, 24 | 'new york': { 25 | temp: 62, 26 | condition: 'Partly Cloudy', 27 | humidity: 60, 28 | wind: '15 mph NE', 29 | forecast: 'Scattered clouds, possible rain tomorrow' 30 | }, 31 | 'miami': { 32 | temp: 82, 33 | condition: 'Humid', 34 | humidity: 75, 35 | wind: '8 mph E', 36 | forecast: 'Hot and humid with afternoon thunderstorms' 37 | }, 38 | 'chicago': { 39 | temp: 48, 40 | condition: 'Windy', 41 | humidity: 55, 42 | wind: '20 mph NW', 43 | forecast: 'Cold front moving in, temperatures dropping' 44 | }, 45 | 'denver': { 46 | temp: 45, 47 | condition: 'Clear', 48 | humidity: 30, 49 | wind: '12 mph W', 50 | forecast: 'Clear and cold, possible snow in mountains' 51 | } 52 | }; 53 | 54 | class WeatherServer { 55 | constructor() { 56 | this.server = new Server( 57 | { 58 | name: 'weather-server', 59 | version: '1.0.0', 60 | }, 61 | { 62 | capabilities: { 63 | tools: {}, 64 | }, 65 | } 66 | ); 67 | 68 | this.setupHandlers(); 69 | } 70 | 71 | setupHandlers() { 72 | // List available tools 73 | this.server.setRequestHandler(ListToolsRequestSchema, async () => ({ 74 | tools: [ 75 | { 76 | name: 'get_weather', 77 | description: 'Get current weather for a city', 78 | inputSchema: { 79 | type: 'object', 80 | properties: { 81 | city: { 82 | type: 'string', 83 | description: 'The city name (e.g., "Seattle", "New York")', 84 | }, 85 | }, 86 | required: ['city'], 87 | }, 88 | }, 89 | { 90 | name: 'list_cities', 91 | description: 'List all available cities with weather data', 92 | inputSchema: { 93 | type: 'object', 94 | properties: {}, 95 | }, 96 | }, 97 | ], 98 | })); 99 | 100 | // Handle tool calls 101 | this.server.setRequestHandler(CallToolRequestSchema, async (request) => { 102 | const { name, arguments: args } = request.params; 103 | 104 | switch (name) { 105 | case 'get_weather': { 106 | const city = args.city?.toLowerCase(); 107 | 108 | if (!city) { 109 | return { 110 | content: [ 111 | { 112 | type: 'text', 113 | text: 'Error: City name is required', 114 | }, 115 | ], 116 | }; 117 | } 118 | 119 | const weather = WEATHER_DATA[city]; 120 | 121 | if (!weather) { 122 | return { 123 | content: [ 124 | { 125 | type: 'text', 126 | text: `Weather data not available for "${args.city}". Available cities: ${Object.keys(WEATHER_DATA).join(', ')}`, 127 | }, 128 | ], 129 | }; 130 | } 131 | 132 | return { 133 | content: [ 134 | { 135 | type: 'text', 136 | text: `Weather in ${args.city}: 137 | 🌡️ Temperature: ${weather.temp}°F 138 | ☁️ Condition: ${weather.condition} 139 | 💧 Humidity: ${weather.humidity}% 140 | 💨 Wind: ${weather.wind} 141 | 📅 Forecast: ${weather.forecast}`, 142 | }, 143 | ], 144 | }; 145 | } 146 | 147 | case 'list_cities': { 148 | const cities = Object.keys(WEATHER_DATA) 149 | .map(city => city.charAt(0).toUpperCase() + city.slice(1)) 150 | .join(', '); 151 | 152 | return { 153 | content: [ 154 | { 155 | type: 'text', 156 | text: `Available cities with weather data: ${cities}`, 157 | }, 158 | ], 159 | }; 160 | } 161 | 162 | default: 163 | return { 164 | content: [ 165 | { 166 | type: 'text', 167 | text: `Unknown tool: ${name}`, 168 | }, 169 | ], 170 | }; 171 | } 172 | }); 173 | } 174 | 175 | async run() { 176 | const transport = new StdioServerTransport(); 177 | await this.server.connect(transport); 178 | console.error('Weather MCP server running on stdio'); 179 | } 180 | } 181 | 182 | // Start the server 183 | const server = new WeatherServer(); 184 | server.run().catch(console.error); -------------------------------------------------------------------------------- /projects-custom-gpt/From_Zero_to_ChatGPT_-_Tutorial.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/timothywarner/chatgptclass/93786f110785ece14306ec13656f108d24929645/projects-custom-gpt/From_Zero_to_ChatGPT_-_Tutorial.pdf -------------------------------------------------------------------------------- /src/CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | ## Code of Conduct\n\nOur pledge to create a welcoming and safe environment. -------------------------------------------------------------------------------- /src/CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | ## Contributing Guidelines\n\nThank you for your interest in contributing to our project! -------------------------------------------------------------------------------- /src/Dockerfile: -------------------------------------------------------------------------------- 1 | # Use an official Python runtime as a parent image 2 | FROM python:3.8-slim 3 | 4 | # Set the working directory in the container 5 | WORKDIR /usr/src/app 6 | 7 | # Copy the src directory contents into the container at /usr/src/app 8 | COPY src/ . 9 | 10 | # Install any needed packages specified in requirements.txt 11 | RUN pip install --no-cache-dir -r requirements.txt 12 | 13 | # Make port 5000 available to the world outside this container 14 | EXPOSE 5000 15 | 16 | # Run app.py when the container launches 17 | CMD ["python", "app.py"] 18 | -------------------------------------------------------------------------------- /src/alerts_to_sarif.py: -------------------------------------------------------------------------------- 1 | import requests 2 | import json 3 | import os 4 | 5 | # Fetch GitHub Personal Access Token from environment variable 6 | TOKEN = os.getenv("TIM_GITHUB_PAT") 7 | REPO_OWNER = "timothywarner-org" 8 | REPO_NAME = "matrix" 9 | ALERT_ID = 641 # Example alert ID 10 | 11 | if not TOKEN: 12 | print("TIM_GITHUB_PAT is not set.") 13 | exit(1) 14 | 15 | 16 | def fetch_alert(owner, repo, alert_id, token): 17 | """Fetch a specific alert from GitHub repository.""" 18 | print("Fetching alert data...") 19 | headers = { 20 | "Authorization": f"token {token}", 21 | "Accept": "application/vnd.github.v3+json", 22 | } 23 | url = f"https://api.github.com/repos/{owner}/{repo}/code-scanning/alerts/{alert_id}" 24 | try: 25 | response = requests.get(url, headers=headers) 26 | if response.status_code == 200: 27 | print("Alert data fetched successfully.") 28 | return response.json() 29 | else: 30 | print( 31 | f"Failed to fetch alert: HTTP {response.status_code} - {response.text}" 32 | ) 33 | except Exception as e: 34 | print(f"Error during API call: {e}") 35 | return None 36 | 37 | 38 | def create_sarif_report(alert): 39 | """Convert the alert details into SARIF format.""" 40 | print("Creating SARIF report...") 41 | if alert is None: 42 | print("No alert data provided.") 43 | return None 44 | 45 | # Basic SARIF template structure 46 | sarif_template = { 47 | "$schema": "https://json.schemastore.org/sarif-2.1.0.json", 48 | "version": "2.1.0", 49 | "runs": [ 50 | { 51 | "tool": { 52 | "driver": { 53 | "name": "GitHub Code Scanning Alert", 54 | "organization": "GitHub", 55 | "semanticVersion": "1.0.0", 56 | "rules": [ 57 | { 58 | "id": alert.get("rule", {}).get("id", "unknown_rule"), 59 | "shortDescription": { 60 | "text": alert.get("rule", {}).get( 61 | "description", "No description available" 62 | ) 63 | }, 64 | "helpUri": alert.get("html_url", "No URL available"), 65 | } 66 | ], 67 | } 68 | }, 69 | "results": [ 70 | { 71 | "ruleId": alert.get("rule", {}).get("id", "unknown_rule"), 72 | "message": { 73 | "text": alert.get("rule", {}).get( 74 | "description", "No description available" 75 | ) 76 | }, 77 | "locations": [ 78 | { 79 | "physicalLocation": { 80 | "artifactLocation": { 81 | "uri": alert.get("html_url", "No URL available") 82 | } 83 | } 84 | } 85 | ], 86 | } 87 | ], 88 | } 89 | ], 90 | } 91 | print("SARIF report created successfully.") 92 | return sarif_template 93 | 94 | 95 | def main(): 96 | alert_data = fetch_alert(REPO_OWNER, REPO_NAME, ALERT_ID, TOKEN) 97 | if alert_data: 98 | sarif_report = create_sarif_report(alert_data) 99 | if sarif_report: 100 | output_filename = f"alert_{ALERT_ID}.sarif" 101 | with open(output_filename, "w") as file: 102 | json.dump(sarif_report, file, indent=4) 103 | print(f"SARIF report generated and saved as {output_filename}") 104 | else: 105 | print("Failed to create SARIF report.") 106 | else: 107 | print("Failed to fetch alert data or alert data is None.") 108 | 109 | 110 | if __name__ == "__main__": 111 | main() 112 | -------------------------------------------------------------------------------- /src/app.py: -------------------------------------------------------------------------------- 1 | from flask import Flask, request 2 | import os 3 | 4 | app = Flask(__name__) 5 | 6 | 7 | # Intentional vulnerability: Insecure use of eval() 8 | @app.route("/eval") 9 | def index(): 10 | return str(eval(request.args.get("input", ""))) 11 | 12 | 13 | # Outdated dependency usage and intentional error 14 | # Note: You should have an older Flask version specified in requirements.txt 15 | @app.route("/hello") 16 | def hello(): 17 | # Intentional coding error: Undefined variable 'name' used 18 | return "Hello " + name 19 | 20 | 21 | if __name__ == "__main__": 22 | app.run(debug=True) 23 | -------------------------------------------------------------------------------- /src/bad-python.py: -------------------------------------------------------------------------------- 1 | 2 | import random 3 | 4 | def generate_random_number(): 5 | return random.randint(0, 100000) 6 | 7 | if __name__ == "__main__": 8 | print(generate_random_number()) 9 | 10 | # python text adventure game 11 | # by: github.com/techwithtim 12 | 13 | import os 14 | import time 15 | import random 16 | 17 | 18 | 19 | 20 | 21 | import math, sys; 22 | 23 | def RandomNumberGenerator(): 24 | return 4; # chosen by fair dice roll 25 | 26 | # generate a pseudo-random number 27 | 28 | 29 | def badFunction1(x, y = 2): 30 | return x+y; 31 | 32 | def CalculateArea(radius): 33 | return math.pi*radius**2; 34 | 35 | def print_hello(): 36 | print('Hello, world!') 37 | 38 | # Global variables 39 | global_var = 42 40 | 41 | def another_bad_function(): 42 | global global_var 43 | global_var += 1 44 | print(global_var) 45 | 46 | if __name__=="__main__": 47 | print_hello() 48 | x = 5 49 | print(badFunction1(x)) 50 | print(CalculateArea(x)); 51 | another_bad_function() 52 | another_bad_function() 53 | print("Done") 54 | -------------------------------------------------------------------------------- /src/build-chat-app.md: -------------------------------------------------------------------------------- 1 | # Build sample Node.js apps 2 | 3 | ## Set up directory 4 | 5 | mkdir gpt-node-app 6 | cd gpt-node-app 7 | 8 | ## Initialize the project 9 | 10 | npm init -y 11 | 12 | ## Install the OpenAI library 13 | 14 | npm install openai 15 | 16 | ## Add this code to `app.js` 17 | 18 | const axios = require('axios'); 19 | const readline = require('readline'); 20 | 21 | const rl = readline.createInterface({ 22 | input: process.stdin, 23 | output: process.stdout 24 | }); 25 | 26 | // Replace with your OpenAI API key 27 | const API_KEY = 'your_openai_api_key_here'; 28 | 29 | // Configure axios instance for GPT API 30 | const gptAxios = axios.create({ 31 | baseURL: 'https://api.openai.com/v1', 32 | headers: { 33 | 'Content-Type': 'application/json', 34 | 'Authorization': `Bearer ${API_KEY}` 35 | } 36 | }); 37 | 38 | // Function to call the GPT API 39 | async function generateText(prompt) { 40 | try { 41 | const response = await gptAxios.post('/engines/davinci-codex/completions', { 42 | prompt: prompt, 43 | max_tokens: 50, 44 | n: 1, 45 | stop: null, 46 | temperature: 1.0, 47 | }); 48 | 49 | return response.data.choices[0].text; 50 | } catch (error) { 51 | console.error('Error calling GPT API:', error); 52 | return null; 53 | } 54 | } 55 | 56 | // Main function to run the app 57 | async function main() { 58 | rl.question('Enter your prompt: ', async (prompt) => { 59 | const generatedText = await generateText(prompt); 60 | console.log('Generated text:', generatedText); 61 | rl.close(); 62 | }); 63 | } 64 | 65 | main(); 66 | 67 | 68 | ## Run the app 69 | 70 | node app.js 71 | 72 | -------------------------------------------------------------------------------- /src/call_openai_api.py: -------------------------------------------------------------------------------- 1 | """ 2 | Write a function that accepts string input and sends it to the OpenAI API.The function 3 | should return the API's response formatted in a visually appealing way. Give me the 4 | entire function at once, NOT piece by piece. I want to see the whole thing. 5 | """ 6 | -------------------------------------------------------------------------------- /src/chaggpt-prompt-examples.md: -------------------------------------------------------------------------------- 1 | # ChatGPT Prompt Examples 2 | 3 | [Awesome Prompts](https://github.com/f/awesome-chatgpt-prompts) 4 | 5 | ## Position interviewer 6 | 7 | I want you to act as an interviewer. I will be the candidate and you will ask me the interview questions for the position Azure Cloud Solution Architect. I want you to only reply as the interviewer. Do not write all the conservation at once. I want you to only do the interview with me. Ask me the questions and wait for my answers. Do not write explanations. Ask me the questions one by one like an interviewer does and wait for my answers. My first sentence is "Hi" 8 | 9 | ## Plagiarism checker 10 | 11 | I want you to act as a plagiarism checker. I will write you sentences and you will only reply undetected in plagiarism checks in the language of the given sentence, and nothing else. Do not write explanations on replies. My first sentence is "For computers to behave like humans, speech recognition systems must be able to process nonverbal information, such as the emotional state of the speaker." 12 | 13 | ## Cybersecurity specialist 14 | 15 | I want you to act as a cyber security specialist. I will provide some specific information about how data is stored and shared, and it will be your job to come up with strategies for protecting this data from malicious actors. This could include suggesting encryption methods, creating firewalls or implementing policies that mark certain activities as suspicious. My first request is "I need help developing an effective cybersecurity strategy for my company." 16 | 17 | ## Chef 18 | 19 | I require someone who can suggest delicious recipes that includes foods which are nutritionally beneficial but also easy & not time consuming enough therefore suitable for busy people like us among other factors such as cost effectiveness so overall dish ends up being healthy yet economical at same time! My first request – “Something light yet fulfilling that could be cooked quickly during lunch break” 20 | 21 | ## Prompt generator 22 | 23 | I want you to act as a prompt generator. Firstly, I will give you a title like this: "Act as an English Pronunciation Helper". Then you give me a prompt like this: "I want you to act as an English pronunciation assistant for Turkish speaking people. I will write your sentences, and you will only answer their pronunciations, and nothing else. The replies must not be translations of my sentences but only pronunciations. Pronunciations should use Turkish Latin letters for phonetics. Do not write explanations on replies. My first sentence is "how the weather is in Istanbul?"." (You should adapt the sample prompt according to the title I gave. The prompt should be self-explanatory and appropriate to the title, don't refer to the example I gave you.). My first title is "Act as a Code Review Helper" (Give me prompt only) 24 | 25 | ## Domain name generator 26 | 27 | I want you to act as a smart domain name generator. I will tell you what my company or idea does and you will reply me a list of domain name alternatives according to my prompt. You will only reply the domain list, and nothing else. Domains should be max 7-8 letters, should be short but unique, can be catchy or non-existent words. Do not write explanations. Reply "OK" to confirm. 28 | 29 | ## Marketing planner 30 | 31 | I want you to act as an advertiser. You will create a campaign to promote a product or service of your choice. You will choose a target audience, develop key messages and slogans, select the media channels for promotion, and decide on any additional activities needed to reach your goals. My first suggestion request is, "I need help creating an advertising campaign for (insert description of service or product)" 32 | 33 | ## Content generation 34 | 35 | Write a technology blog post on ___ in the style of Tim Warner. 36 | 37 | ## Code generation 1 38 | 39 | Write a PowerShell script that does the following: 40 | 41 | - Lists whether the current session is elevated 42 | - How long the system has been up 43 | - Whether a system reboot is pending 44 | 45 | Use standard PowerShell cmdlets and syntax. 46 | 47 | ## Code generation 2 48 | 49 | Write a Node.js console application that does the following: 50 | 51 | - Asks the user for a prompt 52 | - Sends the prompt to the OpenAI Completion API 53 | - Returns a natural language completion 54 | 55 | Generate only the formatted completion text. Do not include any other JSON data. Make sure to explain what the code is doing and how to actually install and run the application on a Windows machine. 56 | 57 | 58 | -------------------------------------------------------------------------------- /src/chatgptclass.code-workspace: -------------------------------------------------------------------------------- 1 | { 2 | "folders": [ 3 | { 4 | "path": "." 5 | } 6 | ], 7 | "settings": {} 8 | } -------------------------------------------------------------------------------- /src/codex-prompt-examples.md: -------------------------------------------------------------------------------- 1 | # Codex Prompt Examples 2 | 3 | ## sketch.js 4 | 5 | /* 6 | Draw a white house with a red roof and a blue door. Brown roof. Two grey windows. Green grass. Yellow sun. Blue sky. 7 | */ 8 | */ 9 | 10 | 11 | 12 | ## math.js 13 | 14 | # Create a list of the first 10 prime numbers 15 | primes = [] 16 | 17 | for num in range(2, 100): 18 | if all(num % i != 0 for i in range(2, num)): 19 | primes.append(num) 20 | print(primes) 21 | 22 | 23 | ## Demonstrate project folder awareness 24 | 25 | -------------------------------------------------------------------------------- /src/combine_ppt.py: -------------------------------------------------------------------------------- 1 | import os 2 | from pptx import Presentation 3 | 4 | def extract_text_from_pptx(directory, output_file): 5 | all_text = [] 6 | 7 | files = [f for f in os.listdir(directory) if f.endswith('.pptx')] 8 | 9 | for file in files: 10 | file_path = os.path.join(directory, file) 11 | presentation = Presentation(file_path) 12 | 13 | for slide in presentation.slides: 14 | for shape in slide.shapes: 15 | if shape.has_text_frame: 16 | for paragraph in shape.text_frame.paragraphs: 17 | for run in paragraph.runs: 18 | all_text.append(run.text) 19 | 20 | with open(output_file, 'w', encoding='utf-8') as f: 21 | f.write('\n'.join(all_text)) 22 | 23 | print(f"Combined text saved as '{output_file}'") 24 | 25 | output_file_path = os.path.join('d:\\work', 'combined_text.txt') 26 | extract_text_from_pptx('d:\\work', output_file_path) 27 | s -------------------------------------------------------------------------------- /src/completion.py: -------------------------------------------------------------------------------- 1 | import openai 2 | 3 | openai.api_key = "" 4 | 5 | completion = openai.ChatCompletion.create( 6 | model="fine-tuned-model-name", 7 | messages=[ 8 | {"role": "system", "content": "You are a helpful customer service chatbot."}, 9 | {"role": "user", "content": "What are some things I can do with the WonderWidget?"} 10 | ] 11 | ) 12 | 13 | print(completion.choices[0].message) -------------------------------------------------------------------------------- /src/context-scope.py: -------------------------------------------------------------------------------- 1 | contextSpecificVariable = 42 -------------------------------------------------------------------------------- /src/convert-ps-to-cli.ps1: -------------------------------------------------------------------------------- 1 | # Get all Key Vaults in the subscription 2 | vaults=$(az keyvault list --query "[].name" -o tsv) 3 | 4 | # Loop through each Key Vault 5 | for vault in $vaults; do 6 | # Get all secrets in the Key Vault 7 | secrets=$(az keyvault secret list --vault-name $vault --query "[].name" -o tsv) 8 | 9 | # Loop through each secret 10 | for secret in $secrets; do 11 | # Check if the secret name includes "vm" 12 | if [[ $secret == *"vm"* ]]; then 13 | # Get the secret's last accessed time 14 | lastAccessed=$(az keyvault secret show --vault-name $vault --name $secret --query "attributes.updated" -o tsv) 15 | 16 | # Create a custom object and output it 17 | output="{\"Vault\":\"$vault\",\"Secret\":\"$secret\",\"Last_accessed\":\"$lastAccessed\"}" 18 | 19 | # Output the custom object 20 | echo $output 21 | fi 22 | done 23 | done 24 | -------------------------------------------------------------------------------- /src/copilot-dev-cover.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/timothywarner/chatgptclass/93786f110785ece14306ec13656f108d24929645/src/copilot-dev-cover.png -------------------------------------------------------------------------------- /src/count_tokens.py: -------------------------------------------------------------------------------- 1 | # From OpenAI Cookbook: https://timw.info/k71 2 | 3 | import tiktoken 4 | import os 5 | 6 | def num_tokens_from_string(string: str, encoding_name: str) -> int: 7 | """Returns the number of tokens in a text string.""" 8 | encoding = tiktoken.get_encoding(encoding_name) 9 | num_tokens = len(encoding.encode(string)) 10 | return num_tokens 11 | 12 | # Clear the console screen 13 | os.system('cls' if os.name == 'nt' else 'clear') 14 | 15 | print("Token count: " + str(num_tokens_from_string("The rain in Spain falls mainly on the plain.", "cl100k_base"))) -------------------------------------------------------------------------------- /src/create_unit_tests.py: -------------------------------------------------------------------------------- 1 | # Test these functions during every build! 2 | 3 | def add_numbers(a, b): 4 | return a + b 5 | 6 | def multiply_numbers(a, b): 7 | return a * b 8 | 9 | -------------------------------------------------------------------------------- /src/data_analysis.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import matplotlib.pyplot as plt 3 | import seaborn as sns 4 | 5 | # Load the iris dataset 6 | df = sns.load_dataset('iris') 7 | 8 | # Display the first few rows of the dataset 9 | print(df.head()) 10 | 11 | # Calculate the mean of each column 12 | mean_values = df.mean() 13 | print(mean_values) 14 | 15 | # Create a histogram of the sepal_length column 16 | plt.hist(df['sepal_length'], bins=10) 17 | plt.xlabel('Sepal Length') 18 | plt.ylabel('Frequency') 19 | plt.title('Histogram of Sepal Length') 20 | plt.show() -------------------------------------------------------------------------------- /src/dependabot-github-cli.sh: -------------------------------------------------------------------------------- 1 | ############## 2 | # GITHUB CLI # 3 | ############## 4 | 5 | # Set up Dependabot CLI (https://github.com/dependabot/cli) 6 | gh gist view --raw e09e1ecd76d5573e0517a7622009f06f | bash 7 | 8 | # Set variables for personalization: 9 | owner="timothywarner-org" 10 | repo="openai-chat" 11 | teams_channel="GHAS_Alerts" 12 | teams_webhook_url="" 13 | 14 | # List all Dependabot alerts for the specified repository: 15 | gh api /repos/timothywarner-org/openai-chat/dependabot/alerts | Out-File -FilePath .\dependabot-alerts.json | code .\dependabot-alerts.json 16 | 17 | # Get details of a specific alert (replace :alert_id with the actual ID): 18 | gh api /repos/timothywarner-org/openai-chat/dependabot/alerts/1 19 | 20 | # List vulnerable dependencies in a repository 21 | gh api /repos/timothywarner-org/openai-chat/vulnerability-alerts 22 | 23 | # Disable Dependabot alerts for a repository 24 | gh api -X DELETE /repos/timothywarner-org/openai-chat/vulnerability-alerts 25 | 26 | 27 | ################## 28 | # DEPENDABOT CLI # 29 | ################## 30 | 31 | # Dependabot CLI Examples for Python Project (Flask) 32 | # Project: github.com/timothywarner-org/openai-chat 33 | # Dependency File: ./requirements.txt 34 | # Source: https://github.com/dependabot/cli 35 | # Ensure Dependabot CLI is installed and configured according to the GitHub repository instructions. 36 | 37 | # 1. Update Flask dependency in the project 38 | dependabot update /path/to/openai-chat --ecosystem=pip --dep=Flask 39 | 40 | # 2. Dry run to simulate updates for Flask without applying them 41 | # Useful for testing what changes Dependabot would make 42 | dependabot update /path/to/openai-chat --ecosystem=pip --dep=Flask --dry-run 43 | 44 | # 3. Configure Dependabot to ignore a specific version of Flask 45 | # Replace 'VERSION' with the version to ignore (e.g., '1.1.0') 46 | dependabot config set ignore-condition --ecosystem=pip --dep=Flask --version='VERSION' /path/to/openai-chat 47 | 48 | # 4. Set custom configuration for Dependabot updates 49 | # This example sets a custom schedule for Dependabot updates 50 | dependabot config set update-schedule "daily" /path/to/openai-chat --ecosystem=pip 51 | 52 | # 5. Print debug logs for Dependabot operations on the Flask dependency 53 | dependabot update /path/to/openai-chat --ecosystem=pip --dep=Flask --debug 54 | 55 | # Note: Replace /path/to/openai-chat with the actual local path to your repository. 56 | # Replace 'VERSION' with the specific version of Flask you want to ignore. 57 | 58 | 59 | -------------------------------------------------------------------------------- /src/dependabot-report.py: -------------------------------------------------------------------------------- 1 | import subprocess 2 | import csv 3 | import json 4 | 5 | # Repository details 6 | repo = "timothywarner-org/openai-chat" 7 | 8 | # GitHub CLI command to get Dependabot alerts in JSON format 9 | gh_cli_command = "gh api /repos/timothywarner-org/openai-chat/dependabot/alerts" 10 | 11 | 12 | def run_gh_cli_command(command): 13 | """Run GitHub CLI command and return the output""" 14 | result = subprocess.run(command, shell=True, capture_output=True, text=True) 15 | if result.returncode != 0: 16 | raise Exception(f"Error in GitHub CLI command: {result.stderr}") 17 | return result.stdout 18 | 19 | 20 | def get_dependabot_alerts(): 21 | """Get Dependabot alerts from GitHub repository""" 22 | output = run_gh_cli_command(gh_cli_command) 23 | return json.loads(output) 24 | 25 | 26 | def write_csv(data, filename="dependabot_alerts.csv"): 27 | """Write Dependabot alerts data to a CSV file""" 28 | keys = data[0].keys() if data else [] 29 | with open(filename, "w", newline="") as file: 30 | writer = csv.DictWriter(file, fieldnames=keys) 31 | writer.writeheader() 32 | for row in data: 33 | writer.writerow(row) 34 | 35 | 36 | def main(): 37 | try: 38 | alerts = get_dependabot_alerts() 39 | write_csv(alerts) 40 | print(f"Dependabot alerts report generated: dependabot_alerts.csv") 41 | except Exception as e: 42 | print(f"An error occurred: {e}") 43 | 44 | 45 | if __name__ == "__main__": 46 | main() 47 | -------------------------------------------------------------------------------- /src/deploy.azcli: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/timothywarner/chatgptclass/93786f110785ece14306ec13656f108d24929645/src/deploy.azcli -------------------------------------------------------------------------------- /src/fine-tuning-dataset.jsonl: -------------------------------------------------------------------------------- 1 | {"prompt": "Translate the following English text to French: 'Hello, how are you?'", "completion": "Traduisez le texte anglais suivant en français : 'Bonjour, comment ça va ?'"} 2 | {"prompt": "What is the capital of France?", "completion": "The capital of France is Paris."} 3 | {"prompt": "Who won the world series in 2020?", "completion": "The Los Angeles Dodgers won the World Series in 2020."} -------------------------------------------------------------------------------- /src/fine-tuning.py: -------------------------------------------------------------------------------- 1 | import openai 2 | import time 3 | 4 | openai.api_key = 'your-api-key' 5 | 6 | # Create a fine-tuning job 7 | fine_tuning = openai.FineTuning.create( 8 | model="your-model-id", 9 | dataset="file://fine_tune.jsonl", 10 | description="Fine-tuning on my dataset", 11 | ) 12 | 13 | # Get the ID of the fine-tuning job 14 | job_id = fine_tuning['id'] 15 | 16 | # Monitor the status of the fine-tuning job 17 | while True: 18 | fine_tuning = openai.FineTuning.retrieve(job_id) 19 | print(f"Job status: {fine_tuning['status']}") 20 | 21 | if fine_tuning['status'] in ['succeeded', 'failed']: 22 | break 23 | 24 | time.sleep(60) 25 | 26 | # Validate the fine-tuned model 27 | validation = openai.Validation.create( 28 | model=fine_tuning['model'], 29 | dataset="file://validation.jsonl", 30 | ) 31 | 32 | print(f"Validation result: {validation['result']}") -------------------------------------------------------------------------------- /src/fine-tuning2.py: -------------------------------------------------------------------------------- 1 | import openai 2 | 3 | openai.api_key = "" 4 | 5 | # Upload training data file 6 | openai.File.create( 7 | file=open(r"source_data.jsonl", "rb"), 8 | purpose='fine-tune' 9 | ) 10 | 11 | """ 12 | OUTPUT 13 | 14 | JSON: { 15 | "object": "file", 16 | "id": "file-id", 17 | "purpose": "fine-tune", 18 | "filename": "file", 19 | "bytes": 12528, 20 | "created_at": 1697811492, 21 | "status": "uploaded", 22 | "status_details": null 23 | } 24 | """ 25 | # Create a fine-tuned model 26 | openai.FineTuningJob.create(training_file="file-id", model="gpt-3.5-turbo") -------------------------------------------------------------------------------- /src/gh-cli-code-scanning.ps1: -------------------------------------------------------------------------------- 1 | # Command to set the default repository for GitHub CLI operations 2 | gh repo set-default timothywarner-org/matrix 3 | 4 | # For working with code scanning alerts or downloading SARIF files, use the GitHub API via the gh api command 5 | # Example to list code scanning alerts using GitHub API (Note: Adjust according to actual API endpoints and requirements) 6 | gh api repos/timothywarner-org/matrix/code-scanning/alerts 7 | 8 | # Example to get details of a specific code scanning alert by its number using GitHub API 9 | gh api repos/timothywarner-org/matrix/code-scanning/alerts/641 10 | 11 | # Upload a SARIF file to a repository using the GitHub API 12 | gh api -X POST /repos/ { owner }/ { repo }/code-scanning/sarifs \ 13 | -H "Accept: application/vnd.github.v3+json" \ 14 | -F "commit_sha=$(git rev-parse HEAD)" \ 15 | -F "ref=refs/heads/main" \ 16 | -F "sarif=@analysis.sarif" \ 17 | -F "checkout_uri=https://github.com/{owner}/{repo}" \ 18 | -F "started_at=$(date -u +'%Y-%m-%dT%H:%M:%SZ')" \ 19 | -F "tool_name=YourStaticAnalysisTool" 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | -------------------------------------------------------------------------------- /src/gpt-metrics-davinci.py: -------------------------------------------------------------------------------- 1 | import openai 2 | import time 3 | import os 4 | 5 | # Set up OpenAI API credentials 6 | openai.api_key = "" 7 | 8 | # Set up the prompt and parameters for the chat completion 9 | prompt = "Hello, how are you?" 10 | model = "text-davinci-002" 11 | temperature = 0.2 12 | max_tokens = 50 13 | 14 | # Calculate the prompt length 15 | prompt_length = len(prompt.split()) 16 | 17 | # Generate the chat completion and time how long it takes 18 | start_time = time.time() 19 | response = openai.Completion.create( 20 | engine=model, 21 | prompt=prompt, 22 | temperature=temperature, 23 | max_tokens=max_tokens 24 | ) 25 | end_time = time.time() 26 | 27 | # Calculate the completion length and total token length 28 | completion_length = len(response.choices[0].text.split()) 29 | total_token_length = prompt_length + completion_length 30 | cost = total_token_length * 0.00006 # Assume a cost of $0.00006 per token 31 | 32 | # Clear the console screen 33 | os.system('cls' if os.name == 'nt' else 'clear') 34 | 35 | # Display the response, prompt length, completion length, and total token length 36 | print(response.choices[0].text) 37 | print(f"Prompt length: {prompt_length}") 38 | print(f"Completion length: {completion_length}") 39 | print(f"Total token length: {total_token_length}") 40 | print(f"Time taken: {end_time - start_time:.2f} seconds") 41 | print(f"Estimated cost: ${cost:.4f}") -------------------------------------------------------------------------------- /src/gpt-metrics-gpt35.py: -------------------------------------------------------------------------------- 1 | import openai 2 | import time 3 | 4 | openai.api_key = "" 5 | 6 | messages = [ 7 | {"role": "system", "content": "You are a helpful assistant."}, 8 | {"role": "user", "content": "Translate the following English text to French: 'Hello, world'"}, 9 | ] 10 | 11 | start_time = time.time() 12 | 13 | response = openai.ChatCompletion.create( 14 | model="gpt-3.5-turbo", 15 | messages=messages, 16 | ) 17 | 18 | end_time = time.time() 19 | 20 | print(f"Prompt: {messages}") 21 | print(f"Completion: {response['choices'][0]['message']['content']}") 22 | 23 | prompt_tokens = response['usage']['prompt_tokens'] 24 | completion_tokens = response['usage']['total_tokens'] - prompt_tokens 25 | 26 | print(f"Prompt token length: {prompt_tokens}") 27 | print(f"Completion token length: {completion_tokens}") 28 | print(f"Overall token length: {prompt_tokens + completion_tokens}") 29 | print(f"Time taken to generate the completion: {end_time - start_time} seconds") 30 | print(f"Cost: {response['usage']['total_tokens'] * 0.0002} USD") 31 | -------------------------------------------------------------------------------- /src/inline_suggestions.py: -------------------------------------------------------------------------------- 1 | # Function stub 2 | def calculate_sum(a, b): 3 | pass 4 | 5 | def calculate_difference(a, b): 6 | 7 | 8 | def number_guessing_game(): 9 | 10 | 11 | # Class stub 12 | class MyClass: 13 | def __init__(self): 14 | pass 15 | 16 | def method1(self): 17 | pass 18 | 19 | def method2(self, param1): 20 | pass 21 | 22 | # If statement stub 23 | if __name__ == "__main__": 24 | pass -------------------------------------------------------------------------------- /src/interact-with-docker-image.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Step 1: Log in to GitHub Container Registry 4 | docker login ghcr.io -u timothywarner 5 | # Note: You'll be prompted for a password, use your personal access token with read:packages scope 6 | 7 | # Step 2: Pull the Docker image 8 | docker pull ghcr.io/timothywarner/my-flask-app:latest 9 | # Replace 'my-flask-app:latest' with your actual image name and tag 10 | 11 | # Step 3: Run the Docker image 12 | docker run -p 5000:5000 ghcr.io/timothywarner/my-flask-app:latest 13 | # This maps port 5000 of the container to port 5000 on your host machine 14 | 15 | # Step 4: Access the application 16 | # If it's a web application, access it via http://localhost:5000 in a web browser 17 | -------------------------------------------------------------------------------- /src/main.py: -------------------------------------------------------------------------------- 1 | import openai 2 | import os 3 | 4 | 5 | def get_etymology(name): 6 | try: 7 | response = openai.ChatCompletion.create( 8 | model="gpt-3.5-turbo", 9 | messages=[ 10 | {"role": "system", 11 | "content": "You are an etymology specialist. You provide the etymology of a name, the meaning of the name, and it's trend in popularity over time."}, 12 | { 13 | "role": "user", 14 | "content": f"What is the etymology of the name {name}?", 15 | }, 16 | ], 17 | ) 18 | try: 19 | return response.choices[0].message["content"] 20 | except Exception as e: 21 | return f"An error occurred: {e}" 22 | 23 | 24 | def main(): 25 | openai.api_key = os.environ.get("OPENAI_API_KEY") 26 | 27 | if not openai.api_key: 28 | print( 29 | "OpenAI API key not found. Please set the OPENAI_API_KEY environment variable." 30 | ) 31 | return 32 | 33 | name = input("Enter a first name: ") 34 | etymology = get_etymology(name) 35 | 36 | print("\nEtymology:") 37 | print(etymology) 38 | 39 | 40 | if __name__ == "__main__": 41 | main() 42 | -------------------------------------------------------------------------------- /src/march_2024.py: -------------------------------------------------------------------------------- 1 | color_codes = [ 2 | "red": "#FF0000", 3 | "green": "#00FF00", 4 | "blue": "#0000FF", 5 | "yellow": "#FFFF00" 6 | ] 7 | 8 | 9 | 10 | # regex for US telephone numbers 11 | us_tel_num = r'\(?([2-9][0-8][0-9])\)?[-.●]?([2-9][0-9]{2})[-.●]?([0-9]{4})' 12 | 13 | 14 | -------------------------------------------------------------------------------- /src/microsoft-LLMLingua.url: -------------------------------------------------------------------------------- 1 | [InternetShortcut] 2 | URL=https://github.com/microsoft/LLMLingua 3 | -------------------------------------------------------------------------------- /src/monitor.py: -------------------------------------------------------------------------------- 1 | import openai 2 | 3 | openai.api_key = "" 4 | 5 | # Retrieve fine-tuning job list 6 | openai.FineTuningJob.list(limit=10) 7 | 8 | # Retrieve the fine-tuning job details 9 | job_details = openai.FineTuningJob.retrieve("job-id") 10 | print(job_details) 11 | 12 | """ 13 | OUTPUT 14 | { 15 | "object": "fine_tuning.job", 16 | "id": "job-id", 17 | "model": "gpt-3.5-turbo-0613", 18 | "created_at": 1697812068, 19 | "finished_at": null, 20 | "fine_tuned_model": null, 21 | "organization_id": "org-3ZZndK1ciLfJ79fmFEaDGTzz", 22 | "result_files": [], 23 | "status": "validating_files", 24 | "validation_file": null, 25 | "training_file": "file-eeyhE6C0I84pkuu3kr9ZFgCU", 26 | "hyperparameters": { 27 | "n_epochs": "auto" 28 | }, 29 | "trained_tokens": null, 30 | "error": null 31 | } 32 | """ 33 | -------------------------------------------------------------------------------- /src/powershell.ps1: -------------------------------------------------------------------------------- 1 | # Install IIS 2 | Install-WindowsFeature -Name Web-Server -IncludeManagementTools 3 | 4 | # Create a website 5 | $websiteName = "MyWebsite" 6 | $websitePath = "C:\inetpub\wwwroot\$websiteName" 7 | New-Item -ItemType Directory -Path $websitePath 8 | 9 | # Create a default webpage 10 | $defaultPagePath = "$websitePath\index.html" 11 | @" 12 | 13 | 14 | 15 | Welcome to $websiteName 16 | 17 | 18 |

Welcome to $websiteName

19 |

This is a simple website created using PowerShell and IIS.

20 | 21 | 22 | "@ | Out-File -FilePath $defaultPagePath 23 | 24 | # Configure the website in IIS 25 | $site = New-WebSite -Name $websiteName -PhysicalPath $websitePath -Port 80 26 | $binding = Get-WebBinding -Name $websiteName -Port 80 27 | $binding.AddSslCertificate("0.0.0.0", "MyCertificateThumbprint") 28 | Set-WebBinding -Name $websiteName -BindingInformation $binding.BindingInformation 29 | 30 | # Start the website 31 | Start-WebSite -Name $websiteName 32 | 33 | -------------------------------------------------------------------------------- /src/project_specific_variable.py: -------------------------------------------------------------------------------- 1 | projectSpecificVariable = "I am a global variable" -------------------------------------------------------------------------------- /src/prompt.txt: -------------------------------------------------------------------------------- 1 | Contact info: 2 | Tim Warner (tim-warner@pluralsight.com) 3 | LinkedIn: timw.info/li 4 | Course files: timw.info/edo 5 | 6 | 7 | 8 | System: You are expert in Python and OpenAI development. Your code is always documented and follows industry best practices. You never hallucinate, and your references are to the latest content. You think in a procedural, step-by-step manner. You never expose secrets in plain text. 9 | 10 | User: Please make a one-page Python web application using Flask. The simple chatbot interface looks like this: 11 | 12 | Title: OpenAI GPT-3 Chatbot 13 | 14 | Prompt: 15 | 16 | Submit button 17 | 18 | Response: 19 | 20 | The user enters a prompt in the prompt text box. The user clicks the Submit button. The request is sent to the Chat Completion endpoint (not Completion) using gpt-35-turbo model. 21 | 22 | The response text box displays the response from the OpenAI GPT-3 chatbot. The response is nicely formatted with line breaks and paragraphs. The response is limited to 1000 characters. 23 | 24 | Make the web page simple but aesthetically pleasing. Use the official OpenAI Python library. 25 | 26 | 27 | 28 | 29 | 30 | openai==0.10.2 31 | Flask==0.12.3 # Outdated, latest is Flask 2.x 32 | Werkzeug==0.14.1 # Outdated, latest is Werkzeug 2.x 33 | Jinja2==2.10 # Outdated, latest is Jinja2 3.x 34 | itsdangerous==0.24 # Outdated, latest is itsdangerous 2.x 35 | click==6.7 # Outdated, latest is click 8.x 36 | SQLAlchemy==1.3.0 # Outdated, latest is SQLAlchemy 1.4.x 37 | requests==2.20.0 # Outdated, has known vulnerabilities -------------------------------------------------------------------------------- /src/py.py: -------------------------------------------------------------------------------- 1 | import openai 2 | import json 3 | 4 | # Set up OpenAI API credentials 5 | openai.api_key = "" 6 | 7 | # Set up the API endpoint 8 | endpoint = "https://api.openai.com/v1/chat/completions" 9 | 10 | # Send the request to the API endpoint 11 | response = openai.ChatCompletion.create( 12 | model="gpt-3.5-turbo", 13 | temperature = 0.5, 14 | messages=[ 15 | {"role": "system", "content": "You are a helpful assistant."}, 16 | {"role": "user", "content": "Who won the world series in 2020?"}, 17 | {"role": "assistant", "content": "The Los Angeles Dodgers won the World Series in 2020."}, 18 | {"role": "user", "content": "Where was it played?"} 19 | ] 20 | ) 21 | 22 | # Print the response 23 | print(json.dumps(response, indent=4)) -------------------------------------------------------------------------------- /src/python.py: -------------------------------------------------------------------------------- 1 | def GetUbuntuIPAddress(): 2 | import socket 3 | import fcntl 4 | import struct 5 | s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) 6 | return socket.inet_ntoa(fcntl.ioctl( 7 | s.fileno(), 8 | 0x8915, # SIOCGIFADDR 9 | struct.pack('256s', 'eth0'[:15]) 10 | )[20:24]) 11 | 12 | import socket 13 | import fcntl 14 | import struct 15 | 16 | def get_ip_address(): 17 | s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) 18 | return socket.inet_ntoa(fcntl.ioctl( 19 | s.fileno(), 20 | 0x8915, # SIOCGIFADDR 21 | struct.pack('256s', 'eth0'[:15]) 22 | )[20:24]) 23 | 24 | 25 | 26 | 27 | 28 | # function that says "Hello world," personalized to the user's name 29 | def hello(name): 30 | print("Hello " + name + "!") 31 | print("Your IP address is " + GetUbuntuIPAddress() + ".") 32 | print("Your computer name is " + socket.gethostname() + ".") 33 | print("Your operating system is " + platform.system() + ".") 34 | print("Your Python version is " + platform.python_version() + ".") 35 | print("Your Python executable is " + sys.executable + ".") 36 | print("Your Python path is " + sys.path[0] + ".") 37 | 38 | 39 | -------------------------------------------------------------------------------- /src/refactor_code.py: -------------------------------------------------------------------------------- 1 | class student: 2 | def __init__(self,n,r): 3 | self.name=n 4 | self.roll=r 5 | self.grades = [90, 85, 77, 92, 88] 6 | 7 | def get_average(self): 8 | total = 0 9 | for i in self.grades: 10 | total += i 11 | return total / len(self.grades) 12 | 13 | def get_grade(self): 14 | avg = self.get_average() 15 | if avg >= 90: return 'A' 16 | elif avg >= 80: return 'B' 17 | elif avg >= 70: return 'C' 18 | elif avg >= 60: return 'D' 19 | else: return 'F' 20 | 21 | def printStudent(self): 22 | print('Name:', self.name) 23 | print('Roll:', self.roll) 24 | print('Average:', self.get_average()) 25 | print('Grade:', self.get_grade()) 26 | 27 | s1 = student('John', 1) 28 | s1.printStudent() -------------------------------------------------------------------------------- /src/req-azureopenai.py: -------------------------------------------------------------------------------- 1 | 2 | sum = 42 + 3 | print(sum) 4 | 5 | pip install --upgrade openai 6 | 7 | import os 8 | from openai import AzureOpenAI 9 | 10 | client = AzureOpenAI( 11 | api_key=os.getenv("AZURE_OPENAI_KEY"), 12 | api_version="2023-10-01-preview", 13 | azure_endpoint = os.getenv("AZURE_OPENAI_ENDPOINT") 14 | ) 15 | 16 | deployment_name='REPLACE_WITH_YOUR_DEPLOYMENT_NAME' #This will correspond to the custom name you chose for your deployment when you deployed a model. 17 | 18 | # Send a completion call to generate an answer 19 | print('Sending a test completion job') 20 | start_phrase = 'Write a tagline for an ice cream shop. ' 21 | response = client.completions.create(model=deployment_name, prompt=start_phrase, max_tokens=10) 22 | print(response.choices[0].text) 23 | 24 | 25 | 26 | # Old approaches 27 | import openai 28 | 29 | # Set your API key from Azure 30 | openai.api_key = "your-api-key" 31 | 32 | # Define the chat completion request 33 | response = openai.ChatCompletion.create( 34 | model="gpt-35-turbo", # Specify your deployment model name 35 | messages=[ 36 | {"role": "system", "content": "You are a helpful assistant."}, 37 | {"role": "user", "content": "Hello world!"}, 38 | ], 39 | ) 40 | 41 | # Print the response 42 | print(response) 43 | 44 | 45 | import openai 46 | 47 | # Initialize the OpenAI object with your API key 48 | openai_obj = openai.OpenAI(api_key="your-api-key") 49 | 50 | # Define the chat completion request 51 | response = openai_obj.ChatCompletion.create( 52 | model="gpt-35-turbo", # Specify your deployment model name 53 | messages=[ 54 | {"role": "system", "content": "You are a helpful assistant."}, 55 | {"role": "user", "content": "Hello world!"}, 56 | ], 57 | ) 58 | 59 | # Print the response 60 | print(response) 61 | -------------------------------------------------------------------------------- /src/requirements.txt: -------------------------------------------------------------------------------- 1 | openai==0.10.2 2 | Flask==3.0.2 # Outdated, latest is Flask 2.x 3 | Werkzeug==0.14.1 # Outdated, latest is Werkzeug 2.x 4 | Jinja2==2.10 # Outdated, latest is Jinja2 3.x 5 | itsdangerous==0.24 # Outdated, latest is itsdangerous 2.x 6 | click==6.7 # Outdated, latest is click 8.x 7 | SQLAlchemy==2.0.25 # Outdated, latest is SQLAlchemy 1.4.x 8 | requests==2.20.0 # Outdated, has known vulnerabilities 9 | Pillow==10.2.0 # CVE-2020-10379, CVE-2020-10177 10 | numpy==1.16.0 # CVE-2019-6446 11 | pandas==2.2.0 12 | -------------------------------------------------------------------------------- /src/run.py: -------------------------------------------------------------------------------- 1 | from app import app 2 | 3 | if __name__ == "__main__": 4 | app.run(debug=True) 5 | -------------------------------------------------------------------------------- /src/sample_openai_chat.py: -------------------------------------------------------------------------------- 1 | import openai 2 | import os 3 | 4 | 5 | def get_etymology(name): 6 | try: 7 | response = openai.ChatCompletion.create( 8 | model="gpt-3.5-turbo", 9 | messages=[ 10 | {"role": "system", 11 | "content": "You are an etymology specialist. You provide the etymology of a name, the meaning of the name, and it's trend in popularity over time."}, 12 | { 13 | "role": "user", 14 | "content": f"What is the etymology of the name {name}?", 15 | }, 16 | ], 17 | ) 18 | return response.choices[0].message["content"] 19 | ecept Exception as e: 20 | return f"An error occurred: {e}" 21 | 22 | 23 | def main(): 24 | openai.api_key = os.getenv( 25 | "OPENAI_API_KEY" 26 | ) # Retrieves the API key from environment variable 27 | 28 | if not openai.api_key: 29 | print( 30 | "OpenAI API key not found. Please set the OPENAI_API_KEY environment variable." 31 | ) 32 | return 33 | 34 | name = input("Enter a first name: ") 35 | etymology = get_etymology(name) 36 | 37 | print("\nEtymology:") 38 | print(etymology) 39 | 40 | 41 | if __name__ == "__main__": 42 | main() 43 | -------------------------------------------------------------------------------- /src/self-hosted-runner.ps1: -------------------------------------------------------------------------------- 1 | # Configure self-hosted (Windows) runner 2 | # Reference: https://timw.info/zpe 3 | 4 | # Create a folder under the drive root 5 | mkdir c:\actions-runner ; cd c:\actions-runner 6 | 7 | function calculateDaysBetweenDates { 8 | param( 9 | [Parameter(Mandatory=$true)][DateTime]$startDate, 10 | [Parameter(Mandatory=$true)][DateTime]$endDate 11 | ) 12 | $days = New-TimeSpan -Start $startDate -End $endDate 13 | return $days.Days 14 | } 15 | 16 | # Download the latest runner package 17 | Invoke-WebRequest -Uri https://github.com/actions/runner/releases/download/v2.311.0/actions-runner-win-x64-2.311.0.zip -OutFile actions-runner-win-x64-2.311.0.zip 18 | 19 | # Optional: Validate the hash 20 | $ if((Get-FileHash -Path actions-runner-win-x64-2.311.0.zip -Algorithm SHA256).Hash.ToUpper() -ne 'e629628ce25c1a7032d845f12dfe3dced630ca13a878b037dde77f5683b039dd'.ToUpper()) { throw 'Computed checksum did not match' } 21 | 22 | # Extract the installer 23 | Add-Type -AssemblyName System.IO.Compression.FileSystem ; [System.IO.Compression.ZipFile]::ExtractToDirectory("$PWD/actions-runner-win-x64-2.311.0.zip", "$PWD") 24 | 25 | # Create the runner and start the configuration experience 26 | ./config.cmd --url https://github.com/timothywarner/actions-cert-prep --token ADAK7RYK5IY4BC5GMVII7KDFTQWIW 27 | 28 | # Run it! 29 | ./run.cmd -------------------------------------------------------------------------------- /src/test.cs: -------------------------------------------------------------------------------- 1 | using System; 2 | using System.Net.Http; 3 | using System.Text; 4 | using System.Threading.Tasks; 5 | 6 | class Program 7 | { 8 | private static readonly HttpClient client = new HttpClient(); 9 | 10 | 11 | 12 | static async Task Main(string[] args) 13 | { 14 | var requestBody = new { input = "Hello, world!" }; 15 | 16 | var content = new StringContent( 17 | Newtonsoft.Json.JsonConvert.SerializeObject(requestBody), 18 | Encoding.UTF8, 19 | "application/json"); 20 | 21 | client.DefaultRequestHeaders.Add("Authorization", "Bearer YOUR_API_KEY"); 22 | 23 | var response = await client.PostAsync("YOUR_ENDPOINT", content); 24 | 25 | var responseString = await response.Content.ReadAsStringAsync(); 26 | 27 | Console.WriteLine(responseString); 28 | } 29 | } -------------------------------------------------------------------------------- /src/test.ps1: -------------------------------------------------------------------------------- 1 | 2 | function Test-Function { 3 | [CmdletBinding()] 4 | param ( 5 | [Parameter(Mandatory=$true)] 6 | [string]$Name 7 | ) 8 | 9 | Write-Host "Hello, $Name" 10 | } 11 | 12 | # create a new Active Directory domain global group 13 | function New-ADGroup { 14 | [CmdletBinding()] 15 | param ( 16 | [Parameter(Mandatory=$true)] 17 | [string]$Name, 18 | # parameter 19 | ) 20 | 21 | Write-Host "Creating group $Name" 22 | } 23 | 24 | -------------------------------------------------------------------------------- /src/test.py: -------------------------------------------------------------------------------- 1 | def number_guessing_game() 2 | -------------------------------------------------------------------------------- /src/testchat-azure-openai.py: -------------------------------------------------------------------------------- 1 | import os 2 | import requests 3 | import json 4 | import openai 5 | 6 | openai.api_key = "" 7 | # os.getenv("AZURE_OPENAI_KEY") 8 | openai.api_base = "" 9 | # os.getenv("AZURE_OPENAI_ENDPOINT") 10 | openai.api_type = 'azure' 11 | openai.api_version = '2023-05-15' # this may change in the future 12 | 13 | deployment_name='gpt-35-turbo' # Azure OpenAI Studio > Management > Deployments 14 | 15 | # Send a completion call to generate an answer 16 | print('Sending a test completion job to Azure...') 17 | start_phrase = 'Who won the Kentucky Derby in 1966 ? ' 18 | response = openai.Completion.create(engine=deployment_name, prompt=start_phrase, max_tokens=50, temperature=0.7) 19 | text = response['choices'][0]['text'].replace('\n', '').replace(' .', '.').strip() 20 | # text = response['choices'][0]['text'].strip() 21 | print(start_phrase+text) -------------------------------------------------------------------------------- /src/testchat-openai.py: -------------------------------------------------------------------------------- 1 | import openai 2 | 3 | openai.api_key = "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" 4 | 5 | response = openai.ChatCompletion.create( 6 | model="gpt-3.5-turbo", 7 | messages=[ 8 | {"role": "system", "content": "You are a helpful assistant."}, 9 | {"role": "user", "content": "What's the weather like today?"}, 10 | {"role": "assistant", "content": "I'm an AI and I don't have access to real-time data. However, you can check the weather on a weather website or app."}, 11 | {"role": "user", "content": "Translate the following English text to French: 'Hello, how are you?'"}, 12 | ] 13 | ) 14 | 15 | print(response['choices'][0]['message']['content']) -------------------------------------------------------------------------------- /tame-defender.ps1: -------------------------------------------------------------------------------- 1 | # Run as Admin - Add performance exclusions 2 | Add-MpPreference -ExclusionPath "C:\Users\tim\AppData\Local\Temp" 3 | Add-MpPreference -ExclusionPath "C:\Windows\Temp" 4 | Add-MpPreference -ExclusionPath "C:\Users\tim\Downloads" 5 | Add-MpPreference -ExclusionProcess "chrome.exe" 6 | Add-MpPreference -ExclusionProcess "msedgewebview2.exe" 7 | Add-MpPreference -ExclusionProcess "node.exe" 8 | Add-MpPreference -ExclusionProcess "python.exe" 9 | Add-MpPreference -ExclusionProcess "code.exe" 10 | 11 | # Reduce real-time scanning aggressiveness 12 | Set-MpPreference -ScanAvgCPULoadFactor 25 13 | Set-MpPreference -DisableCpuThrottleOnIdleScans $false 14 | 15 | # Disable cloud-based protection (major CPU hog) 16 | Set-MpPreference -MAPSReporting 0 17 | Set-MpPreference -SubmitSamplesConsent 2 18 | -------------------------------------------------------------------------------- /test-secrets.js: -------------------------------------------------------------------------------- 1 | // This is a test file for secret scanning 2 | // The following line contains a fake secret that should be detected 3 | 4 | const config = { 5 | apiKey: "AKIAIOSFODNN7EXAMPLE", 6 | endpoint: "https://api.example.com", 7 | username: "admin" 8 | }; 9 | 10 | function getData() { 11 | // This function would use the API key to authenticate 12 | console.log("Fetching data with API key"); 13 | } 14 | 15 | getData(); -------------------------------------------------------------------------------- /test-secrets2.js: -------------------------------------------------------------------------------- 1 | // This is another test file for secret scanning 2 | // The following line contains a fake secret that should be detected 3 | 4 | const config = { 5 | awsSecretKey: "wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY", 6 | endpoint: "https://api.example.com", 7 | password: "SuperSecretPassword123!" 8 | }; 9 | 10 | function authenticate() { 11 | // This function would use the secret key to authenticate 12 | console.log("Authenticating with AWS Secret Key"); 13 | } 14 | 15 | authenticate(); -------------------------------------------------------------------------------- /tim-dev/ChatGPT and GitHub Copilot in 4 Hours.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/timothywarner/chatgptclass/93786f110785ece14306ec13656f108d24929645/tim-dev/ChatGPT and GitHub Copilot in 4 Hours.pdf -------------------------------------------------------------------------------- /tim-dev/convert_pdf.py: -------------------------------------------------------------------------------- 1 | import PyPDF2 2 | import os 3 | 4 | def convert_pdf_to_text(pdf_path, output_path): 5 | try: 6 | # Open the PDF file 7 | with open(pdf_path, 'rb') as file: 8 | # Create a PDF reader object 9 | pdf_reader = PyPDF2.PdfReader(file) 10 | 11 | # Get the number of pages 12 | num_pages = len(pdf_reader.pages) 13 | 14 | # Extract text from each page 15 | with open(output_path, 'w', encoding='utf-8') as output_file: 16 | for page_num in range(num_pages): 17 | # Get the page 18 | page = pdf_reader.pages[page_num] 19 | 20 | # Extract text from the page 21 | text = page.extract_text() 22 | 23 | # Write to output file with page number 24 | output_file.write(f"\n--- Page {page_num + 1} ---\n") 25 | output_file.write(text) 26 | output_file.write("\n") 27 | 28 | print(f"Successfully converted {pdf_path} to {output_path}") 29 | print(f"Total pages processed: {num_pages}") 30 | 31 | except Exception as e: 32 | print(f"Error converting PDF: {str(e)}") 33 | 34 | # Convert the sell sheet PDF 35 | pdf_path = "ChatGPT and GitHub Copilot in 4 Hours.pdf" 36 | output_path = "course-sell-sheet.txt" 37 | 38 | if os.path.exists(pdf_path): 39 | convert_pdf_to_text(pdf_path, output_path) 40 | else: 41 | print(f"PDF file not found at: {pdf_path}") -------------------------------------------------------------------------------- /tim-dev/course-reg-page-copy.txt: -------------------------------------------------------------------------------- 1 | 2 | --- Page 1 --- 3 | 1/12/25, 6:11 PM ChatGPT and GitHub Copilot in 4 Hours 4 | https://www.oreilly.com/live-events/chatgpt-and-github-copilot-in-4-hours/0636920090248/ 1/6 5 | 6 | --- Page 2 --- 7 | 1/12/25, 6:11 PM ChatGPT and GitHub Copilot in 4 Hours 8 | https://www.oreilly.com/live-events/chatgpt-and-github-copilot-in-4-hours/0636920090248/ 2/6 9 | 10 | --- Page 3 --- 11 | 1/12/25, 6:11 PM ChatGPT and GitHub Copilot in 4 Hours 12 | https://www.oreilly.com/live-events/chatgpt-and-github-copilot-in-4-hours/0636920090248/ 3/6 13 | 14 | --- Page 4 --- 15 | 1/12/25, 6:11 PM ChatGPT and GitHub Copilot in 4 Hours 16 | https://www.oreilly.com/live-events/chatgpt-and-github-copilot-in-4-hours/0636920090248/ 4/6 17 | 18 | --- Page 5 --- 19 | 1/12/25, 6:11 PM ChatGPT and GitHub Copilot in 4 Hours 20 | https://www.oreilly.com/live-events/chatgpt-and-github-copilot-in-4-hours/0636920090248/ 5/6 21 | 22 | --- Page 6 --- 23 | 1/12/25, 6:11 PM ChatGPT and GitHub Copilot in 4 Hours 24 | https://www.oreilly.com/live-events/chatgpt-and-github-copilot-in-4-hours/0636920090248/ 6/6 25 | -------------------------------------------------------------------------------- /tim-dev/secret-scanning.yml.disabled: -------------------------------------------------------------------------------- 1 | # DISABLED: This workflow is currently disabled 2 | # To re-enable it, remove the ".disabled" extension from the filename 3 | # and uncomment the configuration below 4 | 5 | # name: Secret Scanning 6 | # 7 | # on: 8 | # push: 9 | # branches: [ main, master ] 10 | # pull_request: 11 | # branches: [ main, master ] 12 | # # Run manually from the Actions tab 13 | # workflow_dispatch: 14 | # 15 | # jobs: 16 | # pre-commit: 17 | # runs-on: ubuntu-latest 18 | # steps: 19 | # - name: Checkout code 20 | # uses: actions/checkout@v3 21 | # with: 22 | # fetch-depth: 0 23 | # 24 | # - name: Set up Python 25 | # uses: actions/setup-python@v4 26 | # with: 27 | # python-version: '3.10' 28 | # 29 | # - name: Install pre-commit 30 | # run: pip install pre-commit 31 | # 32 | # - name: Run detect-secrets scanner 33 | # run: | 34 | # pip install detect-secrets 35 | # pre-commit run detect-secrets --all-files 36 | # 37 | # gitleaks: 38 | # runs-on: ubuntu-latest 39 | # steps: 40 | # - name: Checkout code 41 | # uses: actions/checkout@v3 42 | # with: 43 | # fetch-depth: 0 44 | # 45 | # - name: Run Gitleaks 46 | # uses: gitleaks/gitleaks-action@v2 47 | # env: 48 | # GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} -------------------------------------------------------------------------------- /warner-chatgpt-github-copilot.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/timothywarner/chatgptclass/93786f110785ece14306ec13656f108d24929645/warner-chatgpt-github-copilot.pptx --------------------------------------------------------------------------------