├── .gitignore ├── .travis.yml ├── CONTRIBUTING.md ├── LICENSE ├── README-zh_CN.md ├── README.md ├── certificates ├── aws-cloud-practitioner.md ├── aws-solutions-architect-associate.md ├── azure-fundamentals-az-900.md ├── cka.md └── ckad.md ├── coding └── python │ └── binary_search.py ├── credits.md ├── exercises ├── ansible │ ├── README.md │ ├── my_first_playbook.md │ ├── my_first_task.md │ ├── solutions │ │ ├── my_first_playbook.md │ │ ├── my_first_task.md │ │ └── update_upgrade_task.md │ └── update_upgrade_task.md ├── aws │ ├── README.md │ ├── access_advisor.md │ ├── create_role.md │ ├── create_user.md │ ├── hello_function.md │ ├── password_policy_and_mfa.md │ ├── solutions │ │ ├── access_advisor.md │ │ ├── create_role.md │ │ ├── create_user.md │ │ ├── credential_report.md │ │ ├── hello_function.md │ │ ├── password_policy_and_mfa.md │ │ └── url_function.md │ └── url_function.md ├── cicd │ ├── README.md │ ├── ci_for_open_source_project.md │ ├── deploy_to_kubernetes.md │ ├── remove_builds.md │ ├── remove_jobs.md │ └── solutions │ │ ├── deploy_to_kubernetes │ │ ├── Jenkinsfile │ │ ├── README.md │ │ ├── deploy.yml │ │ ├── helloworld.yml │ │ ├── html │ │ │ ├── css │ │ │ │ ├── normalize.css │ │ │ │ └── skeleton.css │ │ │ ├── images │ │ │ │ └── favicon.png │ │ │ └── index.html │ │ └── inventory │ │ ├── remove_builds_solution.groovy │ │ └── remove_jobs_solution.groovy ├── cloud │ └── README.md ├── cloud_slack_bot.md ├── containers │ ├── README.md │ ├── image_layers.md │ ├── multi_stage_builds.md │ ├── run_forest_run.md │ ├── running_containers.md │ ├── solutions │ │ ├── image_layers.md │ │ ├── multi_stage_builds.md │ │ ├── run_forest_run.md │ │ ├── running_containers.md │ │ └── working_with_images.md │ ├── working_with_images.md │ └── write_dockerfile_run_container.md ├── databases │ ├── solutions │ │ └── table_for_message_board_system.md │ └── table_for_message_board_system.md ├── devops │ ├── README.md │ ├── containerize_app.md │ ├── ha_hello_world.md │ └── solutions │ │ ├── containerize_app.md │ │ └── ha_hello_world.md ├── eflk.md ├── flask_container_ci │ ├── README.md │ ├── app │ │ ├── __init__.py │ │ ├── config.py │ │ ├── main.py │ │ └── tests.py │ ├── requirements.txt │ ├── tests.py │ └── users.json ├── flask_container_ci2 │ ├── README.md │ ├── app │ │ ├── __init__.py │ │ ├── config.py │ │ ├── main.py │ │ └── tests.py │ ├── requirements.txt │ └── tests.py ├── git │ ├── README.md │ ├── branch_01.md │ ├── commit_01.md │ ├── solutions │ │ ├── branch_01_solution.md │ │ ├── commit_01_solution.md │ │ └── squashing_commits.md │ └── squashing_commits.md ├── jenkins_pipelines.md ├── jenkins_scripts.md ├── kubernetes │ ├── README.md │ ├── killing_containers.md │ ├── pods_01.md │ ├── replicaset_01.md │ ├── replicaset_02.md │ ├── replicaset_03.md │ ├── services_01.md │ └── solutions │ │ ├── killing_containers.md │ │ ├── pods_01_solution.md │ │ ├── replicaset_01_solution.md │ │ ├── replicaset_02_solution.md │ │ ├── replicaset_03_solution.md │ │ └── services_01_solution.md ├── misc │ └── elk_kibana_aws.md ├── openshift │ ├── projects_101.md │ └── solutions │ │ └── projects_101.md ├── os │ ├── fork_101.md │ ├── fork_102.md │ └── solutions │ │ ├── fork_101_solution.md │ │ └── fork_102_solution.md ├── pipeline_deploy_image_to_k8.md ├── programming │ ├── grep_berfore_and_after.md │ └── web_scraper.md ├── python │ ├── advanced_data_types.md │ ├── compress_string.md │ ├── data_types.md │ ├── reverse_string.md │ └── solutions │ │ ├── advanced_data_types_solution.md │ │ ├── data_types_solution.md │ │ └── reverse_string.md ├── shell │ ├── argument_check.md │ ├── basic_date.md │ ├── count_chars.md │ ├── directories_comparison.md │ ├── empty_files.md │ ├── factors.md │ ├── files_size.md │ ├── great_day.md │ ├── hello_world.md │ ├── host_status.md │ ├── num_of_args.md │ ├── print_arguments.md │ ├── solutions │ │ ├── basic_date.md │ │ ├── count_chars.md │ │ ├── empty_files.md │ │ ├── factors.md │ │ ├── files_size.md │ │ ├── great_day.md │ │ ├── hello_world.md │ │ ├── host_status.md │ │ ├── num_of_args.md │ │ └── sum.md │ └── sum.md ├── sql │ ├── improve_query.md │ └── solutions │ │ └── improve_query.md └── terraform │ └── README.md ├── faq.md ├── images ├── Go.png ├── HR.png ├── ansible.png ├── aws.png ├── azure.png ├── bash.png ├── big-data.png ├── certificates.png ├── cicd.png ├── cloud.png ├── containers.png ├── databases.png ├── design.png ├── design │ ├── cdn-no-downtime.png │ ├── input-process-output.png │ ├── producers_consumers_fix.png │ └── producers_consumers_issue.png ├── devops.png ├── devops_exercises.png ├── devops_resources.png ├── distributed.png ├── distributed │ ├── distributed_design_lb.png │ └── distributed_design_standby.png ├── dns.png ├── elastic.png ├── exercises.png ├── general.png ├── git.png ├── googlecloud.png ├── hardware.png ├── how_they_devops.png ├── infraverse.png ├── jenkins.png ├── jenkins │ └── jenkins-to-kibana.png ├── kubernetes.png ├── kubernetes │ ├── kubernetes_components.png │ └── kubernetes_components_solution.png ├── linux.png ├── linux_master.jpeg ├── mongo.png ├── monitoring.png ├── network.png ├── openshift.png ├── openstack.png ├── os.png ├── programming.png ├── prometheus.png ├── puppet.png ├── python.png ├── regex.png ├── security.png ├── sql.png ├── storage.png ├── system_design_notebook.png ├── terraform.png ├── testing.png ├── virtualization.png └── you.png ├── prepare_for_interview.md ├── scripts ├── count_questions.sh ├── question_utils.py ├── random_question.py ├── run_ci.sh └── update_question_number.py └── tests ├── scripts_question_utils_unittest.py ├── syntax_checker_unittest.py ├── syntax_lint.py └── testcases ├── testcase1.md ├── testcase2.md └── testcase3.md /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | MANIFEST 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | .pytest_cache/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # Django stuff: 55 | *.log 56 | local_settings.py 57 | db.sqlite3 58 | 59 | # Flask stuff: 60 | instance/ 61 | .webassets-cache 62 | 63 | # Scrapy stuff: 64 | .scrapy 65 | 66 | # Sphinx documentation 67 | docs/_build/ 68 | 69 | # PyBuilder 70 | target/ 71 | 72 | # Jupyter Notebook 73 | .ipynb_checkpoints 74 | 75 | # pyenv 76 | .python-version 77 | 78 | # celery beat schedule file 79 | celerybeat-schedule 80 | 81 | # SageMath parsed files 82 | *.sage.py 83 | 84 | # Environments 85 | .env 86 | .venv 87 | env/ 88 | venv/ 89 | ENV/ 90 | env.bak/ 91 | venv.bak/ 92 | 93 | *.pyc 94 | 95 | #Jetbrain's ides. 96 | .idea -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: "python" 2 | python: 3 | - "3.8" 4 | install: 5 | - pip install flake8 6 | script: 7 | - flake8 --max-line-length=100 . 8 | - python tests/syntax_lint.py 9 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | ## How to contribute 2 | 3 | Use pull requests to contribute to the project. 4 | 5 | Stick to the following format: 6 | 7 | \
8 | [Question]
9 | 10 | [Answer] 11 | \
12 | 13 | * If you added several questions and you would like to know how many questions are there you can use the script "count_questions.sh" in scripts directory. 14 | 15 | ## What to avoid 16 | 17 | * Avoid adding installation questions. Those are the worst type of questions... 18 | * Don't copy questions and answers from other sources. They probably worked hard for adding them. 19 | * If you add new images, make sure they are free and can be used. 20 | 21 | ## Before submitting the pull request 22 | 23 | You can test your changes locally with the script `run_ci.sh` in scripts directory. 24 | -------------------------------------------------------------------------------- /certificates/azure-fundamentals-az-900.md: -------------------------------------------------------------------------------- 1 | ## AZ-900 2 | 3 |
4 | What is cloud computing?
5 | 6 | [Wikipedia](https://en.wikipedia.org/wiki/Cloud_computing): "Cloud computing is the on-demand availability of computer system resources, especially data storage (cloud storage) and computing power, without direct active management by the user" 7 |
8 | 9 |
10 | What types of clouds (or cloud deployments) are there?
11 | 12 | * Public - Cloud services sharing computing resources among multiple customers 13 | * Private - Cloud services having computing resources limited to specific customer or organization, managed by third party or organizations itself 14 | * Hybrid - Combination of public and private clouds 15 |
16 | -------------------------------------------------------------------------------- /certificates/cka.md: -------------------------------------------------------------------------------- 1 | ## Certified Kubernetes Administrator (CKA) 2 | 3 | ### Pods 4 | 5 |
6 | Deploy a pod called web-1985 using the nginx:alpine image
7 | 8 | `kubectl run web-1985 --image=nginx:alpine --restart=Never` 9 |
10 | 11 |
12 | How to find out on which node a certain pod is running?
13 | 14 | `kubectl get po -o wide` 15 |
16 | -------------------------------------------------------------------------------- /certificates/ckad.md: -------------------------------------------------------------------------------- 1 | ## Certified Kubernetes Application Developer (CKAD) 2 | 3 | ### Core Concepts 4 | 5 | ### Pods 6 | 7 |
8 | Deploy a pod called web-1985 using the nginx:alpine image
9 | 10 | `kubectl run web-1985 --image=nginx:alpine --restart=Never` 11 |
12 | 13 |
14 | How to find out on which node a certain pod is running?
15 | 16 | `kubectl get po -o wide` 17 |
18 | 19 | ### Namespaces 20 | 21 |
22 | List all namespaces
23 | 24 | kubectl get ns 25 |
26 | 27 |
28 | List all the pods in the namespace 'neverland'
29 | 30 | kubectl get po -n neverland 31 |
32 | 33 |
34 | List all the pods in all the namespaces
35 | 36 | kubectl get po --all-namespaces 37 |
38 | -------------------------------------------------------------------------------- /coding/python/binary_search.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import random 4 | 5 | 6 | def binary_search(arr, lb, ub, target): 7 | """ 8 | A Binary Search Example which has O(log n) time complexity. 9 | """ 10 | if lb <= ub: 11 | mid = ub + lb // 2 12 | if arr[mid] == target: 13 | return mid 14 | elif arr[mid] < target: 15 | return binary_search(arr, mid + 1, ub, target) 16 | else: 17 | return binary_search(arr, lb, mid - 1, target) 18 | else: 19 | return -1 20 | 21 | 22 | if __name__ == '__main__': 23 | rand_num_li = sorted([random.randint(1, 50) for _ in range(10)]) 24 | target = random.randint(1, 50) 25 | print("List: {}\nTarget: {}\nIndex: {}".format( 26 | rand_num_li, target, 27 | binary_search(rand_num_li, 0, len(rand_num_li) - 1, target))) 28 | -------------------------------------------------------------------------------- /credits.md: -------------------------------------------------------------------------------- 1 | ## Credits 2 | 3 | Jenkins logo created by Ksenia Nenasheva and published through jenkins.io is licensed under cc by-sa 3.0
4 | Git Logo by Jason Long is licensed under the Creative Commons Attribution 3.0 Unported License
5 | Terraform logo created by Hashicorp®
6 | Docker logo created by Docker®
7 | The Python logo is a trademark of the Python Software Foundation®
8 | Puppet logo created by Puppet®
9 | Bash logo created by Prospect One
10 | OpenStack logo created by and a trademark of The OpenStack Foundation®
11 | Linux, Kubernetes and Prometheus logos are trademarks of The Linux Foundation®
12 | Mongo logo is a trademark of Mongo®
13 | Distributed logo by Flatart
14 | Challenge icon by Elizabeth Arostegui in Technology Mix 15 | "Question you ask" (man raising hand) and "Database" icons by [Webalys](https://www.iconfinder.com/webalys) 16 | Testing logo by [Flatart](https://www.iconfinder.com/Flatart)
17 | Google Cloud Plataform Logo created by Google®
18 | VirtualBox Logo created by dAKirby309, under the Creative Commons Attribution-Noncommercial 4.0 License. 19 | Certificates logo by Flatart
20 | Storage icon by Dinosoftlab
21 | CI/CD icon made made by Freepik from www.flaticon.com 22 | -------------------------------------------------------------------------------- /exercises/ansible/my_first_playbook.md: -------------------------------------------------------------------------------- 1 | ## Ansible - My First Playbook 2 | 3 | 1. Write a playbook that will: 4 | a. Install the package zlib 5 | b. Create the file `/tmp/some_file` 6 | 2. Run the playbook on a remote host 7 | -------------------------------------------------------------------------------- /exercises/ansible/my_first_task.md: -------------------------------------------------------------------------------- 1 | ## Ansible - My First Task 2 | 3 | 1. Write a task to create the directory ‘/tmp/new_directory’ 4 | -------------------------------------------------------------------------------- /exercises/ansible/solutions/my_first_playbook.md: -------------------------------------------------------------------------------- 1 | ## My first playbook - Solution 2 | 3 | 1. `vi first_playbook.yml` 4 | 5 | ``` 6 | - name: Install zlib and create a file 7 | hosts: some_remote_host 8 | tasks: 9 | - name: Install zlib 10 | package: 11 | name: zlib 12 | state: present 13 | become: yes 14 | - name: Create the file /tmp/some_file 15 | path: '/tmp/some_file' 16 | state: touch 17 | ``` 18 | 19 | 2. First, edit the inventory file: `vi /etc/ansible/hosts` 20 | 21 | ``` 22 | [some_remote_host] 23 | some.remoted.host.com 24 | ``` 25 | 26 | Run the playbook 27 | 28 | `ansible-playbook first_playbook.yml` 29 | -------------------------------------------------------------------------------- /exercises/ansible/solutions/my_first_task.md: -------------------------------------------------------------------------------- 1 | ## My First Task - Solution 2 | 3 | ``` 4 | - name: Create a new directory 5 | file: 6 | path: "/tmp/new_directory" 7 | state: directory 8 | ``` 9 | -------------------------------------------------------------------------------- /exercises/ansible/solutions/update_upgrade_task.md: -------------------------------------------------------------------------------- 1 | ## Update and Upgrade apt packages task - Solution 2 | 3 | ``` 4 | - name: "update and upgrade apt packages." 5 | become: yes 6 | apt: 7 | upgrade: yes 8 | update_cache: yes 9 | ``` 10 | -------------------------------------------------------------------------------- /exercises/ansible/update_upgrade_task.md: -------------------------------------------------------------------------------- 1 | ## Ansible - Update and upgrade APT packages task 2 | 3 | 1. Write a task to update and upgrade apt packages 4 | -------------------------------------------------------------------------------- /exercises/aws/access_advisor.md: -------------------------------------------------------------------------------- 1 | ## AWS IAM - Access Advisor 2 | 3 | ### Objectives 4 | 5 | Go to the Access Advisor and answer the following questions regarding one of the users: 6 | 7 | 1. Are there services this user never accessed? 8 | 2. What was the last service the user has accessed? 9 | 3. What the Access Advisor is used/good for? 10 | -------------------------------------------------------------------------------- /exercises/aws/create_role.md: -------------------------------------------------------------------------------- 1 | ## AWS - Create a Role 2 | 3 | ### Objectives 4 | 5 | Create a basic role to provide EC2 service with Full IAM access permissions.
6 | In the end, run from the CLI (or CloudShell) the command to verify the role was created. 7 | 8 | ### Solution 9 | 10 | 1. Go to AWS console -> IAM 11 | 2. Click in the left side menu on "Access Manamgement" -> Roles 12 | 3. Click on "Create role" 13 | 3. Choose "AWS service" as the type of trusted entity and then choose "EC2" as a use case. Click on "Next" 14 | 4. In permissions page, check "IAMFullAccess" and click on "Next" until you get to "Review" page 15 | 5. In the "Review" page, give the role a name (e.g. IAMFullAcessEC2), provide a short description and click on "Create role" 16 | 6. `aws iam list-roles` will list all the roles in the account, including the one we've just created. 17 | -------------------------------------------------------------------------------- /exercises/aws/create_user.md: -------------------------------------------------------------------------------- 1 | ## IAM AWS - Create a User 2 | 3 | ### Objectives 4 | 5 | As you probably know at this point, it's not recommended to work with the root account in AWS. For this reason you are going to create a new account which you'll use regularly as the admin account. 6 | 7 | 1. Create a user with password credentials 8 | 2. Add the newly created user to a group called "admin" and attach to it the policy called "Administrator Access" 9 | 3. Make sure the user has a tag called with the key `Role` and the value `DevOps` 10 | -------------------------------------------------------------------------------- /exercises/aws/hello_function.md: -------------------------------------------------------------------------------- 1 | ## Hello Function 2 | 3 | Create a basic AWS Lambda function that when given a name, will return "Hello " 4 | -------------------------------------------------------------------------------- /exercises/aws/password_policy_and_mfa.md: -------------------------------------------------------------------------------- 1 | ## AWS IAM - Password Policy & MFA 2 | 3 | Note: DON'T perform this exercise unless you understand what you are doing and what is the outcome of applying these changes to your account 4 | 5 | ### Objectives 6 | 7 | 1. Create password policy with the following settings: 8 | 1. At least minimum 8 characters 9 | 2. At least one number 10 | 3. Prevent password reuse 11 | 12 | 2. Then enable MFA for the account. 13 | -------------------------------------------------------------------------------- /exercises/aws/solutions/access_advisor.md: -------------------------------------------------------------------------------- 1 | ## AWS IAM - Access Advisor 2 | 3 | ### Objectives 4 | 5 | Go to the Access Advisor and answer the following questions regarding one of the users: 6 | 7 | 1. Are there services this user never accessed? 8 | 2. What was the last service the user has accessed? 9 | 3. What the Access Advisor is used/good for? 10 | 11 | ### Solution 12 | 13 | 1. Go to AWS IAM service and click on "Users" under "Access Management" 14 | 2. Click on one of the users 15 | 3. Click on the "Access Advisor" tab 16 | 4. Check which service was last accessed and which was never accessed 17 | 18 | Access Advisor can be good to evaluate whether there are services the user is not accessing (as in never or not frequently). This can be help in deciding whether some permissions should be revoked or modified. 19 | -------------------------------------------------------------------------------- /exercises/aws/solutions/create_role.md: -------------------------------------------------------------------------------- 1 | ## AWS - Create a Role 2 | 3 | ### Objectives 4 | 5 | Create a basic role to provide EC2 service with Full IAM access permissions.
6 | In the end, run from the CLI (or CloudShell) the command to verify the role was created. 7 | 8 | ### Solution 9 | 10 | 1. Go to AWS console -> IAM 11 | 2. Click in the left side menu on "Access Manamgement" -> Roles 12 | 3. Click on "Create role" 13 | 3. Choose "AWS service" as the type of trusted entity and then choose "EC2" as a use case. Click on "Next" 14 | 4. In permissions page, check "IAMFullAccess" and click on "Next" until you get to "Review" page 15 | 5. In the "Review" page, give the role a name (e.g. IAMFullAcessEC2), provide a short description and click on "Create role" 16 | 6. `aws iam list-roles` will list all the roles in the account, including the one we've just created. 17 | -------------------------------------------------------------------------------- /exercises/aws/solutions/create_user.md: -------------------------------------------------------------------------------- 1 | ## IAM AWS - Create a User 2 | 3 | ### Objectives 4 | 5 | As you probably know at this point, it's not recommended to work with the root account in AWS. For this reason you are going to create a new account which you'll use regularly as the admin account. 6 | 7 | 1. Create a user with password credentials 8 | 2. Add the newly created user to a group called "admin" and attach to it the policy called "Administrator Access" 9 | 3. Make sure the user has a tag called with the key `Role` and the value `DevOps` 10 | 11 | 12 | ### Solution 13 | 14 | 1. Go to the AWS IAM service 15 | 2. Click on "Users" in the right side menu (right under "Access Management") 16 | 3. Click on the button "Add users" 17 | 4. Insert the user name (e.g. mario) 18 | 5. Select the credential type: "Password" 19 | 6. Set console password to custom and click on "Next" 20 | 7. Click on "Add user to group" 21 | 8. Insert "admin" as group name 22 | 9. Check the "AdministratorAccess" policy and click on "Create group" 23 | 10. Click on "Next: Tags" 24 | 11. Add a tag with the key `Role` and the value `DevOps` 25 | 12. Click on "Review" and then create on "Create user" 26 | -------------------------------------------------------------------------------- /exercises/aws/solutions/credential_report.md: -------------------------------------------------------------------------------- 1 | ## AWS - Credential Report 2 | 3 | ### Objectives 4 | 5 | 1. Create/Download a credential report 6 | 2. Answer the following questions based on the report: 7 | 1. Are there users with MFA not activated? 8 | 2. Are there users with password enabled that didn't 9 | 3. Explain the use case for using the credential report 10 | 11 | ### Solution 12 | 13 | 1. Go to the AWS IAM service 14 | 2. Under "Access Reports" click on "Credential report" 15 | 3. Click on "Download Report" and open it once it's downloaded 16 | 4. Answer the questions in this exercises by inspecting the report 17 | 18 | The credential report is useful to identify whether there any users who need assistance or attention in regards to their security. For example a user who didn't change his password for a long time and didn't activate MFA. 19 | -------------------------------------------------------------------------------- /exercises/aws/solutions/hello_function.md: -------------------------------------------------------------------------------- 1 | ## Hello Function - Solution 2 | 3 | ### Exercise 4 | 5 | Create a basic AWS Lambda function that when given a name, will return "Hello " 6 | 7 | ### Solution 8 | 9 | #### Define a function 10 | 11 | 1. Go to Lambda console panel and click on `Create function` 12 | 1. Give the function a name like `BasicFunction` 13 | 2. Select `Python3` runtime 14 | 3. Now to handle function's permissions, we can attach IAM role to our function either by setting a role or creating a new role. I selected "Create a new role from AWS policy templates" 15 | 4. In "Policy Templates" select "Simple Microservice Permissions" 16 | 17 | 1. Next, you should see a text editor where you will insert a code similar to the following 18 | 19 | #### Function's code 20 | ``` 21 | import json 22 | 23 | 24 | def lambda_handler(event, context): 25 | firstName = event['name'] 26 | return 'Hello ' + firstName 27 | ``` 28 | 2. Click on "Create Function" 29 | 30 | #### Define a test 31 | 32 | 1. Now let's test the function. Click on "Test". 33 | 2. Select "Create new test event" 34 | 3. Set the "Event name" to whatever you'd like. For example "TestEvent" 35 | 4. Provide keys to test 36 | 37 | ``` 38 | { 39 | "name": 'Spyro' 40 | } 41 | ``` 42 | 5. Click on "Create" 43 | 44 | #### Test the function 45 | 46 | 1. Choose the test event you've create (`TestEvent`) 47 | 2. Click on the `Test` button 48 | 3. You should see something similar to `Execution result: succeeded` 49 | 4. If you'll go to AWS CloudWatch, you should see a related log stream 50 | -------------------------------------------------------------------------------- /exercises/aws/solutions/password_policy_and_mfa.md: -------------------------------------------------------------------------------- 1 | ## AWS IAM - Password Policy & MFA 2 | 3 | Note: DON'T perform this exercise unless you understand what you are doing and what is the outcome of applying these changes to your account 4 | 5 | ### Objectives 6 | 7 | 1. Create password policy with the following settings: 8 | 1. At least minimum 8 characters 9 | 2. At least one number 10 | 3. Prevent password reuse 11 | 12 | 2. Then enable MFA for the account. 13 | 14 | ### Solution 15 | 16 | Password Policy: 17 | 18 | 1. Go to IAM service in AWS 19 | 2. Click on "Account settings" under "Access management" 20 | 3. Click on "Change password policy" 21 | 1. Check "Enforce minimum password length" and set it to 8 characters 22 | 1. Check "Require at least one number" 23 | 1. Check "Prevent password reuse" 24 | 4. Click on "Save changes" 25 | 26 | MFA: 27 | 28 | 1. Click on the account name 29 | 2. Click on "My Security Credentials" 30 | 3. Expand "Multi-factor authentication (MFA)" and click on "Activate MFA" 31 | 4. Choose one of the devices 32 | 5. Follow the instructions to set it up and click on "Assign MFA" 33 | -------------------------------------------------------------------------------- /exercises/aws/solutions/url_function.md: -------------------------------------------------------------------------------- 1 | ## URL Function 2 | 3 | Create a basic AWS Lambda function that will be triggered when you enter a URL in the browser 4 | 5 | ### Solution 6 | 7 | #### Define a function 8 | 9 | 1. Go to Lambda console panel and click on `Create function` 10 | 1. Give the function a name like `urlFunction` 11 | 2. Select `Python3` runtime 12 | 3. Now to handle function's permissions, we can attach IAM role to our function either by setting a role or creating a new role. I selected "Create a new role from AWS policy templates" 13 | 4. In "Policy Templates" select "Simple Microservice Permissions" 14 | 15 | 1. Next, you should see a text editor where you will insert a code similar to the following 16 | 17 | #### Function's code 18 | ``` 19 | import json 20 | 21 | 22 | def lambda_handler(event, context): 23 | firstName = event['name'] 24 | return 'Hello ' + firstName 25 | ``` 26 | 2. Click on "Create Function" 27 | 28 | #### Define a test 29 | 30 | 1. Now let's test the function. Click on "Test". 31 | 2. Select "Create new test event" 32 | 3. Set the "Event name" to whatever you'd like. For example "TestEvent" 33 | 4. Provide keys to test 34 | 35 | ``` 36 | { 37 | "name": 'Spyro' 38 | } 39 | ``` 40 | 5. Click on "Create" 41 | 42 | #### Test the function 43 | 44 | 1. Choose the test event you've create (`TestEvent`) 45 | 2. Click on the `Test` button 46 | 3. You should see something similar to `Execution result: succeeded` 47 | 4. If you'll go to AWS CloudWatch, you should see a related log stream 48 | 49 | #### Define a trigger 50 | 51 | We'll define a trigger in order to trigger the function when inserting the URL in the browser 52 | 53 | 1. Go to "API Gateway console" and click on "New API Option" 54 | 2. Insert the API name, description and click on "Create" 55 | 3. Click on Action -> Create Resource 56 | 4. Insert resource name and path (e.g. the path can be /hello) and click on "Create Resource" 57 | 5. Select the resource we've created and click on "Create Method" 58 | 6. For "integration type" choose "Lambda Function" and insert the lambda function name we've given to the function we previously created. Make sure to also use the same region 59 | 7. Confirm settings and any required permissions 60 | 8. Now click again on the resource and modify "Body Mapping Templates" so the template includes this: 61 | 62 | ``` 63 | { "name": "$input.params('name')" } 64 | ``` 65 | 9. Finally save and click on Actions -> Deploy API 66 | 67 | #### Running the function 68 | 69 | 1. In the API Gateway console, in stages menu, select the API we've created and click on the GET option 70 | 2. You'll see an invoke URL you can click on. You might have to modify it to include the input so it looks similar to this: `.../hello?name=mario` 71 | 3. You should see in your browser `Hello Mario` 72 | -------------------------------------------------------------------------------- /exercises/aws/url_function.md: -------------------------------------------------------------------------------- 1 | ## URL Function 2 | 3 | Create a basic AWS Lambda function that will be triggered when you enter a URL in the browser 4 | -------------------------------------------------------------------------------- /exercises/cicd/README.md: -------------------------------------------------------------------------------- 1 | ## CI/CD 2 | 3 | ### CI/CD Exercises 4 | 5 | |Name|Topic|Objective & Instructions|Solution|Comments| 6 | |--------|--------|------|----|----| 7 | | Set up a CI pipeline | CI | [Exercise](ci_for_open_source_project.md) | | | 8 | | Deploy to Kubernetes | Deployment | [Exercise](deploy_to_kubernetes.md) | [Solution](solutions/deploy_to_kubernetes/README.md) | | 9 | | Jenkins - Remove Jobs | Jenkins Scripts | [Exercise](remove_jobs.md) | [Solution](solutions/remove_jobs_solution.groovy) | | 10 | | Jenkins - Remove Builds | Jenkins Sripts | [Exercise](remove_builds.md) | [Solution](solutions/remove_builds_solution.groovy) | | 11 | 12 | ### CI/CD Self Assessment 13 | 14 |
15 | What is Continuous Integration?
16 | 17 | A development practice where developers integrate code into a shared repository frequently. It can range from a couple of changes every day or a week to a couple of changes in one hour in larger scales. 18 | 19 | Each piece of code (change/patch) is verified, to make the change is safe to merge. Today, it's a common practice to test the change using an automated build that makes sure the code can integrated. It can be one build which runs several tests in different levels (unit, functional, etc.) or several separate builds that all or some has to pass in order for the change to be merged into the repository. 20 |
21 | 22 |
23 | What is Continuous Deployment?
24 | 25 | A development strategy used by developers to release software automatically into production where any code commit must pass through an automated testing phase. Only when this is successful is the release considered production worthy. This eliminates any human interaction and should be implemented only after production-ready pipelines have been set with real-time monitoring and reporting of deployed assets. If any issues are detected in production it should be easy to rollback to previous working state. 26 | 27 | For more info please read [here](https://www.atlassian.com/continuous-delivery/continuous-deployment) 28 |
29 | 30 |
31 | Can you describe an example of a CI (and/or CD) process starting the moment a developer submitted a change/PR to a repository?
32 | 33 | There are many answers for such a question, as CI processes vary, depending on the technologies used and the type of the project to where the change was submitted. 34 | Such processes can include one or more of the following stages: 35 | 36 | * Compile 37 | * Build 38 | * Install 39 | * Configure 40 | * Update 41 | * Test 42 | 43 | An example of one possible answer: 44 | 45 | A developer submitted a pull request to a project. The PR (pull request) triggered two jobs (or one combined job). One job for running lint test on the change and the second job for building a package which includes the submitted change, and running multiple api/scenario tests using that package. Once all tests passed and the change was approved by a maintainer/core, it's merged/pushed to the repository. If some of the tests failed, the change will not be allowed to merged/pushed to the repository. 46 | 47 | A complete different answer or CI process, can describe how a developer pushes code to a repository, a workflow then triggered to build a container image and push it to the registry. Once in the registry, the k8s cluster is applied with the new changes. 48 |
49 | 50 |
51 | What is Continuous Delivery?
52 | 53 | A development strategy used to frequently deliver code to QA and Ops for testing. This entails having a staging area that has production like features where changes can only be accepted for production after a manual review. Because of this human entanglement there is usually a time lag between release and review making it slower and error prone as compared to continous deployment. 54 | 55 | For more info please read [here](https://www.atlassian.com/continuous-delivery/continuous-deployment) 56 |
57 | 58 |
59 | What is difference between Continuous Delivery and Continuous Deployment?
60 | 61 | Both encapsulate the same process of deploying the changes which were compiled and/or tested in the CI pipelines.
62 | The difference between the two is that Continuous Delivery isn't fully automated process as opposed to Continuous Deployment where every change that is tested in the process is eventually deployed to production. In continuous delivery someone is either approving the deployment process or the deployment process is based on constraints and conditions (like time constraint of deploying every week/month/...) 63 |
64 | 65 |
66 | What CI/CD best practices are you familiar with? Or what do you consider as CI/CD best practice?
67 | 68 | * Commit and test often. 69 | * Testing/Staging environment should be a clone of production environment. 70 | * Clean up your environments (e.g. your CI/CD pipelines may create a lot of resources. They should also take care of cleaning up everything they create) 71 | * The CI/CD pipelines should provide the same results when executed locally or remotely 72 | * Treat CI/CD as another application in your organization. Not as a glue code. 73 | * On demand environments instead of pre-allocated resources for CI/CD purposes 74 | * Stages/Steps/Tasks of pipelines should be shared between applications or microservices (don't re-invent common tasks like "cloning a project") 75 |
76 | 77 |
78 | You are given a pipeline and a pool with 3 workers: virtual machine, baremetal and a container. How will you decide on which one of them to run the pipeline?
79 |
80 | 81 |
82 | Where do you store CI/CD pipelines? Why?
83 | 84 | There are multiple approaches as to where to store the CI/CD pipeline definitions: 85 | 86 | 1. App Repository - store them in the same repository of the application they are building or testing (perhaps the most popular one) 87 | 2. Central Repository - store all organization's/project's CI/CD pipelines in one separate repository (perhaps the best approach when multiple teams test the same set of projects and they end up having many pipelines) 88 | 3. CI repo for every app repo - you separate CI related code from app code but you don't put everything in one place (perhaps the worst option due to the maintenance) 89 | 4. The platform where the CI/CD pipelines are being executed (e.g. Kubernetes Cluster in case of Tekton/OpenShift Pipelines). 90 |
91 | 92 |
93 | How do you perform plan capacity for your CI/CD resources? (e.g. servers, storage, etc.)
94 |
95 | 96 |
97 | How would you structure/implement CD for an application which depends on several other applications?
98 |
99 | 100 |
101 | How do you measure your CI/CD quality? Are there any metrics or KPIs you are using for measuring the quality?
102 |
103 | 104 | #### CI/CD - Jenkins 105 | 106 |
107 | What is Jenkins? What have you used it for?
108 | 109 | Jenkins is an open source automation tool written in Java with plugins built for Continuous Integration purpose. Jenkins is used to build and test your software projects continuously making it easier for developers to integrate changes to the project, and making it easier for users to obtain a fresh build. It also allows you to continuously deliver your software by integrating with a large number of testing and deployment technologies. 110 | 111 | Jenkins integrates development life-cycle processes of all kinds, including build, document, test, package, stage, deploy, static analysis and much more. 112 | 113 |
114 | 115 |
116 | What are the advantages of Jenkins over its competitors? Can you compare it to one of the following systems? 117 | 118 | * Travis 119 | * Bamboo 120 | * Teamcity 121 | * CircleCI
122 |
123 | 124 |
125 | What are the limitations or disadvantages of Jenkins?
126 | 127 | This might be considered to be an opinionated answer: 128 | 129 | * Old fashioned dashboards with not many options to customize it 130 | * Containers readiness (this has improved with Jenkins X) 131 | * By itself, it doesn't have many features. On the other hand, there many plugins created by the community to expand its abilities 132 | * Managing Jenkins and its piplines as a code can be one hell of a nightmare 133 |
134 | 135 |
136 | Explain the following: 137 | 138 | - Job 139 | - Build 140 | - Plugin 141 | - Node or Worker 142 | - Executor
143 | - Job is an automation definition = what and where to execute once the user clicks on "build" 144 | - Build is a running instance of a job. You can have one or more builds at any given point of time (unless limited by confiugration) 145 | - A worker is the machine/instance on which the build is running. When a build starts, it "acquires" a worker out of a pool to run on it. 146 | - An executor is variable of the worker, defining how many builds can run on that worker in parallel. An executor value of 3 means, that 3 builds can run at any point on that executor (not necessarily of the same job. Any builds) 147 |
148 | 149 |
150 | What plugins have you used in Jenkins?
151 |
152 | 153 |
154 | Have you used Jenkins for CI or CD processes? Can you describe them?
155 |
156 | 157 |
158 | What type of jobs are there? Which types have you used?
159 |
160 | 161 |
162 | How did you report build results to users? What ways are there to report the results?
163 | 164 | You can report via: 165 | * Emails 166 | * Messaging apps 167 | * Dashboards 168 | 169 | Each has its own disadvantages and advantages. Emails for example, if sent too often, can be eventually disregarded or ignored. 170 |
171 | 172 |
173 | You need to run unit tests every time a change submitted to a given project. Describe in details how your pipeline would look like and what will be executed in each stage
174 | 175 | The pipelines will have multiple stages: 176 | 177 | * Clone the project 178 | * Install test dependencies (for example, if I need tox package to run the tests, I will install it in this stage) 179 | * Run unit tests 180 | * (Optional) report results (For example an email to the users) 181 | * Archive the relevant logs/files 182 |
183 | 184 |
185 | How to secure Jenkins?
186 | 187 | [Jenkins documentation](https://www.jenkins.io/doc/book/security/securing-jenkins/) provides some basic intro for securing your Jenkins server. 188 |
189 | 190 |
191 | Describe how do you add new nodes (agents) to Jenkins
192 | 193 | You can describe the UI way to add new nodes but better to explain how to do in a way that scales like a script or using dynamic source for nodes like one of the existing clouds. 194 |
195 | 196 |
197 | How to acquire multiple nodes for one specific build?
198 |
199 | 200 |
201 | Whenever a build fails, you would like to notify the team owning the job regarding the failure and provide failure reason. How would you do that?
202 |
203 | 204 |
205 | There are four teams in your organization. How to prioritize the builds of each team? So the jobs of team x will always run before team y for example
206 |
207 | 208 |
209 | If you are managing a dozen of jobs, you can probably use the Jenkins UI. But how do you manage the creation and deletion of hundreds of jobs every week/month?
210 |
211 | 212 |
213 | What are some of Jenkins limitations?
214 | 215 | * Testing cross-dependencies (changes from multiple projects together) 216 | * Starting builds from any stage (although Cloudbees implemented something called checkpoints) 217 |
218 | 219 |
220 | What is the different between a scripted pipeline to declarative pipeline? Which type are you using?
221 |
222 | 223 |
224 | How would you implement an option of a starting a build from a certain stage and not from the beginning?
225 |
226 | 227 |
228 | Do you have experience with developing a Jenkins plugin? Can you describe this experience?
229 |
230 | 231 |
232 | Have you written Jenkins scripts? If yes, what for and how they work?
233 |
234 | 235 | #### CI/CD - GitHub Actions 236 | 237 |
238 | What is a Workflow in GitHub Actions?
239 | 240 | A YAML file that defines the automation actions and instructions to execute upon a specific event.
241 | The file is placed in the repository itself. 242 | 243 | A Workflow can be anything - running tests, compiling code, building packages, ... 244 |
245 | 246 |
247 | What is a Runner in GitHub Actions?
248 | 249 | A workflow has to be executed somewhere. The environment where the workflow is executed is called Runner.
250 | A Runner can be an on-premise host or GitHub hoste 251 |
252 | 253 |
254 | What is a Job in GitHub Actions?
255 | 256 | A job is a series of steps which are executed on the same runner/environment.
257 | A workflow must include at least one job. 258 |
259 | 260 |
261 | What is an Action in GitHub Actions?
262 | 263 | An action is the smallest unit in a workflow. It includes the commands to execute as part of the job. 264 |
265 | 266 |
267 | In GitHub Actions workflow, what the 'on' attribute/directive is used for?
268 | 269 | Specify upon which events the workflow will be triggered.
270 | For example, you might configure the workflow to trigger every time a changed is pushed to the repository. 271 |
272 | 273 |
274 | True or False? In Github Actions, jobs are executed in parallel by deafult
275 | 276 | True 277 |
278 | 279 |
280 | How to create dependencies between jobs so one job runs after another?
281 | 282 | Using the "needs" attribute/directive. 283 | 284 | ``` 285 | jobs: 286 | job1: 287 | job2: 288 | needs: job1 289 | ``` 290 | 291 | In the above example, job1 must complete successfully before job2 runs 292 |
293 | 294 |
295 | How to add a Workflow to a repository?
296 | CLI: 297 | 298 | 1. Create the directory `.github/workflows` in the repository 299 | 2. Add a YAML file 300 | 301 | UI: 302 | 303 | 1. In the repository page, click on "Actions" 304 | 2. Choose workflow and click on "Set up this workflow" 305 |
306 | -------------------------------------------------------------------------------- /exercises/cicd/ci_for_open_source_project.md: -------------------------------------------------------------------------------- 1 | ## CI for Open Source Project 2 | 3 | 1. Choose an open source project from Github and fork it 4 | 2. Create a CI pipeline/workflow for the project you forked 5 | 3. The CI pipeline/workflow will include anything that is relevant to the project you forked. For example: 6 | * If it's a Python project, you will run PEP8 7 | * If the project has unit tests directory, you will run these unit tests as part of the CI 8 | 4. In a separate file, describe what is running as part of the CI and why you chose to include it. You can also describe any thoughts, dilemmas, challenge you had 9 | 10 | ### Bonus 11 | 12 | Containerize the app of the project you forked using any container engine you would like (e.g. Docker, Podman).
13 | Once you successfully ran the application in a container, submit the Dockerfile to the original project (but be prepared that the maintainer might not need/want that). 14 | 15 | ### Suggestions for Projects 16 | 17 | The following is a list of projects without CI (at least at the moment): 18 | 19 | Note: I wrote a script to find these (except the first project on the list, of course) based on some parameters in case you wonder why these projects specifically are listed. 20 | 21 | * [This one](https://github.com/bregman-arie/devops-exercises) - We don't have CI! help! :) 22 | * [image retrieval platform](https://github.com/skx6/image_retrieval_platform) 23 | * [FollowSpot](https://github.com/jenbrissman/FollowSpot) 24 | * [Pyrin](https://github.com/mononobi/pyrin) 25 | * [food-detection-yolov5](https://github.com/lannguyen0910/food-detection-yolov5) 26 | * [Lifely](https://github.com/sagnik1511/Lifely) 27 | -------------------------------------------------------------------------------- /exercises/cicd/deploy_to_kubernetes.md: -------------------------------------------------------------------------------- 1 | ## Deploy to Kubernetes 2 | 3 | * Write a pipeline that will deploy an "hello world" web app to Kubernete 4 | * The CI/CD system (where the pipeline resides) and the Kubernetes cluster should be on separate systems 5 | * The web app should be accessible remotely and only with HTTPS 6 | -------------------------------------------------------------------------------- /exercises/cicd/remove_builds.md: -------------------------------------------------------------------------------- 1 | ### Jenkins - Remove Jobs 2 | 3 | #### Objective 4 | 5 | Learn how to write a Jenkins script that interacts with builds by removing builds older than X days. 6 | 7 | #### Instructions 8 | 9 | 1. Pick up (or create) a job which has builds older than X days 10 | 2. Write a script to remove only the builds that are older than X days 11 | 12 | #### Hints 13 | 14 | X can be anything. For example, remove builds that are older than 3 days. Just make sure that you don't simply remove all the builds (since that's different from the objective). 15 | -------------------------------------------------------------------------------- /exercises/cicd/remove_jobs.md: -------------------------------------------------------------------------------- 1 | ### Jenkins - Remove Jobs 2 | 3 | #### Objective 4 | 5 | Learn how to write a Jenkins script to remove Jenkins jobs 6 | 7 | #### Instructions 8 | 9 | 1. Create three jobs called: test-job, test2-job and prod-job 10 | 2. Write a script to remove all the jobs that include the string "test" 11 | -------------------------------------------------------------------------------- /exercises/cicd/solutions/deploy_to_kubernetes/Jenkinsfile: -------------------------------------------------------------------------------- 1 | pipeline { 2 | 3 | agent any 4 | 5 | stages { 6 | 7 | stage('Checkout Source') { 8 | steps { 9 | git url:'https://github.com//.git', 10 | // credentialsId: 'creds_github', 11 | branch:'master' 12 | } 13 | } 14 | 15 | stage("Build image") { 16 | steps { 17 | script { 18 | myapp = docker.build("/helloworld:${env.BUILD_ID}") 19 | } 20 | } 21 | } 22 | 23 | stage("Push image") { 24 | steps { 25 | script { 26 | docker.withRegistry('https://registry.hub.docker.com', 'dockerhub') { 27 | myapp.push("latest") 28 | myapp.push("${env.BUILD_ID}") 29 | } 30 | } 31 | } 32 | } 33 | 34 | 35 | stage('Deploy App') { 36 | steps { 37 | script { 38 | sh 'ansible-playbook deploy.yml' 39 | } 40 | } 41 | } 42 | 43 | } 44 | 45 | } 46 | -------------------------------------------------------------------------------- /exercises/cicd/solutions/deploy_to_kubernetes/README.md: -------------------------------------------------------------------------------- 1 | ## Deploy to Kubernetes 2 | 3 | Note: this exercise can be solved in various ways. The solution described here is just one possible way. 4 | 5 | 1. Install Jenkins on one system (follow up the standard Jenkins installation procedure) 6 | 2. Deploy Kubernetes on a remote host (minikube can be an easy way to achieve it) 7 | 3. Create a simple web app or [page](html) 8 | 9 | 4. Create Kubernetes [resoruces](helloworld.yml) - Deployment, Service and Ingress (for HTTPS access) 10 | 5. Create an [Ansible inventory](inventory) and insert the address of the Kubernetes cluster 11 | 6. Write [Ansible playbook](deploy.yml) to deploy the Kubernetes resources and also generate 12 | 7. Create a [pipeline](Jenkinsfile) 13 | 14 | 8. Run the pipeline :) 15 | 9. Try to access the web app remotely 16 | -------------------------------------------------------------------------------- /exercises/cicd/solutions/deploy_to_kubernetes/deploy.yml: -------------------------------------------------------------------------------- 1 | - name: Apply Kubernetes YAMLs 2 | hosts: kubernetes 3 | tasks: 4 | - name: Ensure SSL related directories exist 5 | file: 6 | path: "{{ item }}" 7 | state: directory 8 | loop: 9 | - "/etc/ssl/crt" 10 | - "/etc/ssl/csr" 11 | - "/etc/ssl/private" 12 | 13 | - name: Generate an OpenSSL private key. 14 | openssl_privatekey: 15 | path: /etc/ssl/private/privkey.pem 16 | 17 | - name: generate openssl certficate signing requests 18 | openssl_csr: 19 | path: /etc/ssl/csr/hello-world.app.csr 20 | privatekey_path: /etc/ssl/private/privkey.pem 21 | common_name: hello-world.app 22 | 23 | - name: Generate a Self Signed OpenSSL certificate 24 | openssl_certificate: 25 | path: /etc/ssl/crt/hello-world.app.crt 26 | privatekey_path: /etc/ssl/private/privkey.pem 27 | csr_path: /etc/ssl/csr/hello-world.app.csr 28 | provider: selfsigned 29 | 30 | - name: Create k8s secret 31 | command: "kubectl create secret tls tls-secret --cert=/etc/ssl/crt/hello-world.app.crt --key=/etc/ssl/private/privkey.pem" 32 | register: result 33 | failed_when: 34 | - result.rc == 2 35 | 36 | - name: Deploy web app 37 | k8s: 38 | state: present 39 | definition: "{{ lookup('file', './helloworld.yml') }}" 40 | kubeconfig: '/home/abregman/.kube/config' 41 | namespace: 'default' 42 | wait: true 43 | -------------------------------------------------------------------------------- /exercises/cicd/solutions/deploy_to_kubernetes/helloworld.yml: -------------------------------------------------------------------------------- 1 | --- 2 | apiVersion: apps/v1 3 | kind: Deployment 4 | metadata: 5 | name: hello-blue-whale 6 | spec: 7 | replicas: 3 8 | selector: 9 | matchLabels: 10 | app: hello-world-app 11 | version: blue 12 | template: 13 | metadata: 14 | name: hello-blue-whale-pod 15 | labels: 16 | app: hello-world-app 17 | version: blue 18 | spec: 19 | containers: 20 | - name: hello-whale-container 21 | image: abregman2/helloworld:latest 22 | imagePullPolicy: Always 23 | ports: 24 | - containerPort: 80 25 | - containerPort: 443 26 | --- 27 | apiVersion: v1 28 | kind: Service 29 | metadata: 30 | name: hello-world 31 | labels: 32 | app: hello-world-app 33 | spec: 34 | ports: 35 | - port: 80 36 | targetPort: 80 37 | protocol: TCP 38 | name: http 39 | selector: 40 | app: hello-world-app 41 | --- 42 | apiVersion: networking.k8s.io/v1 43 | kind: Ingress 44 | metadata: 45 | name: example-ingress 46 | annotations: 47 | cert-manager.io/cluster-issuer: selfsigned-issuer 48 | nginx.ingress.kubernetes.io/rewrite-target: / 49 | kubernetes.io/ingress.class: nginx 50 | spec: 51 | tls: 52 | - hosts: 53 | - hello-world.app 54 | secretName: shhh 55 | rules: 56 | - host: hello-world.app 57 | http: 58 | paths: 59 | - path: / 60 | pathType: Prefix 61 | backend: 62 | service: 63 | name: hello-world 64 | port: 65 | number: 80 66 | -------------------------------------------------------------------------------- /exercises/cicd/solutions/deploy_to_kubernetes/html/css/normalize.css: -------------------------------------------------------------------------------- 1 | /*! normalize.css v3.0.2 | MIT License | git.io/normalize */ 2 | 3 | /** 4 | * 1. Set default font family to sans-serif. 5 | * 2. Prevent iOS text size adjust after orientation change, without disabling 6 | * user zoom. 7 | */ 8 | 9 | html { 10 | font-family: sans-serif; /* 1 */ 11 | -ms-text-size-adjust: 100%; /* 2 */ 12 | -webkit-text-size-adjust: 100%; /* 2 */ 13 | } 14 | 15 | /** 16 | * Remove default margin. 17 | */ 18 | 19 | body { 20 | margin: 0; 21 | } 22 | 23 | /* HTML5 display definitions 24 | ========================================================================== */ 25 | 26 | /** 27 | * Correct `block` display not defined for any HTML5 element in IE 8/9. 28 | * Correct `block` display not defined for `details` or `summary` in IE 10/11 29 | * and Firefox. 30 | * Correct `block` display not defined for `main` in IE 11. 31 | */ 32 | 33 | article, 34 | aside, 35 | details, 36 | figcaption, 37 | figure, 38 | footer, 39 | header, 40 | hgroup, 41 | main, 42 | menu, 43 | nav, 44 | section, 45 | summary { 46 | display: block; 47 | } 48 | 49 | /** 50 | * 1. Correct `inline-block` display not defined in IE 8/9. 51 | * 2. Normalize vertical alignment of `progress` in Chrome, Firefox, and Opera. 52 | */ 53 | 54 | audio, 55 | canvas, 56 | progress, 57 | video { 58 | display: inline-block; /* 1 */ 59 | vertical-align: baseline; /* 2 */ 60 | } 61 | 62 | /** 63 | * Prevent modern browsers from displaying `audio` without controls. 64 | * Remove excess height in iOS 5 devices. 65 | */ 66 | 67 | audio:not([controls]) { 68 | display: none; 69 | height: 0; 70 | } 71 | 72 | /** 73 | * Address `[hidden]` styling not present in IE 8/9/10. 74 | * Hide the `template` element in IE 8/9/11, Safari, and Firefox < 22. 75 | */ 76 | 77 | [hidden], 78 | template { 79 | display: none; 80 | } 81 | 82 | /* Links 83 | ========================================================================== */ 84 | 85 | /** 86 | * Remove the gray background color from active links in IE 10. 87 | */ 88 | 89 | a { 90 | background-color: transparent; 91 | } 92 | 93 | /** 94 | * Improve readability when focused and also mouse hovered in all browsers. 95 | */ 96 | 97 | a:active, 98 | a:hover { 99 | outline: 0; 100 | } 101 | 102 | /* Text-level semantics 103 | ========================================================================== */ 104 | 105 | /** 106 | * Address styling not present in IE 8/9/10/11, Safari, and Chrome. 107 | */ 108 | 109 | abbr[title] { 110 | border-bottom: 1px dotted; 111 | } 112 | 113 | /** 114 | * Address style set to `bolder` in Firefox 4+, Safari, and Chrome. 115 | */ 116 | 117 | b, 118 | strong { 119 | font-weight: bold; 120 | } 121 | 122 | /** 123 | * Address styling not present in Safari and Chrome. 124 | */ 125 | 126 | dfn { 127 | font-style: italic; 128 | } 129 | 130 | /** 131 | * Address variable `h1` font-size and margin within `section` and `article` 132 | * contexts in Firefox 4+, Safari, and Chrome. 133 | */ 134 | 135 | h1 { 136 | font-size: 2em; 137 | margin: 0.67em 0; 138 | } 139 | 140 | /** 141 | * Address styling not present in IE 8/9. 142 | */ 143 | 144 | mark { 145 | background: #ff0; 146 | color: #000; 147 | } 148 | 149 | /** 150 | * Address inconsistent and variable font size in all browsers. 151 | */ 152 | 153 | small { 154 | font-size: 80%; 155 | } 156 | 157 | /** 158 | * Prevent `sub` and `sup` affecting `line-height` in all browsers. 159 | */ 160 | 161 | sub, 162 | sup { 163 | font-size: 75%; 164 | line-height: 0; 165 | position: relative; 166 | vertical-align: baseline; 167 | } 168 | 169 | sup { 170 | top: -0.5em; 171 | } 172 | 173 | sub { 174 | bottom: -0.25em; 175 | } 176 | 177 | /* Embedded content 178 | ========================================================================== */ 179 | 180 | /** 181 | * Remove border when inside `a` element in IE 8/9/10. 182 | */ 183 | 184 | img { 185 | border: 0; 186 | } 187 | 188 | /** 189 | * Correct overflow not hidden in IE 9/10/11. 190 | */ 191 | 192 | svg:not(:root) { 193 | overflow: hidden; 194 | } 195 | 196 | /* Grouping content 197 | ========================================================================== */ 198 | 199 | /** 200 | * Address margin not present in IE 8/9 and Safari. 201 | */ 202 | 203 | figure { 204 | margin: 1em 40px; 205 | } 206 | 207 | /** 208 | * Address differences between Firefox and other browsers. 209 | */ 210 | 211 | hr { 212 | -moz-box-sizing: content-box; 213 | box-sizing: content-box; 214 | height: 0; 215 | } 216 | 217 | /** 218 | * Contain overflow in all browsers. 219 | */ 220 | 221 | pre { 222 | overflow: auto; 223 | } 224 | 225 | /** 226 | * Address odd `em`-unit font size rendering in all browsers. 227 | */ 228 | 229 | code, 230 | kbd, 231 | pre, 232 | samp { 233 | font-family: monospace, monospace; 234 | font-size: 1em; 235 | } 236 | 237 | /* Forms 238 | ========================================================================== */ 239 | 240 | /** 241 | * Known limitation: by default, Chrome and Safari on OS X allow very limited 242 | * styling of `select`, unless a `border` property is set. 243 | */ 244 | 245 | /** 246 | * 1. Correct color not being inherited. 247 | * Known issue: affects color of disabled elements. 248 | * 2. Correct font properties not being inherited. 249 | * 3. Address margins set differently in Firefox 4+, Safari, and Chrome. 250 | */ 251 | 252 | button, 253 | input, 254 | optgroup, 255 | select, 256 | textarea { 257 | color: inherit; /* 1 */ 258 | font: inherit; /* 2 */ 259 | margin: 0; /* 3 */ 260 | } 261 | 262 | /** 263 | * Address `overflow` set to `hidden` in IE 8/9/10/11. 264 | */ 265 | 266 | button { 267 | overflow: visible; 268 | } 269 | 270 | /** 271 | * Address inconsistent `text-transform` inheritance for `button` and `select`. 272 | * All other form control elements do not inherit `text-transform` values. 273 | * Correct `button` style inheritance in Firefox, IE 8/9/10/11, and Opera. 274 | * Correct `select` style inheritance in Firefox. 275 | */ 276 | 277 | button, 278 | select { 279 | text-transform: none; 280 | } 281 | 282 | /** 283 | * 1. Avoid the WebKit bug in Android 4.0.* where (2) destroys native `audio` 284 | * and `video` controls. 285 | * 2. Correct inability to style clickable `input` types in iOS. 286 | * 3. Improve usability and consistency of cursor style between image-type 287 | * `input` and others. 288 | */ 289 | 290 | button, 291 | html input[type="button"], /* 1 */ 292 | input[type="reset"], 293 | input[type="submit"] { 294 | -webkit-appearance: button; /* 2 */ 295 | cursor: pointer; /* 3 */ 296 | } 297 | 298 | /** 299 | * Re-set default cursor for disabled elements. 300 | */ 301 | 302 | button[disabled], 303 | html input[disabled] { 304 | cursor: default; 305 | } 306 | 307 | /** 308 | * Remove inner padding and border in Firefox 4+. 309 | */ 310 | 311 | button::-moz-focus-inner, 312 | input::-moz-focus-inner { 313 | border: 0; 314 | padding: 0; 315 | } 316 | 317 | /** 318 | * Address Firefox 4+ setting `line-height` on `input` using `!important` in 319 | * the UA stylesheet. 320 | */ 321 | 322 | input { 323 | line-height: normal; 324 | } 325 | 326 | /** 327 | * It's recommended that you don't attempt to style these elements. 328 | * Firefox's implementation doesn't respect box-sizing, padding, or width. 329 | * 330 | * 1. Address box sizing set to `content-box` in IE 8/9/10. 331 | * 2. Remove excess padding in IE 8/9/10. 332 | */ 333 | 334 | input[type="checkbox"], 335 | input[type="radio"] { 336 | box-sizing: border-box; /* 1 */ 337 | padding: 0; /* 2 */ 338 | } 339 | 340 | /** 341 | * Fix the cursor style for Chrome's increment/decrement buttons. For certain 342 | * `font-size` values of the `input`, it causes the cursor style of the 343 | * decrement button to change from `default` to `text`. 344 | */ 345 | 346 | input[type="number"]::-webkit-inner-spin-button, 347 | input[type="number"]::-webkit-outer-spin-button { 348 | height: auto; 349 | } 350 | 351 | /** 352 | * 1. Address `appearance` set to `searchfield` in Safari and Chrome. 353 | * 2. Address `box-sizing` set to `border-box` in Safari and Chrome 354 | * (include `-moz` to future-proof). 355 | */ 356 | 357 | input[type="search"] { 358 | -webkit-appearance: textfield; /* 1 */ 359 | -moz-box-sizing: content-box; 360 | -webkit-box-sizing: content-box; /* 2 */ 361 | box-sizing: content-box; 362 | } 363 | 364 | /** 365 | * Remove inner padding and search cancel button in Safari and Chrome on OS X. 366 | * Safari (but not Chrome) clips the cancel button when the search input has 367 | * padding (and `textfield` appearance). 368 | */ 369 | 370 | input[type="search"]::-webkit-search-cancel-button, 371 | input[type="search"]::-webkit-search-decoration { 372 | -webkit-appearance: none; 373 | } 374 | 375 | /** 376 | * Define consistent border, margin, and padding. 377 | */ 378 | 379 | fieldset { 380 | border: 1px solid #c0c0c0; 381 | margin: 0 2px; 382 | padding: 0.35em 0.625em 0.75em; 383 | } 384 | 385 | /** 386 | * 1. Correct `color` not being inherited in IE 8/9/10/11. 387 | * 2. Remove padding so people aren't caught out if they zero out fieldsets. 388 | */ 389 | 390 | legend { 391 | border: 0; /* 1 */ 392 | padding: 0; /* 2 */ 393 | } 394 | 395 | /** 396 | * Remove default vertical scrollbar in IE 8/9/10/11. 397 | */ 398 | 399 | textarea { 400 | overflow: auto; 401 | } 402 | 403 | /** 404 | * Don't inherit the `font-weight` (applied by a rule above). 405 | * NOTE: the default cannot safely be changed in Chrome and Safari on OS X. 406 | */ 407 | 408 | optgroup { 409 | font-weight: bold; 410 | } 411 | 412 | /* Tables 413 | ========================================================================== */ 414 | 415 | /** 416 | * Remove most spacing between table cells. 417 | */ 418 | 419 | table { 420 | border-collapse: collapse; 421 | border-spacing: 0; 422 | } 423 | 424 | td, 425 | th { 426 | padding: 0; 427 | } -------------------------------------------------------------------------------- /exercises/cicd/solutions/deploy_to_kubernetes/html/css/skeleton.css: -------------------------------------------------------------------------------- 1 | /* 2 | * Skeleton V2.0.4 3 | * Copyright 2014, Dave Gamache 4 | * www.getskeleton.com 5 | * Free to use under the MIT license. 6 | * http://www.opensource.org/licenses/mit-license.php 7 | * 12/29/2014 8 | */ 9 | 10 | 11 | /* Table of contents 12 | –––––––––––––––––––––––––––––––––––––––––––––––––– 13 | - Grid 14 | - Base Styles 15 | - Typography 16 | - Links 17 | - Buttons 18 | - Forms 19 | - Lists 20 | - Code 21 | - Tables 22 | - Spacing 23 | - Utilities 24 | - Clearing 25 | - Media Queries 26 | */ 27 | 28 | 29 | /* Grid 30 | –––––––––––––––––––––––––––––––––––––––––––––––––– */ 31 | .container { 32 | position: relative; 33 | width: 100%; 34 | max-width: 960px; 35 | margin: 0 auto; 36 | padding: 0 20px; 37 | box-sizing: border-box; } 38 | .column, 39 | .columns { 40 | width: 100%; 41 | float: left; 42 | box-sizing: border-box; } 43 | 44 | /* For devices larger than 400px */ 45 | @media (min-width: 400px) { 46 | .container { 47 | width: 85%; 48 | padding: 0; } 49 | } 50 | 51 | /* For devices larger than 550px */ 52 | @media (min-width: 550px) { 53 | .container { 54 | width: 80%; } 55 | .column, 56 | .columns { 57 | margin-left: 4%; } 58 | .column:first-child, 59 | .columns:first-child { 60 | margin-left: 0; } 61 | 62 | .one.column, 63 | .one.columns { width: 4.66666666667%; } 64 | .two.columns { width: 13.3333333333%; } 65 | .three.columns { width: 22%; } 66 | .four.columns { width: 30.6666666667%; } 67 | .five.columns { width: 39.3333333333%; } 68 | .six.columns { width: 48%; } 69 | .seven.columns { width: 56.6666666667%; } 70 | .eight.columns { width: 65.3333333333%; } 71 | .nine.columns { width: 74.0%; } 72 | .ten.columns { width: 82.6666666667%; } 73 | .eleven.columns { width: 91.3333333333%; } 74 | .twelve.columns { width: 100%; margin-left: 0; } 75 | 76 | .one-third.column { width: 30.6666666667%; } 77 | .two-thirds.column { width: 65.3333333333%; } 78 | 79 | .one-half.column { width: 48%; } 80 | 81 | /* Offsets */ 82 | .offset-by-one.column, 83 | .offset-by-one.columns { margin-left: 8.66666666667%; } 84 | .offset-by-two.column, 85 | .offset-by-two.columns { margin-left: 17.3333333333%; } 86 | .offset-by-three.column, 87 | .offset-by-three.columns { margin-left: 26%; } 88 | .offset-by-four.column, 89 | .offset-by-four.columns { margin-left: 34.6666666667%; } 90 | .offset-by-five.column, 91 | .offset-by-five.columns { margin-left: 43.3333333333%; } 92 | .offset-by-six.column, 93 | .offset-by-six.columns { margin-left: 52%; } 94 | .offset-by-seven.column, 95 | .offset-by-seven.columns { margin-left: 60.6666666667%; } 96 | .offset-by-eight.column, 97 | .offset-by-eight.columns { margin-left: 69.3333333333%; } 98 | .offset-by-nine.column, 99 | .offset-by-nine.columns { margin-left: 78.0%; } 100 | .offset-by-ten.column, 101 | .offset-by-ten.columns { margin-left: 86.6666666667%; } 102 | .offset-by-eleven.column, 103 | .offset-by-eleven.columns { margin-left: 95.3333333333%; } 104 | 105 | .offset-by-one-third.column, 106 | .offset-by-one-third.columns { margin-left: 34.6666666667%; } 107 | .offset-by-two-thirds.column, 108 | .offset-by-two-thirds.columns { margin-left: 69.3333333333%; } 109 | 110 | .offset-by-one-half.column, 111 | .offset-by-one-half.columns { margin-left: 52%; } 112 | 113 | } 114 | 115 | 116 | /* Base Styles 117 | –––––––––––––––––––––––––––––––––––––––––––––––––– */ 118 | /* NOTE 119 | html is set to 62.5% so that all the REM measurements throughout Skeleton 120 | are based on 10px sizing. So basically 1.5rem = 15px :) */ 121 | html { 122 | font-size: 62.5%; } 123 | body { 124 | font-size: 1.5em; /* currently ems cause chrome bug misinterpreting rems on body element */ 125 | line-height: 1.6; 126 | font-weight: 400; 127 | font-family: "Raleway", "HelveticaNeue", "Helvetica Neue", Helvetica, Arial, sans-serif; 128 | color: #222; } 129 | 130 | 131 | /* Typography 132 | –––––––––––––––––––––––––––––––––––––––––––––––––– */ 133 | h1, h2, h3, h4, h5, h6 { 134 | margin-top: 0; 135 | margin-bottom: 2rem; 136 | font-weight: 300; } 137 | h1 { font-size: 4.0rem; line-height: 1.2; letter-spacing: -.1rem;} 138 | h2 { font-size: 3.6rem; line-height: 1.25; letter-spacing: -.1rem; } 139 | h3 { font-size: 3.0rem; line-height: 1.3; letter-spacing: -.1rem; } 140 | h4 { font-size: 2.4rem; line-height: 1.35; letter-spacing: -.08rem; } 141 | h5 { font-size: 1.8rem; line-height: 1.5; letter-spacing: -.05rem; } 142 | h6 { font-size: 1.5rem; line-height: 1.6; letter-spacing: 0; } 143 | 144 | /* Larger than phablet */ 145 | @media (min-width: 550px) { 146 | h1 { font-size: 5.0rem; } 147 | h2 { font-size: 4.2rem; } 148 | h3 { font-size: 3.6rem; } 149 | h4 { font-size: 3.0rem; } 150 | h5 { font-size: 2.4rem; } 151 | h6 { font-size: 1.5rem; } 152 | } 153 | 154 | p { 155 | margin-top: 0; } 156 | 157 | 158 | /* Links 159 | –––––––––––––––––––––––––––––––––––––––––––––––––– */ 160 | a { 161 | color: #1EAEDB; } 162 | a:hover { 163 | color: #0FA0CE; } 164 | 165 | 166 | /* Buttons 167 | –––––––––––––––––––––––––––––––––––––––––––––––––– */ 168 | .button, 169 | button, 170 | input[type="submit"], 171 | input[type="reset"], 172 | input[type="button"] { 173 | display: inline-block; 174 | height: 38px; 175 | padding: 0 30px; 176 | color: #555; 177 | text-align: center; 178 | font-size: 11px; 179 | font-weight: 600; 180 | line-height: 38px; 181 | letter-spacing: .1rem; 182 | text-transform: uppercase; 183 | text-decoration: none; 184 | white-space: nowrap; 185 | background-color: transparent; 186 | border-radius: 4px; 187 | border: 1px solid #bbb; 188 | cursor: pointer; 189 | box-sizing: border-box; } 190 | .button:hover, 191 | button:hover, 192 | input[type="submit"]:hover, 193 | input[type="reset"]:hover, 194 | input[type="button"]:hover, 195 | .button:focus, 196 | button:focus, 197 | input[type="submit"]:focus, 198 | input[type="reset"]:focus, 199 | input[type="button"]:focus { 200 | color: #333; 201 | border-color: #888; 202 | outline: 0; } 203 | .button.button-primary, 204 | button.button-primary, 205 | input[type="submit"].button-primary, 206 | input[type="reset"].button-primary, 207 | input[type="button"].button-primary { 208 | color: #FFF; 209 | background-color: #33C3F0; 210 | border-color: #33C3F0; } 211 | .button.button-primary:hover, 212 | button.button-primary:hover, 213 | input[type="submit"].button-primary:hover, 214 | input[type="reset"].button-primary:hover, 215 | input[type="button"].button-primary:hover, 216 | .button.button-primary:focus, 217 | button.button-primary:focus, 218 | input[type="submit"].button-primary:focus, 219 | input[type="reset"].button-primary:focus, 220 | input[type="button"].button-primary:focus { 221 | color: #FFF; 222 | background-color: #1EAEDB; 223 | border-color: #1EAEDB; } 224 | 225 | 226 | /* Forms 227 | –––––––––––––––––––––––––––––––––––––––––––––––––– */ 228 | input[type="email"], 229 | input[type="number"], 230 | input[type="search"], 231 | input[type="text"], 232 | input[type="tel"], 233 | input[type="url"], 234 | input[type="password"], 235 | textarea, 236 | select { 237 | height: 38px; 238 | padding: 6px 10px; /* The 6px vertically centers text on FF, ignored by Webkit */ 239 | background-color: #fff; 240 | border: 1px solid #D1D1D1; 241 | border-radius: 4px; 242 | box-shadow: none; 243 | box-sizing: border-box; } 244 | /* Removes awkward default styles on some inputs for iOS */ 245 | input[type="email"], 246 | input[type="number"], 247 | input[type="search"], 248 | input[type="text"], 249 | input[type="tel"], 250 | input[type="url"], 251 | input[type="password"], 252 | textarea { 253 | -webkit-appearance: none; 254 | -moz-appearance: none; 255 | appearance: none; } 256 | textarea { 257 | min-height: 65px; 258 | padding-top: 6px; 259 | padding-bottom: 6px; } 260 | input[type="email"]:focus, 261 | input[type="number"]:focus, 262 | input[type="search"]:focus, 263 | input[type="text"]:focus, 264 | input[type="tel"]:focus, 265 | input[type="url"]:focus, 266 | input[type="password"]:focus, 267 | textarea:focus, 268 | select:focus { 269 | border: 1px solid #33C3F0; 270 | outline: 0; } 271 | label, 272 | legend { 273 | display: block; 274 | margin-bottom: .5rem; 275 | font-weight: 600; } 276 | fieldset { 277 | padding: 0; 278 | border-width: 0; } 279 | input[type="checkbox"], 280 | input[type="radio"] { 281 | display: inline; } 282 | label > .label-body { 283 | display: inline-block; 284 | margin-left: .5rem; 285 | font-weight: normal; } 286 | 287 | 288 | /* Lists 289 | –––––––––––––––––––––––––––––––––––––––––––––––––– */ 290 | ul { 291 | list-style: circle inside; } 292 | ol { 293 | list-style: decimal inside; } 294 | ol, ul { 295 | padding-left: 0; 296 | margin-top: 0; } 297 | ul ul, 298 | ul ol, 299 | ol ol, 300 | ol ul { 301 | margin: 1.5rem 0 1.5rem 3rem; 302 | font-size: 90%; } 303 | li { 304 | margin-bottom: 1rem; } 305 | 306 | 307 | /* Code 308 | –––––––––––––––––––––––––––––––––––––––––––––––––– */ 309 | code { 310 | padding: .2rem .5rem; 311 | margin: 0 .2rem; 312 | font-size: 90%; 313 | white-space: nowrap; 314 | background: #F1F1F1; 315 | border: 1px solid #E1E1E1; 316 | border-radius: 4px; } 317 | pre > code { 318 | display: block; 319 | padding: 1rem 1.5rem; 320 | white-space: pre; } 321 | 322 | 323 | /* Tables 324 | –––––––––––––––––––––––––––––––––––––––––––––––––– */ 325 | th, 326 | td { 327 | padding: 12px 15px; 328 | text-align: left; 329 | border-bottom: 1px solid #E1E1E1; } 330 | th:first-child, 331 | td:first-child { 332 | padding-left: 0; } 333 | th:last-child, 334 | td:last-child { 335 | padding-right: 0; } 336 | 337 | 338 | /* Spacing 339 | –––––––––––––––––––––––––––––––––––––––––––––––––– */ 340 | button, 341 | .button { 342 | margin-bottom: 1rem; } 343 | input, 344 | textarea, 345 | select, 346 | fieldset { 347 | margin-bottom: 1.5rem; } 348 | pre, 349 | blockquote, 350 | dl, 351 | figure, 352 | table, 353 | p, 354 | ul, 355 | ol, 356 | form { 357 | margin-bottom: 2.5rem; } 358 | 359 | 360 | /* Utilities 361 | –––––––––––––––––––––––––––––––––––––––––––––––––– */ 362 | .u-full-width { 363 | width: 100%; 364 | box-sizing: border-box; } 365 | .u-max-full-width { 366 | max-width: 100%; 367 | box-sizing: border-box; } 368 | .u-pull-right { 369 | float: right; } 370 | .u-pull-left { 371 | float: left; } 372 | 373 | 374 | /* Misc 375 | –––––––––––––––––––––––––––––––––––––––––––––––––– */ 376 | hr { 377 | margin-top: 3rem; 378 | margin-bottom: 3.5rem; 379 | border-width: 0; 380 | border-top: 1px solid #E1E1E1; } 381 | 382 | 383 | /* Clearing 384 | –––––––––––––––––––––––––––––––––––––––––––––––––– */ 385 | 386 | /* Self Clearing Goodness */ 387 | .container:after, 388 | .row:after, 389 | .u-cf { 390 | content: ""; 391 | display: table; 392 | clear: both; } 393 | 394 | 395 | /* Media Queries 396 | –––––––––––––––––––––––––––––––––––––––––––––––––– */ 397 | /* 398 | Note: The best way to structure the use of media queries is to create the queries 399 | near the relevant code. For example, if you wanted to change the styles for buttons 400 | on small devices, paste the mobile query code up in the buttons section and style it 401 | there. 402 | */ 403 | 404 | 405 | /* Larger than mobile */ 406 | @media (min-width: 400px) {} 407 | 408 | /* Larger than phablet (also point when grid becomes active) */ 409 | @media (min-width: 550px) {} 410 | 411 | /* Larger than tablet */ 412 | @media (min-width: 750px) {} 413 | 414 | /* Larger than desktop */ 415 | @media (min-width: 1000px) {} 416 | 417 | /* Larger than Desktop HD */ 418 | @media (min-width: 1200px) {} 419 | -------------------------------------------------------------------------------- /exercises/cicd/solutions/deploy_to_kubernetes/html/images/favicon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/exercises/cicd/solutions/deploy_to_kubernetes/html/images/favicon.png -------------------------------------------------------------------------------- /exercises/cicd/solutions/deploy_to_kubernetes/html/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 7 | 8 | Hello World :) 9 | 10 | 11 | 12 | 14 | 15 | 16 | 18 | 19 | 20 | 22 | 23 | 24 | 25 | 27 | 28 | 29 | 30 | 31 | 32 | 34 |
35 |
36 |
37 |

Hello World :)

38 |
39 |
40 |
41 | 42 | 44 | 45 | 46 | -------------------------------------------------------------------------------- /exercises/cicd/solutions/deploy_to_kubernetes/inventory: -------------------------------------------------------------------------------- 1 | [kubernetes] 2 | x.x.x.x 3 | -------------------------------------------------------------------------------- /exercises/cicd/solutions/remove_builds_solution.groovy: -------------------------------------------------------------------------------- 1 | def removeOldBuilds(buildDirectory, days = 14) { 2 | 3 | def wp = new File("${buildDirectory}") 4 | def currentTime = new Date() 5 | def backTime = currentTime - days 6 | 7 | wp.list().each { fileName -> 8 | folder = new File("${buildDirectory}/${fileName}") 9 | if (folder.isDirectory()) { 10 | def timeStamp = new Date(folder.lastModified()) 11 | if (timeStamp.before(backTime)) { 12 | folder.delete() 13 | } 14 | } 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /exercises/cicd/solutions/remove_jobs_solution.groovy: -------------------------------------------------------------------------------- 1 | def jobs = Jenkins.instance.items.findAll { job -> job.name =~ /"test"/ } 2 | 3 | jobs.each { job -> 4 | println job.name 5 | //job.delete() 6 | } 7 | -------------------------------------------------------------------------------- /exercises/cloud/README.md: -------------------------------------------------------------------------------- 1 | ## Cloud 2 | 3 |
4 | What is Cloud Computing? What is a Cloud Provider?
5 | 6 | Cloud computing refers to the delivery of on-demand computing services 7 | over the internet on a pay-as-you-go basis. 8 | 9 | In simple words, Cloud computing is a service that lets you use any computing 10 | service such as a server, storage, networking, databases, and intelligence, 11 | right through your browser without owning anything. You can do anything you 12 | can think of unless it doesn’t require you to stay close to your hardware. 13 | 14 | Cloud service providers are companies that establish public clouds, manage private clouds, or offer on-demand cloud computing components (also known as cloud computing services) like Infrastructure-as-a-Service (IaaS), Platform-as-a-Service (PaaS), and Software-as-a-Service(SaaS). Cloud services can reduce business process costs when compared to on-premise IT. 15 |
16 | 17 |
18 | What are the advantages of cloud computing? Mention at least 3 advantages
19 | 20 | * Pay as you go: you are paying only for what you are using. No upfront payments and payment stops when resources are no longer used. 21 | * Scalable: resources are scaled down or up based on demand 22 | * High availability: resources and applications provide seamless experience, even when some services are down 23 | * Disaster recovery 24 |
25 | 26 |
27 | True or False? Cloud computing is a consumption-based model (users only pay for for resources they use)
28 | 29 | True 30 |
31 | 32 |
33 | What types of Cloud Computing services are there?
34 | 35 | IAAS - Infrastructure as a Service 36 | PAAS - Platform as a Service 37 | SAAS - Software as a Service 38 |
39 | 40 |
41 | Explain each of the following and give an example: 42 | 43 | * IAAS 44 | * PAAS 45 | * SAAS
46 | * IAAS - Users have control over complete Operating System and don't need to worry about the physical resources, which is managed by Cloud Service Provider. 47 | * PAAS - CLoud Service Provider takes care of Operating System, Middlewares and users only need to focus on our Data and Application. 48 | * SAAS - A cloud based method to provide software to users, software logics running on cloud, can be run on-premises or managed by Cloud Service Provider. 49 |
50 | 51 |
52 | What types of clouds (or cloud deployments) are there?
53 | 54 | * Public - Cloud services sharing computing resources among multiple customers 55 | * Private - Cloud services having computing resources limited to specific customer or organization, managed by third party or organizations itself 56 | * Hybrid - Combination of public and private clouds 57 |
58 | 59 |
60 | What are the differences between Cloud Providers and On-Premise solution?
61 | 62 | In cloud providers, someone else owns and manages the hardware, hire the relevant infrastructure teams and pays for real-estate (for both hardware and people). You can focus on your business. 63 | 64 | In On-Premise solution, it's quite the opposite. You need to take care of hardware, infrastructure teams and pay for everything which can be quite expensive. On the other hand it's tailored to your needs. 65 |
66 | 67 |
68 | What is Serverless Computing?
69 | 70 | The main idea behind serverless computing is that you don't need to manage the creation and configuration of server. All you need to focus on is splitting your app into multiple functions which will be triggered by some actions. 71 | 72 | It's important to note that: 73 | 74 | * Serverless Computing is still using servers. So saying there are no servers in serverless computing is completely wrong 75 | * Serverless Computing allows you to have a different paying model. You basically pay only when your functions are running and not when the VM or containers are running as in other payment models 76 |
77 | 78 |
79 | Can we replace any type of computing on servers with serverless?
80 |
81 | 82 |
83 | Is there a difference between managed service to SaaS or is it the same thing?
84 |
85 | 86 |
87 | What is auto scaling?
88 | 89 | AWS definition: "AWS Auto Scaling monitors your applications and automatically adjusts capacity to maintain steady, predictable performance at the lowest possible cost" 90 | 91 | Read more about auto scaling [here](https://aws.amazon.com/autoscaling) 92 |
93 | 94 |
95 | True or False? Auto Scaling is about adding resources (such as instances) and not about removing resource
96 | 97 | False. Auto scaling adjusts capacity and this can mean removing some resources based on usage and performances. 98 |
99 | 100 | #### Cloud - Security 101 | 102 |
103 | How to secure instances in the cloud?
104 | 105 | * Instance should have minimal permissions needed. You don't want an instance-level incident to become an account-level incident 106 | * Instances should be accessed through load balancers or bastion hosts. In other words, they should be off the internet (in a private subnet behind a NAT). 107 | * Using latest OS images with your instances (or at least apply latest patches) 108 |
109 | -------------------------------------------------------------------------------- /exercises/cloud_slack_bot.md: -------------------------------------------------------------------------------- 1 | ## Cloud Slack Bot 2 | 3 | Create a slack bot to manage cloud instances. You can choose whatever cloud provider you want (e.g. Openstack, AWS, GCP, Azure) 4 | You should provide: 5 | 6 | * Instructions on how to use it 7 | * Source code of the slack bot 8 | * A running slack bot account or a deployment script so we can test it 9 | 10 | The bot should be able to support: 11 | 12 | * Creating new instances 13 | * Removing existing instances 14 | * Starting an instance 15 | * Stopping an instance 16 | * Displaying the status of an instance 17 | * List all available instances 18 | 19 | The bot should also be able to show help message. 20 | -------------------------------------------------------------------------------- /exercises/containers/image_layers.md: -------------------------------------------------------------------------------- 1 | ## Layer by Layer 2 | 3 | ### Objective 4 | 5 | Learn about image layers 6 | 7 | ### Requirements 8 | 9 | Make sure Docker is installed on your system and the service is started 10 | 11 | ``` 12 | # Fedora/RHEL/CentOS 13 | rpm -qa | grep docker 14 | systemctl status docker 15 | ``` 16 | 17 | ### Instructions 18 | 19 | 1. Write a Dockefile. Any Dockefile! :) (just make sure it's a valid one) 20 | 2. Build an image using the Dockerfile you've wrote 21 | 3. Which of the instructions you've used, created new layers and which added image metadata? 22 | 4. What ways are there to confirm your answer to the last question? 23 | 5. Can you reduce the size of the image you've created? 24 | -------------------------------------------------------------------------------- /exercises/containers/multi_stage_builds.md: -------------------------------------------------------------------------------- 1 | ## Multi-Stage Builds 2 | 3 | ### Objective 4 | 5 | Learn about multi-stage builds 6 | 7 | ### Instructions 8 | 9 | 1. Without actually building an image or running any container, use the following Dockerfile and convert it to use multi-stage: 10 | 11 | ``` 12 | FROM nginx 13 | RUN apt-get update \ 14 | && apt-get install -y curl python build-essential \ 15 | && apt-get install -y nodejs \ 16 | && apt-get clean -y 17 | RUN mkdir -p /my_app 18 | ADD ./config/nginx/docker.conf /etc/nginx/nginx.conf 19 | ADD ./config/nginx/k8s.conf /etc/nginx/nginx.conf.k8s 20 | ADD app/ /my_cool_app 21 | WORKDIR /my_cool_app 22 | RUN npm install -g ember-cli 23 | RUN npm install -g bower 24 | RUN apt-get update && apt-get install -y git \ 25 | && npm install \ 26 | && bower install \ 27 | RUN ember build — environment=prod 28 | CMD [ “/root/nginx-app.sh”, “nginx”, “-g”, “daemon off;” ] 29 | ``` 30 | 31 | 2. What are the benefits of using multi-stage builds? 32 | -------------------------------------------------------------------------------- /exercises/containers/run_forest_run.md: -------------------------------------------------------------------------------- 1 | ## Run, Forest, Run! 2 | 3 | ### Objective 4 | 5 | Learn what restart policies do and how to use them 6 | 7 | ### Requirements 8 | 9 | Make sure Docker is installed on your system and the service is started 10 | 11 | ``` 12 | # Fedora/RHEL/CentOS 13 | rpm -qa | grep docker 14 | systemctl status docker 15 | ``` 16 | 17 | ### Instructions 18 | 19 | 1. Run a container with the following properties: 20 | * image: alpine 21 | * name: forest 22 | * restart policy: always 23 | * command to execute: sleep 15 24 | 2. Run `docker container ls` - Is the container running? What about after 15 seconds, is it still running? why? 25 | 3. How then can we stop the container from running? 26 | 4. Remove the container you've created 27 | 5. Run the same container again but this time with `sleep 600` and verify it runs 28 | 6. Restart the Docker service. Is the container still running? why? 29 | 8. Update the policy to `unless-stopped` 30 | 9. Stop the container 31 | 10. Restart the Docker service. Is the container running? why? 32 | -------------------------------------------------------------------------------- /exercises/containers/running_containers.md: -------------------------------------------------------------------------------- 1 | ## Running Containers 2 | 3 | ### Objective 4 | 5 | Learn how to run containers 6 | 7 | ### Requirements 8 | 9 | Make sure Podman or Docker (or any other containers engine) is installed on your system 10 | 11 | ### Instructions 12 | 13 | 1. Run a container using the latest nginx image 14 | 2. List the containers to make sure the container is running 15 | 3. Run another container but this time use ubuntu latest and attach to the terminal of the container 16 | 4. List again the containers. How many containers are running? 17 | 5. Stop the containers 18 | 6. Remove the containers 19 | -------------------------------------------------------------------------------- /exercises/containers/solutions/image_layers.md: -------------------------------------------------------------------------------- 1 | ## Layer by Layer 2 | 3 | ### Objective 4 | 5 | Learn about image layers 6 | 7 | ### Requirements 8 | 9 | Make sure Docker is installed on your system and the service is started 10 | 11 | ``` 12 | # Fedora/RHEL/CentOS 13 | rpm -qa | grep docker 14 | systemctl status docker 15 | ``` 16 | 17 | ### Instructions 18 | 19 | 1. Write a Dockefile. Any Dockefile! :) (just make sure it's a valid one) 20 | 21 | ``` 22 | FROM ubuntu 23 | EXPOSE 212 24 | ENV foo=bar 25 | WORKDIR /tmp 26 | RUN dd if=/dev/zero of=some_file bs=1024 count=0 seek=1024 27 | RUN dd if=/dev/zero of=some_file bs=1024 count=0 seek=1024 28 | RUN dd if=/dev/zero of=some_file bs=1024 count=0 seek=1024 29 | ``` 30 | 31 | 2. Build an image using the Dockerfile you've wrote 32 | 33 | `docker image build -t super_cool_app:latest .` 34 | 35 | 3. Which of the instructions you've used, created new layers and which added image metadata? 36 | 37 | ``` 38 | FROM, RUN -> new layer 39 | EXPOSE, ENV, WORKDIR -> metadata 40 | ``` 41 | 42 | 4. What ways are there to confirm your answer to the last question? 43 | 44 | You can run `docker image history super_cool_app`. It will show you each instruction and its size. Usually instructions that create new layers has non-zero size, but this is not something you can rely on by itself since, some run commands can have size of zero in `docker image history` output (e.g. `ls -l`). 45 | 46 | You can also use `docker image inspect super_cool_appl` and see if in the output, under "RootFS", there are the number of layers that matches the instructions that should create new layers. 47 | 48 | 5. Can you reduce the size of the image you've created? 49 | 50 | yes, for example, use all the RUN instructions as a single RUN instruction this way: 51 | 52 | `RUN dd if=/dev/zero of=some_file bs=1024 count=0 seek=1024 && dd if=/dev/zero of=some_file bs=1024 count=0 seek=1024 && dd if=/dev/zero of=some_file bs=1024 count=0 seek=1024` 53 | 54 | The change in size might not be dramatic in this case, but in some cases it will make a big impact on the image size. 55 | -------------------------------------------------------------------------------- /exercises/containers/solutions/multi_stage_builds.md: -------------------------------------------------------------------------------- 1 | ## Multi-Stage Builds 2 | 3 | ### Objective 4 | 5 | Learn about multi-stage builds 6 | 7 | ### Instructions 8 | 9 | 1. Without actually building an image or running any container, use the following Dockerfile and convert it to use multi-stage: 10 | 11 | ``` 12 | FROM nginx 13 | RUN apt-get update \ 14 | && apt-get install -y curl python build-essential \ 15 | && apt-get install -y nodejs \ 16 | && apt-get clean -y 17 | RUN mkdir -p /my_app 18 | ADD ./config/nginx/docker.conf /etc/nginx/nginx.conf 19 | ADD ./config/nginx/k8s.conf /etc/nginx/nginx.conf.k8s 20 | ADD app/ /my_cool_app 21 | WORKDIR /my_cool_app 22 | RUN npm install -g ember-cli 23 | RUN npm install -g bower 24 | RUN apt-get update && apt-get install -y git \ 25 | && npm install \ 26 | && bower install \ 27 | RUN ember build — environment=prod 28 | CMD [ “/root/nginx-app.sh”, “nginx”, “-g”, “daemon off;” ] 29 | ``` 30 | 31 | 2. What are the benefits of using multi-stage builds? 32 | 33 | ### Solution 34 | 35 | 1. One possible solution (the emphasize is on passing the app from the first stage): 36 | 37 | ``` 38 | FROM node:6 39 | RUN mkdir -p /my_cool_app 40 | RUN npm install -g ember-cli 41 | RUN npm install -g bower 42 | WORKDIR /my_cool_app 43 | RUN npm install 44 | ADD app/ /my_cool_app 45 | RUN bower install 46 | RUN ember build — environment=prod 47 | 48 | FROM nginx 49 | RUN mkdir -p /my_cool_app 50 | ADD ./config/nginx/docker.conf /etc/nginx/nginx.conf 51 | ADD ./config/nginx/k8s.conf /etc/nginx/nginx.conf.k8s 52 | # Copy build artifacts from the first stage 53 | COPY — from=0 /my_cool_app/dist /my_cool_app/dist 54 | WORKDIR /my_cool_app 55 | CMD [ “/root/nginx-app.sh”, “nginx”, “-g”, “daemon off;” ] 56 | ``` 57 | 58 | 2. Multi-stages builds allow you to produce smaller container images by splitting the build process into multiple stages as we did above. The app image doesn't contain anything related to the build process except the actual app. 59 | -------------------------------------------------------------------------------- /exercises/containers/solutions/run_forest_run.md: -------------------------------------------------------------------------------- 1 | ## Run, Forest, Run! 2 | 3 | ### Objective 4 | 5 | Learn what restart policies do and how to use them 6 | 7 | ### Requirements 8 | 9 | Make sure Docker is installed on your system and the service is started 10 | 11 | ``` 12 | # Fedora/RHEL/CentOS 13 | rpm -qa | grep docker 14 | systemctl status docker 15 | ``` 16 | 17 | ### Instructions 18 | 19 | 1. Run a container with the following properties: 20 | * image: alpine 21 | * name: forest 22 | * restart policy: always 23 | * command to execute: sleep 15 24 | 25 | `docker run --restart always --name forest alpine sleep 15` 26 | 27 | 2. Run `docker container ls` - Is the container running? What about after 15 seconds, is it still running? why? 28 | 29 | 30 | It runs even after it completes to run `sleep 15` because the restart policy is "always". This means that Docker will keep restarting the **same** container even after it exists. 31 | 32 | 33 | 3. How then can we stop the container from running? 34 | 35 | The restart policy doesn't apply when the container is stopped with the command `docker container stop` 36 | 37 | 4. Remove the container you've created 38 | 39 | ``` 40 | docker container stop forest 41 | docker container rm forest 42 | ``` 43 | 44 | 5. Run the same container again but this time with `sleep 600` and verify it runs 45 | 46 | ``` 47 | docker run --restart always --name forest alpine sleep 600 48 | docker container ls 49 | ``` 50 | 51 | 6. Restart the Docker service. Is the container still running? why? 52 | 53 | ``` 54 | sudo systemctl restart docker 55 | ``` 56 | Yes, it's still running due to the restart policy `always` which means Docker will always bring up the container after it exists or stopped (not with the stop command). 57 | 58 | 8. Update the policy to `unless-stopped` 59 | 60 | `docker update --restart unless-stopped forest` 61 | 62 | 9. Stop the container 63 | 64 | `docker container stop forest` 65 | 66 | 10. Restart the Docker service. Is the container running? why? 67 | 68 | ``` 69 | sudo systemctl restart docker 70 | ``` 71 | No, the container is not running. This is because we changed the policy to `unless-stopped` which will run the container unless it was in stopped status. Since before the restart we stopped the container, Docker didn't continue running it after the restart. 72 | -------------------------------------------------------------------------------- /exercises/containers/solutions/running_containers.md: -------------------------------------------------------------------------------- 1 | ## Running Containers 2 | 3 | ### Objective 4 | 5 | Learn how to run containers 6 | 7 | ### Requirements 8 | 9 | Make sure Podman or Docker (or any other containers engine) is installed on your system 10 | 11 | ### Instructions 12 | 13 | 1. Run a container using the latest nginx image - `podman container run nginx:latest` 14 | 2. List the containers to make sure the container is running - `podman container ls` 15 | 3. Run another container but this time use ubuntu latest and attach to the terminal of the container - `podman container run -it ubuntu:latest /bin/bash` 16 | 4. List again the containers. How many containers are running? - `podman container ls` -> 2 17 | 5. Stop the containers - WARNING: the following will stop all the containers on the host: `podman stop $(podman container ls -q)` or for each container `podman stop [container id/name]` 18 | 6. Remove the containers - WARNING: the following will remove other containers as well if such are running: `podman rm $(podman container ls -q -a)` or for each container `podman rm [container id/name]` 19 | -------------------------------------------------------------------------------- /exercises/containers/solutions/working_with_images.md: -------------------------------------------------------------------------------- 1 | ## Working with Images - Solution 2 | 3 | ### Objective 4 | 5 | Learn how to work with containers images 6 | 7 | ### Requirements 8 | 9 | Make sure Podman, Docker (or any other containers engine) is installed on your system 10 | 11 | ### Instructions 12 | 13 | 1. List the containers images in your environment - `podman image ls` 14 | 2. Pull the latest ubuntu image - `podman image pull ubuntu:latest` 15 | 3. Run a container with the image you just pulled - `podman container run -it ubuntu:latest /bin/bash` 16 | 4. Remove the image. Did it work? - No. There is a running container which is using the image we try to remove 17 | 5. Do whatever is needed in order to remove the image - `podman rm ; podman image rm ubuntu` 18 | -------------------------------------------------------------------------------- /exercises/containers/working_with_images.md: -------------------------------------------------------------------------------- 1 | ## Working with Images 2 | 3 | ### Objective 4 | 5 | Learn how to work with containers images 6 | 7 | ### Requirements 8 | 9 | Make sure Podman or Docker (or any other containers engine) is installed on your system 10 | 11 | ### Instructions 12 | 13 | 1. List the containers images in your environment 14 | 2. Pull the latest ubuntu image 15 | 3. Run a container with the image you just pulled 16 | 4. Remove the image. Did it work? 17 | 5. Do whatever is needed in order to remove the image 18 | -------------------------------------------------------------------------------- /exercises/containers/write_dockerfile_run_container.md: -------------------------------------------------------------------------------- 1 | # Write a Dockerfile and run a container 2 | 3 | Your task is as follows: 4 | 5 | 1. Create a Docker image: 6 | * Use centos or ubuntu as the base image 7 | * Install apache web server 8 | * Deploy any web application you want 9 | * Add https support (using HAProxy as reverse-proxy) 10 | 2. Once you wrote the Dockerfile and created an image, run the container and test the application. Describe how did you test it and provide output 11 | 3. Describe one or more weakness of your Dockerfile. Is it ready to be used in production? 12 | -------------------------------------------------------------------------------- /exercises/databases/solutions/table_for_message_board_system.md: -------------------------------------------------------------------------------- 1 | ## Database Table for Message Board System 2 | 3 | ### Instructions 4 | 5 | Design a database table for a message board system. It should include the following information: 6 | 7 | * Personal details 8 | * Who saw the message and when 9 | * Replies 10 | * Tagged people in the message 11 | * Message categories 12 | 13 | Notes: 14 | 15 | * No SQL is needed 16 | * You should include: table names, field names, data types and mention the foreign keys used. 17 | 18 | ### Solution 19 | 20 | Note: This is just one possible design 21 | 2nd Note: PK = primary key, FK = Foreign key 22 | 23 | ----- People ----- 24 | ID int PK 25 | FirstName varchar(255) 26 | LastName varchar(255) 27 | DOB date 28 | Gender varchar(1) 29 | Phone varchar(10) 30 | 31 | | \ 32 | | \ 33 | | \ 34 | v \ 35 | \ 36 | --- Messages --- v 37 | ID int PK 38 | MessageBoardID FK --- MessageTags --- 39 | --- MessageBoards --- PeopleID int FK ID int PK 40 | ID int PK ----> MsgDate datetime ---> MessageID FK 41 | Board text Message text PeopleID int Fk 42 | MessageID (FK) 43 | ^ | 44 | | | 45 | |______| 46 | 47 | -------------------------------------------------------------------------------- /exercises/databases/table_for_message_board_system.md: -------------------------------------------------------------------------------- 1 | ## Database Table for Message Board System 2 | 3 | ### Instructions 4 | 5 | Design a database table for a message board system. It should include the following information: 6 | 7 | * Personal details 8 | * Who saw the message and when 9 | * Replies 10 | * Tagged people in the message 11 | * Message categories 12 | 13 | Notes: 14 | 15 | * No SQL is needed 16 | * You should include: table names, field names, data types and mention the foreign keys used. 17 | -------------------------------------------------------------------------------- /exercises/devops/containerize_app.md: -------------------------------------------------------------------------------- 1 | ## Containerize an Application 2 | 3 | 1. Clone an open source project you would like to containerize. A couple of suggestions: 4 | 5 | ``` 6 | https://github.com/bregman-arie/node-hello-world 7 | https://github.com/bregman-arie/flask-hello-world 8 | ``` 9 | 2. Write a Dockerfile you'll use for building an image of the application (you can use any base image you would like) 10 | 3. Build an image using the Dockerfile you've just wrote 11 | 4. Verify the image exists 12 | 5. [Optional] Push the image you've just built to a registry 13 | 6. Run the application 14 | 7. Verify the app is running 15 | -------------------------------------------------------------------------------- /exercises/devops/ha_hello_world.md: -------------------------------------------------------------------------------- 1 | ## Highly Available "Hello World" 2 | 3 | Set up an highly available "Hello World" application with the following instructions: 4 | 5 | * Use a containerized Load Balancer 6 | * Provision two virtual machines (this is where the app will run) 7 | * The page, when visited, should show "Hello World! I'm host X" - X should be the name of the virtual machine 8 | -------------------------------------------------------------------------------- /exercises/devops/solutions/containerize_app.md: -------------------------------------------------------------------------------- 1 | ## Containerize an Application 2 | 3 | 1. Clone an open source project you would like to containerize. A couple of suggestions: 4 | 5 | ``` 6 | https://github.com/bregman-arie/node-hello-world 7 | https://github.com/bregman-arie/flask-hello-world 8 | ``` 9 | 10 | `git clone https://github.com/bregman-arie/node-hello-world` 11 | 12 | 2. Write a Dockerfile you'll use for building an image of the application (you can use any base image you would like) 13 | 14 | ``` 15 | FROM alpine 16 | LABEL maintainer="your name/email" 17 | RUN apk add --update nodejs npm 18 | COPY . /src 19 | WORKDIR /src 20 | RUN npm install 21 | EXPOSE 3000 22 | ENTRYPOINT ["node", "./app.js"] 23 | ``` 24 | 25 | 3. Build an image using the Dockerfile you've just wrote 26 | 27 | `docker image build -t web_app:latest .` 28 | 29 | 4. Verify the image exists 30 | 31 | `docker image ls` 32 | 33 | 5. [Optional] Push the image you've just built to a registry 34 | 35 | ``` 36 | docker login 37 | docker image tag web_app:latest /web_app:latest 38 | # Verify with "docker image ls" 39 | docker image push /web_app:latest 40 | ``` 41 | 42 | 6. Run the application 43 | 44 | ``` 45 | docker container run -d -p 80:3000 web_app:latest 46 | ``` 47 | 48 | 7. Verify the app is running 49 | 50 | ``` 51 | docker container ls 52 | docker logs 53 | # In the browser, go to 127.0.0.1:80 54 | ``` 55 | -------------------------------------------------------------------------------- /exercises/devops/solutions/ha_hello_world.md: -------------------------------------------------------------------------------- 1 | ## Highly Available "Hello World" 2 | 3 | Set up an highly available "Hello World" application with the following instructions: 4 | 5 | * Use a containerized Load Balancer 6 | * Provision two virtual machines (this is where the app will run) 7 | * The page, when visited, should show "Hello World! I'm host X" - X should be the name of the virtual machine 8 | 9 | ### Solution 10 | 11 | 1. Provision two VMs 12 | -------------------------------------------------------------------------------- /exercises/eflk.md: -------------------------------------------------------------------------------- 1 | ## ELK + Filebeat 2 | 3 | Set up the following using any log you would like: 4 | 5 | * Run the following: elasticsearch, logstash, kibana and filebeat (each running in its own container) 6 | * Make filebeat transfer a log to logstash for process 7 | * Once logstash is done, index with elasticsearch 8 | * Finally, make sure data is available in Kibana 9 | -------------------------------------------------------------------------------- /exercises/flask_container_ci/README.md: -------------------------------------------------------------------------------- 1 | Your mission, should you choose to accept it, involves fixing the app in this directory, containerize it and set up a CI for it. 2 | Please read carefully all the instructions. 3 | 4 | If any of the following steps is not working, it is expected from you to fix them 5 | 6 | ## Installation 7 | 8 | 1. Create a virtual environment with `python3 -m venv challenge_venv` 9 | 2. Activate it with `source challenge_venv/bin/activate` 10 | 3. Install the requirements in this directory `pip install -r requirements.txt` 11 | 12 | ## Run the app 13 | 14 | 1. Move to `challenges/flask_container_ci` directory, if you are not already there 15 | 1. Run `export FLASK_APP=app/main.py` 16 | 1. To run the app execute `flask run`. If it doesn't work, fix it 17 | 3. Access `http://127.0.0.1:5000`. You should see the following: 18 | 19 | ``` 20 | { 21 | "resources_uris": { 22 | "user": "/users/\", 23 | "users": "/users" 24 | }, 25 | "current_uri": "/" 26 | } 27 | ``` 28 | 29 | 4. You should be able to access any of the resources and get the following data: 30 | 31 | * /users - all users data 32 | * /users/ - data on the specific chosen user 33 | 34 | 5. When accessing /users, the data returned should not include the id of the user, only its name and description. Also, the data should be ordered by usernames. 35 | 36 | ## Containers 37 | 38 | Using Docker or Podman, containerize the flask app so users can run the following two commands: 39 | 40 | ``` 41 | docker build -t app:latest /path/to/Dockerfile 42 | docker run -d -p 5000:5000 app 43 | ``` 44 | 45 | 1. You can use any image base you would like 46 | 2. Containerize only what you need for running the application, nothing else. 47 | 48 | ## CI 49 | 50 | Great, now that we have a working app and also can run it in a container, let's set up a CI for it so it won't break again in the future 51 | In current directory you have a file called tests.py which includes the tests for the app. What is required from you, is: 52 | 53 | 1. The CI should run the app tests. You are free to choose whatever CI system or service you prefer. Use `python tests.py` for running the tests. 54 | 2. There should be some kind of test for the Dockerfile you wrote 55 | 3. Add additional unit test (or another level of tests) for testing the app 56 | 57 | ### Guidelines 58 | 59 | * Except the app functionality, you can change whatever you want - structure, tooling, libraries, ... If possible add `notes.md` file which explains reasons, logic, thoughts and anything else you would like to share 60 | * The CI part should include the source code for the pipeline definition 61 | -------------------------------------------------------------------------------- /exercises/flask_container_ci/app/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # coding=utf-8 3 | -------------------------------------------------------------------------------- /exercises/flask_container_ci/app/config.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # coding=utf-8 3 | 4 | import os 5 | 6 | basedir = os.path.abspath(os.path.dirname(__file__)) 7 | 8 | SECRET_KEY = 'shhh' 9 | CSRF_ENABLED = True 10 | 11 | SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir, 'app.db') 12 | -------------------------------------------------------------------------------- /exercises/flask_container_ci/app/main.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # coding=utf-8 3 | 4 | from flask import Flask 5 | from flask import make_response 6 | 7 | import json 8 | from werkzeug.exceptions import NotFound 9 | 10 | 11 | app = Flask(__name__) 12 | 13 | with open("./users.json", "r") as f: 14 | users = json.load(f) 15 | 16 | 17 | @app.route("/", methods=['GET']) 18 | def index(): 19 | return pretty_json({ 20 | "resources": { 21 | "users": "/users", 22 | "user": "/users/", 23 | }, 24 | "current_uri": "/" 25 | }) 26 | 27 | 28 | @app.route("/users", methods=['GET']) 29 | def all_users(): 30 | return pretty_json(users) 31 | 32 | 33 | @app.route("/users/", methods=['GET']) 34 | def user_data(username): 35 | if username not in users: 36 | raise NotFound 37 | 38 | return pretty_json(users[username]) 39 | 40 | 41 | @app.route("/users//something", methods=['GET']) 42 | def user_something(username): 43 | raise NotImplementedError() 44 | 45 | 46 | def pretty_json(arg): 47 | response = make_response(json.dumps(arg, sort_keys=True, indent=4)) 48 | response.headers['Content-type'] = "application/json" 49 | return response 50 | 51 | 52 | def create_test_app(): 53 | app = Flask(__name__) 54 | return app 55 | 56 | 57 | if __name__ == "__main__": 58 | app.run(port=5000) 59 | -------------------------------------------------------------------------------- /exercises/flask_container_ci/app/tests.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # coding=utf-8 3 | 4 | import os 5 | import unittest 6 | 7 | from config import basedir 8 | from app import app 9 | from app import db 10 | 11 | 12 | class TestCase(unittest.TestCase): 13 | 14 | def setUp(self): 15 | app.config['TESTING'] = True 16 | app.config['WTF_CSRF_ENABLED'] = False 17 | app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///' + os.path.join( 18 | basedir, 'test.db') 19 | self.app = app.test_client() 20 | db.create_all() 21 | 22 | def tearDown(self): 23 | db.session.remove() 24 | db.drop_all() 25 | 26 | 27 | if __name__ == '__main__': 28 | unittest.main() 29 | -------------------------------------------------------------------------------- /exercises/flask_container_ci/requirements.txt: -------------------------------------------------------------------------------- 1 | flask 2 | -------------------------------------------------------------------------------- /exercises/flask_container_ci/tests.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # coding=utf-8 3 | 4 | import unittest 5 | 6 | from app import main 7 | 8 | 9 | class TestCase(unittest.TestCase): 10 | 11 | def setUp(self): 12 | self.app = main.app.test_client() 13 | 14 | def test_main_page(self): 15 | response = self.app.get('/', follow_redirects=True) 16 | self.assertEqual(response.status_code, 200) 17 | 18 | def test_users_page(self): 19 | response = self.app.get('/users', follow_redirects=True) 20 | self.assertEqual(response.status_code, 200) 21 | 22 | 23 | if __name__ == '__main__': 24 | unittest.main() 25 | -------------------------------------------------------------------------------- /exercises/flask_container_ci/users.json: -------------------------------------------------------------------------------- 1 | { 2 | "geralt" : { 3 | "id": "whitewolf", 4 | "name": "Geralt of Rivia", 5 | "description": "Traveling monster slayer for hire" 6 | }, 7 | "lara_croft" : { 8 | "id": "m31a3n6sion", 9 | "name": "Lara Croft", 10 | "description": "Highly intelligent and athletic English archaeologist" 11 | }, 12 | "mario" : { 13 | "id": "smb3igiul", 14 | "name": "Mario", 15 | "description": "Italian plumber who really likes mushrooms" 16 | }, 17 | "gordon_freeman" : { 18 | "id": "nohalflife3", 19 | "name": "Gordon Freeman", 20 | "description": "Physicist with great shooting skills" 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /exercises/flask_container_ci2/README.md: -------------------------------------------------------------------------------- 1 | Your mission, should you choose to accept it, involves developing an app, containerize it and set up a CI for it. 2 | Please read carefully all the instructions. 3 | 4 | If any of the following steps is not working, it is expected from you to fix them 5 | 6 | ## Installation 7 | 8 | 1. Create a virtual environment with `python3 -m venv challenge_venv` 9 | 2. Activate it with `source challenge_venv/bin/activate` 10 | 3. Install the requirements in this directory `pip install -r requirements.txt` 11 | 12 | ## Run the app 13 | 14 | 1. Move to `challenges/flask_container_ci` directory, if you are not already there 15 | 1. Run `export FLASK_APP=app/main.py` 16 | 1. To run the app execute `flask run`. If it doesn't works, fix it 17 | 3. Access `http://127.0.0.1:5000`. You should see the following 18 | 19 | ``` 20 | { 21 | "current_uri": "/", 22 | "example": "/matrix/'123n456n789'", 23 | "resources": { 24 | "column": "/columns//", 25 | "matrix": "/matrix/", 26 | "row": "/rows//" 27 | } 28 | } 29 | ``` 30 | 31 | 4. You should be able to access any of the resources and get the following data: 32 | 33 | * /matrix/\ 34 | 35 | for example, for /matrix/123n456n789 the user will get: 36 | 37 | 1 2 3 38 | 4 5 6 39 | 7 8 9 40 | 41 | * /matrix/\/\ 42 | 43 | for example, for /matrix/123n456n789/2 the user will get: 44 | 45 | 2 46 | 5 47 | 8 48 | 49 | * /matrix/\/\ 50 | 51 | for example, for /matrix/123n456n789/1 the user will get: 52 | 53 | 1 2 3 54 | 55 | ## Containers 56 | 57 | Using Docker or Podman, containerize the flask app so users can run the following two commands: 58 | 59 | ``` 60 | docker build -t app:latest /path/to/Dockerfile 61 | docker run -d -p 5000:5000 app 62 | ``` 63 | 64 | 1. You can use any image base you would like 65 | 2. Containerize only what you need for running the application, nothing else. 66 | 67 | ## CI 68 | 69 | Great, now that we have a working app and also can run it in a container, let's set up a CI for it so it won't break again in the future 70 | In current directory you have a file called tests.py which includes the tests for the app. What is required from you, is: 71 | 72 | 1. Write a CI pipeline that will run the app tests. You are free to choose whatever CI system or service you prefer. Use `python tests.py` for running the tests. 73 | 2. There should be some kind of test for the Dockerfile you wrote 74 | 3. Add additional unit test (or any other level of tests) for testing the app 75 | 76 | ### Guidelines 77 | 78 | * Except the app functionality, you can change whatever you want - structure, tooling, libraries, ... If possible, add `notes.md` file which explains reasons, logic, thoughts and anything else you would like to share 79 | * The CI part should include the source code for the pipeline definition 80 | -------------------------------------------------------------------------------- /exercises/flask_container_ci2/app/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # coding=utf-8 3 | -------------------------------------------------------------------------------- /exercises/flask_container_ci2/app/config.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # coding=utf-8 3 | 4 | import os 5 | 6 | basedir = os.path.abspath(os.path.dirname(__file__)) 7 | 8 | SECRET_KEY = 'shhh' 9 | CSRF_ENABLED = True 10 | 11 | SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir, 'app.db') 12 | -------------------------------------------------------------------------------- /exercises/flask_container_ci2/app/main.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # coding=utf-8 3 | 4 | from flask import Flask 5 | from flask import make_response 6 | 7 | import json 8 | 9 | app = Flask(__name__) 10 | 11 | 12 | @app.routee("/", methods=['GET']) 13 | def index(): 14 | return pretty_json({ 15 | "resources": { 16 | "matrix": "/matrix/", 17 | "column": "/columns//", 18 | "row": "/rows//", 19 | }, 20 | "current_uri": "/", 21 | "example": "/matrix/'123n456n789'", 22 | }) 23 | 24 | 25 | @app.route("/matrix/", methods=['GET']) 26 | def matrix(matrix): 27 | # TODO: return matrix, each row in a new line 28 | pass 29 | 30 | 31 | @app.route("/matrix//", methods=['GET']) 32 | def column(matrix, column_number): 33 | # TODO: return column based on given column number 34 | pass 35 | 36 | 37 | @app.route("/matrix//", methods=['GET']) 38 | def row(matrix, row_number): 39 | # TODO: return row based on given row number 40 | pass 41 | 42 | 43 | def pretty_json(arg): 44 | response = make_response(json.dumps(arg, sort_keys=True, indent=4)) 45 | response.headers['Content-type'] = "application/json" 46 | return response 47 | 48 | 49 | if __name__ == "__main__": 50 | app.run(port=5000) 51 | -------------------------------------------------------------------------------- /exercises/flask_container_ci2/app/tests.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # coding=utf-8 3 | 4 | import os 5 | import unittest 6 | 7 | from config import basedir 8 | from app import app 9 | from app import db 10 | 11 | 12 | class TestCase(unittest.TestCase): 13 | 14 | def setUp(self): 15 | app.config['TESTING'] = True 16 | app.config['WTF_CSRF_ENABLED'] = False 17 | app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///' + os.path.join( 18 | basedir, 'test.db') 19 | self.app = app.test_client() 20 | db.create_all() 21 | 22 | def tearDown(self): 23 | db.session.remove() 24 | db.drop_all() 25 | 26 | 27 | if __name__ == '__main__': 28 | unittest.main() 29 | -------------------------------------------------------------------------------- /exercises/flask_container_ci2/requirements.txt: -------------------------------------------------------------------------------- 1 | flask 2 | -------------------------------------------------------------------------------- /exercises/flask_container_ci2/tests.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # coding=utf-8 3 | 4 | import unittest 5 | 6 | from app import main 7 | 8 | 9 | class TestCase(unittest.TestCase): 10 | 11 | def setUp(self): 12 | self.app = main.app.test_client() 13 | 14 | def test_main_page(self): 15 | response = self.app.get('/', follow_redirects=True) 16 | self.assertEqual(response.status_code, 200) 17 | 18 | def test_matrix(self): 19 | response = self.app.get('/matrix/123n459,789', follow_redirects=True) 20 | # Change when the matrix route is fixed and returning the actual matrix 21 | self.assertEqual(response.status_code, 500) 22 | 23 | 24 | if __name__ == '__main__': 25 | unittest.main() 26 | -------------------------------------------------------------------------------- /exercises/git/README.md: -------------------------------------------------------------------------------- 1 | ## Git 2 | 3 | |Name|Topic|Objective & Instructions|Solution|Comments| 4 | |--------|--------|------|----|----| 5 | | My first Commit | Commit | [Exercise](exercises/git/commit_01.md) | [Solution](exercises/git/solutions/commit_01_solution.md) | | 6 | | Time to Branch | Branch | [Exercise](exercises/git/branch_01.md) | [Solution](exercises/git/solutions/branch_01_solution.md) | | 7 | | Squashing Commits | Commit | [Exercise](exercises/git/squashing_commits.md) | [Solution](exercises/git/solutions/squashing_commits.md) | | 8 | 9 |
10 | How do you know if a certain directory is a git repository?
11 | 12 | You can check if there is a ".git" directory. 13 |
14 | 15 |
16 | Explain the following: git directory, working directory and staging area
17 | 18 | This answer taken from [git-scm.com](https://git-scm.com/book/en/v1/Getting-Started-Git-Basics#_the_three_states) 19 | 20 | "The Git directory is where Git stores the meta data and object database for your project. This is the most important part of Git, and it is what is copied when you clone a repository from another computer. 21 | 22 | The working directory is a single checkout of one version of the project. These files are pulled out of the compressed database in the Git directory and placed on disk for you to use or modify. 23 | 24 | The staging area is a simple file, generally contained in your Git directory, that stores information about what will go into your next commit. It’s sometimes referred to as the index, but it’s becoming standard to refer to it as the staging area." 25 |
26 | 27 |
28 | What is the difference between git pull and git fetch?
29 | 30 | Shortly, git pull = git fetch + git merge 31 | 32 | When you run git pull, it gets all the changes from the remote or central 33 | repository and attaches it to your corresponding branch in your local repository. 34 | 35 | git fetch gets all the changes from the remote repository, stores the changes in 36 | a separate branch in your local repository 37 |
38 | 39 |
40 | How to check if a file is tracked and if not, then track it?
41 | 42 | There are different ways to check whether a file is tracked or not: 43 | 44 | - `git ls-file ` -> exit code of 0 means it's tracked 45 | - `git blame ` 46 | ... 47 |
48 | 49 |
50 | How can you see which changes have done before committing them?
51 | 52 | `git diff``` 53 |
54 | 55 |
56 | What git status does?
57 |
58 | 59 |
60 | You have two branches - main and devel. How do you make sure devel is in sync with main?
61 | 62 | ``` 63 | git checkout main 64 | git pull 65 | git checkout devel 66 | git merge main 67 | ``` 68 |
69 | 70 | #### Git - Merge 71 | 72 |
73 | You have two branches - main and devel. How do you put devel into main?
74 | 75 | git checkout main 76 | git merge devel 77 | git push origin main 78 |
79 | 80 |
81 | How to resolve git merge conflicts?
82 | 83 |

84 | First, you open the files which are in conflict and identify what are the conflicts. 85 | Next, based on what is accepted in your company or team, you either discuss with your 86 | colleagues on the conflicts or resolve them by yourself 87 | After resolving the conflicts, you add the files with `git add ` 88 | Finally, you run `git rebase --continue` 89 |

90 |
91 | 92 |
93 | What merge strategies are you familiar with?
94 | 95 | Mentioning two or three should be enough and it's probably good to mention that 'recursive' is the default one. 96 | 97 | recursive 98 | resolve 99 | ours 100 | theirs 101 | 102 | This page explains it the best: https://git-scm.com/docs/merge-strategies 103 |
104 | 105 |
106 | Explain Git octopus merge
107 | 108 | Probably good to mention that it's: 109 | 110 | * It's good for cases of merging more than one branch (and also the default of such use cases) 111 | * It's primarily meant for bundling topic branches together 112 | 113 | This is a great article about Octopus merge: http://www.freblogg.com/2016/12/git-octopus-merge.html 114 |
115 | 116 |
117 | What is the difference between git reset and git revert?
118 | 119 |

120 | 121 | `git revert` creates a new commit which undoes the changes from last commit. 122 | 123 | `git reset` depends on the usage, can modify the index or change the commit which the branch head 124 | is currently pointing at. 125 |

126 |
127 | 128 | #### Git - Rebase 129 | 130 |
131 | You would like to move forth commit to the top. How would you achieve that?
132 | 133 | Using the `git rebase` command 134 |
135 | 136 |
137 | In what situations are you using git rebase?
138 |
139 | 140 |
141 | How do you revert a specific file to previous commit?
142 | 143 | ``` 144 | git checkout HEAD~1 -- /path/of/the/file 145 | ``` 146 |
147 | 148 |
149 | How to squash last two commits?
150 |
151 | 152 |
153 | What is the .git directory? What can you find there?
154 | The .git folder contains all the information that is necessary for your project in version control and all the information about commits, remote repository address, etc. All of them are present in this folder. It also contains a log that stores your commit history so that you can roll back to history. 155 | 156 | 157 | This info copied from [https://stackoverflow.com/questions/29217859/what-is-the-git-folder](https://stackoverflow.com/questions/29217859/what-is-the-git-folder) 158 |
159 | 160 |
161 | What are some Git anti-patterns? Things that you shouldn't do
162 | 163 | * Not waiting too long between commits 164 | * Not removing the .git directory :) 165 |
166 | 167 |
168 | How do you remove a remote branch?
169 | 170 | You delete a remote branch with this syntax: 171 | 172 | git push origin :[branch_name] 173 |
174 | 175 |
176 | Are you familiar with gitattributes? When would you use it?
177 | 178 | gitattributes allow you to define attributes per pathname or path pattern.
179 | 180 | You can use it for example to control endlines in files. In Windows and Unix based systems, you have different characters for new lines (\r\n and \n accordingly). So using gitattributes we can align it for both Windows and Unix with `* text=auto` in .gitattributes for anyone working with git. This is way, if you use the Git project in Windows you'll get \r\n and if you are using Unix or Linux, you'll get \n. 181 |
182 | 183 |
184 | How do you discard local file changes? (before commit)
185 | 186 | `git checkout -- ` 187 |
188 | 189 |
190 | How do you discard local commits?
191 | 192 | `git reset HEAD~1` for removing last commit 193 | If you would like to also discard the changes you `git reset --hard`` 194 |
195 | 196 |
197 | True or False? To remove a file from git but not from the filesystem, one should use git rm
198 | 199 | False. If you would like to keep a file on your filesystem, use `git reset ` 200 |
201 | -------------------------------------------------------------------------------- /exercises/git/branch_01.md: -------------------------------------------------------------------------------- 1 | ## Git Commit 01 2 | 3 | ### Objective 4 | 5 | Learn how to work with Git Branches 6 | 7 | ### Instructions 8 | 9 | 1. Pick up a Git repository (or create a new one) with at least one commit 10 | 2. Create a new branch called "dev" 11 | 3. Modify one of the files in the repository 12 | 4. Create a new commit 13 | 5. Verify the commit you created is only in "dev" branch 14 | 15 | ### After you complete the exercise 16 | 17 | Answer the following: 18 | 19 | 1. Why branches are useful? Give an example of one real-world scenario for using branches 20 | -------------------------------------------------------------------------------- /exercises/git/commit_01.md: -------------------------------------------------------------------------------- 1 | ## Git Commit 01 2 | 3 | ### Objective 4 | 5 | Learn how to commit changes in Git repositories 6 | 7 | ### Instructions 8 | 9 | 1. Create a new directory 10 | 2. Make it a git repository 11 | 3. Create a new file called `file` with the content "hello commit" 12 | 4. Commit your new file 13 | 5. Run a git command to verify your commit was recorded 14 | 15 | ### After you complete the exercise 16 | 17 | Answer the following: 18 | 19 | * What are the benefits of commits? 20 | * Is there another way to verify a commit was created? 21 | -------------------------------------------------------------------------------- /exercises/git/solutions/branch_01_solution.md: -------------------------------------------------------------------------------- 1 | ## Branch 01 - Solution 2 | 3 | ``` 4 | cd some_repository 5 | echo "master branch" > file1 6 | git add file1 7 | git commit -a -m "added file1" 8 | git checkout -b dev 9 | echo "dev branch" > file2 10 | git add file2 11 | git commit -a -m "added file2" 12 | ``` 13 | 14 | Verify: 15 | 16 | ``` 17 | git log (you should see two commits) 18 | git checkout master 19 | git log (you should see one commit) 20 | ``` 21 | -------------------------------------------------------------------------------- /exercises/git/solutions/commit_01_solution.md: -------------------------------------------------------------------------------- 1 | ## Git Commit 01 - Solution 2 | 3 | ``` 4 | mkdir my_repo && cd my_repo 5 | git init 6 | echo "hello_commit" > file 7 | git add file 8 | git commit -a -m "It's my first commit. Exciting!" 9 | git log 10 | ``` 11 | -------------------------------------------------------------------------------- /exercises/git/solutions/squashing_commits.md: -------------------------------------------------------------------------------- 1 | ## Git - Squashing Commits - Solution 2 | 3 | 4 | 1. In a git repository, create a new file with the content "Mario" and commit the change 5 | 6 | ``` 7 | git add new_file 8 | echo "Mario" -> new_file 9 | git commit -a -m "New file" 10 | ``` 11 | 12 | 2. Make change to the content of the file you just created so the content is "Mario & Luigi" and create another commit 13 | 14 | ``` 15 | echo "Mario & Luigi" > new_file 16 | git commit -a -m "Added Luigi" 17 | ``` 18 | 19 | 3. Verify you have two separate commits - `git log` 20 | 21 | 4. Squash the two commits you've created into one commit 22 | 23 | ``` 24 | git rebase -i HEAD~2 25 | ``` 26 | 27 | You should see something similar to: 28 | 29 | ``` 30 | pick 5412076 New file 31 | pick 4016808 Added Luigi 32 | ``` 33 | 34 | Change `pick` to `squash` 35 | 36 | 37 | ``` 38 | pick 5412076 New file 39 | squash 4016808 Added Luigi 40 | ``` 41 | 42 | Save it and provide a commit message for the squashed commit 43 | 44 | ### After you complete the exercise 45 | 46 | Answer the following: 47 | 48 | * What is the reason for squashing commits? - history becomes cleaner and it's easier to track changes without commit like "removed a character" for example. 49 | * Is it possible to squash more than 2 commits? - yes 50 | -------------------------------------------------------------------------------- /exercises/git/squashing_commits.md: -------------------------------------------------------------------------------- 1 | ## Git - Squashing Commits 2 | 3 | ### Objective 4 | 5 | Learn how to squash commits 6 | 7 | ### Instructions 8 | 9 | 1. In a git repository, create a new file with the content "Mario" and create a new commit 10 | 2. Make change to the content of the file you just created so the content is "Mario & Luigi" and create another commit 11 | 3. Verify you have two separate commits 12 | 4. Squash the latest two commits into one commit 13 | 14 | ### After you complete the exercise 15 | 16 | Answer the following: 17 | 18 | * What is the reason for squashing commits? 19 | * Is it possible to squash more than 2 commits? 20 | -------------------------------------------------------------------------------- /exercises/jenkins_pipelines.md: -------------------------------------------------------------------------------- 1 | ## Jenkins Pipelines 2 | 3 | Write/Create the following Jenkins pipelines: 4 | 5 | * A pipeline which will run unit tests upon git push to a certain repository 6 | * A pipeline which will do to the following: 7 | 8 | * Provision an instance (can also be a container) 9 | * Configure the instance as Apache web server 10 | * Deploy a web application on the provisioned instance 11 | -------------------------------------------------------------------------------- /exercises/jenkins_scripts.md: -------------------------------------------------------------------------------- 1 | ## Jenkins Scripts 2 | 3 | Write the following scripts: 4 | 5 | * Remove all the jobs which include the string "REMOVE_ME" in their name 6 | * Remove builds older than 14 days 7 | 8 | ### Answer 9 | 10 | * [Remove jobs which include specific string](jenkins/scripts/jobs_with_string.groovy) 11 | * [Remove builds older than 14 days](jenkins/scripts/old_builds.groovy) 12 | -------------------------------------------------------------------------------- /exercises/kubernetes/killing_containers.md: -------------------------------------------------------------------------------- 1 | ## "Killing" Containers 2 | 3 | 1. Run Pod with a web service (e.g. httpd) 4 | 2. Verify the web service is running with the `ps` command 5 | 3. Check how many restarts the pod has performed 6 | 4. Kill the web service process 7 | 5. Check how many restarts the pod has performed 8 | 6. Verify again the web service is running 9 | 10 | ## After you complete the exercise 11 | 12 | * Why did the "RESTARTS" count raised? 13 | -------------------------------------------------------------------------------- /exercises/kubernetes/pods_01.md: -------------------------------------------------------------------------------- 1 | ## Pods 01 2 | 3 | #### Objective 4 | 5 | Learn how to create pods 6 | 7 | #### Instructions 8 | 9 | 1. Choose a container image (e.g. redis, nginx, mongo, etc.) 10 | 2. Create a pod (in the default namespace) using the image you chose 11 | 3. Verify the pod is running 12 | -------------------------------------------------------------------------------- /exercises/kubernetes/replicaset_01.md: -------------------------------------------------------------------------------- 1 | ## ReplicaSet 101 2 | 3 | #### Objective 4 | 5 | Learn how to create and view ReplicaSets 6 | 7 | #### Instructions 8 | 9 | 1. Create a ReplicaSet with 2 replicas. The app can be anything. 10 | 2. Verify a ReplicaSet was created and there are 2 replicas 11 | 3. Delete one of the Pods the ReplicaSet has created 12 | 4. If you'll list all the Pods now, what will you see? 13 | 5. Remove the ReplicaSet you've created 14 | 6. Verify you've deleted the ReplicaSet 15 | -------------------------------------------------------------------------------- /exercises/kubernetes/replicaset_02.md: -------------------------------------------------------------------------------- 1 | ## ReplicaSet 102 2 | 3 | #### Objective 4 | 5 | Learn how to operate ReplicaSets 6 | 7 | #### Instructions 8 | 9 | 1. Create a ReplicaSet with 2 replicas. The app can be anything. 10 | 2. Verify a ReplicaSet was created and there are 2 replicas 11 | 3. Remove the ReplicaSet but NOT the pods it created 12 | 4. Verify you've deleted the ReplicaSet but the Pods are still running 13 | -------------------------------------------------------------------------------- /exercises/kubernetes/replicaset_03.md: -------------------------------------------------------------------------------- 1 | ## ReplicaSet 103 2 | 3 | #### Objective 4 | 5 | Learn how labels used by ReplicaSets 6 | 7 | #### Instructions 8 | 9 | 1. Create a ReplicaSet with 2 replicas. Make sure the label used for the selector and in the Pods is "type=web" 10 | 2. Verify a ReplicaSet was created and there are 2 replicas 11 | 3. List the Pods running 12 | 4. Remove the label (type=web) from one of the Pods created by the ReplicaSet 13 | 5. List the Pods running. Are there more Pods running after removing the label? Why? 14 | 6. Verify the ReplicaSet indeed created a new Pod 15 | -------------------------------------------------------------------------------- /exercises/kubernetes/services_01.md: -------------------------------------------------------------------------------- 1 | ## Services 01 2 | 3 | #### Objective 4 | 5 | Learn how to create services 6 | 7 | #### Instructions 8 | 9 | 1. Create a pod running ngnix 10 | 2. Create a service for the pod you've just created 11 | 3. Verify the app is reachable 12 | -------------------------------------------------------------------------------- /exercises/kubernetes/solutions/killing_containers.md: -------------------------------------------------------------------------------- 1 | ## "Killing" Containers - Solution 2 | 3 | 1. Run Pod with a web service (e.g. httpd) - `kubectl run web --image registry.redhat.io/rhscl/httpd-24-rhel7` 4 | 2. Verify the web service is running with the `ps` command - `kubectl exec web -- ps` 5 | 3. Check how many restarts the pod has performed - `kubectl get po web` 6 | 4. Kill the web service process -`kubectl exec web -- kill 1` 7 | 5. Check how many restarts the pod has performed - `kubectl get po web` 8 | 6. Verify again the web service is running - `kubectl exec web -- ps` 9 | 10 | ## After you complete the exercise 11 | 12 | * Why did the "RESTARTS" count raised? - `because we killed the process and Kubernetes identified the container isn't running proprely so it performed restart to the Pod` 13 | -------------------------------------------------------------------------------- /exercises/kubernetes/solutions/pods_01_solution.md: -------------------------------------------------------------------------------- 1 | ## Pods 01 - Solution 2 | 3 | ``` 4 | kubectl run nginx --image=nginx --restart=Never 5 | kubectl get pods 6 | ``` 7 | -------------------------------------------------------------------------------- /exercises/kubernetes/solutions/replicaset_01_solution.md: -------------------------------------------------------------------------------- 1 | ## ReplicaSet 01 - Solution 2 | 3 | 1. Create a ReplicaSet with 2 replicas. The app can be anything. 4 | 5 | ``` 6 | cat >> rs.yaml < 43 | ``` 44 | 45 | 4. If you'll list all the Pods now, what will you see? 46 | 47 | ``` 48 | The same number of Pods. Since we defined 2 replicas, the ReplicaSet will make sure to create another Pod that will replace the one you've deleted. 49 | ``` 50 | 51 | 5. Remove the ReplicaSet you've created 52 | 53 | ``` 54 | kubectl delete -f rs.yaml 55 | ``` 56 | 57 | 6. Verify you've deleted the ReplicaSet 58 | 59 | ``` 60 | kubectl get rs 61 | # OR a more specific way: kubectl get -f rs.yaml 62 | ``` 63 | -------------------------------------------------------------------------------- /exercises/kubernetes/solutions/replicaset_02_solution.md: -------------------------------------------------------------------------------- 1 | ## ReplicaSet 02 - Solution 2 | 3 | 1. Create a ReplicaSet with 2 replicas. The app can be anything. 4 | 5 | ``` 6 | cat >> rs.yaml <> rs.yaml < running_pods.txt 43 | ``` 44 | 45 | 4. Remove the label (type=web) from one of the Pods created by the ReplicaSet 46 | 47 | ``` 48 | kubectl label pod type- 49 | ``` 50 | 51 | 5. List the Pods running. Are there more Pods running after removing the label? Why? 52 | 53 | ``` 54 | Yes, there is an additional Pod running because once the label (used as a matching selector) was removed, the Pod became independant meaning, it's not controlled by the ReplicaSet anymore and the ReplicaSet was missing replicas based on its definition so, it created a new Pod. 55 | ``` 56 | 57 | 6. Verify the ReplicaSet indeed created a new Pod 58 | 59 | ``` 60 | kubectl describe rs web 61 | ``` 62 | -------------------------------------------------------------------------------- /exercises/kubernetes/solutions/services_01_solution.md: -------------------------------------------------------------------------------- 1 | ## Services 01 - Solution 2 | 3 | ``` 4 | kubectl run nginx --image=nginx --restart=Never --port=80 --labels="app=dev-nginx" 5 | 6 | cat << EOF > nginx-service.yaml 7 | apiVersion: v1 8 | kind: Service 9 | metadata: 10 | name: nginx-service 11 | spec: 12 | selector: 13 | app: dev-nginx 14 | ports: 15 | - protocol: TCP 16 | port: 80 17 | targetPort: 9372 18 | EOF 19 | ``` 20 | -------------------------------------------------------------------------------- /exercises/misc/elk_kibana_aws.md: -------------------------------------------------------------------------------- 1 | # Elasticsearch, Kibana and AWS 2 | 3 | Your task is to build an elasticsearch cluster along with Kibana dashboard on one of the following clouds: 4 | 5 | * AWS 6 | * OpenStack 7 | * Azure 8 | * GCP 9 | 10 | You have to describe in details (preferably with some drawings) how you are going to set it up. 11 | Please describe in detail: 12 | 13 | - How you scale it up or down 14 | - How you quickly (less 20 minutes) provision the cluster 15 | - How you apply security policy for access control 16 | - How you transfer the logs from the app to ELK 17 | - How you deal with multi apps running in different regions 18 | 19 | # Solution 20 | 21 | This one out of many possible solutions. This solution is relying heavily on AWS. 22 | 23 | * Create a VPC with subnet so we can place Elasticsearch node(s) in internal environment only. 24 | If required, we will also setup NAT for public access. 25 | 26 | * Create an IAM role for the access to the cluster. Also, create a separate role for admin access. 27 | 28 | * To provision the solution quickly, we will use the elasticsearch service directly from AWS for production deployment. 29 | This way we also cover multiple AZs. As for authentication, we either use Amazon cognito or the organization LDAP server. 30 | 31 | * To transfer data, we will have to install logstash agent on the instances. The agent will be responsible 32 | for pushing the data to elasticsearch. 33 | 34 | * For monitoring we will use: 35 | 36 | * Cloud watch to monitor cluster resource utilization 37 | * Cloud metrics dashboard 38 | 39 | * If access required from multiple regions we will transfer all the data to S3 which will allow us to view the data 40 | from different regions and consolidate it in one dashboard 41 | -------------------------------------------------------------------------------- /exercises/openshift/projects_101.md: -------------------------------------------------------------------------------- 1 | ## OpenShift - Projects 101 2 | 3 | In a newly deployed cluster (preferably) perform and answer the following instructions and questions, using CLI only 4 | 5 | 1. Login to the OpenShift cluster 6 | 2. List all the projects 7 | 3. Create a new project called 'neverland' 8 | 4. Check the overview status of the current project 9 | -------------------------------------------------------------------------------- /exercises/openshift/solutions/projects_101.md: -------------------------------------------------------------------------------- 1 | ## Projects 101 - Solution 2 | 3 | 1. Login to the OpenShift cluster -> `oc login -u YOUR_USER -p YOUR_PASSWORD_OR_TOKEN` 4 | 2. List all the projects -> `oc get projects`(The output should be empty in a newly created cluster) 5 | 3. Create a new project called 'neverland' -> `oc new-project neverland` 6 | 4. Check the overview status of the current project -> `oc status` 7 | -------------------------------------------------------------------------------- /exercises/os/fork_101.md: -------------------------------------------------------------------------------- 1 | ## Fork 101 2 | 3 | Answer the questions given the following program (without running it): 4 | 5 | ``` 6 | #include 7 | #include 8 | int main() 9 | { 10 | fork(); 11 | printf("\nyay\n"); 12 | return 0; 13 | } 14 | ``` 15 | 16 | 1. How many times the word "yay" will be printed? 17 | 2. How many processes will be created? 18 | -------------------------------------------------------------------------------- /exercises/os/fork_102.md: -------------------------------------------------------------------------------- 1 | ## Fork 101 2 | 3 | Answer the questions given the following program (without running it): 4 | 5 | ``` 6 | #include 7 | #include 8 | 9 | int main() 10 | { 11 | fork(); 12 | fork(); 13 | printf("\nyay\n"); 14 | return 0; 15 | } 16 | ``` 17 | 18 | 1. How many times the word "yay" will be printed? 19 | 2. How many processes will be created? 20 | -------------------------------------------------------------------------------- /exercises/os/solutions/fork_101_solution.md: -------------------------------------------------------------------------------- 1 | ## Fork 101 - Solution 2 | 3 | 1. 2 4 | 2. 2 5 | -------------------------------------------------------------------------------- /exercises/os/solutions/fork_102_solution.md: -------------------------------------------------------------------------------- 1 | ## Fork 102 - Solution 2 | 3 | 1. 4 4 | 2. 4 5 | -------------------------------------------------------------------------------- /exercises/pipeline_deploy_image_to_k8.md: -------------------------------------------------------------------------------- 1 | ## Build & Publish Docker Images to Kubernetes Cluster 2 | 3 | Write a pipeline, on any CI/CD system you prefer, that will build am image out of a given Dockerfile and will publish that image to running Kubernetes cluster. 4 | -------------------------------------------------------------------------------- /exercises/programming/grep_berfore_and_after.md: -------------------------------------------------------------------------------- 1 | Implement the following grep command in Python (numbers can be different): `grep error -A 2 -B 2 some_file` 2 | -------------------------------------------------------------------------------- /exercises/programming/web_scraper.md: -------------------------------------------------------------------------------- 1 | ## Web Scraper 2 | 3 | 1. Pick a web site to scrape 4 | 2. Using any language you would like, write a web scraper to save some data from the site you chose 5 | 3. Save the results to a database (doesn't matter which database, just pick one) 6 | 7 | 8 | * Note: if you don't know which site to pick up have a look [here](http://toscrape.com) 9 | -------------------------------------------------------------------------------- /exercises/python/advanced_data_types.md: -------------------------------------------------------------------------------- 1 | ## (Advanced) Identify the data type 2 | 3 | For each of the following, identify what is the data type of the result variable 4 | 5 | 1. a = {'a', 'b', 'c'} 6 | 2. b = {'1': '2'} 7 | 4. c = ([1, 2, 3]) 8 | 4. d = (1, 2, 3) 9 | 4. e = True+True 10 | -------------------------------------------------------------------------------- /exercises/python/compress_string.md: -------------------------------------------------------------------------------- 1 | ## Compress String 2 | 3 | 1. Write a function that gets a string and compresses it 4 | - 'aaaabbccc' -> 'a4b2c3' 5 | - 'abbbc' -> 'a1b3c1' 6 | 2. Write a function that decompresses a given string 7 | - 'a4b2c3' -> 'aaaabbccc' 8 | - 'a1b3c1' -> 'abbbc' 9 | -------------------------------------------------------------------------------- /exercises/python/data_types.md: -------------------------------------------------------------------------------- 1 | ## Data Types 2 | 3 | For each of the following, identify what is the data type of the result variable 4 | 5 | 1. a = [1, 2, 3, 4, 5] 6 | 2. b = "Hello, is it me you looking for?" 7 | 3. e = 100 8 | 4. f = '100' 9 | 5. i = 0.100 10 | 6. i = True 11 | 12 | Bonus question: how to find out in Python what is the data type of certain variable? 13 | -------------------------------------------------------------------------------- /exercises/python/reverse_string.md: -------------------------------------------------------------------------------- 1 | ## Reverse a String 2 | 3 | Write a code that reverses a string 4 | -------------------------------------------------------------------------------- /exercises/python/solutions/advanced_data_types_solution.md: -------------------------------------------------------------------------------- 1 | ## (Advanced) Identify the data type 2 | 3 | For each of the following, identify what is the data type of the result variable 4 | 5 | 1. a = {'a', 'b', 'c'} -> set 6 | 2. b = {'1': '2'} -> dict 7 | 4. c = ([1, 2, 3]) -> list 8 | 4. d = (1, 2, 3) -> tuple 9 | 4. e = True+True -> int 10 | -------------------------------------------------------------------------------- /exercises/python/solutions/data_types_solution.md: -------------------------------------------------------------------------------- 1 | ## Data Types - Solution 2 | 3 | 1. a = [1, 2, 3, 4, 5] -> list 4 | 2. b = "Hello, is it me you looking for?" -> string 5 | 3. e = 100 -> int 6 | 4. f = '100' -> string 7 | 5. i = 0.100 -> float 8 | 6. i = True -> bool 9 | 10 | ### Bonus question - Answer 11 | 12 | `type(...)` 13 | -------------------------------------------------------------------------------- /exercises/python/solutions/reverse_string.md: -------------------------------------------------------------------------------- 1 | ## Reverse a String - Solution 2 | 3 | ``` 4 | my_string[::-1] 5 | ``` 6 | 7 | A more visual way is:
8 | Careful: this is very slow 9 | 10 | ``` 11 | def reverse_string(string): 12 | temp = "" 13 | for char in string: 14 | temp = char + temp 15 | return temp 16 | ``` 17 | -------------------------------------------------------------------------------- /exercises/shell/argument_check.md: -------------------------------------------------------------------------------- 1 | ## Argument Check 2 | 3 | ### Objectives 4 | 5 | Note: assume the script is executed with an argument 6 | 7 | 1. Write a script that will check if a given argument is the string "pizza" 8 | 1. If it's the string "pizza" print "with pineapple?" 9 | 2. If it's not the string "pizza" print "I want pizza!" 10 | 11 | ### Solution 12 | 13 | ``` 14 | /usr/bin/env bash 15 | 16 | arg_value=${1:-default} 17 | 18 | if [ $arg_value = "pizza" ]; then 19 | echo "with pineapple?" 20 | else 21 | echo "I want pizza!" 22 | fi 23 | ``` 24 | -------------------------------------------------------------------------------- /exercises/shell/basic_date.md: -------------------------------------------------------------------------------- 1 | ## Basic Date 2 | 3 | ### Objectives 4 | 5 | 1. Write a script that will put the current date in a file called "the_date.txt" 6 | -------------------------------------------------------------------------------- /exercises/shell/count_chars.md: -------------------------------------------------------------------------------- 1 | ## Count Chars 2 | 3 | ### Objectives 4 | 5 | 1. Read input from the user until you get empty string 6 | 2. For each of the lines you read, count the number of characters and print it 7 | 8 | ### Constraints 9 | 10 | 1. You must use a while loop 11 | 2. Assume at least three lines of input 12 | -------------------------------------------------------------------------------- /exercises/shell/directories_comparison.md: -------------------------------------------------------------------------------- 1 | ## Directories Comparison 2 | 3 | ### Objectives 4 | 5 | 1. You are given two directories as arguments and the output should be any difference between the two directories 6 | -------------------------------------------------------------------------------- /exercises/shell/empty_files.md: -------------------------------------------------------------------------------- 1 | ## Empty Files 2 | 3 | ### Objectives 4 | 5 | 1. Write a script to remove all the empty files in a given directory (including nested directories) 6 | -------------------------------------------------------------------------------- /exercises/shell/factors.md: -------------------------------------------------------------------------------- 1 | ## Shell Scripting - Factors 2 | 3 | ### Objectives 4 | 5 | Write a script that when given a number, will: 6 | 7 | * Check if the number has 2 as factor, if yes it will print "one factor" 8 | * Check if the number has 3 as factor, if yes it will print "one factor...actually two!" 9 | * If none of them (2 and 3) is a factor, print the number itself 10 | -------------------------------------------------------------------------------- /exercises/shell/files_size.md: -------------------------------------------------------------------------------- 1 | ## Files Size 2 | 3 | ### Objectives 4 | 5 | 1. Print the name and size of every file and directory in current path 6 | 7 | Note: use at least one for loop! 8 | -------------------------------------------------------------------------------- /exercises/shell/great_day.md: -------------------------------------------------------------------------------- 1 | ## Great Day 2 | 3 | ### Objectives 4 | 5 | 1. Write a script that will print "Today is a great day!" unless it's given a day name and then it should print "Today is " 6 | 7 | Note: no need to check whether the given argument is actually a valid day 8 | -------------------------------------------------------------------------------- /exercises/shell/hello_world.md: -------------------------------------------------------------------------------- 1 | ## Shell Scripting - Hello World 2 | 3 | ### Objectives 4 | 5 | 1. Define a variable with the string 'Hello World' 6 | 2. Print the value of the variable you've defined and redirect the output to the file "amazing_output.txt" 7 | -------------------------------------------------------------------------------- /exercises/shell/host_status.md: -------------------------------------------------------------------------------- 1 | ## It's Alive! 2 | 3 | ### Objectives 4 | 5 | 1. Write a script to determine whether a given host is down or up 6 | -------------------------------------------------------------------------------- /exercises/shell/num_of_args.md: -------------------------------------------------------------------------------- 1 | ## Number of Arguments 2 | 3 | ### Objectives 4 | 5 | * Write a script that will print "Got it: " in case of one argument 6 | * In case no arguments were provided, it will print "Usage: ./ " 7 | * In case of more than one argument, print "hey hey...too many!" 8 | -------------------------------------------------------------------------------- /exercises/shell/print_arguments.md: -------------------------------------------------------------------------------- 1 | ## Shell Scripting - Print Arguments 2 | 3 | ### Objectives 4 | 5 | You should include everything mentioned here in one shell script 6 | 7 | 1. Print the first argument passed to the script 8 | 2. Print the number of arguments passed to the script 9 | 3. 10 | -------------------------------------------------------------------------------- /exercises/shell/solutions/basic_date.md: -------------------------------------------------------------------------------- 1 | ## Basic Date 2 | 3 | ### Objectives 4 | 5 | 1. Write a script that will put the current date in a file called "the_date.txt" 6 | 7 | ### Solution 8 | 9 | ``` 10 | #!/usr/bin/env bash 11 | 12 | echo $(date) > the_date.txt 13 | ``` 14 | -------------------------------------------------------------------------------- /exercises/shell/solutions/count_chars.md: -------------------------------------------------------------------------------- 1 | ## Count Chars 2 | 3 | ### Objectives 4 | 5 | 1. Read input from the user until you get empty string 6 | 2. For each of the lines you read, count the number of characters and print it 7 | 8 | ### Constraints 9 | 10 | 1. You must use a while loop 11 | 2. Assume at least three lines of input 12 | 13 | ### Solution 14 | 15 | ``` 16 | #!/usr/bin/env bash 17 | 18 | echo -n "Please insert your input: " 19 | 20 | while read line; do 21 | echo -n "$line" | wc -c 22 | echo -n "Please insert your input: " 23 | done 24 | ``` 25 | -------------------------------------------------------------------------------- /exercises/shell/solutions/empty_files.md: -------------------------------------------------------------------------------- 1 | ## Empty Files 2 | 3 | ### Objectives 4 | 5 | 1. Write a script to remove all the empty files in a given directory (including nested directories) 6 | 7 | ### Solution 8 | 9 | ``` 10 | #! /bin/bash 11 | for x in * 12 | do 13 | if [ -s $x ] 14 | then 15 | continue 16 | else 17 | rm -rf $x 18 | fi 19 | done 20 | ``` 21 | -------------------------------------------------------------------------------- /exercises/shell/solutions/factors.md: -------------------------------------------------------------------------------- 1 | ## Shell Scripting - Factors 2 | 3 | ### Objectives 4 | 5 | Write a script that when given a number, will: 6 | 7 | * Check if the number has 2 as factor, if yes it will print "one factor" 8 | * Check if the number has 3 as factor, if yes it will print "one factor...actually two!" 9 | * If none of them (2 and 3) is a factor, print the number itself 10 | 11 | ### Solution 12 | 13 | ``` 14 | #!/usr/bin/env bash 15 | 16 | (( $1 % 2 )) || res="one factor" 17 | (( $1 % 3 )) || res+="...actually two!" 18 | 19 | echo ${res:-$1} 20 | ``` 21 | -------------------------------------------------------------------------------- /exercises/shell/solutions/files_size.md: -------------------------------------------------------------------------------- 1 | ## Files Size 2 | 3 | ### Objectives 4 | 5 | 1. Print the name and size of every file and directory in current path 6 | 7 | Note: use at least one for loop! 8 | 9 | ### Solution 10 | 11 | ``` 12 | #!/usr/bin/env bash 13 | 14 | for i in $(ls -S1); do 15 | echo $i: $(du -sh "$i" | cut -f1) 16 | done 17 | ``` 18 | -------------------------------------------------------------------------------- /exercises/shell/solutions/great_day.md: -------------------------------------------------------------------------------- 1 | ## Great Day 2 | 3 | ### Objectives 4 | 5 | 1. Write a script that will print "Today is a great day!" unless it's given a day name and then it should print "Today is " 6 | 7 | Note: no need to check whether the given argument is actually a valid day 8 | 9 | ### Solution 10 | 11 | ``` 12 | #!/usr/bin/env bash 13 | 14 | echo "Today is ${1:-a great day!}" 15 | ``` 16 | -------------------------------------------------------------------------------- /exercises/shell/solutions/hello_world.md: -------------------------------------------------------------------------------- 1 | ## Shell Scripting - Hello World 2 | 3 | ### Objectives 4 | 5 | 1. Define a variable with the string 'Hello World' 6 | 2. Print the value of the variable you've defined and redirect the output to the file "amazing_output.txt" 7 | 8 | ### Solution 9 | 10 | ``` 11 | #!/usr/bin/env bash 12 | 13 | HW_STR="Hello World" 14 | echo $HW_STR > amazing_output.txt 15 | ``` 16 | -------------------------------------------------------------------------------- /exercises/shell/solutions/host_status.md: -------------------------------------------------------------------------------- 1 | ## It's Alive! 2 | 3 | ### Objectives 4 | 5 | 1. Write a script to determine whether a given host is down or up 6 | 7 | ### Solution 8 | 9 | ``` 10 | #!/usr/bin/env bash 11 | SERVERIP= 12 | NOTIFYEMAIL=test@example.com 13 | 14 | ping -c 3 $SERVERIP > /dev/null 2>&1 15 | if [ $? -ne 0 ] 16 | then 17 | # Use mailer here: 18 | mailx -s "Server $SERVERIP is down" -t "$NOTIFYEMAIL" < /dev/null 19 | fi 20 | ``` 21 | -------------------------------------------------------------------------------- /exercises/shell/solutions/num_of_args.md: -------------------------------------------------------------------------------- 1 | ## Number of Arguments 2 | 3 | ### Objectives 4 | 5 | * Write a script that will print "Got it: " in case of one argument 6 | * In case no arguments were provided, it will print "Usage: ./ " 7 | * In case of more than one argument, print "hey hey...too many!" 8 | 9 | ### Solution 10 | 11 | ``` 12 | #!/usr/bin/env bash 13 | 14 | set -eu 15 | 16 | main() { 17 | case $# in 18 | 0) printf "%s" "Usage: ./ "; return 1 ;; 19 | 1) printf "%s" "Got it: $1"; return 0 ;; 20 | *) return 1 ;; 21 | esac 22 | } 23 | 24 | main "$@" 25 | ``` 26 | 27 | -------------------------------------------------------------------------------- /exercises/shell/solutions/sum.md: -------------------------------------------------------------------------------- 1 | ## Sum 2 | 3 | ### Objectives 4 | 5 | 1. Write a script that gets two numbers and prints their sum 6 | 3. Make sure the input is valid (= you got two numbers from the user) 7 | 2. Test the script by running and passing it two numbers as arguments 8 | 9 | ### Constraints 10 | 11 | 1. Use functions 12 | 13 | ### Solution 14 | 15 | ``` 16 | #!/usr/bin/env bash 17 | 18 | re='^[0-9]+$' 19 | 20 | if ! [[ $1 =~ $re && $2 =~ $re ]]; then 21 | echo "Oh no...I need two numbers" 22 | exit 2 23 | fi 24 | 25 | function sum { 26 | echo $(( $1 + $2 )) 27 | } 28 | 29 | sum $1 $2 30 | ``` 31 | -------------------------------------------------------------------------------- /exercises/shell/sum.md: -------------------------------------------------------------------------------- 1 | ## Sum 2 | 3 | ### Objectives 4 | 5 | 1. Write a script that gets two numbers and prints their sum 6 | 3. Make sure the input is valid (= you got two numbers from the user) 7 | 2. Test the script by running and passing it two numbers as arguments 8 | 9 | ### Constraints 10 | 11 | 1. Use functions 12 | -------------------------------------------------------------------------------- /exercises/sql/improve_query.md: -------------------------------------------------------------------------------- 1 | ## Comparisons vs. Functions 2 | 3 | 1. Improve the following query 4 | 5 | ``` 6 | SELECT count(*) 7 | FROM shawarma_purchases 8 | WHERE 9 | YEAR(purchased_at) == '2017' 10 | ``` 11 | -------------------------------------------------------------------------------- /exercises/sql/solutions/improve_query.md: -------------------------------------------------------------------------------- 1 | ## Comparisons vs. Functions - Solution 2 | 3 | ``` 4 | SELECT count(*) 5 | FROM shawarma_purchases 6 | WHERE 7 | purchased_at >= '2017-01-01' AND 8 | purchased_at <= '2017-31-12' 9 | ``` 10 | -------------------------------------------------------------------------------- /faq.md: -------------------------------------------------------------------------------- 1 | ## FAQ 2 | 3 | Most frequently asked questions. 4 | 5 | ### What is the purpose of repository? 6 | 7 | Learning, of course. 8 | 9 | ### My goal is to prepare for a DevOps interviews. Should I use this repository? 10 | 11 | Overall, this repository should help you learn some concepts but, don't assume at any point that your interview will include similar questions to those that included in this repository. 12 | Regarding interview, I've added a couple of suggestions [here](prepare_for_interview.md)
13 | 14 | ### Will you stop at some point adding questions and exercises? 15 | 16 | All good things come to an end... 17 | 18 | ### How do I become a better DevOps Engineer? 19 | 20 | That's a great question.
21 | I don't have a definitive answer for this question, I'm exploring it myself from time to time. What I believe helps is to: 22 | 23 | * Practice - Practicing DevOps practically should be the primary way to become a DevOps engineer in my opinion 24 | * Read - blogs, books, ... anything that can enrich your knowledge about DevOps or related DevOps topics 25 | * Participate - there are great DevOps communities. I personally like [Reddit DevOps community](https://www.reddit.com/r/devops). Visiting there, I learn quite a lot on different topics. 26 | * Share - This is one of the reasons I created this project. Primary goal was to help others but a secondary goal quickly became to learn more. By asking questions, you actually learn better a certain topic. Try it out, take a certain subject and try to come up with questions you would ask someone to test his/her skills about that topic. 27 | 28 | ### Why most of the questions don't have answers? 29 | 30 | 1. Because we need more contributors 31 | 2. Because often asking questions is easier than answering them 32 | 33 | ### Where can I find answers to some of the questions in this repository? 34 | 35 | 1. Search for them using search engines, documentation pages, ... this is part of being a DevOps engineer 36 | 2. Use the communities: many people will be happy to help and answer your questions 37 | 3. Ask us. If you want, you can contact me or even open an issue that is only a question, that's totally fine :) 38 | 39 | ### Where the questions and answers are coming from? 40 | 41 | Well, everywhere! - past experience, colleagues, contributors, ... but please note we do not allow copying interview questions from interview questions sites to here. There are people who worked hard on adding those to their sites and we respect that.
42 | As an evidence, we did deny pull requests with copied content from other sites. 43 | 44 | ### What are the top DevOps skills required for being a DevOps Engineer? 45 | 46 | It's a hard question and the reason is that if you'll ask 20 different people, you'll probably get at least 10 different answers but here is what I believe is common today: 47 | 48 | * OS - DevOps require you good understanding of operating system concepts. The level required is mainly depends on the company although in my opinion it should be the same level. You should understand how the operating system works, how to troubleshoot and debug issues, etc. 49 | * Programming is part of DevOps. The level again depends on the company. Some will require you to know basic level of scripting while others deep understanding of common algorithms, data structure, design patterns etc. 50 | * Cloud and Containers - while not 100% must in all companies/positions, this skill is on the rise every year and many (if not most) of the positions/companies require this skill. This specifically means: AWS/Azure/GCP, Docker/Podman, Kubernetes, ... 51 | * CI/CD - Be able to to answer questions like "Why do we need CI/CD?" and "What ways and models are there to perform CI/CD?". Eventually, practice assembling such processes and workflow, using whatever tools you feel comfortable with. 52 | 53 | ### I feel like there are some questions that shouldn't be included in this project 54 | 55 | Is that a question? :)
56 | If you don't like some of the questions or think that some questions should be removed you can open an issue or submit a PR and we can discuss it there. We don't have rules against deleting questions (for now :P) 57 | 58 | ### Can I copy the questions from here to my site? 59 | 60 | You can (although I have no idea why would you want to), but: 61 | 62 | * Not without attribution. Many people worked hard on adding these questions and they deserve a proper credit for their work 63 | * Not if you plan to make money out of it. Directly or indirectly (e.g. ADS) as this is a free content and we would like it to stay this way :) 64 | 65 | Same goes for copying questions from different sources to this repository. We saw it happened already with a couple of pull requests and we rejected them. We will not merge pull requests with copied questions and answers from other sources. 66 | 67 | ### Can I add questions and/or answers to this project? 68 | 69 | I'll simply imagine you didn't ask that on an open source project... :) 70 | 71 | ### Why can't I add installation questions? 72 | 73 | In general, I prefer questions added to this repository will have certain educational value for the user. Either regarding a certain concept or even a very general question, but one that will make the user research on a certain topic and will make him eventually more familiar with some of its core concepts.
74 | I know that this is not the case for every question in this repo as of today (e.g. questions about specific commands) but this is definitely something to aspire for. 75 | 76 | I see little to none value in what is known as "Installation Questions". Let's say I ask you "how to install Jenkins?". Should I conclude from your answer that you are familiar with what is Jenkins and/or how it works? In other words, is there a value in knowing how to install Jenkins? In my opinion, no. 77 | 78 | ### Where can I practice coding? 79 | 80 | Personally, I really like the following sites 81 | 82 | * [HackerRank](https://www.hackerrank.com) 83 | * [LeetCode](https://leetcode.com) 84 | * [Exercism](https://exercism.io) 85 | 86 | ### How to learn more DevOps? 87 | 88 | I listed some roadmaps in [devops-resources](https://github.com/bregman-arie/devops-resources) 89 | 90 | ### Why some questions repeat themselves? 91 | 92 | If you see two identical questions, that's a bug.
93 | If you see two similar questions, that's a feature :D (= it's intentional) 94 | 95 | For example: 96 | 97 | 1. What is horizontal scaling? 98 | 2. The act of adding additional instances to the pool to handle scaling is called ________ scaling 99 | 100 | You are right, both ask about horizontal scaling but it's done from a different angle in every question and in addition, I do believe repetition helps you to learn something in a way where you are not fixed on the way it's asked, rather you understand the concept itself. 101 | 102 | ### Are you open for making big changes in the repository? 103 | 104 | Absolutely. Don't be afraid to raise ideas and start discussions.
105 | I'll be more than happy to discuss any change you think we should make to improve the learning experience 106 | -------------------------------------------------------------------------------- /images/Go.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/Go.png -------------------------------------------------------------------------------- /images/HR.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/HR.png -------------------------------------------------------------------------------- /images/ansible.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/ansible.png -------------------------------------------------------------------------------- /images/aws.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/aws.png -------------------------------------------------------------------------------- /images/azure.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/azure.png -------------------------------------------------------------------------------- /images/bash.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/bash.png -------------------------------------------------------------------------------- /images/big-data.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/big-data.png -------------------------------------------------------------------------------- /images/certificates.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/certificates.png -------------------------------------------------------------------------------- /images/cicd.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/cicd.png -------------------------------------------------------------------------------- /images/cloud.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/cloud.png -------------------------------------------------------------------------------- /images/containers.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/containers.png -------------------------------------------------------------------------------- /images/databases.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/databases.png -------------------------------------------------------------------------------- /images/design.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/design.png -------------------------------------------------------------------------------- /images/design/cdn-no-downtime.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/design/cdn-no-downtime.png -------------------------------------------------------------------------------- /images/design/input-process-output.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/design/input-process-output.png -------------------------------------------------------------------------------- /images/design/producers_consumers_fix.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/design/producers_consumers_fix.png -------------------------------------------------------------------------------- /images/design/producers_consumers_issue.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/design/producers_consumers_issue.png -------------------------------------------------------------------------------- /images/devops.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/devops.png -------------------------------------------------------------------------------- /images/devops_exercises.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/devops_exercises.png -------------------------------------------------------------------------------- /images/devops_resources.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/devops_resources.png -------------------------------------------------------------------------------- /images/distributed.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/distributed.png -------------------------------------------------------------------------------- /images/distributed/distributed_design_lb.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/distributed/distributed_design_lb.png -------------------------------------------------------------------------------- /images/distributed/distributed_design_standby.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/distributed/distributed_design_standby.png -------------------------------------------------------------------------------- /images/dns.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/dns.png -------------------------------------------------------------------------------- /images/elastic.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/elastic.png -------------------------------------------------------------------------------- /images/exercises.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/exercises.png -------------------------------------------------------------------------------- /images/general.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/general.png -------------------------------------------------------------------------------- /images/git.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/git.png -------------------------------------------------------------------------------- /images/googlecloud.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/googlecloud.png -------------------------------------------------------------------------------- /images/hardware.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/hardware.png -------------------------------------------------------------------------------- /images/how_they_devops.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/how_they_devops.png -------------------------------------------------------------------------------- /images/infraverse.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/infraverse.png -------------------------------------------------------------------------------- /images/jenkins.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/jenkins.png -------------------------------------------------------------------------------- /images/jenkins/jenkins-to-kibana.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/jenkins/jenkins-to-kibana.png -------------------------------------------------------------------------------- /images/kubernetes.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/kubernetes.png -------------------------------------------------------------------------------- /images/kubernetes/kubernetes_components.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/kubernetes/kubernetes_components.png -------------------------------------------------------------------------------- /images/kubernetes/kubernetes_components_solution.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/kubernetes/kubernetes_components_solution.png -------------------------------------------------------------------------------- /images/linux.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/linux.png -------------------------------------------------------------------------------- /images/linux_master.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/linux_master.jpeg -------------------------------------------------------------------------------- /images/mongo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/mongo.png -------------------------------------------------------------------------------- /images/monitoring.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/monitoring.png -------------------------------------------------------------------------------- /images/network.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/network.png -------------------------------------------------------------------------------- /images/openshift.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/openshift.png -------------------------------------------------------------------------------- /images/openstack.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/openstack.png -------------------------------------------------------------------------------- /images/os.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/os.png -------------------------------------------------------------------------------- /images/programming.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/programming.png -------------------------------------------------------------------------------- /images/prometheus.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/prometheus.png -------------------------------------------------------------------------------- /images/puppet.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/puppet.png -------------------------------------------------------------------------------- /images/python.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/python.png -------------------------------------------------------------------------------- /images/regex.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/regex.png -------------------------------------------------------------------------------- /images/security.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/security.png -------------------------------------------------------------------------------- /images/sql.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/sql.png -------------------------------------------------------------------------------- /images/storage.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/storage.png -------------------------------------------------------------------------------- /images/system_design_notebook.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/system_design_notebook.png -------------------------------------------------------------------------------- /images/terraform.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/terraform.png -------------------------------------------------------------------------------- /images/testing.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/testing.png -------------------------------------------------------------------------------- /images/virtualization.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/virtualization.png -------------------------------------------------------------------------------- /images/you.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/AMINETN/devops-exercises/cf69197a44612a2b7f2732dce9365f974527cc46/images/you.png -------------------------------------------------------------------------------- /prepare_for_interview.md: -------------------------------------------------------------------------------- 1 | ## How to prepare for DevOps/SRE/Production Engineer interviews? 2 | 3 | Note: the following is opinionated. 4 | 5 | ### Skills you should have 6 | 7 | #### Linux 8 | 9 | Every DevOps Engineer should have a deep understanding of at least one operating system and if you have the option to choose then I would say it should definitely be Linux as I believe it's a requirement of at least 90% of the DevOps jobs postings out there. 10 | 11 | Usually, the followup question is "How extensive should my knowledge be?" Out of all the DevOps skills, I would say this, along with coding, should be your strongest skills. Be familiar with OS processes, debugging tools, filesystem, networking, ... know your operating system, understand how it works, how to manage issues, etc. 12 | 13 | Not long ago, I've created a list of Linux resources right [here](https://dev.to/abregman/collection-of-linux-resources-3nhk). There are some good sites there that you can use for learning more about Linux. 14 | 15 | #### Programming 16 | 17 | My personal belief is that any DevOps engineer should know programming, at least to some degree. Having this skill you can automate manual processes, improve some of the open source tools you are using today or build new tools & projects to provide a solution to existing problems. Knowing how to code = a lot of power. 18 | 19 | When it comes to interviews you'll notice that the level of knowledge very much depends on the company or position you are interviewing for. Some will require you just to be able to write simple scripts while others will deep dive into complex algorithms and data structures. 20 | 21 | The best way to practice this skill is by doing some actual coding - scripts, online challenges, CLI tools, web applications, ... just code :) 22 | 23 | Also, the following is probably clear to most people but let's still clarify it: when given the chance to choose any language for answering coding tasks/questions, choose the one you have experience with! Some candidates prefer to choose the language they think the company is using and this is a huge mistake since giving the right answer is always better than a wrong answer, no matter which language you have used :) 24 | 25 | I recommend the following sites for practicing coding: 26 | 27 | * [HackerRank](https://www.hackerrank.com) 28 | * [LeetCode](https://leetcode.com) 29 | * [Exercism](https://exercism.io) 30 | 31 | Starting your own project is also a good idea. More on that later on. 32 | 33 | #### Architecture and Design 34 | 35 | This is also an important aspect of DevOps. You should be able to describe how to design different systems, workflows, and architectures. Also, the scale is an important aspect of that. A design which might work for a dozen of hosts or X amount of data, will not necessarily work well with bigger scale. 36 | 37 | Some ideas for you to explore: 38 | 39 | * How to design and implement a CI pipeline (or pipelines) for verifying PRs, run multiple different types of tests, package the project and deploy it somewhere 40 | * How to design and implement secured ELK architecture which will get logs from 10,000 apps and will display the data eventually to the user 41 | * Microservices designs are also quite popular these days 42 | 43 | I recommend going over the following GitHub projects as they are really deep-diving into System Design: 44 | 45 | * https://github.com/donnemartin/system-design-primer 46 | 47 | #### Tools 48 | 49 | Some interviews will focus on specific tools or technologies. Which tools? this is mainly based on a combination of what you mentioned in your C.V & those that are mentioned in the job posting and used in the company. Here are some questions I believe anyone should know to answer regarding the tools he/she is familiar with: 50 | 51 | * What the tool does? What it allows us to achieve that we couldn't do without it? 52 | * What its advantages over other tools in the same area, with the same purpose? Why you specifically using it? 53 | * How it works? 54 | * How to use it? 55 | 56 | Let's deep dive into practical preparation steps 57 | 58 | ### Scenarios || Challenges || Tasks 59 | 60 | This is a very common way to interview today for DevOps roles. The candidate is given a task which represents a common task of DevOps Engineers or a piece of common knowledge and the candidate has several hours or days to accomplish the task.
61 | 62 | This is a great way to prepare for interviews and I recommend to try it out before actually interviewing. How? Take requirements from job posts and convert them into scenarios. Let's see an example: 63 | 64 | "Knowledge in CI/CD" -> Scenario: create a CI/CD pipeline for a project. 65 | 66 | At this point, some people ask: "but what project?" and the answer is: what about GitHub? it has only 9125912851285192 projects...and a free way to set up CI to any of them (also a great way to learn how to collaborate with others :) ) 67 | 68 | Let's convert another scenario: 69 | 70 | "Experience with provisioning servers" -> Scenario: provision a server (to make it more interesting: create a web server). 71 | 72 | And the last example: 73 | 74 | "Experience with scripting" -> Scenario: write a script. Don't waste too much time thinking "what script should I write?". Simply automate something you are doing manually or even implement your own version of common small utils. 75 | 76 | ### Start your own DevOps project 77 | 78 | Starting a DevOps project is a good idea because: 79 | 80 | * It will make you practice coding 81 | * It will be something you can add to your resume and talk about with the interviewer 82 | * Depends on size and complexity, it can teach you something about design in general 83 | * Depends on adoption, it can teach you about managing Open Source projects 84 | 85 | Same here, don't overthink what your project should be about. Just go and build something :) 86 | 87 | ### Sample interview questions 88 | 89 | Make a sample list of interview questions on various topics/areas like technical, company, role, ... and try to answer them. 90 | See if you can manage to answer them in a fluent, detailed way. 91 | 92 | Better yet, ask a good friend/colleague to challenge you with some questions. Your self-awareness might be an obstacle in objective self-review of your knowledge :) 93 | 94 | ### Networking 95 | 96 | For those who attend technical meetups and conferences, it can be a great opportunity to chat with people from other companies on their interviewing process. But don't start with it, it can be quite awkward. Say at least hello first... (: 97 | 98 | Doing so can give you a lot of information on what to expect from an interview at some companies or how to better prepare. 99 | 100 | ### Know your resume 101 | 102 | It may sound trivial but the idea here is simple: be ready to answer any question regarding any line you included in your resume. 103 | Sometimes candidates surprised when they are asked on a skill or line which seems to be not related to the position but the simple truth is: if you mentioned something on your resume, it's only fair to ask you about it. 104 | 105 | 106 | ### Know the company 107 | 108 | Be familiar with the company you are interviewing at. Some ideas: 109 | 110 | * What the company does? 111 | * What products it has? 112 | * Why its products are unique (or better than other products)? This can also be a good question for you to ask 113 | 114 | ### Books 115 | 116 | From my experience, this is not done by many candidates but it's one of the best ways to deep dive into topics like operating system, virtualization, scale, distributed systems, etc. 117 | 118 | In most cases, you will do fine without reading books but for the AAA interviews (hardest level) you'll want to read some books and overall if you inspire to be better DevOps Engineer, books (also articles, blog posts) is a great way :) 119 | 120 | ### Consider starting in non-DevOps position 121 | 122 | While not a preparation step, you should know that landing DevOps as a first position can be challenging. No, it's not impossible but still, since DevOps covers many different practices, tools, ... it can be quite challenging and also overwhelming for someone to try and achieve it as a first position.
123 | A possible path to becoming a DevOps engineer is to start with actually a different (but related) position and switch from there after 1-2 years or more. 124 | 125 | Some ideas: 126 | 127 | * System Administrator - This is perfect because every DevOps Engineer should have a solid understanding of the OS and sysadmins know their OS :) 128 | * Software Developer/Engineer - A DevOps should have coding skills and this position will provide more than the required knowledge in most cases 129 | * QA Engineer - This is a more tricky one because IMHO there are less overlapping areas/skills with DevOps Engineer. Sure, DevOps engineers should have some knowledge about testing but usually, it seems their solid skills/background is mainly composed out of system internals and coding skills. 130 | 131 | ### What to expect from a DevOps interview? 132 | 133 | DevOps interviews can be very different. Some will include design questions, some will focus on coding, others will include short technical questions and you might even have an interview where the interviewer only goes over your resume and discussing your past experience. 134 | 135 | There are a couple of things you can do about it so it will be a less overwhelming experience: 136 | 137 | 1. You can and probably should ask the HR (in some cases even the team lead) how the interview process looks like. Some will be kind enough to even tell you how to prepare. 138 | 2. Usually, the job posting gives more than a hint on where the focus will be and what you should focus on in your preparations so read it carefully. 139 | 3. There are plenty of sites that have notes or a summary of the interview process in different companies, especially big enterprises. 140 | 141 | ### Don't forget to be an interviewer as well 142 | 143 | Some people tend to look at interviews as a one-way road of "Determining whether a candidate is qualified" but in reality, a candidate should also determine whether 144 | the company he/she is interviewing at, is the right place for him/her. 145 | 146 | * Do I care about team size? More specifically, do I care about being a one-man show or being part of a bigger team? 147 | * Do I care about work-life balance? 148 | * Do I care about personal growth and how it's practically done? 149 | * Do I care about knowing what are my responsibilities as part of the role? 150 | 151 | If you do, you should also play the interviewer role :) 152 | 153 | ### One Last Thing 154 | 155 | [Good luck](https://youtu.be/AFUrG1-BAt4?t=59) :) 156 | -------------------------------------------------------------------------------- /scripts/count_questions.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | echo $(( $(grep -E "\[Exercise\]|" -c README.md exercises/*/README.md | awk -F: '{ s+=$2 } END { print s }' ))) 4 | -------------------------------------------------------------------------------- /scripts/question_utils.py: -------------------------------------------------------------------------------- 1 | """ 2 | Question utils functions 3 | """ 4 | 5 | import pathlib 6 | from random import choice 7 | from typing import List 8 | 9 | p = pathlib.Path(__file__).parent.parent.joinpath('README.md') 10 | 11 | 12 | def get_file_list(): 13 | with open(p, 'rb') as f: 14 | file_list = [line.rstrip() for line in f.readlines()] 15 | return file_list 16 | 17 | 18 | def get_question_list(file_list: List[bytes]) -> list: 19 | 20 | questions_list = [] 21 | temp = [] 22 | after_summary_tag = False 23 | 24 | for line in file_list: 25 | if line.startswith(b'
'): 26 | temp.append(line) 27 | after_summary_tag = True 28 | 29 | elif after_summary_tag and line != b'' and b'
' not in line: 30 | temp.append(line) 31 | 32 | elif after_summary_tag and b'' in line: 33 | temp.append(line) 34 | after_summary_tag = False 35 | 36 | questions_list.append(temp) 37 | temp = [] 38 | 39 | return questions_list 40 | 41 | 42 | def get_answered_questions(question_list: List[List[bytes]]) -> list: 43 | """Dont let the type hint confuse you, problem of not using classes. 44 | 45 | It takes the result of get_question_list(file_list) 46 | 47 | Returns a list of questions that are answered. 48 | """ 49 | 50 | t = [] 51 | 52 | for q in question_list: 53 | 54 | index = 0 55 | 56 | for i in q: 57 | if b'' in i: 58 | index = q.index(i) 59 | 60 | if q[index+1: len(q) - 1]: 61 | t.append(q) 62 | 63 | return t 64 | 65 | 66 | def get_challenges_count() -> int: 67 | challenges_path = pathlib.Path(__file__).parent.parent.joinpath('exercises').glob('*.md') 68 | return len(list(challenges_path)) 69 | 70 | 71 | # WIP WAITING FEEDBACK 72 | def get_random_question(question_list: List[List[bytes]], with_answer=False): 73 | if with_answer: 74 | return choice(get_answered_questions(question_list)) 75 | return choice(question_list) 76 | 77 | 78 | """Use this question_list. Unless you have already opened/worked/need the file, then don't or 79 | you will end up doing the same thing twice. 80 | 81 | eg: 82 | 83 | #my_dir/main.py 84 | 85 | from scripts import question_utils 86 | 87 | print(question_utils.get_answered_questions(question_utils.question_list) 88 | 89 | >> 123 90 | 91 | """ 92 | 93 | question_list = get_question_list(get_file_list()) 94 | -------------------------------------------------------------------------------- /scripts/random_question.py: -------------------------------------------------------------------------------- 1 | import random 2 | import optparse 3 | 4 | 5 | def main(): 6 | """Reads through README.md for question/answer pairs and adds them to a 7 | list to randomly select from and quiz yourself. 8 | Supports skipping quesitons with no documented answer with the -s flag 9 | """ 10 | parser = optparse.OptionParser() 11 | parser.add_option("-s", "--skip", action="store_true", 12 | help="skips questions without an answer.", 13 | default=False) 14 | options, args = parser.parse_args() 15 | 16 | with open('README.md', 'r') as f: 17 | text = f.read() 18 | 19 | questions = [] 20 | 21 | while True: 22 | question_start = text.find('') + 9 23 | question_end = text.find('') 24 | answer_end = text.find('') 25 | 26 | if answer_end == -1: 27 | break 28 | 29 | question = text[question_start: question_end].replace('
', '').replace('', '') 30 | answer = text[question_end + 17: answer_end] 31 | questions.append((question, answer)) 32 | text = text[answer_end + 1:] 33 | 34 | num_questions = len(questions) 35 | 36 | while True: 37 | try: 38 | question, answer = questions[random.randint(0, num_questions)] 39 | 40 | if options.skip and not answer.strip(): 41 | continue 42 | 43 | if input(f'Q: {question} ...Show answer? "y" for yes: ').lower() == 'y': 44 | print('A: ', answer) 45 | 46 | except KeyboardInterrupt: 47 | break 48 | 49 | print("\nGoodbye! See you next time.") 50 | 51 | 52 | if __name__ == '__main__': 53 | main() 54 | -------------------------------------------------------------------------------- /scripts/run_ci.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # These are the same steps we are running in Travis CI 3 | 4 | python tests/syntax_lint.py 5 | flake8 --max-line-length=100 . && echo "PEP8 Passed" 6 | -------------------------------------------------------------------------------- /scripts/update_question_number.py: -------------------------------------------------------------------------------- 1 | """ 2 | Meant to be used like this: 3 | 4 | python scripts/update_question_number.py 5 | 6 | """ 7 | import pathlib 8 | from scripts.question_utils import get_question_list, get_challenges_count 9 | 10 | LINE_FLAG = b":bar_chart:" 11 | 12 | p = pathlib.Path(__file__).parent.parent.joinpath('README.md') 13 | 14 | 15 | with open(p, 'rb') as f: 16 | file = f.readlines() 17 | 18 | 19 | file_list = [line.rstrip() for line in file] 20 | 21 | question_list = get_question_list(file_list) 22 | question_count = len(question_list) 23 | total_count = question_count + get_challenges_count() 24 | print(question_count) 25 | print(get_challenges_count()) 26 | print(total_count) 27 | for line in file: 28 | if LINE_FLAG in line: 29 | file[file.index(line)] = b':bar_chart:  There are currently **%s** questions\r\n' %\ 30 | str(total_count).encode() 31 | break 32 | 33 | with open(p, 'wb') as f: 34 | f.writelines(file) 35 | -------------------------------------------------------------------------------- /tests/scripts_question_utils_unittest.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from pathlib import Path 3 | from typing import List 4 | from scripts.question_utils import get_answered_questions, get_question_list 5 | 6 | 7 | def open_test_case_file(n: int) -> List[bytes]: 8 | tests_path = Path(__file__).parent.joinpath() 9 | 10 | with open(f'{tests_path}/testcases/testcase{n}.md', 'rb') as f: 11 | file_list = [line.rstrip() for line in f.readlines()] 12 | return file_list 13 | 14 | 15 | class QuestionCount(unittest.TestCase): 16 | 17 | def test_case_1(self): 18 | raw_list = open_test_case_file(1) 19 | question_list = get_question_list(raw_list) 20 | answers = get_answered_questions(question_list) 21 | 22 | self.assertEqual(len(question_list), 11) 23 | self.assertEqual(len(answers), 3) 24 | 25 | def test_case_2(self): 26 | raw_list = open_test_case_file(2) 27 | question_list = get_question_list(raw_list) 28 | answers = get_answered_questions(question_list) 29 | 30 | self.assertEqual(len(question_list), 16) 31 | self.assertEqual(len(answers), 11) 32 | -------------------------------------------------------------------------------- /tests/syntax_checker_unittest.py: -------------------------------------------------------------------------------- 1 | """ 2 | WIP 3 | 4 | Yes, we do write tests for our tests. 5 | """ 6 | from pathlib import Path 7 | from typing import List 8 | from unittest import TestCase 9 | from tests import syntax_lint 10 | 11 | 12 | def open_test_case_file(n: int) -> List[bytes]: 13 | tests_path = Path(__file__).parent.joinpath() 14 | 15 | with open(f'{tests_path}/testcases/testcase{n}.md', 'rb') as f: 16 | file_list = [line.rstrip() for line in f.readlines()] 17 | return file_list 18 | 19 | 20 | test_case_1 = open_test_case_file(1) 21 | test_case_2 = open_test_case_file(2) 22 | test_case_3 = open_test_case_file(3) 23 | 24 | 25 | class TestSyntax(TestCase): 26 | 27 | def test_details_count_case1(self): 28 | self.assertTrue(syntax_lint.count_details(test_case_1)) 29 | 30 | def test_details_count_case2(self): 31 | self.assertTrue(syntax_lint.count_details(test_case_2)) 32 | 33 | def test_details_errors_1(self): 34 | syntax_lint.check_details_tag(test_case_1) 35 | self.assertFalse(syntax_lint.errors) 36 | 37 | def test_details_errors_2(self): 38 | syntax_lint.check_details_tag(test_case_2) 39 | self.assertFalse(syntax_lint.errors) 40 | # 41 | # def test_details_error_exist_1(self): 42 | # syntax_checker.check_details_tag(test_case_3) 43 | # print(syntax_checker.errors) 44 | # self.assertEqual(len(syntax_checker.errors), 3) 45 | -------------------------------------------------------------------------------- /tests/syntax_lint.py: -------------------------------------------------------------------------------- 1 | """ 2 | Testing suite for https://github.com/bregman-arie/devops-interview-questions 3 | written by surister 4 | 5 | Even though both check_details_tag and check_summary_tags are practically the 6 | same, due to readability and functionality it was decided to be split like 7 | that. 8 | 9 | Usage: 10 | $ python tests/syntax_lint.py 11 | 12 | """ 13 | 14 | import pathlib 15 | 16 | p = pathlib.Path(__file__).parent.parent.joinpath('README.md') 17 | 18 | with open(p, 'rb') as f: 19 | file_list = [line.rstrip() for line in f.readlines()] 20 | 21 | errors = [] 22 | 23 | 24 | def count_details(file_list): 25 | """ 26 | Counts the total amount of
and
27 | 28 | Used for debugging purpose, not meant to be used in actual tests 29 | """ 30 | details_final_count = 0 31 | details_count = 0 32 | 33 | for line_number, line in enumerate(file_list): 34 | if b'
' in line: 35 | details_count += 1 36 | if b'
' in line: 37 | details_final_count += 1 38 | 39 | return details_count == details_final_count 40 | 41 | 42 | def count_summary(file_list): 43 | """ 44 | Counts the total amount of
and
45 | 46 | Used for debugging purpose, not meant to be used in actual tests 47 | """ 48 | details_final_count = 0 49 | details_count = 0 50 | 51 | for line_number, line in enumerate(file_list): 52 | if b'' in line: 53 | details_count += 1 54 | if b'' in line: 55 | details_final_count += 1 56 | 57 | return details_count == details_final_count 58 | 59 | 60 | def check_details_tag(file_list): 61 | """ 62 | Check whether the structure: 63 |
64 | ... 65 |
66 | 67 | Is correctly followed, if not generates an error. 68 | 69 | """ 70 | 71 | after_detail = False 72 | error = False 73 | err_message = '' 74 | for line_number, line in enumerate(file_list): 75 | if b'
' in line and b'
' in line: 76 | pass 77 | else: 78 | if b'
' in line and after_detail: 79 | err_message = f'Missing closing detail tag round line {line_number - 1}' 80 | error = True 81 | if b'
' in line and not after_detail: 82 | err_message = f'Missing opening detail tag round line {line_number - 1}' 83 | error = True 84 | 85 | if b'
' in line: 86 | after_detail = True 87 | 88 | if b'
' in line and after_detail: 89 | after_detail = False 90 | 91 | if error: 92 | errors.append(err_message) 93 | 94 | error = False 95 | 96 | 97 | def check_summary_tag(file_list): 98 | """ 99 | Check whether the structure: 100 | 101 | ... 102 | 103 | 104 | Is correctly followed, if not generates an error. 105 | 106 | """ 107 | 108 | after_summary = False 109 | error = False 110 | err_message = '' 111 | for line_number, line in enumerate(file_list): 112 | if b'' in line and b'' in line: 113 | pass 114 | else: 115 | if b'' in line and after_summary: 116 | err_message = f'Missing closing summary tag around line {line_number}' 117 | error = True 118 | if b'' in line and not after_summary: 119 | err_message = f'Missing opening summary tag around line {line_number}' 120 | error = True 121 | 122 | if b'' in line: 123 | after_summary = True 124 | 125 | if b'' in line and after_summary: 126 | after_summary = False 127 | 128 | if error: 129 | errors.append(err_message) 130 | 131 | error = False 132 | 133 | 134 | if __name__ == '__main__': 135 | check_details_tag(file_list) 136 | check_summary_tag(file_list) 137 | if errors: 138 | for error in errors: 139 | print(error) 140 | exit(1) 141 | 142 | print("Tests passed successfully.") 143 | -------------------------------------------------------------------------------- /tests/testcases/testcase1.md: -------------------------------------------------------------------------------- 1 |
2 | What is Docker? What are you using it for?
3 |
4 | 5 |
6 | How containers are different from VMs?
7 | 8 | The primary difference between containers and VMs is that containers allow you to virtualize 9 | multiple workloads on the operating system while in the case of VMs the hardware is being virtualized to 10 | run multiple machines each with its own OS. 11 |
12 | 13 |
14 | In which scenarios would you use containers and in which you would prefer to use VMs?
15 | 16 | You should choose VMs when: 17 | * you need run an application which requires all the resources and functionalities of an OS 18 | * you need full isolation and security 19 | 20 | You should choose containers when: 21 | * you need a lightweight solution 22 | * Running multiple versions or instances of a single application 23 |
24 | 25 |
26 | Explain Docker architecture
27 |
28 | 29 |
30 | Describe in detail what happens when you run `docker run hello-world`?
31 | 32 | Docker CLI passes your request to Docker daemon. 33 | Docker daemon downloads the image from Docker Hub 34 | Docker daemon creates a new container by using the image it downloaded 35 | Docker daemon redirects output from container to Docker CLI which redirects it to the standard output 36 |
37 | 38 |
39 | How do you run a container?
40 |
41 | 42 |
43 | What `docker commit` does?. When will you use it?
44 |
45 | 46 |
47 | How would you transfer data from one container into another?
48 |
49 | 50 |
51 | What happens to data of the container when a container exists?
52 |
53 | 54 |
55 | Explain what each of the following commands do: 56 | 57 | * docker run 58 | * docker rm 59 | * docker ps 60 | * docker pull 61 | * docker build 62 | * docker commit
63 |
64 | 65 |
66 | How do you remove old, non running, containers?
67 |
68 | -------------------------------------------------------------------------------- /tests/testcases/testcase2.md: -------------------------------------------------------------------------------- 1 |
2 | Explain the following code: 3 | 4 | :(){ :|:& };: 5 | 6 |
7 |
8 | 9 |
10 | Can you give an example to some Bash best practices?
11 |
12 | 13 |
14 | What is the ternary operator? How do you use it in bash?
15 | 16 | A short way of using if/else. An example: 17 | 18 | [[ $a = 1 ]] && b="yes, equal" || b="nope" 19 |
20 | 21 |
22 | What does the following code do and when would you use it? 23 | 24 | diff <(ls /tmp) <(ls /var/tmp) 25 | 26 |
27 | It is called 'process substitution'. It provides a way to pass the output of a command to another command when using a pipe | is not possible. It can be used when a command does not support STDIN or you need the output of multiple commands. 28 | https://superuser.com/a/1060002/167769 29 |
30 | 31 | 32 | ## SQL 33 | 34 | 35 | #### :baby: Beginner 36 | 37 |
38 | What does SQL stand for?
39 | 40 | Structured Query Language 41 | 42 |
43 | 44 |
45 | How is SQL Different from NoSQL
46 | 47 | The main difference is that SQL databases are structured (data is stored in the form of 48 | tables with rows and columns - like an excel spreadsheet table) while NoSQL is 49 | unstructured, and the data storage can vary depending on how the NoSQL DB is set up, such 50 | as key-value pair, document-oriented, etc. 51 |
52 | 53 |
54 | What does it mean when a database is ACID compliant?
55 | 56 | ACID stands for Atomicity, Consistency, Isolation, Durability. In order to be ACID compliant, the database much meet each of the four criteria 57 | 58 | **Atomicity** - When a change occurs to the database, it should either succeed or fail as a whole. 59 | 60 | For example, if you were to update a table, the update should completely execute. If it only partially executes, the 61 | update is considered failed as a whole, and will not go through - the DB will revert back to it's original 62 | state before the update occurred. It should also be mentioned that Atomicity ensures that each 63 | transaction is completed as it's own stand alone "unit" - if any part fails, the whole statement fails. 64 | 65 | **Consistency** - any change made to the database should bring it from one valid state into the next. 66 | 67 | For example, if you make a change to the DB, it shouldn't corrupt it. Consistency is upheld by checks and constraints that 68 | are pre-defined in the DB. For example, if you tried to change a value from a string to an int when the column 69 | should be of datatype string, a consistent DB would not allow this transaction to go through, and the action would 70 | not be executed 71 | 72 | **Isolation** - this ensures that a database will never be seen "mid-update" - as multiple transactions are running at 73 | the same time, it should still leave the DB in the same state as if the transactions were being run sequentially. 74 | 75 | For example, let's say that 20 other people were making changes to the database at the same time. At the 76 | time you executed your query, 15 of the 20 changes had gone through, but 5 were still in progress. You should 77 | only see the 15 changes that had completed - you wouldn't see the database mid-update as the change goes through. 78 | 79 | **Durability** - Once a change is committed, it will remain committed regardless of what happens 80 | (power failure, system crash, etc.). This means that all completed transactions 81 | must be recorded in non-volatile memory. 82 | 83 | Note that SQL is by nature ACID compliant. Certain NoSQL DB's can be ACID compliant depending on 84 | how they operate, but as a general rule of thumb, NoSQL DB's are not considered ACID compliant 85 |
86 | 87 |
88 | When is it best to use SQL? NoSQL?
89 | 90 | SQL - Best used when data integrity is crucial. SQL is typically implemented with many 91 | businesses and areas within the finance field due to it's ACID compliance. 92 | 93 | NoSQL - Great if you need to scale things quickly. NoSQL was designed with web applications 94 | in mind, so it works great if you need to quickly spread the same information around to 95 | multiple servers 96 | 97 | Additionally, since NoSQL does not adhere to the strict table with columns and rows structure 98 | that Relational Databases require, you can store different data types together. 99 |
100 | 101 |
102 | What is a Cartesian Product?
103 | 104 | A Cartesian product is when all rows from the first table are joined to all rows in the second 105 | table. This can be done implicitly by not defining a key to join, or explicitly by 106 | calling a CROSS JOIN on two tables, such as below: 107 | 108 | Select * from customers **CROSS JOIN** orders; 109 | 110 | Note that a Cartesian product can also be a bad thing - when performing a join 111 | on two tables in which both do not have unique keys, this could cause the returned information 112 | to be incorrect. 113 |
114 | 115 | ##### SQL Specific Questions 116 | 117 | For these questions, we will be using the Customers and Orders tables shown below: 118 | 119 | **Customers** 120 | 121 | Customer_ID | Customer_Name | Items_in_cart | Cash_spent_to_Date 122 | ------------ | ------------- | ------------- | ------------- 123 | 100204 | John Smith | 0 | 20.00 124 | 100205 | Jane Smith | 3 | 40.00 125 | 100206 | Bobby Frank | 1 | 100.20 126 | 127 | **ORDERS** 128 | 129 | Customer_ID | Order_ID | Item | Price | Date_sold 130 | ------------ | ------------- | ------------- | ------------- | ------------- 131 | 100206 | A123 | Rubber Ducky | 2.20 | 2019-09-18 132 | 100206 | A123 | Bubble Bath | 8.00 | 2019-09-18 133 | 100206 | Q987 | 80-Pack TP | 90.00 | 2019-09-20 134 | 100205 | Z001 | Cat Food - Tuna Fish | 10.00 | 2019-08-05 135 | 100205 | Z001 | Cat Food - Chicken | 10.00 | 2019-08-05 136 | 100205 | Z001 | Cat Food - Beef | 10.00 | 2019-08-05 137 | 100205 | Z001 | Cat Food - Kitty quesadilla | 10.00 | 2019-08-05 138 | 100204 | X202 | Coffee | 20.00 | 2019-04-29 139 | 140 |
141 | How would I select all fields from this table?
142 | 143 | Select *
144 | From Customers; 145 |
146 | 147 |
148 | How many items are in John's cart?
149 | 150 | Select Items_in_cart
151 | From Customers
152 | Where Customer_Name = "John Smith"; 153 |
154 | 155 |
156 | What is the sum of all the cash spent across all customers?
157 | 158 | Select SUM(Cash_spent_to_Date) as SUM_CASH
159 | From Customers; 160 |
161 | 162 |
163 | Tell me about your last big project/task you worked on
164 |
165 | 166 |
167 | What was most challenging part in the project you worked on?
168 |
169 | 170 |
171 | Why do you want to work here?
172 |
173 | 174 |
175 | How did you hear about us?
176 | 177 | Tell them how did you hear about them :D 178 | Relax, there is no wrong or right answer here...I think. 179 |
-------------------------------------------------------------------------------- /tests/testcases/testcase3.md: -------------------------------------------------------------------------------- 1 | 2 | You have a colleague you don‘t get along with. Tell us some strategies how you create a good work relationship with them anyway.
3 | 4 | Bad answer: I don't. 5 | Better answer: Every person has strengths and weaknesses. This is true also for colleagues I don't have good work relationship with and this is what helps me to create good work relationship with them. If I am able to highlight or recognize their strengths I'm able to focus mainly on that when communicating with them. 6 | 7 | 8 |
9 | What do you love about your work?
10 | 11 | You know the best, but some ideas if you find it hard to express yourself: 12 | 13 | * Diversity 14 | * Complexity 15 | * Challenging 16 | * Communication with several different teams 17 |
18 | 19 |
20 | What are your responsibilities in your current position?
21 | 22 | You know the best :) 23 |
24 | 25 | 26 | Why should we hire you for the role?
27 | 28 | You can use and elaborate on one or all of the following: 29 | 30 | * Passion 31 | * Motivation 32 | * Autodidact 33 | * Creativity (be able to support it with some actual examples) 34 | 35 | 36 | ## Questions you CAN ask 37 | 38 | A list of questions you as a candidate can ask the interviewer during or after the interview. 39 | These are only a suggestion, use them carefully. Not every interviewer will be able to answer these (or happy to) which should be perhaps a red flag warning for your regarding working in such place but that's really up to you. 40 | 41 |
42 | What do you like about working here?
43 |
44 | 45 |
46 | How does the company promote personal growth?
47 | 48 | 49 |
50 | What is the current level of technical debt you are dealing with?
51 | 52 | Be careful when asking this question - all companies, regardless of size, have some level of tech debt. 53 | Phrase the question in the light that all companies have the deal with this, but you want to see the current 54 | pain points they are dealing with
55 | 56 | This is a great way to figure how managers deal with unplanned work, and how good they are at 57 | setting expectations with projects. 58 |
--------------------------------------------------------------------------------