├── .gitignore ├── .travis.yml ├── CONTRIBUTING.md ├── LICENSE ├── README-zh_CN.md ├── README.md ├── certificates ├── cka.md ├── ckad.md └── cloud-practitioner.md ├── coding └── python │ └── binary_search.py ├── common-qa.md ├── credits.md ├── exercises ├── ansible │ ├── my_first_playbook.md │ ├── my_first_task.md │ └── solutions │ │ ├── my_first_playbook.md │ │ └── my_first_task.md ├── ansible_minikube_docker.md ├── aws │ ├── hello_function.md │ ├── solutions │ │ ├── hello_function.md │ │ └── url_function.md │ └── url_function.md ├── cloud_slack_bot.md ├── devops │ ├── ci_for_open_source_project.md │ ├── deploy_to_kubernetes.md │ └── solutions │ │ └── deploy_to_kubernetes │ │ ├── Jenkinsfile │ │ ├── README.md │ │ ├── deploy.yml │ │ ├── helloworld.yml │ │ ├── html │ │ ├── css │ │ │ ├── normalize.css │ │ │ └── skeleton.css │ │ ├── images │ │ │ └── favicon.png │ │ └── index.html │ │ └── inventory ├── eflk.md ├── flask_container_ci │ ├── README.md │ ├── app │ │ ├── __init__.py │ │ ├── config.py │ │ ├── main.py │ │ └── tests.py │ ├── requirements.txt │ ├── tests.py │ └── users.json ├── flask_container_ci2 │ ├── README.md │ ├── app │ │ ├── __init__.py │ │ ├── config.py │ │ ├── main.py │ │ └── tests.py │ ├── requirements.txt │ └── tests.py ├── git │ ├── branch_01.md │ ├── commit_01.md │ ├── solutions │ │ ├── branch_01_solution.md │ │ ├── commit_01_solution.md │ │ └── squashing_commits.md │ └── squashing_commits.md ├── jenkins │ ├── jobs_101.md │ ├── remove_builds.md │ ├── remove_jobs.md │ └── solutions │ │ ├── remove_builds_solution.groovy │ │ └── remove_jobs_solution.groovy ├── jenkins_pipelines.md ├── jenkins_scripts.md ├── kubernetes │ ├── killing_containers.md │ ├── pods_01.md │ ├── replicaset_01.md │ ├── replicaset_02.md │ ├── replicaset_03.md │ ├── services_01.md │ └── solutions │ │ ├── killing_containers.md │ │ ├── pods_01_solution.md │ │ ├── replicaset_01_solution.md │ │ ├── replicaset_02_solution.md │ │ ├── replicaset_03_solution.md │ │ └── services_01_solution.md ├── misc │ └── elk_kibana_aws.md ├── openshift │ ├── projects_101.md │ └── solutions │ │ └── projects_101.md ├── os │ ├── fork_101.md │ ├── fork_102.md │ └── solutions │ │ ├── fork_101_solution.md │ │ └── fork_102_solution.md ├── pipeline_deploy_image_to_k8.md ├── programming │ ├── grep_berfore_and_after.md │ └── web_scraper.md ├── python │ ├── advanced_data_types.md │ ├── compress_string.md │ ├── data_types.md │ ├── reverse_string.md │ └── solutions │ │ ├── advanced_data_types_solution.md │ │ ├── data_types_solution.md │ │ └── reverse_string.md ├── sql │ ├── improve_query.md │ └── solutions │ │ └── improve_query.md └── write_dockerfile_run_container.md ├── images ├── Go.png ├── HR.png ├── ansible.png ├── aws.png ├── azure.png ├── bash.png ├── big-data.png ├── certificates.png ├── cloud.png ├── containers.png ├── databases.png ├── design.png ├── design │ ├── cdn-no-downtime.png │ ├── input-process-output.png │ ├── producers_consumers_fix.png │ └── producers_consumers_issue.png ├── devops.png ├── devops_exercises.png ├── devops_resources.png ├── distributed.png ├── distributed │ ├── distributed_design_lb.png │ └── distributed_design_standby.png ├── dns.png ├── elastic.png ├── exercises.png ├── general.png ├── git.png ├── googlecloud.png ├── hardware.png ├── how_they_devops.png ├── infraverse.png ├── jenkins.png ├── jenkins │ └── jenkins-to-kibana.png ├── kubernetes.png ├── kubernetes │ ├── kubernetes_components.png │ └── kubernetes_components_solution.png ├── linux.png ├── linux_master.jpeg ├── mongo.png ├── monitoring.png ├── network.png ├── openshift.png ├── openstack.png ├── os.png ├── programming.png ├── prometheus.png ├── puppet.png ├── python.png ├── regex.png ├── security.png ├── sql.png ├── storage.png ├── system_design_notebook.png ├── terraform.png ├── testing.png ├── virtualization.png └── you.png ├── prepare_for_interview.md ├── scripts ├── count_questions.sh ├── question_utils.py ├── run_ci.sh └── update_question_number.py └── tests ├── scripts_question_utils_unittest.py ├── syntax_checker_unittest.py ├── syntax_lint.py └── testcases ├── testcase1.md ├── testcase2.md └── testcase3.md /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | MANIFEST 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | .pytest_cache/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # Django stuff: 55 | *.log 56 | local_settings.py 57 | db.sqlite3 58 | 59 | # Flask stuff: 60 | instance/ 61 | .webassets-cache 62 | 63 | # Scrapy stuff: 64 | .scrapy 65 | 66 | # Sphinx documentation 67 | docs/_build/ 68 | 69 | # PyBuilder 70 | target/ 71 | 72 | # Jupyter Notebook 73 | .ipynb_checkpoints 74 | 75 | # pyenv 76 | .python-version 77 | 78 | # celery beat schedule file 79 | celerybeat-schedule 80 | 81 | # SageMath parsed files 82 | *.sage.py 83 | 84 | # Environments 85 | .env 86 | .venv 87 | env/ 88 | venv/ 89 | ENV/ 90 | env.bak/ 91 | venv.bak/ 92 | 93 | *.pyc 94 | 95 | #Jetbrain's ides. 96 | .idea -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: "python" 2 | python: 3 | - "3.8" 4 | install: 5 | - pip install flake8 6 | script: 7 | - flake8 --max-line-length=100 . 8 | - python tests/syntax_lint.py 9 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | ## How to contribute 2 | 3 | Use pull requests to contribute to the project. 4 | 5 | Stick to the following format: 6 | 7 | \
8 | [Question]
9 | 10 | [Answer] 11 | \
12 | 13 | * If you added several questions and you would like to know how many questions are there you can use the script "count_questions.sh" in scripts directory. 14 | 15 | ## What to avoid 16 | 17 | * Avoid adding installation questions. Those are the worst type of questions... 18 | * Don't copy questions and answers from other sources. They probably worked hard for adding them. 19 | * If you add new images, make sure they are free and can be used. 20 | 21 | ## Before submitting the pull request 22 | 23 | You can test your changes locally with the script `run_ci.sh` in scripts directory. 24 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | THE WORK (AS DEFINED BELOW) IS PROVIDED UNDER THE TERMS OF THIS CREATIVE COMMONS PUBLIC LICENSE ("CCPL" OR "LICENSE"). THE WORK IS PROTECTED BY COPYRIGHT AND/OR OTHER APPLICABLE LAW. ANY USE OF THE WORK OTHER THAN AS AUTHORIZED UNDER THIS LICENSE OR COPYRIGHT LAW IS PROHIBITED. 2 | 3 | BY EXERCISING ANY RIGHTS TO THE WORK PROVIDED HERE, YOU ACCEPT AND AGREE TO BE BOUND BY THE TERMS OF THIS LICENSE. TO THE EXTENT THIS LICENSE MAY BE CONSIDERED TO BE A CONTRACT, THE LICENSOR GRANTS YOU THE RIGHTS CONTAINED HERE IN CONSIDERATION OF YOUR ACCEPTANCE OF SUCH TERMS AND CONDITIONS. 4 | 5 | 1. Definitions 6 | "Adaptation" means a work based upon the Work, or upon the Work and other pre-existing works, such as a translation, adaptation, derivative work, arrangement of music or other alterations of a literary or artistic work, or phonogram or performance and includes cinematographic adaptations or any other form in which the Work may be recast, transformed, or adapted including in any form recognizably derived from the original, except that a work that constitutes a Collection will not be considered an Adaptation for the purpose of this License. For the avoidance of doubt, where the Work is a musical work, performance or phonogram, the synchronization of the Work in timed-relation with a moving image ("synching") will be considered an Adaptation for the purpose of this License. 7 | "Collection" means a collection of literary or artistic works, such as encyclopedias and anthologies, or performances, phonograms or broadcasts, or other works or subject matter other than works listed in Section 1(f) below, which, by reason of the selection and arrangement of their contents, constitute intellectual creations, in which the Work is included in its entirety in unmodified form along with one or more other contributions, each constituting separate and independent works in themselves, which together are assembled into a collective whole. A work that constitutes a Collection will not be considered an Adaptation (as defined above) for the purposes of this License. 8 | "Distribute" means to make available to the public the original and copies of the Work through sale or other transfer of ownership. 9 | "Licensor" means the individual, individuals, entity or entities that offer(s) the Work under the terms of this License. 10 | "Original Author" means, in the case of a literary or artistic work, the individual, individuals, entity or entities who created the Work or if no individual or entity can be identified, the publisher; and in addition (i) in the case of a performance the actors, singers, musicians, dancers, and other persons who act, sing, deliver, declaim, play in, interpret or otherwise perform literary or artistic works or expressions of folklore; (ii) in the case of a phonogram the producer being the person or legal entity who first fixes the sounds of a performance or other sounds; and, (iii) in the case of broadcasts, the organization that transmits the broadcast. 11 | "Work" means the literary and/or artistic work offered under the terms of this License including without limitation any production in the literary, scientific and artistic domain, whatever may be the mode or form of its expression including digital form, such as a book, pamphlet and other writing; a lecture, address, sermon or other work of the same nature; a dramatic or dramatico-musical work; a choreographic work or entertainment in dumb show; a musical composition with or without words; a cinematographic work to which are assimilated works expressed by a process analogous to cinematography; a work of drawing, painting, architecture, sculpture, engraving or lithography; a photographic work to which are assimilated works expressed by a process analogous to photography; a work of applied art; an illustration, map, plan, sketch or three-dimensional work relative to geography, topography, architecture or science; a performance; a broadcast; a phonogram; a compilation of data to the extent it is protected as a copyrightable work; or a work performed by a variety or circus performer to the extent it is not otherwise considered a literary or artistic work. 12 | "You" means an individual or entity exercising rights under this License who has not previously violated the terms of this License with respect to the Work, or who has received express permission from the Licensor to exercise rights under this License despite a previous violation. 13 | "Publicly Perform" means to perform public recitations of the Work and to communicate to the public those public recitations, by any means or process, including by wire or wireless means or public digital performances; to make available to the public Works in such a way that members of the public may access these Works from a place and at a place individually chosen by them; to perform the Work to the public by any means or process and the communication to the public of the performances of the Work, including by public digital performance; to broadcast and rebroadcast the Work by any means including signs, sounds or images. 14 | "Reproduce" means to make copies of the Work by any means including without limitation by sound or visual recordings and the right of fixation and reproducing fixations of the Work, including storage of a protected performance or phonogram in digital form or other electronic medium. 15 | 16 | 2. Fair Dealing Rights. 17 | Nothing in this License is intended to reduce, limit, or restrict any uses free from copyright or rights arising from limitations or exceptions that are provided for in connection with the copyright protection under copyright law or other applicable laws. 18 | 19 | 3. License Grant. 20 | Subject to the terms and conditions of this License, Licensor hereby grants You a worldwide, royalty-free, non-exclusive, perpetual (for the duration of the applicable copyright) license to exercise the rights in the Work as stated below: 21 | 22 | to Reproduce the Work, to incorporate the Work into one or more Collections, and to Reproduce the Work as incorporated in the Collections; and, 23 | to Distribute and Publicly Perform the Work including as incorporated in Collections. 24 | The above rights may be exercised in all media and formats whether now known or hereafter devised. The above rights include the right to make such modifications as are technically necessary to exercise the rights in other media and formats, but otherwise you have no rights to make Adaptations. Subject to 8(f), all rights not expressly granted by Licensor are hereby reserved, including but not limited to the rights set forth in Section 4(d). 25 | 26 | 4. Restrictions. 27 | The license granted in Section 3 above is expressly made subject to and limited by the following restrictions: 28 | 29 | You may Distribute or Publicly Perform the Work only under the terms of this License. You must include a copy of, or the Uniform Resource Identifier (URI) for, this License with every copy of the Work You Distribute or Publicly Perform. You may not offer or impose any terms on the Work that restrict the terms of this License or the ability of the recipient of the Work to exercise the rights granted to that recipient under the terms of the License. You may not sublicense the Work. You must keep intact all notices that refer to this License and to the disclaimer of warranties with every copy of the Work You Distribute or Publicly Perform. When You Distribute or Publicly Perform the Work, You may not impose any effective technological measures on the Work that restrict the ability of a recipient of the Work from You to exercise the rights granted to that recipient under the terms of the License. This Section 4(a) applies to the Work as incorporated in a Collection, but this does not require the Collection apart from the Work itself to be made subject to the terms of this License. If You create a Collection, upon notice from any Licensor You must, to the extent practicable, remove from the Collection any credit as required by Section 4(c), as requested. 30 | You may not exercise any of the rights granted to You in Section 3 above in any manner that is primarily intended for or directed toward commercial advantage or private monetary compensation. The exchange of the Work for other copyrighted works by means of digital file-sharing or otherwise shall not be considered to be intended for or directed toward commercial advantage or private monetary compensation, provided there is no payment of any monetary compensation in connection with the exchange of copyrighted works. 31 | If You Distribute, or Publicly Perform the Work or Collections, You must, unless a request has been made pursuant to Section 4(a), keep intact all copyright notices for the Work and provide, reasonable to the medium or means You are utilizing: (i) the name of the Original Author (or pseudonym, if applicable) if supplied, and/or if the Original Author and/or Licensor designate another party or parties (e.g., a sponsor institute, publishing entity, journal) for attribution ("Attribution Parties") in Licensor's copyright notice, terms of service or by other reasonable means, the name of such party or parties; (ii) the title of the Work if supplied; (iii) to the extent reasonably practicable, the URI, if any, that Licensor specifies to be associated with the Work, unless such URI does not refer to the copyright notice or licensing information for the Work. The credit required by this Section 4(c) may be implemented in any reasonable manner; provided, however, that in the case of a Collection, at a minimum such credit will appear, if a credit for all contributing authors of Collection appears, then as part of these credits and in a manner at least as prominent as the credits for the other contributing authors. For the avoidance of doubt, You may only use the credit required by this Section for the purpose of attribution in the manner set out above and, by exercising Your rights under this License, You may not implicitly or explicitly assert or imply any connection with, sponsorship or endorsement by the Original Author, Licensor and/or Attribution Parties, as appropriate, of You or Your use of the Work, without the separate, express prior written permission of the Original Author, Licensor and/or Attribution Parties. 32 | For the avoidance of doubt: 33 | 34 | Non-waivable Compulsory License Schemes. In those jurisdictions in which the right to collect royalties through any statutory or compulsory licensing scheme cannot be waived, the Licensor reserves the exclusive right to collect such royalties for any exercise by You of the rights granted under this License; 35 | Waivable Compulsory License Schemes. In those jurisdictions in which the right to collect royalties through any statutory or compulsory licensing scheme can be waived, the Licensor reserves the exclusive right to collect such royalties for any exercise by You of the rights granted under this License if Your exercise of such rights is for a purpose or use which is otherwise than noncommercial as permitted under Section 4(b) and otherwise waives the right to collect royalties through any statutory or compulsory licensing scheme; and, 36 | Voluntary License Schemes. The Licensor reserves the right to collect royalties, whether individually or, in the event that the Licensor is a member of a collecting society that administers voluntary licensing schemes, via that society, from any exercise by You of the rights granted under this License that is for a purpose or use which is otherwise than noncommercial as permitted under Section 4(b). 37 | Except as otherwise agreed in writing by the Licensor or as may be otherwise permitted by applicable law, if You Reproduce, Distribute or Publicly Perform the Work either by itself or as part of any Collections, You must not distort, mutilate, modify or take other derogatory action in relation to the Work which would be prejudicial to the Original Author's honor or reputation. 38 | 39 | 5. Representations, Warranties and Disclaimer 40 | UNLESS OTHERWISE MUTUALLY AGREED BY THE PARTIES IN WRITING, LICENSOR OFFERS THE WORK AS-IS AND MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE WORK, EXPRESS, IMPLIED, STATUTORY OR OTHERWISE, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF TITLE, MERCHANTIBILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, ACCURACY, OR THE PRESENCE OF ABSENCE OF ERRORS, WHETHER OR NOT DISCOVERABLE. SOME JURISDICTIONS DO NOT ALLOW THE EXCLUSION OF IMPLIED WARRANTIES, SO SUCH EXCLUSION MAY NOT APPLY TO YOU. 41 | 42 | 6. Limitation on Liability. 43 | EXCEPT TO THE EXTENT REQUIRED BY APPLICABLE LAW, IN NO EVENT WILL LICENSOR BE LIABLE TO YOU ON ANY LEGAL THEORY FOR ANY SPECIAL, INCIDENTAL, CONSEQUENTIAL, PUNITIVE OR EXEMPLARY DAMAGES ARISING OUT OF THIS LICENSE OR THE USE OF THE WORK, EVEN IF LICENSOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. 44 | 45 | 7. Termination 46 | This License and the rights granted hereunder will terminate automatically upon any breach by You of the terms of this License. Individuals or entities who have received Collections from You under this License, however, will not have their licenses terminated provided such individuals or entities remain in full compliance with those licenses. Sections 1, 2, 5, 6, 7, and 8 will survive any termination of this License. 47 | Subject to the above terms and conditions, the license granted here is perpetual (for the duration of the applicable copyright in the Work). Notwithstanding the above, Licensor reserves the right to release the Work under different license terms or to stop distributing the Work at any time; provided, however that any such election will not serve to withdraw this License (or any other license that has been, or is required to be, granted under the terms of this License), and this License will continue in full force and effect unless terminated as stated above. 48 | 49 | 8. Miscellaneous 50 | Each time You Distribute or Publicly Perform the Work or a Collection, the Licensor offers to the recipient a license to the Work on the same terms and conditions as the license granted to You under this License. 51 | If any provision of this License is invalid or unenforceable under applicable law, it shall not affect the validity or enforceability of the remainder of the terms of this License, and without further action by the parties to this agreement, such provision shall be reformed to the minimum extent necessary to make such provision valid and enforceable. 52 | No term or provision of this License shall be deemed waived and no breach consented to unless such waiver or consent shall be in writing and signed by the party to be charged with such waiver or consent. 53 | This License constitutes the entire agreement between the parties with respect to the Work licensed here. There are no understandings, agreements or representations with respect to the Work not specified here. Licensor shall not be bound by any additional provisions that may appear in any communication from You. This License may not be modified without the mutual written agreement of the Licensor and You. 54 | The rights granted under, and the subject matter referenced, in this License were drafted utilizing the terminology of the Berne Convention for the Protection of Literary and Artistic Works (as amended on September 28, 1979), the Rome Convention of 1961, the WIPO Copyright Treaty of 1996, the WIPO Performances and Phonograms Treaty of 1996 and the Universal Copyright Convention (as revised on July 24, 1971). These rights and subject matter take effect in the relevant jurisdiction in which the License terms are sought to be enforced according to the corresponding provisions of the implementation of those treaty provisions in the applicable national law. If the standard suite of rights granted under applicable copyright law includes additional rights not granted under this License, such additional rights are deemed to be included in the License; this License is not intended to restrict the license of any rights under applicable law. 55 | -------------------------------------------------------------------------------- /certificates/cka.md: -------------------------------------------------------------------------------- 1 | ## Certified Kubernetes Administrator (CKA) 2 | 3 | ### Pods 4 | 5 |
6 | Deploy a pod called web-1985 using the nginx:alpine image
7 | 8 | `kubectl run web-1985 --image=nginx:alpine --restart=Never` 9 |
10 | 11 |
12 | How to find out on which node a certain pod is running?
13 | 14 | `kubectl get po -o wide` 15 |
16 | -------------------------------------------------------------------------------- /certificates/ckad.md: -------------------------------------------------------------------------------- 1 | ## Certified Kubernetes Application Developer (CKAD) 2 | 3 | ### Core Concepts 4 | 5 | ### Pods 6 | 7 |
8 | Deploy a pod called web-1985 using the nginx:alpine image
9 | 10 | `kubectl run web-1985 --image=nginx:alpine --restart=Never` 11 |
12 | 13 |
14 | How to find out on which node a certain pod is running?
15 | 16 | `kubectl get po -o wide` 17 |
18 | 19 | ### Namespaces 20 | 21 |
22 | List all namespaces
23 | 24 | kubectl get ns 25 |
26 | 27 |
28 | List all the pods in the namespace 'neverland'
29 | 30 | kubectl get ns -n neverland 31 |
32 | 33 |
34 | List all the pods in all the namespaces
35 | 36 | kubectl get po --all-namespaces 37 |
38 | -------------------------------------------------------------------------------- /certificates/cloud-practitioner.md: -------------------------------------------------------------------------------- 1 | ## AWS - Cloud Practitioner 2 | 3 | A summary of what you need to know for the exam can be found [here](https://codingshell.com/aws-cloud-practitioner) 4 | 5 | #### Cloud 101 6 | 7 |
8 | What types of Cloud Computing services are there?
9 | 10 | IAAS 11 | PAAS 12 | SAAS 13 |
14 | 15 |
16 | Explain each of the following and give an example: 17 | 18 | * IAAS 19 | * PAAS 20 | * SAAS
21 |
22 | 23 |
24 | What types of clouds (or cloud deployments) are there?
25 | 26 | * Public 27 | * Hybrid 28 | * Private 29 |
30 | 31 |
32 | Explain each of the following Cloud Computing Deployments: 33 | 34 | * Public 35 | * Hybrid 36 | * Private
37 |
38 | 39 | #### AWS Global Infrastructure 40 | 41 |
42 | Explain the following 43 | 44 | * Availability zone 45 | * Region 46 | * Edge location
47 | AWS regions are data centers hosted across different geographical locations worldwide, each region is completely independent of one another.
48 | 49 | Within each region, there are multiple isolated locations known as Availability Zones. Multiple availability zones ensure high availability in case one of them goes down.
50 | 51 | Edge locations are basically content delivery network which caches data and insures lower latency and faster delivery to the users in any location. They are located in major cities in the world. 52 |
53 | 54 | #### AWS Networking 55 | 56 |
57 | What is VPC?
58 | 59 | "A logically isolated section of the AWS cloud where you can launch AWS resources in a virtual network that you define" 60 | Read more about it [here](https://aws.amazon.com/vpc). 61 |
62 | 63 |
64 | True or False? VPC spans multiple regions
65 | 66 | False 67 |
68 | 69 |
70 | True or False? Subnets belong to the same VPC, can be in different availability zones
71 | 72 | True. Just to clarify, a subnet must reside entirely in one AZ. 73 |
74 | 75 |
76 | What is an Internet Gateway?
77 | 78 | "component that allows communication between instances in your VPC and the internet" (AWS docs). 79 | Read more about it [here](https://docs.aws.amazon.com/vpc/latest/userguide/VPC_Internet_Gateway.html) 80 |
81 | 82 |
83 | True or False? NACL allow or deny traffic on the subnet level
84 | 85 | True 86 |
87 | 88 |
89 | True or False? Multiple Internet Gateways can be attached to one VPC
90 | 91 | False. Only one internet gateway can be attached to a single VPC. 92 |
93 | 94 |
95 | True or False? Route Tables used to allow or deny traffic from the internet to AWS instances
96 | 97 | False. 98 |
99 | 100 |
101 | Explain Security Groups and Network ACLs
102 | 103 | * NACL - security layer on the subnet level. 104 | * Security Group - security layer on the instance level. 105 | 106 | Read more about it [here](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-security-groups.html) and [here](https://docs.aws.amazon.com/vpc/latest/userguide/VPC_SecurityGroups.html) 107 |
108 | 109 |
110 | What is AWS Direct Connect?
111 | 112 | Allows you to connect your corporate network to AWS network. 113 |
114 | 115 | #### AWS Compute 116 | 117 |
118 | What is EC2?
119 | 120 | "a web service that provides secure, resizable compute capacity in the cloud". 121 | Read more [here](https://aws.amazon.com/ec2) 122 |
123 | 124 |
125 | What is AMI?
126 | 127 | Amazon Machine Images is "An Amazon Machine Image (AMI) provides the information required to launch an instance". 128 | Read more [here](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/AMIs.html) 129 |
130 | 131 |
132 | What are the different source for AMIs?
133 | 134 | * Personal AMIs - AMIs you create 135 | * AWS Marketplace for AMIs - Paid AMIs usually with bundled with licensed software 136 | * Community AMIs - Free 137 |
138 | 139 |
140 | What is instance type?
141 | 142 | "the instance type that you specify determines the hardware of the host computer used for your instance" 143 | Read more about instance types [here](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/instance-types.html) 144 |
145 | 146 |
147 | True or False? The following are instance types available for a user in AWS: 148 | 149 | * Compute optimizied 150 | * Network optimizied 151 | * Web optimized
152 | 153 | False. From the above list only compute optimized is available. 154 |
155 | 156 |
157 | What is EBS?
158 | 159 | "provides block level storage volumes for use with EC2 instances. EBS volumes behave like raw, unformatted block devices." 160 | More on EBS [here](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/AmazonEBS.html) 161 |
162 | 163 |
164 | What EC2 pricing models are there?
165 | 166 | On Demand - pay a fixed rate by the hour/second with no commitment. You can provision and terminate it at any given time. 167 | Reserved - you get capacity reservation, basically purchase an instance for a fixed time of period. The longer, the cheaper. 168 | Spot - Enables you to bid whatever price you want for instances or pay the spot price. 169 | Dedicated Hosts - physical EC2 server dedicated for your use. 170 |
171 | 172 |
173 | What are Security Groups?
174 | 175 | "A security group acts as a virtual firewall that controls the traffic for one or more instances" 176 | More on this subject [here](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-security-groups.html) 177 |
178 | 179 |
180 | What can you attach to an EC2 instance in order to store data?
181 | 182 | EBS 183 |
184 | 185 |
186 | What EC2 RI types are there?
187 | 188 | Standard RI - most significant discount + suited for steady-state usage 189 | Convertible RI - discount + change attribute of RI + suited for steady-state usage 190 | Scheduled RI - launch within time windows you reserve 191 | 192 | Learn more about EC2 RI [here](https://aws.amazon.com/ec2/pricing/reserved-instances) 193 |
194 | 195 | #### AWS Containers 196 | 197 |
198 | What is Amazon ECS?
199 | 200 | Amazon definition: "Amazon Elastic Container Service (Amazon ECS) is a fully managed container orchestration service. Customers such as Duolingo, Samsung, GE, and Cook Pad use ECS to run their most sensitive and mission critical applications because of its security, reliability, and scalability." 201 | 202 | Learn more [here](https://aws.amazon.com/ecs) 203 |
204 | 205 |
206 | What is Amazon ECR?
207 | 208 | Amazon definition: "Amazon Elastic Container Registry (ECR) is a fully-managed Docker container registry that makes it easy for developers to store, manage, and deploy Docker container images." 209 | 210 | Learn more [here](https://aws.amazon.com/ecr) 211 |
212 | 213 |
214 | What is AWS Fargate?
215 | 216 | Amazon definition: "AWS Fargate is a serverless compute engine for containers that works with both Amazon Elastic Container Service (ECS) and Amazon Elastic Kubernetes Service (EKS)." 217 | 218 | Learn more [here](https://aws.amazon.com/fargate) 219 |
220 | 221 | #### AWS Storage 222 | 223 |
224 | Explain what is AWS S3?
225 | 226 | S3 stands for 3 S, Simple Storage Service. 227 | S3 is a object storage service which is fast, scalable and durable. S3 enables customers to upload, download or store any file or object that is up to 5 TB in size. 228 | 229 | More on S3 [here](https://aws.amazon.com/s3) 230 |
231 | 232 |
233 | What is a bucket?
234 | 235 | An S3 bucket is a resource which is similar to folders in a file system and allows storing objects, which consist of data. 236 |
237 | 238 |
239 | True or False? A bucket name must be globally unique
240 | 241 | True 242 |
243 | 244 |
245 | Explain folders and objects in regards to buckets
246 | 247 | * Folder - any sub folder in an s3 bucket 248 | * Object - The files which are stored in a bucket 249 |
250 | 251 |
252 | Explain the following: 253 | 254 | * Object Lifecycles 255 | * Object Sharing 256 | * Object Versioning
257 | 258 | * Object Lifecycles - Transfer objects between storage classes based on defined rules of time periods 259 | * Object Sharing - Share objects via a URL link 260 | * Object Versioning - Manage multiple versions of an object 261 |
262 | 263 |
264 | Explain Object Durability and Object Availability
265 | 266 | Object Durability: The percent over a one-year time period that a file will not be lost 267 | Object Availability: The percent over a one-year time period that a file will be accessible 268 |
269 | 270 |
271 | What is a storage class? What storage classes are there?
272 | 273 | Each object has a storage class assigned to, affecting its availability and durability. This also has effect on costs. 274 | Storage classes offered today: 275 | * Standard: 276 | * Used for general, all-purpose storage (mostly storage that needs to be accessed frequently) 277 | * The most expensive storage class 278 | * 11x9% durability 279 | * 2x9% availability 280 | * Default storage class 281 | 282 | * Standard-IA (Infrequent Access) 283 | * Long lived, infrequently accessed data but must be available the moment it's being accessed 284 | * 11x9% durability 285 | * 99.90% availability 286 | 287 | * One Zone-IA (Infrequent Access): 288 | * Long-lived, infrequently accessed, non-critical data 289 | * Less expensive than Standard and Standard-IA storage classes 290 | * 2x9% durability 291 | * 99.50% availability 292 | 293 | * Intelligent-Tiering: 294 | * Long-lived data with changing or unknown access patterns. Basically, In this class the data automatically moves to the class most suitable for you based on usage patterns 295 | * Price depends on the used class 296 | * 11x9% durability 297 | * 99.90% availability 298 | 299 | * Glacier: Archive data with retrieval time ranging from minutes to hours 300 | * Glacier Deep Archive: Archive data that rarely, if ever, needs to be accessed with retrieval times in hours 301 | * Both Glacier and Glacier Deep Archive are: 302 | * The most cheap storage classes 303 | * have 9x9% durability 304 | 305 | More on storage classes [here](https://aws.amazon.com/s3/storage-classes) 306 | 307 |
308 | 309 |
310 | A customer would like to move data which is rarely accessed from standard storage class to the most cheapest class there is. Which storage class should be used? 311 | 312 | * One Zone-IA 313 | * Glacier Deep Archive 314 | * Intelligent-Tiering
315 | 316 | Glacier Deep Archive 317 |
318 | 319 |
320 | What Glacier retrieval options are available for the user?
321 | 322 | Expedited, Standard and Bulk 323 |
324 | 325 |
326 | True or False? Each AWS account can store up to 500 PetaByte of data. Any additional storage will cost double
327 | 328 | False. Unlimited capacity. 329 |
330 | 331 |
332 | Explain what is Storage Gateway
333 | 334 | "AWS Storage Gateway is a hybrid cloud storage service that gives you on-premises access to virtually unlimited cloud storage". 335 | More on Storage Gateway [here](https://aws.amazon.com/storagegateway) 336 |
337 | 338 |
339 | Explain the following Storage Gateway deployments types 340 | 341 | * File Gateway 342 | * Volume Gateway 343 | * Tape Gateway
344 | 345 | Explained in detail [here](https://aws.amazon.com/storagegateway/faqs) 346 |
347 | 348 |
349 | What is the difference between stored volumes and cached volumes?
350 | 351 | Stored Volumes - Data is located at customer's data center and periodically backed up to AWS 352 | Cached Volumes - Data is stored in AWS cloud and cached at customer's data center for quick access 353 |
354 | 355 |
356 | What is "Amazon S3 Transfer Acceleration"?
357 | 358 | AWS definition: "Amazon S3 Transfer Acceleration enables fast, easy, and secure transfers of files over long distances between your client and an S3 bucket" 359 | 360 | Learn more [here](https://docs.aws.amazon.com/AmazonS3/latest/dev/transfer-acceleration.html) 361 |
362 | 363 |
364 | What is Amazon EFS?
365 | 366 | Amazon definition: "Amazon Elastic File System (Amazon EFS) provides a simple, scalable, fully managed elastic NFS file system for use with AWS Cloud services and on-premises resources." 367 | 368 | Learn more [here](https://aws.amazon.com/efs) 369 |
370 | 371 |
372 | What is AWS Snowmobile?
373 | 374 | "AWS Snowmobile is an Exabyte-scale data transfer service used to move extremely large amounts of data to AWS." 375 | 376 | Learn more [here](https://aws.amazon.com/snowmobile) 377 |
378 | 379 | #### AWS IAM 380 | 381 |
382 | What is IAM? What are some of its features?
383 | 384 | Full explanation is [here](https://aws.amazon.com/iam) 385 | In short: it's used for managing users, groups, access policies & roles 386 |
387 | 388 |
389 | True or False? IAM configuration is defined globally and not per region
390 | 391 | True 392 |
393 | 394 |
395 | Given an example of IAM best practices?
396 | 397 | * Set up MFA 398 | * Delete root account access keys 399 | * Create IAM users instead of using root for daily management 400 |
401 | 402 |
403 | What are Roles?
404 | 405 | A way for allowing a service of AWS to use another service of AWS. You assign roles to AWS resources. 406 | For example, you can make use of a role which allows EC2 service to acesses s3 buckets (read and write). 407 |
408 | 409 |
410 | What are Policies?
411 | 412 | Policies documents used to give permissions as to what a user, group or role are able to do. Their format is JSON. 413 |
414 | 415 |
416 | A user is unable to access an s3 bucket. What might be the problem?
417 | 418 | There can be several reasons for that. One of them is lack of policy. To solve that, the admin has to attach the user with a policy what allows him to access the s3 bucket. 419 |
420 | 421 |
422 | What should you use to: 423 | 424 | * Grant access between two services/resources? 425 | * Grant user access to resources/services?
426 | 427 | * Role 428 | * Policy 429 |
430 | 431 |
432 | What permissions does a new user have?
433 | 434 | Only a login access. 435 |
436 | 437 | ##### AWS ELB 438 | 439 |
440 | What is ELB (Elastic Load Balancing)?
441 | 442 | AWS definition: "Elastic Load Balancing automatically distributes incoming application traffic across multiple targets, such as Amazon EC2 instances, containers, IP addresses, and Lambda functions." 443 | 444 | More on ELB [here](https://aws.amazon.com/elasticloadbalancing) 445 |
446 | 447 |
448 | What is auto scaling?
449 | 450 | AWS definition: "AWS Auto Scaling monitors your applications and automatically adjusts capacity to maintain steady, predictable performance at the lowest possible cost" 451 | 452 | Read more about auto scaling [here](https://aws.amazon.com/autoscaling) 453 |
454 | 455 |
456 | True or False? Auto Scaling is about adding resources (such as instances) and not about removing resource
457 | 458 | False. Auto scaling adjusts capacity and this can mean removing some resources based on usage and performances. 459 |
460 | 461 |
462 | What types of load balancers are supported in EC2 and what are they used for?
463 | 464 | * Application LB - layer 7 traffic 465 | * Network LB - ultra-high performances or static IP address 466 | * Classic LB - low costs, good for test or dev environments 467 |
468 | 469 | #### AWS DNS 470 | 471 |
472 | What is Route 53?
473 | 474 | "Amazon Route 53 is a highly available and scalable cloud Domain Name System (DNS) web service" 475 | Some of Route 53 features: 476 | * Register domain 477 | * DNS service - domain name translations 478 | * Health checks - verify your app is available 479 | 480 | More on Route 53 [here](https://aws.amazon.com/route53) 481 |
482 | 483 | #### AWS CloudFront 484 | 485 |
486 | Explain what is CloudFront
487 | 488 | AWS definition: "Amazon CloudFront is a fast content delivery network (CDN) service that securely delivers data, videos, applications, and APIs to customers globally with low latency, high transfer speeds, all within a developer-friendly environment." 489 | 490 | More on CloudFront [here](https://aws.amazon.com/cloudfront) 491 |
492 | 493 |
494 | Explain the following 495 | 496 | * Origin 497 | * Edge location 498 | * Distribution
499 |
500 | 501 | #### AWS Monitoring & Logging 502 | 503 |
504 | What is AWS CloudWatch?
505 | 506 | AWS definition: "Amazon CloudWatch is a monitoring and observability service..." 507 | 508 | More on CloudWatch [here](https://aws.amazon.com/cloudwatch) 509 |
510 | 511 |
512 | What is AWS CloudTrail?
513 | 514 | AWS definition: "AWS CloudTrail is a service that enables governance, compliance, operational auditing, and risk auditing of your AWS account." 515 | 516 | Read more on CloudTrail [here](https://aws.amazon.com/cloudtrail) 517 |
518 | 519 |
520 | What is Simply Notification Service?
521 | 522 | AWS definition: "a highly available, durable, secure, fully managed pub/sub messaging service that enables you to decouple microservices, distributed systems, and serverless applications." 523 | 524 | Read more about it [here](https://aws.amazon.com/sns) 525 |
526 | 527 |
528 | Explain the following in regards to SNS: 529 | 530 | * Topics 531 | * Subscribers 532 | * Publishers
533 | 534 | * Topics - used for grouping multiple endpoints 535 | * Subscribers - the endpoints where topics send messages to 536 | * Publishers - the provider of the message (event, person, ...) 537 |
538 | 539 | #### AWS Security 540 | 541 |
542 | What is the shared responsibility model? What AWS is responsible for and what the user is responsible for based on the shared responsibility model?
543 | 544 | The shared responsibility model defines what the customer is responsible for and what AWS is responsible for. 545 | 546 | More on the shared responsibility model [here](https://aws.amazon.com/compliance/shared-responsibility-model) 547 |
548 | 549 |
550 | True or False? Based on the shared responsibility model, Amazon is responsible for physical CPUs and security groups on instances
551 | 552 | False. It is responsible for Hardware in its sites but not for security groups which created and managed by the users. 553 |
554 | 555 |
556 | Explain "Shared Controls" in regards to the shared responsibility model
557 | 558 | AWS definition: "apply to both the infrastructure layer and customer layers, but in completely separate contexts or perspectives. In a shared control, AWS provides the requirements for the infrastructure and the customer must provide their own control implementation within their use of AWS services" 559 | 560 | Learn more about it [here](https://aws.amazon.com/compliance/shared-responsibility-model) 561 |
562 | 563 |
564 | What is the AWS compliance program?
565 |
566 | 567 |
568 | What is AWS Artifact?
569 | 570 | AWS definition: "AWS Artifact is your go-to, central resource for compliance-related information that matters to you. It provides on-demand access to AWS’ security and compliance reports and select online agreements." 571 | 572 | Read more about it [here](https://aws.amazon.com/artifact) 573 |
574 | 575 |
576 | What is AWS Inspector?
577 | 578 | AWS definition: "Amazon Inspector is an automated security assessment service that helps improve the security and compliance of applications deployed on AWS. Amazon Inspector automatically assesses applications for exposure, vulnerabilities, and deviations from best practices."" 579 | 580 | Learn more [here](https://aws.amazon.com/inspector) 581 |
582 | 583 |
584 | What is AWS Guarduty?
585 |
586 | 587 |
588 | What is AWS Shield?
589 | 590 | AWS definition: "AWS Shield is a managed Distributed Denial of Service (DDoS) protection service that safeguards applications running on AWS." 591 |
592 | 593 |
594 | What is AWS WAF? Give an example of how it can used and describe what resources or services you can use it with
595 |
596 | 597 |
598 | What AWS VPN is used for?
599 |
600 | 601 |
602 | What is the difference between Site-to-Site VPN and Client VPN?
603 |
604 | 605 |
606 | What is AWS CloudHSM?
607 | 608 | Amazon definition: "AWS CloudHSM is a cloud-based hardware security module (HSM) that enables you to easily generate and use your own encryption keys on the AWS Cloud." 609 | 610 | Learn more [here](https://aws.amazon.com/cloudhsm) 611 |
612 | 613 |
614 | True or False? AWS Inspector can perform both network and host assessments
615 | 616 | True 617 |
618 | 619 |
620 | What is AWS Acceptable Use Policy?
621 | 622 | It describes prohibited uses of the web services offered by AWS. 623 | More on AWS Acceptable Use Policy [here](https://aws.amazon.com/aup) 624 |
625 | 626 |
627 | What is AWS Key Management Service (KMS)?
628 | 629 | AWS definition: "KMS makes it easy for you to create and manage cryptographic keys and control their use across a wide range of AWS services and in your applications." 630 | More on KMS [here](https://aws.amazon.com/kms) 631 |
632 | 633 |
634 | True or False? A user is not allowed to perform penetration testing on any of the AWS services
635 | 636 | False. On some services, like EC2, CloudFront and RDS, penetration testing is allowed. 637 |
638 | 639 |
640 | True or False? DDoS attack is an example of allowed penetration testing activity
641 | 642 | False. 643 |
644 | 645 |
646 | True or False? AWS Access Key is a type of MFA device used for AWS resources protection
647 | 648 | False. Security key is an example of an MFA device. 649 |
650 | 651 |
652 | What is Amazon Cognito?
653 | 654 | Amazon definition: "Amazon Cognito handles user authentication and authorization for your web and mobile apps." 655 | 656 | Learn more [here](https://docs.aws.amazon.com/cognito/index.html) 657 |
658 | 659 |
660 | What is AWS ACM?
661 | 662 | Amazon definition: "AWS Certificate Manager is a service that lets you easily provision, manage, and deploy public and private Secure Sockets Layer/Transport Layer Security (SSL/TLS) certificates for use with AWS services and your internal connected resources." 663 | 664 | Learn more [here](https://aws.amazon.com/certificate-manager) 665 |
666 | 667 | #### AWS Databases 668 | 669 |
670 | What is AWS RDS?
671 |
672 | 673 |
674 | What is AWS DynamoDB?
675 |
676 | 677 |
678 | Explain "Point-in-Time Recovery" feature in DynamoDB
679 | 680 | Amazon definition: "You can create on-demand backups of your Amazon DynamoDB tables, or you can enable continuous backups using point-in-time recovery. For more information about on-demand backups, see On-Demand Backup and Restore for DynamoDB." 681 | 682 | Learn more [here](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/PointInTimeRecovery.html) 683 |
684 | 685 |
686 | Explain "Global Tables" in DynamoDB
687 | 688 | Amazon definition: "A global table is a collection of one or more replica tables, all owned by a single AWS account." 689 | 690 | Learn more [here](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/V2globaltables_HowItWorks.html) 691 |
692 | 693 |
694 | What is DynamoDB Accelerator?
695 | 696 | Amazon definition: "Amazon DynamoDB Accelerator (DAX) is a fully managed, highly available, in-memory cache for DynamoDB that delivers up to a 10x performance improvement – from milliseconds to microseconds..." 697 | 698 | Learn more [here](https://aws.amazon.com/dynamodb/dax) 699 |
700 | 701 |
702 | What is AWS Redshift and how is it different than RDS?
703 | 704 | cloud data warehouse 705 |
706 | 707 |
708 | What is AWS ElastiCache? For what cases is it used?
709 | 710 | Amazon Elasticache is a fully managed Redis or Memcached in-memory data store. 711 | It's great for use cases like two-tier web applications where the most frequently accesses data is stored in ElastiCache so response time is optimal. 712 |
713 | 714 |
715 | What is Amazon Aurora
716 | 717 | A MySQL & Postgresql based relational database. Also, the default database proposed for the user when using RDS for creating a database. 718 | Great for use cases like two-tier web applications that has a MySQL or Postgresql database layer and you need automated backups for your application. 719 |
720 | 721 |
722 | What is Amazon DocumentDB?
723 | 724 | Amazon definition: "Amazon DocumentDB (with MongoDB compatibility) is a fast, scalable, highly available, and fully managed document database service that supports MongoDB workloads. As a document database, Amazon DocumentDB makes it easy to store, query, and index JSON data." 725 | 726 | Learn more [here](https://aws.amazon.com/documentdb) 727 |
728 | 729 |
730 | What "AWS Database Migration Service" is used for?
731 |
732 | 733 |
734 | What type of storage is used by Amazon RDS?
735 | 736 | EBS 737 |
738 | 739 |
740 | Explain Amazon RDS Read Replicas
741 | 742 | AWS definition: "Amazon RDS Read Replicas provide enhanced performance and durability for RDS database (DB) instances. They make it easy to elastically scale out beyond the capacity constraints of a single DB instance for read-heavy database workloads." 743 | Read more about [here](https://aws.amazon.com/rds/features/read-replicas) 744 |
745 | 746 | #### AWS Serverless Compute 747 | 748 |
749 | Explain what is AWS Lambda
750 | 751 | AWS definition: "AWS Lambda lets you run code without provisioning or managing servers. You pay only for the compute time you consume." 752 | 753 | Read more on it [here](https://aws.amazon.com/lambda) 754 |
755 | 756 |
757 | True or False? In AWS Lambda, you are charged as long as a function exists, regardless of whether it's running or not
758 | 759 | False. Charges are being made when the code is executed. 760 |
761 | 762 |
763 | Which of the following set of languages Lambda supports? 764 | 765 | * R, Swift, Rust, Kotlin 766 | * Python, Ruby, Go 767 | * Python, Ruby, PHP
768 | 769 | * Python, Ruby, Go 770 |
771 | 772 | #### Identify the service or tool 773 | 774 |
775 | What would you use for automating code/software deployments?
776 | 777 | AWS CodeDeploy 778 |
779 | 780 |
781 | What would you use for easily creating similar AWS environments/resources for different customers?
782 | 783 | CloudFormation 784 |
785 | 786 |
787 | Which service would you use for building a website or web application?
788 | 789 | Lightsail 790 |
791 | 792 |
793 | Which tool would you use for choosing between Reserved instances or On-Demand instances?
794 | 795 | Cost Explorer 796 |
797 | 798 |
799 | What would you use to check how many unassociated Elastic IP address you have?
800 | 801 | Trusted Advisor 802 |
803 | 804 |
805 | What service allows you to transfer large amounts (Petabytes) of data in and out of the AWS cloud?
806 | 807 | AWS Snowball 808 |
809 | 810 |
811 | What provides a virtual network dedicated to your AWS account?
812 | 813 | VPC 814 |
815 | 816 |
817 | What you would use for having automated backups for an application that has MySQL database layer?
818 | 819 | Amazon Aurora 820 |
821 | 822 |
823 | What would you use to migrate on-premise database to AWS?
824 | 825 | AWS Database Migration Service (DMS) 826 |
827 | 828 |
829 | What would you use to check why certain EC2 instances were terminated?
830 | 831 | AWS CloudTrail 832 |
833 | 834 |
835 | What would you use for SQL database?
836 | 837 | AWS RDS 838 |
839 | 840 |
841 | What would you use for NoSQL database?
842 | 843 | AWS DynamoDB 844 |
845 | 846 |
847 | What would you use for running SQL queries interactively on S3?
848 | 849 | AWS Athena 850 |
851 | 852 |
853 | What would you use for adding image and video analysis to your application?
854 | 855 | AWS Rekognition 856 |
857 | 858 |
859 | Which service would you use for debugging and improving performances issues with your applications?
860 | 861 | AWS X-Ray 862 |
863 | 864 |
865 | Which service is used for sending notifications?
866 | 867 | SNS 868 |
869 | 870 |
871 | Which service would you use for monitoring malicious activity and unauthorized behavior in regards to AWS accounts and workloads?
872 | 873 | Amazon GuardDuty 874 |
875 | 876 |
877 | Which service would you use for centrally manage billing, control access, compliance, and security across multiple AWS accounts?
878 | 879 | AWS Organizations 880 |
881 | 882 |
883 | Which service would you use for web application protection?
884 | 885 | AWS WAF 886 |
887 | 888 |
889 | You would like to monitor some of your resources in the different services. Which service would you use for that?
890 | 891 | CloudWatch 892 |
893 | 894 |
895 | Which service would you use for performing security assessment?
896 | 897 | AWS Inspector 898 |
899 | 900 |
901 | Which service would you use for creating DNS record?
902 | 903 | Route 53 904 |
905 | 906 |
907 | What would you use if you need a fully managed document database?
908 | 909 | Amazon DocumentDB 910 |
911 | 912 |
913 | Which service would you use to add access control (or sign-up, sign-in forms) to your web/mobile apps?
914 | 915 | AWS Cognito 916 |
917 | 918 |
919 | Which service would you use if you need messaging queue?
920 | 921 | Simple Queue Service (SQS) 922 |
923 | 924 |
925 | Which service would you use if you need managed DDOS protection?
926 | 927 | AWS Shield 928 |
929 | 930 |
931 | Which service would you use if you need store frequently used data for low latency access?
932 | 933 | ElastiCache 934 |
935 | 936 |
937 | What would you use to transfer files over long distances between a client and an S3 bucket?
938 | 939 | Amazon S3 Transfer Acceleration 940 |
941 | 942 | #### AWS Billing & Support 943 | 944 |
945 | What is AWS Organizations?
946 | 947 | AWS definition: "AWS Organizations helps you centrally govern your environment as you grow and scale your workloads on AWS." 948 | More on Organizations [here](https://aws.amazon.com/organizations) 949 |
950 | 951 |
952 | Explain AWS pricing model
953 | 954 | It mainly works on "pay-as-you-go" meaning you pay only for what are using and when you are using it. 955 | In s3 you pay for 1. How much data you are storing 2. Making requests (PUT, POST, ...) 956 | In EC2 it's based on the purchasing option (on-demand, spot, ...), instance type, AMI type and the region used. 957 | 958 | More on AWS pricing model [here](https://aws.amazon.com/pricing) 959 |
960 | 961 |
962 | How one should estimate AWS costs when for example comparing to on-premise solutions?
963 | 964 | * TCO calculator 965 | * AWS simple calculator 966 | * Cost Explorer 967 |
968 | 969 |
970 | What basic support in AWS includes?
971 | 972 | * 24x7 customer service 973 | * Trusted Advisor 974 | * AWS personal Health Dashoard 975 |
976 | 977 |
978 | How are EC2 instances billed?
979 |
980 | 981 |
982 | What AWS Pricing Calculator is used for?
983 |
984 | 985 |
986 | What is Amazon Connect?
987 | 988 | Amazon definition: "Amazon Connect is an easy to use omnichannel cloud contact center that helps companies provide superior customer service at a lower cost." 989 | 990 | Learn more [here](https://aws.amazon.com/connect) 991 |
992 | 993 |
994 | What are "APN Consulting Partners"?
995 | 996 | Amazon definition: "APN Consulting Partners are professional services firms that help customers of all types and sizes design, architect, build, migrate, and manage their workloads and applications on AWS, accelerating their journey to the cloud." 997 | 998 | Learn more [here](https://aws.amazon.com/partners/consulting) 999 |
1000 | 1001 |
1002 | Which of the following are AWS accounts types (and are sorted by order)? 1003 | 1004 | * Basic, Developer, Business, Enterprise 1005 | * Newbie, Intermediate, Pro, Enterprise 1006 | * Developer, Basic, Business, Enterprise 1007 | * Beginner, Pro, Intermediate Enterprise
1008 | 1009 | * Basic, Developer, Business, Enterprise 1010 |
1011 | 1012 |
1013 | True or False? Region is a factor when it comes to EC2 costs/pricing
1014 | 1015 | True. You pay differently based on the chosen region. 1016 |
1017 | 1018 |
1019 | What is "AWS Infrastructure Event Management"?
1020 | 1021 | AWS Definition: "AWS Infrastructure Event Management is a structured program available to Enterprise Support customers (and Business Support customers for an additional fee) that helps you plan for large-scale events such as product or application launches, infrastructure migrations, and marketing events." 1022 |
1023 | 1024 | #### AWS Automation 1025 | 1026 |
1027 | What is AWS CodeDeploy?
1028 | 1029 | Amazon definition: "AWS CodeDeploy is a fully managed deployment service that automates software deployments to a variety of compute services such as Amazon EC2, AWS Fargate, AWS Lambda, and your on-premises servers." 1030 | 1031 | Learn more [here](https://aws.amazon.com/codedeploy) 1032 |
1033 | 1034 |
1035 | Explain what is CloudFormation
1036 |
1037 | 1038 | #### AWS Misc 1039 | 1040 |
1041 | What is AWS Lightsail?
1042 | 1043 | AWS definition: "Lightsail is an easy-to-use cloud platform that offers you everything needed to build an application or website, plus a cost-effective, monthly plan." 1044 |
1045 | 1046 |
1047 | What is AWS Rekognition?
1048 | 1049 | AWS definition: "Amazon Rekognition makes it easy to add image and video analysis to your applications using proven, highly scalable, deep learning technology that requires no machine learning expertise to use." 1050 | 1051 | Learn more [here](https://aws.amazon.com/rekognition) 1052 |
1053 | 1054 |
1055 | What AWS Resource Groups used for?
1056 | 1057 | Amazon definition: "You can use resource groups to organize your AWS resources. Resource groups make it easier to manage and automate tasks on large numbers of resources at one time. " 1058 | 1059 | Learn more [here](https://docs.aws.amazon.com/ARG/latest/userguide/welcome.html) 1060 |
1061 | 1062 |
1063 | What is AWS Global Accelerator?
1064 | 1065 | Amazon definition: "AWS Global Accelerator is a service that improves the availability and performance of your applications with local or global users..." 1066 | 1067 | Learn more [here](https://aws.amazon.com/global-accelerator) 1068 |
1069 | 1070 |
1071 | What is AWS Config?
1072 | 1073 | Amazon definition: "AWS Config is a service that enables you to assess, audit, and evaluate the configurations of your AWS resources." 1074 | 1075 | Learn more [here](https://aws.amazon.com/config) 1076 |
1077 | 1078 |
1079 | What is AWS X-Ray?
1080 | 1081 | AWS definition: "AWS X-Ray helps developers analyze and debug production, distributed applications, such as those built using a microservices architecture." 1082 | Learn more [here](https://aws.amazon.com/xray) 1083 |
1084 | 1085 |
1086 | What is AWS OpsWorks?
1087 | 1088 | Amazon definition: "AWS OpsWorks is a configuration management service that provides managed instances of Chef and Puppet." 1089 | 1090 | Learn more about it [here](https://aws.amazon.com/opsworks) 1091 |
1092 | 1093 |
1094 | What is AWS Service Catalog?
1095 | 1096 | Amazon definition: "AWS Service Catalog allows organizations to create and manage catalogs of IT services that are approved for use on AWS." 1097 | 1098 | Learn more [here](https://aws.amazon.com/servicecatalog) 1099 |
1100 | 1101 |
1102 | What is AWS CAF?
1103 | 1104 | Amazon definition: "AWS Professional Services created the AWS Cloud Adoption Framework (AWS CAF) to help organizations design and travel an accelerated path to successful cloud adoption. " 1105 | 1106 | Learn more [here](https://aws.amazon.com/professional-services/CAF) 1107 |
1108 | 1109 |
1110 | What is AWS Cloud9?
1111 | 1112 | AWS definition: "AWS Cloud9 is a cloud-based integrated development environment (IDE) that lets you write, run, and debug your code with just a browser" 1113 |
1114 | 1115 |
1116 | What is AWS Application Discovery Service?
1117 | 1118 | Amazon definition: "AWS Application Discovery Service helps enterprise customers plan migration projects by gathering information about their on-premises data centers." 1119 | 1120 | Learn more [here](https://aws.amazon.com/application-discovery) 1121 |
1122 | 1123 |
1124 | What is the Trusted Advisor?
1125 |
1126 | 1127 |
1128 | What is the AWS well-architected framework and what pillars it's based on?
1129 | 1130 | AWS definition: "The Well-Architected Framework has been developed to help cloud architects build secure, high-performing, resilient, and efficient infrastructure for their applications. Based on five pillars — operational excellence, security, reliability, performance efficiency, and cost optimization" 1131 | 1132 | Learn more [here](https://aws.amazon.com/architecture/well-architected) 1133 |
1134 | 1135 |
1136 | What AWS services are serverless (or have the option to be serverless)?
1137 | 1138 | AWS Lambda 1139 | AWS Athena 1140 |
1141 | 1142 |
1143 | What is AWS EMR?
1144 | 1145 | AWS definition: "big data platform for processing vast amounts of data using open source tools such as Apache Spark, Apache Hive, Apache HBase, Apache Flink, Apache Hudi, and Presto." 1146 | 1147 | Learn more [here](https://aws.amazon.com/emr) 1148 |
1149 | 1150 |
1151 | What is AWS Athena?
1152 | 1153 | "Amazon Athena is an interactive query service that makes it easy to analyze data in Amazon S3 using standard SQL." 1154 | 1155 | Learn more about AWS Athena [here](https://aws.amazon.com/athena) 1156 |
1157 | 1158 |
1159 | What is Amazon Cloud Directory?
1160 | 1161 | Amazon definition: "Amazon Cloud Directory is a highly available multi-tenant directory-based store in AWS. These directories scale automatically to hundreds of millions of objects as needed for applications." 1162 | 1163 | Learn more [here](https://docs.aws.amazon.com/clouddirectory/latest/developerguide/what_is_cloud_directory.html) 1164 |
1165 | 1166 |
1167 | What is AWS Elastic Beanstalk?
1168 | 1169 | AWS definition: "AWS Elastic Beanstalk is an easy-to-use service for deploying and scaling web applications and services...You can simply upload your code and Elastic Beanstalk automatically handles the deployment" 1170 | 1171 | Learn more about it [here](https://aws.amazon.com/elasticbeanstalk) 1172 |
1173 | 1174 |
1175 | What is AWS SWF?
1176 | 1177 | Amazon definition: "Amazon SWF helps developers build, run, and scale background jobs that have parallel or sequential steps. You can think of Amazon SWF as a fully-managed state tracker and task coordinator in the Cloud." 1178 | 1179 | Learn more on Amazon Simple Workflow Service [here](https://aws.amazon.com/swf) 1180 |
1181 | 1182 |
1183 | What is Simple Queue Service (SQS)?
1184 | 1185 | AWS definition: "Amazon Simple Queue Service (SQS) is a fully managed message queuing service that enables you to decouple and scale microservices, distributed systems, and serverless applications". 1186 | 1187 | Learn more about it [here](https://aws.amazon.com/sqs) 1188 |
1189 | 1190 | #### AWS Disaster Recovery 1191 | 1192 |
1193 | In regards to disaster recovery, what is RTO and RPO?
1194 | 1195 | RTO - The maximum acceptable length of time that your application can be offline. 1196 | 1197 | RPO - The maximum acceptable length of time during which data might be lost from your application due to an incident. 1198 |
1199 | 1200 |
1201 | What types of disaster recovery techniques AWS supports?
1202 | 1203 | * The Cold Method - Periodically backups and sending the backups off-site
1204 | * Pilot Light - Data is mirrored to an environment which is always running 1205 | * Warm Standby - Running scaled down version of production environment 1206 | * Multi-site - Duplicated environment that is always running 1207 |
1208 | 1209 |
1210 | Which disaster recovery option has the highest downtime and which has the lowest?
1211 | 1212 | Lowest - Multi-site 1213 | Highest - The cold method 1214 |
1215 | 1216 | ### Final Note 1217 | 1218 | Good luck! You can do it :) 1219 | -------------------------------------------------------------------------------- /coding/python/binary_search.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import random 4 | 5 | 6 | def binary_search(arr, lb, ub, target): 7 | """ 8 | A Binary Search Example which has O(log n) time complexity. 9 | """ 10 | if lb <= ub: 11 | mid = ub + lb // 2 12 | if arr[mid] == target: 13 | return mid 14 | elif arr[mid] < target: 15 | return binary_search(arr, mid + 1, ub, target) 16 | else: 17 | return binary_search(arr, lb, mid - 1, target) 18 | else: 19 | return -1 20 | 21 | 22 | if __name__ == '__main__': 23 | rand_num_li = sorted([random.randint(1, 50) for _ in range(10)]) 24 | target = random.randint(1, 50) 25 | print("List: {}\nTarget: {}\nIndex: {}".format( 26 | rand_num_li, target, 27 | binary_search(rand_num_li, 0, len(rand_num_li) - 1, target))) 28 | -------------------------------------------------------------------------------- /common-qa.md: -------------------------------------------------------------------------------- 1 | ## Q&A 2 | 3 | The questions I've been asked at least once. 4 | 5 | ### What is the purpose of repository? 6 | 7 | Learning 8 | 9 | ### My goal is to prepare for a DevOps interview. How to do that? 10 | 11 | I've added a couple of suggestions [here](prepare_for_interview.md)
12 | Overall, this repository should help you learn some concepts but don't assume at any point that your interview will consist of questions included in this repository. 13 | 14 | ### How do I become a better DevOps Engineer? 15 | 16 | That's a great question.
17 | I don't have a definitive answer for this question, but try the following: 18 | 19 | * Practice - doing DevOps tasks/work should be the primary way to become a DevOps engineer (obvisouly) 20 | * Read - Read blogs, books, ... anything that can enrich you about DevOps 21 | * Participate - there are great DevOps communities. I especially like [Reddit DevOps](https://www.reddit.com/r/devops). Visiting there, I learn quite a lot on different topics. 22 | * Share - This is one of the reasons I created this project. Primary goal was to help others but a secondary goal quickly became to learn more. By asking questions, you actually learn better a certain topic. Try it out, take a certain subject and try to come up with questions you would ask someone to test his/her skills. 23 | 24 | ### Why most of the questions don't have answers? 25 | 26 | Because we need more contributors ;) 27 | 28 | ### Where can I find answers to some of the questions in this repository? 29 | 30 | 1. Search for them using search engines, documentation pages, ... this is part of being a DevOps engineer 31 | 2. Use the communities - many people will be happy to help and answer your questions 32 | 3. Ask us. If you want, you can contact me or even open an issue that is only a question, that's totally fine :) 33 | 34 | ### Where the questions and answers are coming from? 35 | 36 | Well, from everywhere - past experience, colleagues, contributors, ... but please note we do not allow copying interview questions from interview questions sites to here. There are people who worked hard on adding those to their sites and we respect that. 37 | 38 | ### What are the top DevOps skills required for being a DevOps Engineer? 39 | 40 | It's a hard question and the reason is that if you'll ask 20 different people, you'll probably get at least 10 different answers but here is what I believe is common today: 41 | 42 | * OS - DevOps require you good understanding of operating system concepts. The level required is mainly depends on the company although in my opinion it should be the same level. You should understand how the operating system works, how to troubleshoot and debug issues, etc. 43 | * Programming is part of DevOps. The level again depends on the company. Some will require you to know basic level of scripting while others deep understanding of common algorithms, data structure, design patterns etc. 44 | * Cloud and Containers - while not 100% must in all companies/positions, this skill is on the rise every year and many (if not most) of the positions/companies require this skill. This specifically means: AWS/Azure/GCP, Docker/Podman, Kubernetes, ... 45 | * CI/CD - Be able to to answer questions like "Why do we need CI/CD?" and "What ways and models are there to perform CI/CD?". Eventually, practice assembling such processes and workflow, using whatever tools you feel comfortable with. 46 | 47 | ### I feel like there are some questions that shouldn't be included in this project 48 | 49 | Is that a question? :) 50 | If you don't like some of the questions or think that some questions should be removed you can open an issue or submit a PR and we can discuss it there. We don't have rules against deleting questions (for now :P) 51 | 52 | ### Can I copy the questions from here to my site? 53 | 54 | You can but: 55 | 56 | * Not without attribution. Many people worked hard on adding these questions and they deserve a proper credit for their work 57 | * Not if you plan to make money out of it. Directly or indirectly (e.g. ADS) as this is a free content and we would like it to stay this way :) 58 | 59 | Same goes for copying questions from different sources to this repository. We saw it happened already with a couple of pull requests and we rejected them. We will not merge pull requests with copied questions and answers from other sources. 60 | 61 | ### Can I add questions and/or answers to this project? 62 | 63 | I'll simply imagine you didn't ask that on an open source project... :) 64 | 65 | ### Why can't I add installation questions? 66 | 67 | In most cases, I prefer questions added to this repository will have certain educational value for the user. Either regarding a certain concept or even a very general question, but one that will make you research on a certain topic and will make you more familiar with it. 68 | I see little to none value in what is known as "Installation Questions". Let's say I ask you "how to install Jenkins?". Should I conclude from your answer that you are familiar with Jenkins? Is there a value in knowing how to install Jenkins? In my opinion, no. 69 | 70 | ### Where can I practice coding? 71 | 72 | Personally, I really like the following sites 73 | 74 | * [HackerRank](https://www.hackerrank.com) 75 | * [LeetCode](https://leetcode.com) 76 | * [Exercism](https://exercism.io) 77 | 78 | ### How to learn more DevOps? 79 | 80 | I listed some roadmaps in [devops-resources](https://github.com/bregman-arie/devops-resources) 81 | 82 | ### Why some questions repeat themselves? 83 | 84 | If you see two identical questions, that's a bug.
85 | If you see two similar questions, that's a feature :D (= it's intentional) 86 | 87 | For example: 88 | 89 | 1. What is horizontal scaling? 90 | 2. The act of adding additional instances to the pool to handle scaling is called ________ scaling 91 | 92 | You are right, both ask about horizontal scaling but it's done from a different angel in every question and in addition, I do believe repetition helps you to learn something in a way where you are not fixed on the way it's asked, rather you understand the concept itself. 93 | -------------------------------------------------------------------------------- /credits.md: -------------------------------------------------------------------------------- 1 | ## Credits 2 | 3 | Jenkins logo created by Ksenia Nenasheva and published through jenkins.io is licensed under cc by-sa 3.0
4 | Git Logo by Jason Long is licensed under the Creative Commons Attribution 3.0 Unported License
5 | Terraform logo created by Hashicorp®
6 | Docker logo created by Docker®
7 | The Python logo is a trademark of the Python Software Foundation®
8 | Puppet logo created by Puppet®
9 | Bash logo created by Prospect One
10 | OpenStack logo created by and a trademark of The OpenStack Foundation®
11 | Linux, Kubernetes and Prometheus logos are trademarks of The Linux Foundation®
12 | Mongo logo is a trademark of Mongo®
13 | Distributed logo by Flatart
14 | Challenge icon by Elizabeth Arostegui in Technology Mix 15 | "Question you ask" (man raising hand) and "Database" icons by [Webalys](https://www.iconfinder.com/webalys) 16 | Testing logo by [Flatart](https://www.iconfinder.com/Flatart)
17 | Google Cloud Plataform Logo created by Google®
18 | VirtualBox Logo created by dAKirby309, under the Creative Commons Attribution-Noncommercial 4.0 License. 19 | Certificates logo by Flatart
20 | Storage icon by Dinosoftlab
21 | -------------------------------------------------------------------------------- /exercises/ansible/my_first_playbook.md: -------------------------------------------------------------------------------- 1 | ## Ansible - My First Playbook 2 | 3 | 1. Write a playbook that will: 4 | a. Install the package zlib 5 | b. Create the file `/tmp/some_file` 6 | 2. Run the playbook on a remote host 7 | -------------------------------------------------------------------------------- /exercises/ansible/my_first_task.md: -------------------------------------------------------------------------------- 1 | ## Ansible - My First Task 2 | 3 | 1. Write a task to create the directory ‘/tmp/new_directory’ 4 | -------------------------------------------------------------------------------- /exercises/ansible/solutions/my_first_playbook.md: -------------------------------------------------------------------------------- 1 | ## My first playbook - Solution 2 | 3 | 1. `vi first_playbook.yml` 4 | 5 | ``` 6 | - name: Install zlib and create a file 7 | hosts: some_remote_host 8 | tasks: 9 | - name: Install zlib 10 | package: 11 | name: zlib 12 | state: present 13 | become: yes 14 | - name: Create the file /tmp/some_file 15 | path: '/tmp/some_file' 16 | state: touch 17 | ``` 18 | 19 | 2. First, edit the inventory file: `vi /etc/ansible/hosts` 20 | 21 | ``` 22 | [some_remote_host] 23 | some.remoted.host.com 24 | ``` 25 | 26 | Run the playbook 27 | 28 | `ansible-playbook first_playbook.yml` 29 | -------------------------------------------------------------------------------- /exercises/ansible/solutions/my_first_task.md: -------------------------------------------------------------------------------- 1 | ## My First Task - Solution 2 | 3 | ``` 4 | - name: Create a new directory 5 | file: 6 | path: "/tmp/new_directory" 7 | state: directory 8 | ``` 9 | -------------------------------------------------------------------------------- /exercises/ansible_minikube_docker.md: -------------------------------------------------------------------------------- 1 | ## Ansible, Minikube and Docker 2 | 3 | * Write a simple program in any language you want that outputs "I'm on %HOSTNAME%" (HOSTNAME should be the actual host name on which the app is running) 4 | * Write a Dockerfile which will run your app 5 | * Create the YAML files required for deploying the pods 6 | * Write and run an Ansible playbook which will install Docker, Minikube and kubectl and then create a deployment in minikube with your app running. 7 | -------------------------------------------------------------------------------- /exercises/aws/hello_function.md: -------------------------------------------------------------------------------- 1 | ## Hello Function 2 | 3 | Create a basic AWS Lambda function that when given a name, will return "Hello " 4 | -------------------------------------------------------------------------------- /exercises/aws/solutions/hello_function.md: -------------------------------------------------------------------------------- 1 | ## Hello Function - Solution 2 | 3 | ### Exercise 4 | 5 | Create a basic AWS Lambda function that when given a name, will return "Hello " 6 | 7 | ### Solution 8 | 9 | #### Define a function 10 | 11 | 1. Go to Lambda console panel and click on `Create function` 12 | 1. Give the function a name like `BasicFunction` 13 | 2. Select `Python3` runtime 14 | 3. Now to handle function's permissions, we can attach IAM role to our function either by setting a role or creating a new role. I selected "Create a new role from AWS policy templates" 15 | 4. In "Policy Templates" select "Simple Microservice Permissions" 16 | 17 | 1. Next, you should see a text editor where you will insert a code similar to the following 18 | 19 | #### Function's code 20 | ``` 21 | import json 22 | 23 | 24 | def lambda_handler(event, context): 25 | firstName = event['name'] 26 | return 'Hello ' + firstName 27 | ``` 28 | 2. Click on "Create Function" 29 | 30 | #### Define a test 31 | 32 | 1. Now let's test the function. Click on "Test". 33 | 2. Select "Create new test event" 34 | 3. Set the "Event name" to whatever you'd like. For example "TestEvent" 35 | 4. Provide keys to test 36 | 37 | ``` 38 | { 39 | "name": 'Spyro' 40 | } 41 | ``` 42 | 5. Click on "Create" 43 | 44 | #### Test the function 45 | 46 | 1. Choose the test event you've create (`TestEvent`) 47 | 2. Click on the `Test` button 48 | 3. You should see something similar to `Execution result: succeeded` 49 | 4. If you'll go to AWS CloudWatch, you should see a related log stream 50 | -------------------------------------------------------------------------------- /exercises/aws/solutions/url_function.md: -------------------------------------------------------------------------------- 1 | ## URL Function 2 | 3 | Create a basic AWS Lambda function that will be triggered when you enter a URL in the browser 4 | 5 | ### Solution 6 | 7 | #### Define a function 8 | 9 | 1. Go to Lambda console panel and click on `Create function` 10 | 1. Give the function a name like `urlFunction` 11 | 2. Select `Python3` runtime 12 | 3. Now to handle function's permissions, we can attach IAM role to our function either by setting a role or creating a new role. I selected "Create a new role from AWS policy templates" 13 | 4. In "Policy Templates" select "Simple Microservice Permissions" 14 | 15 | 1. Next, you should see a text editor where you will insert a code similar to the following 16 | 17 | #### Function's code 18 | ``` 19 | import json 20 | 21 | 22 | def lambda_handler(event, context): 23 | firstName = event['name'] 24 | return 'Hello ' + firstName 25 | ``` 26 | 2. Click on "Create Function" 27 | 28 | #### Define a test 29 | 30 | 1. Now let's test the function. Click on "Test". 31 | 2. Select "Create new test event" 32 | 3. Set the "Event name" to whatever you'd like. For example "TestEvent" 33 | 4. Provide keys to test 34 | 35 | ``` 36 | { 37 | "name": 'Spyro' 38 | } 39 | ``` 40 | 5. Click on "Create" 41 | 42 | #### Test the function 43 | 44 | 1. Choose the test event you've create (`TestEvent`) 45 | 2. Click on the `Test` button 46 | 3. You should see something similar to `Execution result: succeeded` 47 | 4. If you'll go to AWS CloudWatch, you should see a related log stream 48 | 49 | #### Define a trigger 50 | 51 | We'll define a trigger in order to trigger the function when inserting the URL in the browser 52 | 53 | 1. Go to "API Gateway console" and click on "New API Option" 54 | 2. Insert the API name, description and click on "Create" 55 | 3. Click on Action -> Create Resource 56 | 4. Insert resource name and path (e.g. the path can be /hello) and click on "Create Resource" 57 | 5. Select the resource we've created and click on "Create Method" 58 | 6. For "integration type" choose "Lambda Function" and insert the lambda function name we've given to the function we previously created. Make sure to also use the same region 59 | 7. Confirm settings and any required permissions 60 | 8. Now click again on the resource and modify "Body Mapping Templates" so the template includes this: 61 | 62 | ``` 63 | { "name": "$input.params('name')" } 64 | ``` 65 | 9. Finally save and click on Actions -> Deploy API 66 | 67 | #### Running the function 68 | 69 | 1. In the API Gateway console, in stages menu, select the API we've created and click on the GET option 70 | 2. You'll see an invoke URL you can click on. You might have to modify it to include the input so it looks similar to this: `.../hello?name=mario` 71 | 3. You should see in your browser `Hello Mario` 72 | -------------------------------------------------------------------------------- /exercises/aws/url_function.md: -------------------------------------------------------------------------------- 1 | ## URL Function 2 | 3 | Create a basic AWS Lambda function that will be triggered when you enter a URL in the browser 4 | -------------------------------------------------------------------------------- /exercises/cloud_slack_bot.md: -------------------------------------------------------------------------------- 1 | ## Cloud Slack Bot 2 | 3 | Create a slack bot to manage cloud instances. You can choose whatever cloud provider you want (e.g. Openstack, AWS, GCP, Azure) 4 | You should provide: 5 | 6 | * Instructions on how to use it 7 | * Source code of the slack bot 8 | * A running slack bot account or a deployment script so we can test it 9 | 10 | The bot should be able to support: 11 | 12 | * Creating new instances 13 | * Removing existing instances 14 | * Starting an instance 15 | * Stopping an instance 16 | * Displaying the status of an instance 17 | * List all available instances 18 | 19 | The bot should also be able to show help message. 20 | -------------------------------------------------------------------------------- /exercises/devops/ci_for_open_source_project.md: -------------------------------------------------------------------------------- 1 | ## CI for Open Source Project 2 | 3 | 1. Choose an open source project from Github and fork it 4 | 2. Create a CI pipeline/workflow for the project you forked 5 | 3. The CI pipeline/workflow will include anything that is relevant to the project you forked. For example: 6 | * If it's a Python project, you will run PEP8 7 | * If the project has unit tests directory, you will run these unit tests as part of the CI 8 | 4. In a separate file, describe what is running as part of the CI and why you chose to include it. You can also describe any thoughts, dilemmas, challenge you had 9 | 10 | ### Bonus 11 | 12 | Containerize the app of the project you forked using any containerization technology you would like. 13 | -------------------------------------------------------------------------------- /exercises/devops/deploy_to_kubernetes.md: -------------------------------------------------------------------------------- 1 | ## Deploy to Kubernetes 2 | 3 | * Write a pipeline that will deploy an "hello world" web app to Kubernete 4 | * The CI/CD system (where the pipeline resides) and the Kubernetes cluster should be on separate systems 5 | * The web app should be accessible remotely and only with HTTPS 6 | -------------------------------------------------------------------------------- /exercises/devops/solutions/deploy_to_kubernetes/Jenkinsfile: -------------------------------------------------------------------------------- 1 | pipeline { 2 | 3 | agent any 4 | 5 | stages { 6 | 7 | stage('Checkout Source') { 8 | steps { 9 | git url:'https://github.com//.git', 10 | // credentialsId: 'creds_github', 11 | branch:'master' 12 | } 13 | } 14 | 15 | stage("Build image") { 16 | steps { 17 | script { 18 | myapp = docker.build("/helloworld:${env.BUILD_ID}") 19 | } 20 | } 21 | } 22 | 23 | stage("Push image") { 24 | steps { 25 | script { 26 | docker.withRegistry('https://registry.hub.docker.com', 'dockerhub') { 27 | myapp.push("latest") 28 | myapp.push("${env.BUILD_ID}") 29 | } 30 | } 31 | } 32 | } 33 | 34 | 35 | stage('Deploy App') { 36 | steps { 37 | script { 38 | sh 'ansible-playbook deploy.yml' 39 | } 40 | } 41 | } 42 | 43 | } 44 | 45 | } 46 | -------------------------------------------------------------------------------- /exercises/devops/solutions/deploy_to_kubernetes/README.md: -------------------------------------------------------------------------------- 1 | ## Deploy to Kubernetes 2 | 3 | Note: this exercise can be solved in various ways. The solution described here is just one possible way. 4 | 5 | 1. Install Jenkins on one system (follow up the standard Jenkins installation procedure) 6 | 2. Deploy Kubernetes on a remote host (minikube can be an easy way to achieve it) 7 | 3. Create a simple web app or [page](html) 8 | 9 | 4. Create Kubernetes [resoruces](helloworld.yml) - Deployment, Service and Ingress (for HTTPS access) 10 | 5. Create an [Ansible inventory](inventory) and insert the address of the Kubernetes cluster 11 | 6. Write [Ansible playbook](deploy.yml) to deploy the Kubernetes resources and also generate 12 | 7. Create a [pipeline](Jenkinsfile) 13 | 14 | 8. Run the pipeline :) 15 | 9. Try to access the web app remotely 16 | -------------------------------------------------------------------------------- /exercises/devops/solutions/deploy_to_kubernetes/deploy.yml: -------------------------------------------------------------------------------- 1 | - name: Apply Kubernetes YAMLs 2 | hosts: kubernetes 3 | tasks: 4 | - name: Ensure SSL related directories exist 5 | file: 6 | path: "{{ item }}" 7 | state: directory 8 | loop: 9 | - "/etc/ssl/crt" 10 | - "/etc/ssl/csr" 11 | - "/etc/ssl/private" 12 | 13 | - name: Generate an OpenSSL private key. 14 | openssl_privatekey: 15 | path: /etc/ssl/private/privkey.pem 16 | 17 | - name: generate openssl certficate signing requests 18 | openssl_csr: 19 | path: /etc/ssl/csr/hello-world.app.csr 20 | privatekey_path: /etc/ssl/private/privkey.pem 21 | common_name: hello-world.app 22 | 23 | - name: Generate a Self Signed OpenSSL certificate 24 | openssl_certificate: 25 | path: /etc/ssl/crt/hello-world.app.crt 26 | privatekey_path: /etc/ssl/private/privkey.pem 27 | csr_path: /etc/ssl/csr/hello-world.app.csr 28 | provider: selfsigned 29 | 30 | - name: Create k8s secret 31 | command: "kubectl create secret tls tls-secret --cert=/etc/ssl/crt/hello-world.app.crt --key=/etc/ssl/private/privkey.pem" 32 | register: result 33 | failed_when: 34 | - result.rc == 2 35 | 36 | - name: Deploy web app 37 | k8s: 38 | state: present 39 | definition: "{{ lookup('file', './helloworld.yml') }}" 40 | kubeconfig: '/home/abregman/.kube/config' 41 | namespace: 'default' 42 | wait: true 43 | -------------------------------------------------------------------------------- /exercises/devops/solutions/deploy_to_kubernetes/helloworld.yml: -------------------------------------------------------------------------------- 1 | --- 2 | apiVersion: apps/v1 3 | kind: Deployment 4 | metadata: 5 | name: hello-blue-whale 6 | spec: 7 | replicas: 3 8 | selector: 9 | matchLabels: 10 | app: hello-world-app 11 | version: blue 12 | template: 13 | metadata: 14 | name: hello-blue-whale-pod 15 | labels: 16 | app: hello-world-app 17 | version: blue 18 | spec: 19 | containers: 20 | - name: hello-whale-container 21 | image: abregman2/helloworld:latest 22 | imagePullPolicy: Always 23 | ports: 24 | - containerPort: 80 25 | - containerPort: 443 26 | --- 27 | apiVersion: v1 28 | kind: Service 29 | metadata: 30 | name: hello-world 31 | labels: 32 | app: hello-world-app 33 | spec: 34 | ports: 35 | - port: 80 36 | targetPort: 80 37 | protocol: TCP 38 | name: http 39 | selector: 40 | app: hello-world-app 41 | --- 42 | apiVersion: networking.k8s.io/v1 43 | kind: Ingress 44 | metadata: 45 | name: example-ingress 46 | annotations: 47 | cert-manager.io/cluster-issuer: selfsigned-issuer 48 | nginx.ingress.kubernetes.io/rewrite-target: / 49 | kubernetes.io/ingress.class: nginx 50 | spec: 51 | tls: 52 | - hosts: 53 | - hello-world.app 54 | secretName: shhh 55 | rules: 56 | - host: hello-world.app 57 | http: 58 | paths: 59 | - path: / 60 | pathType: Prefix 61 | backend: 62 | service: 63 | name: hello-world 64 | port: 65 | number: 80 66 | -------------------------------------------------------------------------------- /exercises/devops/solutions/deploy_to_kubernetes/html/css/normalize.css: -------------------------------------------------------------------------------- 1 | /*! normalize.css v3.0.2 | MIT License | git.io/normalize */ 2 | 3 | /** 4 | * 1. Set default font family to sans-serif. 5 | * 2. Prevent iOS text size adjust after orientation change, without disabling 6 | * user zoom. 7 | */ 8 | 9 | html { 10 | font-family: sans-serif; /* 1 */ 11 | -ms-text-size-adjust: 100%; /* 2 */ 12 | -webkit-text-size-adjust: 100%; /* 2 */ 13 | } 14 | 15 | /** 16 | * Remove default margin. 17 | */ 18 | 19 | body { 20 | margin: 0; 21 | } 22 | 23 | /* HTML5 display definitions 24 | ========================================================================== */ 25 | 26 | /** 27 | * Correct `block` display not defined for any HTML5 element in IE 8/9. 28 | * Correct `block` display not defined for `details` or `summary` in IE 10/11 29 | * and Firefox. 30 | * Correct `block` display not defined for `main` in IE 11. 31 | */ 32 | 33 | article, 34 | aside, 35 | details, 36 | figcaption, 37 | figure, 38 | footer, 39 | header, 40 | hgroup, 41 | main, 42 | menu, 43 | nav, 44 | section, 45 | summary { 46 | display: block; 47 | } 48 | 49 | /** 50 | * 1. Correct `inline-block` display not defined in IE 8/9. 51 | * 2. Normalize vertical alignment of `progress` in Chrome, Firefox, and Opera. 52 | */ 53 | 54 | audio, 55 | canvas, 56 | progress, 57 | video { 58 | display: inline-block; /* 1 */ 59 | vertical-align: baseline; /* 2 */ 60 | } 61 | 62 | /** 63 | * Prevent modern browsers from displaying `audio` without controls. 64 | * Remove excess height in iOS 5 devices. 65 | */ 66 | 67 | audio:not([controls]) { 68 | display: none; 69 | height: 0; 70 | } 71 | 72 | /** 73 | * Address `[hidden]` styling not present in IE 8/9/10. 74 | * Hide the `template` element in IE 8/9/11, Safari, and Firefox < 22. 75 | */ 76 | 77 | [hidden], 78 | template { 79 | display: none; 80 | } 81 | 82 | /* Links 83 | ========================================================================== */ 84 | 85 | /** 86 | * Remove the gray background color from active links in IE 10. 87 | */ 88 | 89 | a { 90 | background-color: transparent; 91 | } 92 | 93 | /** 94 | * Improve readability when focused and also mouse hovered in all browsers. 95 | */ 96 | 97 | a:active, 98 | a:hover { 99 | outline: 0; 100 | } 101 | 102 | /* Text-level semantics 103 | ========================================================================== */ 104 | 105 | /** 106 | * Address styling not present in IE 8/9/10/11, Safari, and Chrome. 107 | */ 108 | 109 | abbr[title] { 110 | border-bottom: 1px dotted; 111 | } 112 | 113 | /** 114 | * Address style set to `bolder` in Firefox 4+, Safari, and Chrome. 115 | */ 116 | 117 | b, 118 | strong { 119 | font-weight: bold; 120 | } 121 | 122 | /** 123 | * Address styling not present in Safari and Chrome. 124 | */ 125 | 126 | dfn { 127 | font-style: italic; 128 | } 129 | 130 | /** 131 | * Address variable `h1` font-size and margin within `section` and `article` 132 | * contexts in Firefox 4+, Safari, and Chrome. 133 | */ 134 | 135 | h1 { 136 | font-size: 2em; 137 | margin: 0.67em 0; 138 | } 139 | 140 | /** 141 | * Address styling not present in IE 8/9. 142 | */ 143 | 144 | mark { 145 | background: #ff0; 146 | color: #000; 147 | } 148 | 149 | /** 150 | * Address inconsistent and variable font size in all browsers. 151 | */ 152 | 153 | small { 154 | font-size: 80%; 155 | } 156 | 157 | /** 158 | * Prevent `sub` and `sup` affecting `line-height` in all browsers. 159 | */ 160 | 161 | sub, 162 | sup { 163 | font-size: 75%; 164 | line-height: 0; 165 | position: relative; 166 | vertical-align: baseline; 167 | } 168 | 169 | sup { 170 | top: -0.5em; 171 | } 172 | 173 | sub { 174 | bottom: -0.25em; 175 | } 176 | 177 | /* Embedded content 178 | ========================================================================== */ 179 | 180 | /** 181 | * Remove border when inside `a` element in IE 8/9/10. 182 | */ 183 | 184 | img { 185 | border: 0; 186 | } 187 | 188 | /** 189 | * Correct overflow not hidden in IE 9/10/11. 190 | */ 191 | 192 | svg:not(:root) { 193 | overflow: hidden; 194 | } 195 | 196 | /* Grouping content 197 | ========================================================================== */ 198 | 199 | /** 200 | * Address margin not present in IE 8/9 and Safari. 201 | */ 202 | 203 | figure { 204 | margin: 1em 40px; 205 | } 206 | 207 | /** 208 | * Address differences between Firefox and other browsers. 209 | */ 210 | 211 | hr { 212 | -moz-box-sizing: content-box; 213 | box-sizing: content-box; 214 | height: 0; 215 | } 216 | 217 | /** 218 | * Contain overflow in all browsers. 219 | */ 220 | 221 | pre { 222 | overflow: auto; 223 | } 224 | 225 | /** 226 | * Address odd `em`-unit font size rendering in all browsers. 227 | */ 228 | 229 | code, 230 | kbd, 231 | pre, 232 | samp { 233 | font-family: monospace, monospace; 234 | font-size: 1em; 235 | } 236 | 237 | /* Forms 238 | ========================================================================== */ 239 | 240 | /** 241 | * Known limitation: by default, Chrome and Safari on OS X allow very limited 242 | * styling of `select`, unless a `border` property is set. 243 | */ 244 | 245 | /** 246 | * 1. Correct color not being inherited. 247 | * Known issue: affects color of disabled elements. 248 | * 2. Correct font properties not being inherited. 249 | * 3. Address margins set differently in Firefox 4+, Safari, and Chrome. 250 | */ 251 | 252 | button, 253 | input, 254 | optgroup, 255 | select, 256 | textarea { 257 | color: inherit; /* 1 */ 258 | font: inherit; /* 2 */ 259 | margin: 0; /* 3 */ 260 | } 261 | 262 | /** 263 | * Address `overflow` set to `hidden` in IE 8/9/10/11. 264 | */ 265 | 266 | button { 267 | overflow: visible; 268 | } 269 | 270 | /** 271 | * Address inconsistent `text-transform` inheritance for `button` and `select`. 272 | * All other form control elements do not inherit `text-transform` values. 273 | * Correct `button` style inheritance in Firefox, IE 8/9/10/11, and Opera. 274 | * Correct `select` style inheritance in Firefox. 275 | */ 276 | 277 | button, 278 | select { 279 | text-transform: none; 280 | } 281 | 282 | /** 283 | * 1. Avoid the WebKit bug in Android 4.0.* where (2) destroys native `audio` 284 | * and `video` controls. 285 | * 2. Correct inability to style clickable `input` types in iOS. 286 | * 3. Improve usability and consistency of cursor style between image-type 287 | * `input` and others. 288 | */ 289 | 290 | button, 291 | html input[type="button"], /* 1 */ 292 | input[type="reset"], 293 | input[type="submit"] { 294 | -webkit-appearance: button; /* 2 */ 295 | cursor: pointer; /* 3 */ 296 | } 297 | 298 | /** 299 | * Re-set default cursor for disabled elements. 300 | */ 301 | 302 | button[disabled], 303 | html input[disabled] { 304 | cursor: default; 305 | } 306 | 307 | /** 308 | * Remove inner padding and border in Firefox 4+. 309 | */ 310 | 311 | button::-moz-focus-inner, 312 | input::-moz-focus-inner { 313 | border: 0; 314 | padding: 0; 315 | } 316 | 317 | /** 318 | * Address Firefox 4+ setting `line-height` on `input` using `!important` in 319 | * the UA stylesheet. 320 | */ 321 | 322 | input { 323 | line-height: normal; 324 | } 325 | 326 | /** 327 | * It's recommended that you don't attempt to style these elements. 328 | * Firefox's implementation doesn't respect box-sizing, padding, or width. 329 | * 330 | * 1. Address box sizing set to `content-box` in IE 8/9/10. 331 | * 2. Remove excess padding in IE 8/9/10. 332 | */ 333 | 334 | input[type="checkbox"], 335 | input[type="radio"] { 336 | box-sizing: border-box; /* 1 */ 337 | padding: 0; /* 2 */ 338 | } 339 | 340 | /** 341 | * Fix the cursor style for Chrome's increment/decrement buttons. For certain 342 | * `font-size` values of the `input`, it causes the cursor style of the 343 | * decrement button to change from `default` to `text`. 344 | */ 345 | 346 | input[type="number"]::-webkit-inner-spin-button, 347 | input[type="number"]::-webkit-outer-spin-button { 348 | height: auto; 349 | } 350 | 351 | /** 352 | * 1. Address `appearance` set to `searchfield` in Safari and Chrome. 353 | * 2. Address `box-sizing` set to `border-box` in Safari and Chrome 354 | * (include `-moz` to future-proof). 355 | */ 356 | 357 | input[type="search"] { 358 | -webkit-appearance: textfield; /* 1 */ 359 | -moz-box-sizing: content-box; 360 | -webkit-box-sizing: content-box; /* 2 */ 361 | box-sizing: content-box; 362 | } 363 | 364 | /** 365 | * Remove inner padding and search cancel button in Safari and Chrome on OS X. 366 | * Safari (but not Chrome) clips the cancel button when the search input has 367 | * padding (and `textfield` appearance). 368 | */ 369 | 370 | input[type="search"]::-webkit-search-cancel-button, 371 | input[type="search"]::-webkit-search-decoration { 372 | -webkit-appearance: none; 373 | } 374 | 375 | /** 376 | * Define consistent border, margin, and padding. 377 | */ 378 | 379 | fieldset { 380 | border: 1px solid #c0c0c0; 381 | margin: 0 2px; 382 | padding: 0.35em 0.625em 0.75em; 383 | } 384 | 385 | /** 386 | * 1. Correct `color` not being inherited in IE 8/9/10/11. 387 | * 2. Remove padding so people aren't caught out if they zero out fieldsets. 388 | */ 389 | 390 | legend { 391 | border: 0; /* 1 */ 392 | padding: 0; /* 2 */ 393 | } 394 | 395 | /** 396 | * Remove default vertical scrollbar in IE 8/9/10/11. 397 | */ 398 | 399 | textarea { 400 | overflow: auto; 401 | } 402 | 403 | /** 404 | * Don't inherit the `font-weight` (applied by a rule above). 405 | * NOTE: the default cannot safely be changed in Chrome and Safari on OS X. 406 | */ 407 | 408 | optgroup { 409 | font-weight: bold; 410 | } 411 | 412 | /* Tables 413 | ========================================================================== */ 414 | 415 | /** 416 | * Remove most spacing between table cells. 417 | */ 418 | 419 | table { 420 | border-collapse: collapse; 421 | border-spacing: 0; 422 | } 423 | 424 | td, 425 | th { 426 | padding: 0; 427 | } -------------------------------------------------------------------------------- /exercises/devops/solutions/deploy_to_kubernetes/html/css/skeleton.css: -------------------------------------------------------------------------------- 1 | /* 2 | * Skeleton V2.0.4 3 | * Copyright 2014, Dave Gamache 4 | * www.getskeleton.com 5 | * Free to use under the MIT license. 6 | * http://www.opensource.org/licenses/mit-license.php 7 | * 12/29/2014 8 | */ 9 | 10 | 11 | /* Table of contents 12 | –––––––––––––––––––––––––––––––––––––––––––––––––– 13 | - Grid 14 | - Base Styles 15 | - Typography 16 | - Links 17 | - Buttons 18 | - Forms 19 | - Lists 20 | - Code 21 | - Tables 22 | - Spacing 23 | - Utilities 24 | - Clearing 25 | - Media Queries 26 | */ 27 | 28 | 29 | /* Grid 30 | –––––––––––––––––––––––––––––––––––––––––––––––––– */ 31 | .container { 32 | position: relative; 33 | width: 100%; 34 | max-width: 960px; 35 | margin: 0 auto; 36 | padding: 0 20px; 37 | box-sizing: border-box; } 38 | .column, 39 | .columns { 40 | width: 100%; 41 | float: left; 42 | box-sizing: border-box; } 43 | 44 | /* For devices larger than 400px */ 45 | @media (min-width: 400px) { 46 | .container { 47 | width: 85%; 48 | padding: 0; } 49 | } 50 | 51 | /* For devices larger than 550px */ 52 | @media (min-width: 550px) { 53 | .container { 54 | width: 80%; } 55 | .column, 56 | .columns { 57 | margin-left: 4%; } 58 | .column:first-child, 59 | .columns:first-child { 60 | margin-left: 0; } 61 | 62 | .one.column, 63 | .one.columns { width: 4.66666666667%; } 64 | .two.columns { width: 13.3333333333%; } 65 | .three.columns { width: 22%; } 66 | .four.columns { width: 30.6666666667%; } 67 | .five.columns { width: 39.3333333333%; } 68 | .six.columns { width: 48%; } 69 | .seven.columns { width: 56.6666666667%; } 70 | .eight.columns { width: 65.3333333333%; } 71 | .nine.columns { width: 74.0%; } 72 | .ten.columns { width: 82.6666666667%; } 73 | .eleven.columns { width: 91.3333333333%; } 74 | .twelve.columns { width: 100%; margin-left: 0; } 75 | 76 | .one-third.column { width: 30.6666666667%; } 77 | .two-thirds.column { width: 65.3333333333%; } 78 | 79 | .one-half.column { width: 48%; } 80 | 81 | /* Offsets */ 82 | .offset-by-one.column, 83 | .offset-by-one.columns { margin-left: 8.66666666667%; } 84 | .offset-by-two.column, 85 | .offset-by-two.columns { margin-left: 17.3333333333%; } 86 | .offset-by-three.column, 87 | .offset-by-three.columns { margin-left: 26%; } 88 | .offset-by-four.column, 89 | .offset-by-four.columns { margin-left: 34.6666666667%; } 90 | .offset-by-five.column, 91 | .offset-by-five.columns { margin-left: 43.3333333333%; } 92 | .offset-by-six.column, 93 | .offset-by-six.columns { margin-left: 52%; } 94 | .offset-by-seven.column, 95 | .offset-by-seven.columns { margin-left: 60.6666666667%; } 96 | .offset-by-eight.column, 97 | .offset-by-eight.columns { margin-left: 69.3333333333%; } 98 | .offset-by-nine.column, 99 | .offset-by-nine.columns { margin-left: 78.0%; } 100 | .offset-by-ten.column, 101 | .offset-by-ten.columns { margin-left: 86.6666666667%; } 102 | .offset-by-eleven.column, 103 | .offset-by-eleven.columns { margin-left: 95.3333333333%; } 104 | 105 | .offset-by-one-third.column, 106 | .offset-by-one-third.columns { margin-left: 34.6666666667%; } 107 | .offset-by-two-thirds.column, 108 | .offset-by-two-thirds.columns { margin-left: 69.3333333333%; } 109 | 110 | .offset-by-one-half.column, 111 | .offset-by-one-half.columns { margin-left: 52%; } 112 | 113 | } 114 | 115 | 116 | /* Base Styles 117 | –––––––––––––––––––––––––––––––––––––––––––––––––– */ 118 | /* NOTE 119 | html is set to 62.5% so that all the REM measurements throughout Skeleton 120 | are based on 10px sizing. So basically 1.5rem = 15px :) */ 121 | html { 122 | font-size: 62.5%; } 123 | body { 124 | font-size: 1.5em; /* currently ems cause chrome bug misinterpreting rems on body element */ 125 | line-height: 1.6; 126 | font-weight: 400; 127 | font-family: "Raleway", "HelveticaNeue", "Helvetica Neue", Helvetica, Arial, sans-serif; 128 | color: #222; } 129 | 130 | 131 | /* Typography 132 | –––––––––––––––––––––––––––––––––––––––––––––––––– */ 133 | h1, h2, h3, h4, h5, h6 { 134 | margin-top: 0; 135 | margin-bottom: 2rem; 136 | font-weight: 300; } 137 | h1 { font-size: 4.0rem; line-height: 1.2; letter-spacing: -.1rem;} 138 | h2 { font-size: 3.6rem; line-height: 1.25; letter-spacing: -.1rem; } 139 | h3 { font-size: 3.0rem; line-height: 1.3; letter-spacing: -.1rem; } 140 | h4 { font-size: 2.4rem; line-height: 1.35; letter-spacing: -.08rem; } 141 | h5 { font-size: 1.8rem; line-height: 1.5; letter-spacing: -.05rem; } 142 | h6 { font-size: 1.5rem; line-height: 1.6; letter-spacing: 0; } 143 | 144 | /* Larger than phablet */ 145 | @media (min-width: 550px) { 146 | h1 { font-size: 5.0rem; } 147 | h2 { font-size: 4.2rem; } 148 | h3 { font-size: 3.6rem; } 149 | h4 { font-size: 3.0rem; } 150 | h5 { font-size: 2.4rem; } 151 | h6 { font-size: 1.5rem; } 152 | } 153 | 154 | p { 155 | margin-top: 0; } 156 | 157 | 158 | /* Links 159 | –––––––––––––––––––––––––––––––––––––––––––––––––– */ 160 | a { 161 | color: #1EAEDB; } 162 | a:hover { 163 | color: #0FA0CE; } 164 | 165 | 166 | /* Buttons 167 | –––––––––––––––––––––––––––––––––––––––––––––––––– */ 168 | .button, 169 | button, 170 | input[type="submit"], 171 | input[type="reset"], 172 | input[type="button"] { 173 | display: inline-block; 174 | height: 38px; 175 | padding: 0 30px; 176 | color: #555; 177 | text-align: center; 178 | font-size: 11px; 179 | font-weight: 600; 180 | line-height: 38px; 181 | letter-spacing: .1rem; 182 | text-transform: uppercase; 183 | text-decoration: none; 184 | white-space: nowrap; 185 | background-color: transparent; 186 | border-radius: 4px; 187 | border: 1px solid #bbb; 188 | cursor: pointer; 189 | box-sizing: border-box; } 190 | .button:hover, 191 | button:hover, 192 | input[type="submit"]:hover, 193 | input[type="reset"]:hover, 194 | input[type="button"]:hover, 195 | .button:focus, 196 | button:focus, 197 | input[type="submit"]:focus, 198 | input[type="reset"]:focus, 199 | input[type="button"]:focus { 200 | color: #333; 201 | border-color: #888; 202 | outline: 0; } 203 | .button.button-primary, 204 | button.button-primary, 205 | input[type="submit"].button-primary, 206 | input[type="reset"].button-primary, 207 | input[type="button"].button-primary { 208 | color: #FFF; 209 | background-color: #33C3F0; 210 | border-color: #33C3F0; } 211 | .button.button-primary:hover, 212 | button.button-primary:hover, 213 | input[type="submit"].button-primary:hover, 214 | input[type="reset"].button-primary:hover, 215 | input[type="button"].button-primary:hover, 216 | .button.button-primary:focus, 217 | button.button-primary:focus, 218 | input[type="submit"].button-primary:focus, 219 | input[type="reset"].button-primary:focus, 220 | input[type="button"].button-primary:focus { 221 | color: #FFF; 222 | background-color: #1EAEDB; 223 | border-color: #1EAEDB; } 224 | 225 | 226 | /* Forms 227 | –––––––––––––––––––––––––––––––––––––––––––––––––– */ 228 | input[type="email"], 229 | input[type="number"], 230 | input[type="search"], 231 | input[type="text"], 232 | input[type="tel"], 233 | input[type="url"], 234 | input[type="password"], 235 | textarea, 236 | select { 237 | height: 38px; 238 | padding: 6px 10px; /* The 6px vertically centers text on FF, ignored by Webkit */ 239 | background-color: #fff; 240 | border: 1px solid #D1D1D1; 241 | border-radius: 4px; 242 | box-shadow: none; 243 | box-sizing: border-box; } 244 | /* Removes awkward default styles on some inputs for iOS */ 245 | input[type="email"], 246 | input[type="number"], 247 | input[type="search"], 248 | input[type="text"], 249 | input[type="tel"], 250 | input[type="url"], 251 | input[type="password"], 252 | textarea { 253 | -webkit-appearance: none; 254 | -moz-appearance: none; 255 | appearance: none; } 256 | textarea { 257 | min-height: 65px; 258 | padding-top: 6px; 259 | padding-bottom: 6px; } 260 | input[type="email"]:focus, 261 | input[type="number"]:focus, 262 | input[type="search"]:focus, 263 | input[type="text"]:focus, 264 | input[type="tel"]:focus, 265 | input[type="url"]:focus, 266 | input[type="password"]:focus, 267 | textarea:focus, 268 | select:focus { 269 | border: 1px solid #33C3F0; 270 | outline: 0; } 271 | label, 272 | legend { 273 | display: block; 274 | margin-bottom: .5rem; 275 | font-weight: 600; } 276 | fieldset { 277 | padding: 0; 278 | border-width: 0; } 279 | input[type="checkbox"], 280 | input[type="radio"] { 281 | display: inline; } 282 | label > .label-body { 283 | display: inline-block; 284 | margin-left: .5rem; 285 | font-weight: normal; } 286 | 287 | 288 | /* Lists 289 | –––––––––––––––––––––––––––––––––––––––––––––––––– */ 290 | ul { 291 | list-style: circle inside; } 292 | ol { 293 | list-style: decimal inside; } 294 | ol, ul { 295 | padding-left: 0; 296 | margin-top: 0; } 297 | ul ul, 298 | ul ol, 299 | ol ol, 300 | ol ul { 301 | margin: 1.5rem 0 1.5rem 3rem; 302 | font-size: 90%; } 303 | li { 304 | margin-bottom: 1rem; } 305 | 306 | 307 | /* Code 308 | –––––––––––––––––––––––––––––––––––––––––––––––––– */ 309 | code { 310 | padding: .2rem .5rem; 311 | margin: 0 .2rem; 312 | font-size: 90%; 313 | white-space: nowrap; 314 | background: #F1F1F1; 315 | border: 1px solid #E1E1E1; 316 | border-radius: 4px; } 317 | pre > code { 318 | display: block; 319 | padding: 1rem 1.5rem; 320 | white-space: pre; } 321 | 322 | 323 | /* Tables 324 | –––––––––––––––––––––––––––––––––––––––––––––––––– */ 325 | th, 326 | td { 327 | padding: 12px 15px; 328 | text-align: left; 329 | border-bottom: 1px solid #E1E1E1; } 330 | th:first-child, 331 | td:first-child { 332 | padding-left: 0; } 333 | th:last-child, 334 | td:last-child { 335 | padding-right: 0; } 336 | 337 | 338 | /* Spacing 339 | –––––––––––––––––––––––––––––––––––––––––––––––––– */ 340 | button, 341 | .button { 342 | margin-bottom: 1rem; } 343 | input, 344 | textarea, 345 | select, 346 | fieldset { 347 | margin-bottom: 1.5rem; } 348 | pre, 349 | blockquote, 350 | dl, 351 | figure, 352 | table, 353 | p, 354 | ul, 355 | ol, 356 | form { 357 | margin-bottom: 2.5rem; } 358 | 359 | 360 | /* Utilities 361 | –––––––––––––––––––––––––––––––––––––––––––––––––– */ 362 | .u-full-width { 363 | width: 100%; 364 | box-sizing: border-box; } 365 | .u-max-full-width { 366 | max-width: 100%; 367 | box-sizing: border-box; } 368 | .u-pull-right { 369 | float: right; } 370 | .u-pull-left { 371 | float: left; } 372 | 373 | 374 | /* Misc 375 | –––––––––––––––––––––––––––––––––––––––––––––––––– */ 376 | hr { 377 | margin-top: 3rem; 378 | margin-bottom: 3.5rem; 379 | border-width: 0; 380 | border-top: 1px solid #E1E1E1; } 381 | 382 | 383 | /* Clearing 384 | –––––––––––––––––––––––––––––––––––––––––––––––––– */ 385 | 386 | /* Self Clearing Goodness */ 387 | .container:after, 388 | .row:after, 389 | .u-cf { 390 | content: ""; 391 | display: table; 392 | clear: both; } 393 | 394 | 395 | /* Media Queries 396 | –––––––––––––––––––––––––––––––––––––––––––––––––– */ 397 | /* 398 | Note: The best way to structure the use of media queries is to create the queries 399 | near the relevant code. For example, if you wanted to change the styles for buttons 400 | on small devices, paste the mobile query code up in the buttons section and style it 401 | there. 402 | */ 403 | 404 | 405 | /* Larger than mobile */ 406 | @media (min-width: 400px) {} 407 | 408 | /* Larger than phablet (also point when grid becomes active) */ 409 | @media (min-width: 550px) {} 410 | 411 | /* Larger than tablet */ 412 | @media (min-width: 750px) {} 413 | 414 | /* Larger than desktop */ 415 | @media (min-width: 1000px) {} 416 | 417 | /* Larger than Desktop HD */ 418 | @media (min-width: 1200px) {} 419 | -------------------------------------------------------------------------------- /exercises/devops/solutions/deploy_to_kubernetes/html/images/favicon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/exercises/devops/solutions/deploy_to_kubernetes/html/images/favicon.png -------------------------------------------------------------------------------- /exercises/devops/solutions/deploy_to_kubernetes/html/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 7 | 8 | Hello World :) 9 | 10 | 11 | 12 | 14 | 15 | 16 | 18 | 19 | 20 | 22 | 23 | 24 | 25 | 27 | 28 | 29 | 30 | 31 | 32 | 34 |
35 |
36 |
37 |

Hello World :)

38 |
39 |
40 |
41 | 42 | 44 | 45 | 46 | -------------------------------------------------------------------------------- /exercises/devops/solutions/deploy_to_kubernetes/inventory: -------------------------------------------------------------------------------- 1 | [kubernetes] 2 | x.x.x.x 3 | -------------------------------------------------------------------------------- /exercises/eflk.md: -------------------------------------------------------------------------------- 1 | ## ELK + Filebeat 2 | 3 | Set up the following using any log you would like: 4 | 5 | * Run the following: elasticsearch, logstash, kibana and filebeat (each running in its own container) 6 | * Make filebeat transfer a log to logstash for process 7 | * Once logstash is done, index with elasticsearch 8 | * Finally, make sure data is available in Kibana 9 | -------------------------------------------------------------------------------- /exercises/flask_container_ci/README.md: -------------------------------------------------------------------------------- 1 | Your mission, should you choose to accept it, involves fixing the app in this directory, containerize it and set up a CI for it. 2 | Please read carefully all the instructions. 3 | 4 | If any of the following steps is not working, it is expected from you to fix them 5 | 6 | ## Installation 7 | 8 | 1. Create a virtual environment with `python3 -m venv challenge_venv` 9 | 2. Activate it with `source challenge_venv/bin/activate` 10 | 3. Install the requirements in this directory `pip install -r requirements.txt` 11 | 12 | ## Run the app 13 | 14 | 1. Move to `challenges/flask_container_ci` directory, if you are not already there 15 | 1. Run `export FLASK_APP=app/main.py` 16 | 1. To run the app execute `flask run`. If it doesn't work, fix it 17 | 3. Access `http://127.0.0.1:5000`. You should see the following: 18 | 19 | ``` 20 | { 21 | "resources_uris": { 22 | "user": "/users/\", 23 | "users": "/users" 24 | }, 25 | "current_uri": "/" 26 | } 27 | ``` 28 | 29 | 4. You should be able to access any of the resources and get the following data: 30 | 31 | * /users - all users data 32 | * /users/ - data on the specific chosen user 33 | 34 | 5. When accessing /users, the data returned should not include the id of the user, only its name and description. Also, the data should be ordered by usernames. 35 | 36 | ## Containers 37 | 38 | Using Docker or Podman, containerize the flask app so users can run the following two commands: 39 | 40 | ``` 41 | docker build -t app:latest /path/to/Dockerfile 42 | docker run -d -p 5000:5000 app 43 | ``` 44 | 45 | 1. You can use any image base you would like 46 | 2. Containerize only what you need for running the application, nothing else. 47 | 48 | ## CI 49 | 50 | Great, now that we have a working app and also can run it in a container, let's set up a CI for it so it won't break again in the future 51 | In current directory you have a file called tests.py which includes the tests for the app. What is required from you, is: 52 | 53 | 1. The CI should run the app tests. You are free to choose whatever CI system or service you prefer. Use `python tests.py` for running the tests. 54 | 2. There should be some kind of test for the Dockerfile you wrote 55 | 3. Add additional unit test (or another level of tests) for testing the app 56 | 57 | ### Guidelines 58 | 59 | * Except the app functionality, you can change whatever you want - structure, tooling, libraries, ... If possible add `notes.md` file which explains reasons, logic, thoughts and anything else you would like to share 60 | * The CI part should include the source code for the pipeline definition 61 | -------------------------------------------------------------------------------- /exercises/flask_container_ci/app/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # coding=utf-8 3 | -------------------------------------------------------------------------------- /exercises/flask_container_ci/app/config.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # coding=utf-8 3 | 4 | import os 5 | 6 | basedir = os.path.abspath(os.path.dirname(__file__)) 7 | 8 | SECRET_KEY = 'shhh' 9 | CSRF_ENABLED = True 10 | 11 | SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir, 'app.db') 12 | -------------------------------------------------------------------------------- /exercises/flask_container_ci/app/main.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # coding=utf-8 3 | 4 | from flask import Flask 5 | from flask import make_response 6 | 7 | import json 8 | from werkzeug.exceptions import NotFound 9 | 10 | 11 | app = Flask(__name__) 12 | 13 | with open("./users.json", "r") as f: 14 | users = json.load(f) 15 | 16 | 17 | @app.route("/", methods=['GET']) 18 | def index(): 19 | return pretty_json({ 20 | "resources": { 21 | "users": "/users", 22 | "user": "/users/", 23 | }, 24 | "current_uri": "/" 25 | }) 26 | 27 | 28 | @app.route("/users", methods=['GET']) 29 | def all_users(): 30 | return pretty_json(users) 31 | 32 | 33 | @app.route("/users/", methods=['GET']) 34 | def user_data(username): 35 | if username not in users: 36 | raise NotFound 37 | 38 | return pretty_json(users[username]) 39 | 40 | 41 | @app.route("/users//something", methods=['GET']) 42 | def user_something(username): 43 | raise NotImplementedError() 44 | 45 | 46 | def pretty_json(arg): 47 | response = make_response(json.dumps(arg, sort_keys=True, indent=4)) 48 | response.headers['Content-type'] = "application/json" 49 | return response 50 | 51 | 52 | def create_test_app(): 53 | app = Flask(__name__) 54 | return app 55 | 56 | 57 | if __name__ == "__main__": 58 | app.run(port=5000) 59 | -------------------------------------------------------------------------------- /exercises/flask_container_ci/app/tests.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # coding=utf-8 3 | 4 | import os 5 | import unittest 6 | 7 | from config import basedir 8 | from app import app 9 | from app import db 10 | 11 | 12 | class TestCase(unittest.TestCase): 13 | 14 | def setUp(self): 15 | app.config['TESTING'] = True 16 | app.config['WTF_CSRF_ENABLED'] = False 17 | app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///' + os.path.join( 18 | basedir, 'test.db') 19 | self.app = app.test_client() 20 | db.create_all() 21 | 22 | def tearDown(self): 23 | db.session.remove() 24 | db.drop_all() 25 | 26 | 27 | if __name__ == '__main__': 28 | unittest.main() 29 | -------------------------------------------------------------------------------- /exercises/flask_container_ci/requirements.txt: -------------------------------------------------------------------------------- 1 | flask 2 | -------------------------------------------------------------------------------- /exercises/flask_container_ci/tests.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # coding=utf-8 3 | 4 | import unittest 5 | 6 | from app import main 7 | 8 | 9 | class TestCase(unittest.TestCase): 10 | 11 | def setUp(self): 12 | self.app = main.app.test_client() 13 | 14 | def test_main_page(self): 15 | response = self.app.get('/', follow_redirects=True) 16 | self.assertEqual(response.status_code, 200) 17 | 18 | def test_users_page(self): 19 | response = self.app.get('/users', follow_redirects=True) 20 | self.assertEqual(response.status_code, 200) 21 | 22 | 23 | if __name__ == '__main__': 24 | unittest.main() 25 | -------------------------------------------------------------------------------- /exercises/flask_container_ci/users.json: -------------------------------------------------------------------------------- 1 | { 2 | "geralt" : { 3 | "id": "whitewolf", 4 | "name": "Geralt of Rivia", 5 | "description": "Traveling monster slayer for hire" 6 | }, 7 | "lara_croft" : { 8 | "id": "m31a3n6sion", 9 | "name": "Lara Croft", 10 | "description": "Highly intelligent and athletic English archaeologist" 11 | }, 12 | "mario" : { 13 | "id": "smb3igiul", 14 | "name": "Mario", 15 | "description": "Italian plumber who really likes mushrooms" 16 | }, 17 | "gordon_freeman" : { 18 | "id": "nohalflife3", 19 | "name": "Gordon Freeman", 20 | "description": "Physicist with great shooting skills" 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /exercises/flask_container_ci2/README.md: -------------------------------------------------------------------------------- 1 | Your mission, should you choose to accept it, involves developing an app, containerize it and set up a CI for it. 2 | Please read carefully all the instructions. 3 | 4 | If any of the following steps is not working, it is expected from you to fix them 5 | 6 | ## Installation 7 | 8 | 1. Create a virtual environment with `python3 -m venv challenge_venv` 9 | 2. Activate it with `source challenge_venv/bin/activate` 10 | 3. Install the requirements in this directory `pip install -r requirements.txt` 11 | 12 | ## Run the app 13 | 14 | 1. Move to `challenges/flask_container_ci` directory, if you are not already there 15 | 1. Run `export FLASK_APP=app/main.py` 16 | 1. To run the app execute `flask run`. If it doesn't works, fix it 17 | 3. Access `http://127.0.0.1:5000`. You should see the following 18 | 19 | ``` 20 | { 21 | "current_uri": "/", 22 | "example": "/matrix/'123n456n789'", 23 | "resources": { 24 | "column": "/columns//", 25 | "matrix": "/matrix/", 26 | "row": "/rows//" 27 | } 28 | } 29 | ``` 30 | 31 | 4. You should be able to access any of the resources and get the following data: 32 | 33 | * /matrix/\ 34 | 35 | for example, for /matrix/123n456n789 the user will get: 36 | 37 | 1 2 3 38 | 4 5 6 39 | 7 8 9 40 | 41 | * /matrix/\/\ 42 | 43 | for example, for /matrix/123n456n789/2 the user will get: 44 | 45 | 2 46 | 5 47 | 8 48 | 49 | * /matrix/\/\ 50 | 51 | for example, for /matrix/123n456n789/1 the user will get: 52 | 53 | 1 2 3 54 | 55 | ## Containers 56 | 57 | Using Docker or Podman, containerize the flask app so users can run the following two commands: 58 | 59 | ``` 60 | docker build -t app:latest /path/to/Dockerfile 61 | docker run -d -p 5000:5000 app 62 | ``` 63 | 64 | 1. You can use any image base you would like 65 | 2. Containerize only what you need for running the application, nothing else. 66 | 67 | ## CI 68 | 69 | Great, now that we have a working app and also can run it in a container, let's set up a CI for it so it won't break again in the future 70 | In current directory you have a file called tests.py which includes the tests for the app. What is required from you, is: 71 | 72 | 1. Write a CI pipeline that will run the app tests. You are free to choose whatever CI system or service you prefer. Use `python tests.py` for running the tests. 73 | 2. There should be some kind of test for the Dockerfile you wrote 74 | 3. Add additional unit test (or any other level of tests) for testing the app 75 | 76 | ### Guidelines 77 | 78 | * Except the app functionality, you can change whatever you want - structure, tooling, libraries, ... If possible, add `notes.md` file which explains reasons, logic, thoughts and anything else you would like to share 79 | * The CI part should include the source code for the pipeline definition 80 | -------------------------------------------------------------------------------- /exercises/flask_container_ci2/app/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # coding=utf-8 3 | -------------------------------------------------------------------------------- /exercises/flask_container_ci2/app/config.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # coding=utf-8 3 | 4 | import os 5 | 6 | basedir = os.path.abspath(os.path.dirname(__file__)) 7 | 8 | SECRET_KEY = 'shhh' 9 | CSRF_ENABLED = True 10 | 11 | SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir, 'app.db') 12 | -------------------------------------------------------------------------------- /exercises/flask_container_ci2/app/main.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # coding=utf-8 3 | 4 | from flask import Flask 5 | from flask import make_response 6 | 7 | import json 8 | 9 | app = Flask(__name__) 10 | 11 | 12 | @app.routee("/", methods=['GET']) 13 | def index(): 14 | return pretty_json({ 15 | "resources": { 16 | "matrix": "/matrix/", 17 | "column": "/columns//", 18 | "row": "/rows//", 19 | }, 20 | "current_uri": "/", 21 | "example": "/matrix/'123n456n789'", 22 | }) 23 | 24 | 25 | @app.route("/matrix/", methods=['GET']) 26 | def matrix(matrix): 27 | # TODO: return matrix, each row in a new line 28 | pass 29 | 30 | 31 | @app.route("/matrix//", methods=['GET']) 32 | def column(matrix, column_number): 33 | # TODO: return column based on given column number 34 | pass 35 | 36 | 37 | @app.route("/matrix//", methods=['GET']) 38 | def row(matrix, row_number): 39 | # TODO: return row based on given row number 40 | pass 41 | 42 | 43 | def pretty_json(arg): 44 | response = make_response(json.dumps(arg, sort_keys=True, indent=4)) 45 | response.headers['Content-type'] = "application/json" 46 | return response 47 | 48 | 49 | if __name__ == "__main__": 50 | app.run(port=5000) 51 | -------------------------------------------------------------------------------- /exercises/flask_container_ci2/app/tests.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # coding=utf-8 3 | 4 | import os 5 | import unittest 6 | 7 | from config import basedir 8 | from app import app 9 | from app import db 10 | 11 | 12 | class TestCase(unittest.TestCase): 13 | 14 | def setUp(self): 15 | app.config['TESTING'] = True 16 | app.config['WTF_CSRF_ENABLED'] = False 17 | app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///' + os.path.join( 18 | basedir, 'test.db') 19 | self.app = app.test_client() 20 | db.create_all() 21 | 22 | def tearDown(self): 23 | db.session.remove() 24 | db.drop_all() 25 | 26 | 27 | if __name__ == '__main__': 28 | unittest.main() 29 | -------------------------------------------------------------------------------- /exercises/flask_container_ci2/requirements.txt: -------------------------------------------------------------------------------- 1 | flask 2 | -------------------------------------------------------------------------------- /exercises/flask_container_ci2/tests.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # coding=utf-8 3 | 4 | import unittest 5 | 6 | from app import main 7 | 8 | 9 | class TestCase(unittest.TestCase): 10 | 11 | def setUp(self): 12 | self.app = main.app.test_client() 13 | 14 | def test_main_page(self): 15 | response = self.app.get('/', follow_redirects=True) 16 | self.assertEqual(response.status_code, 200) 17 | 18 | def test_matrix(self): 19 | response = self.app.get('/matrix/123n459,789', follow_redirects=True) 20 | # Change when the matrix route is fixed and returning the actual matrix 21 | self.assertEqual(response.status_code, 500) 22 | 23 | 24 | if __name__ == '__main__': 25 | unittest.main() 26 | -------------------------------------------------------------------------------- /exercises/git/branch_01.md: -------------------------------------------------------------------------------- 1 | ## Git Commit 01 2 | 3 | ### Objective 4 | 5 | Learn how to work with Git Branches 6 | 7 | ### Instructions 8 | 9 | 1. Pick up a Git repository (or create a new one) with at least one commit 10 | 2. Create a new branch called "dev" 11 | 3. Modify one of the files in the repository 12 | 4. Create a new commit 13 | 5. Verify the commit you created is only in "dev" branch 14 | 15 | ### After you complete the exercise 16 | 17 | Answer the following: 18 | 19 | 1. Why branches are useful? Give an example of one real-world scenario for using branches 20 | -------------------------------------------------------------------------------- /exercises/git/commit_01.md: -------------------------------------------------------------------------------- 1 | ## Git Commit 01 2 | 3 | ### Objective 4 | 5 | Learn how to commit changes in Git repositories 6 | 7 | ### Instructions 8 | 9 | 1. Create a new directory 10 | 2. Make it a git repository 11 | 3. Create a new file called `file` with the content "hello commit" 12 | 4. Commit your new file 13 | 5. Run a git command to verify your commit was recorded 14 | 15 | ### After you complete the exercise 16 | 17 | Answer the following: 18 | 19 | * What are the benefits of commits? 20 | * Is there another way to verify a commit was created? 21 | -------------------------------------------------------------------------------- /exercises/git/solutions/branch_01_solution.md: -------------------------------------------------------------------------------- 1 | ## Branch 01 - Solution 2 | 3 | ``` 4 | cd some_repository 5 | echo "master branch" > file1 6 | git add file1 7 | git commit -a -m "added file1" 8 | git checkout -b dev 9 | echo "dev branch" > file2 10 | git add file2 11 | git commit -a -m "added file2" 12 | ``` 13 | 14 | Verify: 15 | 16 | ``` 17 | git log (you should see two commits) 18 | git checkout master 19 | git log (you should see one commit) 20 | ``` 21 | -------------------------------------------------------------------------------- /exercises/git/solutions/commit_01_solution.md: -------------------------------------------------------------------------------- 1 | ## Git Commit 01 - Solution 2 | 3 | ``` 4 | mkdir my_repo && cd my_repo 5 | git init 6 | echo "hello_commit" > file 7 | git add file 8 | git commit -a -m "It's my first commit. Exciting!" 9 | git log 10 | ``` 11 | -------------------------------------------------------------------------------- /exercises/git/solutions/squashing_commits.md: -------------------------------------------------------------------------------- 1 | ## Git - Squashing Commits - Solution 2 | 3 | 4 | 1. In a git repository, create a new file with the content "Mario" and commit the change 5 | 6 | ``` 7 | git add new_file 8 | echo "Mario" -> new_file 9 | git commit -a -m "New file" 10 | ``` 11 | 12 | 2. Make change to the content of the file you just created so the content is "Mario & Luigi" and create another commit 13 | 14 | ``` 15 | echo "Mario & Luigi" > new_file 16 | git commit -a -m "Added Luigi" 17 | ``` 18 | 19 | 3. Verify you have two separate commits - `git log` 20 | 21 | 4. Squash the two commits you've created into one commit 22 | 23 | ``` 24 | git rebase -i HEAD~2 25 | ``` 26 | 27 | You should see something similar to: 28 | 29 | ``` 30 | pick 5412076 New file 31 | pick 4016808 Added Luigi 32 | ``` 33 | 34 | Change `pick` to `squash` 35 | 36 | 37 | ``` 38 | pick 5412076 New file 39 | squash 4016808 Added Luigi 40 | ``` 41 | 42 | Save it and provide a commit message for the squashed commit 43 | 44 | ### After you complete the exercise 45 | 46 | Answer the following: 47 | 48 | * What is the reason for squashing commits? - history becomes cleaner and it's easier to track changes without commit like "removed a character" for example. 49 | * Is it possible to squash more than 2 commits? - yes 50 | -------------------------------------------------------------------------------- /exercises/git/squashing_commits.md: -------------------------------------------------------------------------------- 1 | ## Git - Squashing Commits 2 | 3 | ### Objective 4 | 5 | Learn how to squash commits 6 | 7 | ### Instructions 8 | 9 | 1. In a git repository, create a new file with the content "Mario" and create a new commit 10 | 2. Make change to the content of the file you just created so the content is "Mario & Luigi" and create another commit 11 | 3. Verify you have two separate commits 12 | 4. Squash the latest two commits into one commit 13 | 14 | ### After you complete the exercise 15 | 16 | Answer the following: 17 | 18 | * What is the reason for squashing commits? 19 | * Is it possible to squash more than 2 commits? 20 | -------------------------------------------------------------------------------- /exercises/jenkins/jobs_101.md: -------------------------------------------------------------------------------- 1 | ## Jobs 101 2 | 3 | 1. Create a new job/pipeline 4 | 2. Make sure every time the job is triggered it prints the current date 5 | -------------------------------------------------------------------------------- /exercises/jenkins/remove_builds.md: -------------------------------------------------------------------------------- 1 | ### Jenkins - Remove Jobs 2 | 3 | #### Objective 4 | 5 | Learn how to write a Jenkins script that interacts with builds by removing builds older than X days. 6 | 7 | #### Instructions 8 | 9 | 1. Pick up (or create) a job which has builds older than X days 10 | 2. Write a script to remove only the builds that are older than X days 11 | 12 | #### Hints 13 | 14 | X can be anything. For example, remove builds that are older than 3 days. Just make sure that you don't simply remove all the builds (since that's different from the objective). 15 | -------------------------------------------------------------------------------- /exercises/jenkins/remove_jobs.md: -------------------------------------------------------------------------------- 1 | ### Jenkins - Remove Jobs 2 | 3 | #### Objective 4 | 5 | Learn how to write a Jenkins script to remove Jenkins jobs 6 | 7 | #### Instructions 8 | 9 | 1. Create three jobs called: test-job, test2-job and prod-job 10 | 2. Write a script to remove all the jobs that include the string "test" 11 | -------------------------------------------------------------------------------- /exercises/jenkins/solutions/remove_builds_solution.groovy: -------------------------------------------------------------------------------- 1 | def removeOldBuilds(buildDirectory, days = 14) { 2 | 3 | def wp = new File("${buildDirectory}") 4 | def currentTime = new Date() 5 | def backTime = currentTime - days 6 | 7 | wp.list().each { fileName -> 8 | folder = new File("${buildDirectory}/${fileName}") 9 | if (folder.isDirectory()) { 10 | def timeStamp = new Date(folder.lastModified()) 11 | if (timeStamp.before(backTime)) { 12 | folder.delete() 13 | } 14 | } 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /exercises/jenkins/solutions/remove_jobs_solution.groovy: -------------------------------------------------------------------------------- 1 | def jobs = Jenkins.instance.items.findAll { job -> job.name =~ /"test"/ } 2 | 3 | jobs.each { job -> 4 | println job.name 5 | //job.delete() 6 | } 7 | -------------------------------------------------------------------------------- /exercises/jenkins_pipelines.md: -------------------------------------------------------------------------------- 1 | ## Jenkins Pipelines 2 | 3 | Write/Create the following Jenkins pipelines: 4 | 5 | * A pipeline which will run unit tests upon git push to a certain repository 6 | * A pipeline which will do to the following: 7 | 8 | * Provision an instance (can also be a container) 9 | * Configure the instance as Apache web server 10 | * Deploy a web application on the provisioned instance 11 | -------------------------------------------------------------------------------- /exercises/jenkins_scripts.md: -------------------------------------------------------------------------------- 1 | ## Jenkins Scripts 2 | 3 | Write the following scripts: 4 | 5 | * Remove all the jobs which include the string "REMOVE_ME" in their name 6 | * Remove builds older than 14 days 7 | 8 | ### Answer 9 | 10 | * [Remove jobs which include specific string](jenkins/scripts/jobs_with_string.groovy) 11 | * [Remove builds older than 14 days](jenkins/scripts/old_builds.groovy) 12 | -------------------------------------------------------------------------------- /exercises/kubernetes/killing_containers.md: -------------------------------------------------------------------------------- 1 | ## "Killing" Containers 2 | 3 | 1. Run Pod with a web service (e.g. httpd) 4 | 2. Verify the web service is running with the `ps` command 5 | 3. Check how many restarts the pod has performed 6 | 4. Kill the web service process 7 | 5. Check how many restarts the pod has performed 8 | 6. Verify again the web service is running 9 | 10 | ## After you complete the exercise 11 | 12 | * Why did the "RESTARTS" count raised? 13 | -------------------------------------------------------------------------------- /exercises/kubernetes/pods_01.md: -------------------------------------------------------------------------------- 1 | ## Pods 01 2 | 3 | #### Objective 4 | 5 | Learn how to create pods 6 | 7 | #### Instructions 8 | 9 | 1. Choose a container image (e.g. redis, nginx, mongo, etc.) 10 | 2. Create a pod (in the default namespace) using the image you chose 11 | 3. Verify the pod is running 12 | -------------------------------------------------------------------------------- /exercises/kubernetes/replicaset_01.md: -------------------------------------------------------------------------------- 1 | ## ReplicaSet 101 2 | 3 | #### Objective 4 | 5 | Learn how to create and view ReplicaSets 6 | 7 | #### Instructions 8 | 9 | 1. Create a ReplicaSet with 2 replicas. The app can be anything. 10 | 2. Verify a ReplicaSet was created and there are 2 replicas 11 | 3. Delete one of the Pods the ReplicaSet has created 12 | 4. If you'll list all the Pods now, what will you see? 13 | 5. Remove the ReplicaSet you've created 14 | 6. Verify you've deleted the ReplicaSet 15 | -------------------------------------------------------------------------------- /exercises/kubernetes/replicaset_02.md: -------------------------------------------------------------------------------- 1 | ## ReplicaSet 102 2 | 3 | #### Objective 4 | 5 | Learn how to operate ReplicaSets 6 | 7 | #### Instructions 8 | 9 | 1. Create a ReplicaSet with 2 replicas. The app can be anything. 10 | 2. Verify a ReplicaSet was created and there are 2 replicas 11 | 3. Remove the ReplicaSet but NOT the pods it created 12 | 4. Verify you've deleted the ReplicaSet but the Pods are still running 13 | -------------------------------------------------------------------------------- /exercises/kubernetes/replicaset_03.md: -------------------------------------------------------------------------------- 1 | ## ReplicaSet 103 2 | 3 | #### Objective 4 | 5 | Learn how labels used by ReplicaSets 6 | 7 | #### Instructions 8 | 9 | 1. Create a ReplicaSet with 2 replicas. Make sure the label used for the selector and in the Pods is "type=web" 10 | 2. Verify a ReplicaSet was created and there are 2 replicas 11 | 3. List the Pods running 12 | 4. Remove the label (type=web) from one of the Pods created by the ReplicaSet 13 | 5. List the Pods running. Are there more Pods running after removing the label? Why? 14 | 6. Verify the ReplicaSet indeed created a new Pod 15 | -------------------------------------------------------------------------------- /exercises/kubernetes/services_01.md: -------------------------------------------------------------------------------- 1 | ## Services 01 2 | 3 | #### Objective 4 | 5 | Learn how to create services 6 | 7 | #### Instructions 8 | 9 | 1. Create a pod running ngnix 10 | 2. Create a service for the pod you've just created 11 | 3. Verify the app is reachable 12 | -------------------------------------------------------------------------------- /exercises/kubernetes/solutions/killing_containers.md: -------------------------------------------------------------------------------- 1 | ## "Killing" Containers - Solution 2 | 3 | 1. Run Pod with a web service (e.g. httpd) - `kubectl run web --image registry.redhat.io/rhscl/httpd-24-rhel7` 4 | 2. Verify the web service is running with the `ps` command - `kubectl exec web -- ps` 5 | 3. Check how many restarts the pod has performed - `kubectl get po web` 6 | 4. Kill the web service process -`kubectl exec web -- kill 1` 7 | 5. Check how many restarts the pod has performed - `kubectl get po web` 8 | 6. Verify again the web service is running - `kubectl exec web -- ps` 9 | 10 | ## After you complete the exercise 11 | 12 | * Why did the "RESTARTS" count raised? - `because we killed the process and Kubernetes identified the container isn't running proprely so it performed restart to the Pod` 13 | -------------------------------------------------------------------------------- /exercises/kubernetes/solutions/pods_01_solution.md: -------------------------------------------------------------------------------- 1 | ## Pods 01 - Solution 2 | 3 | ``` 4 | kubectl run nginx --image=nginx --restart=Never 5 | kubectl get pods 6 | ``` 7 | -------------------------------------------------------------------------------- /exercises/kubernetes/solutions/replicaset_01_solution.md: -------------------------------------------------------------------------------- 1 | ## ReplicaSet 01 - Solution 2 | 3 | 1. Create a ReplicaSet with 2 replicas. The app can be anything. 4 | 5 | ``` 6 | cat >> rs.yaml < 43 | ``` 44 | 45 | 4. If you'll list all the Pods now, what will you see? 46 | 47 | ``` 48 | The same number of Pods. Since we defined 2 replicas, the ReplicaSet will make sure to create another Pod that will replace the one you've deleted. 49 | ``` 50 | 51 | 5. Remove the ReplicaSet you've created 52 | 53 | ``` 54 | kubectl delete -f rs.yaml 55 | ``` 56 | 57 | 6. Verify you've deleted the ReplicaSet 58 | 59 | ``` 60 | kubectl get rs 61 | # OR a more specific way: kubectl get -f rs.yaml 62 | ``` 63 | -------------------------------------------------------------------------------- /exercises/kubernetes/solutions/replicaset_02_solution.md: -------------------------------------------------------------------------------- 1 | ## ReplicaSet 02 - Solution 2 | 3 | 1. Create a ReplicaSet with 2 replicas. The app can be anything. 4 | 5 | ``` 6 | cat >> rs.yaml <> rs.yaml < running_pods.txt 43 | ``` 44 | 45 | 4. Remove the label (type=web) from one of the Pods created by the ReplicaSet 46 | 47 | ``` 48 | kubectl label pod type- 49 | ``` 50 | 51 | 5. List the Pods running. Are there more Pods running after removing the label? Why? 52 | 53 | ``` 54 | Yes, there is an additional Pod running because once the label, used as a matching selector, was removed, the Pod became independant meaning, it's not controlled by the ReplicaSet anymore and the ReplicaSet was missing replicas based on its definition so, it created a new Pod. 55 | ``` 56 | 57 | 6. Verify the ReplicaSet indeed created a new Pod 58 | 59 | ``` 60 | kubectl describe rs web 61 | ``` 62 | -------------------------------------------------------------------------------- /exercises/kubernetes/solutions/services_01_solution.md: -------------------------------------------------------------------------------- 1 | ## Services 01 - Solution 2 | 3 | ``` 4 | kubectl run nginx --image=nginx --restart=Never --port=80 --labels="app=dev-nginx" 5 | 6 | cat << EOF > nginx-service.yaml 7 | apiVersion: v1 8 | kind: Service 9 | metadata: 10 | name: nginx-service 11 | spec: 12 | selector: 13 | app: dev-nginx 14 | ports: 15 | - protocol: TCP 16 | port: 80 17 | targetPort: 9372 18 | EOF 19 | ``` 20 | -------------------------------------------------------------------------------- /exercises/misc/elk_kibana_aws.md: -------------------------------------------------------------------------------- 1 | # Elasticsearch, Kibana and AWS 2 | 3 | Your task is to build an elasticsearch cluster along with Kibana dashboard on one of the following clouds: 4 | 5 | * AWS 6 | * OpenStack 7 | * Azure 8 | * GCP 9 | 10 | You have to describe in details (preferably with some drawings) how you are going to set it up. 11 | Please describe in detail: 12 | 13 | - How you scale it up or down 14 | - How you quickly (less 20 minutes) provision the cluster 15 | - How you apply security policy for access control 16 | - How you transfer the logs from the app to ELK 17 | - How you deal with multi apps running in different regions 18 | 19 | # Solution 20 | 21 | This one out of many possible solutions. This solution is relying heavily on AWS. 22 | 23 | * Create a VPC with subnet so we can place Elasticsearch node(s) in internal environment only. 24 | If required, we will also setup NAT for public access. 25 | 26 | * Create an IAM role for the access to the cluster. Also, create a separate role for admin access. 27 | 28 | * To provision the solution quickly, we will use the elasticsearch service directly from AWS for production deployment. 29 | This way we also cover multiple AZs. As for authentication, we either use Amazon cognito or the organization LDAP server. 30 | 31 | * To transfer data, we will have to install logstash agent on the instances. The agent will be responsible 32 | for pushing the data to elasticsearch. 33 | 34 | * For monitoring we will use: 35 | 36 | * Cloud watch to monitor cluster resource utilization 37 | * Cloud metrics dashboard 38 | 39 | * If access required from multiple regions we will transfer all the data to S3 which will allow us to view the data 40 | from different regions and consolidate it in one dashboard 41 | -------------------------------------------------------------------------------- /exercises/openshift/projects_101.md: -------------------------------------------------------------------------------- 1 | ## OpenShift - Projects 101 2 | 3 | In a newly deployed cluster (preferably) perform and answer the following instructions and questions, using CLI only 4 | 5 | 1. Login to the OpenShift cluster 6 | 2. List all the projects 7 | 3. Create a new project called 'neverland' 8 | 4. Check the overview status of the current project 9 | -------------------------------------------------------------------------------- /exercises/openshift/solutions/projects_101.md: -------------------------------------------------------------------------------- 1 | ## Projects 101 - Solution 2 | 3 | 1. Login to the OpenShift cluster -> `oc login -u YOUR_USER -p YOUR_PASSWORD_OR_TOKEN` 4 | 2. List all the projects -> `oc get projects`(The output should be empty in a newly created cluster) 5 | 3. Create a new project called 'neverland' -> `oc new-project neverland` 6 | 4. Check the overview status of the current project -> `oc status` 7 | -------------------------------------------------------------------------------- /exercises/os/fork_101.md: -------------------------------------------------------------------------------- 1 | ## Fork 101 2 | 3 | Answer the questions given the following program (without running it): 4 | 5 | ``` 6 | #include 7 | #include 8 | int main() 9 | { 10 | fork(); 11 | printf("\nyay\n"); 12 | return 0; 13 | } 14 | ``` 15 | 16 | 1. How many times the word "yay" will be printed? 17 | 2. How many processes will be created? 18 | -------------------------------------------------------------------------------- /exercises/os/fork_102.md: -------------------------------------------------------------------------------- 1 | ## Fork 101 2 | 3 | Answer the questions given the following program (without running it): 4 | 5 | ``` 6 | #include 7 | #include 8 | 9 | int main() 10 | { 11 | fork(); 12 | fork(); 13 | printf("\nyay\n"); 14 | return 0; 15 | } 16 | ``` 17 | 18 | 1. How many times the word "yay" will be printed? 19 | 2. How many processes will be created? 20 | -------------------------------------------------------------------------------- /exercises/os/solutions/fork_101_solution.md: -------------------------------------------------------------------------------- 1 | ## Fork 101 - Solution 2 | 3 | 1. 2 4 | 2. 2 5 | -------------------------------------------------------------------------------- /exercises/os/solutions/fork_102_solution.md: -------------------------------------------------------------------------------- 1 | ## Fork 102 - Solution 2 | 3 | 1. 4 4 | 2. 4 5 | -------------------------------------------------------------------------------- /exercises/pipeline_deploy_image_to_k8.md: -------------------------------------------------------------------------------- 1 | ## Build & Publish Docker Images to Kubernetes Cluster 2 | 3 | Write a pipeline, on any CI/CD system you prefer, that will build am image out of a given Dockerfile and will publish that image to running Kubernetes cluster. 4 | -------------------------------------------------------------------------------- /exercises/programming/grep_berfore_and_after.md: -------------------------------------------------------------------------------- 1 | Implement the following grep command in Python (numbers can be different): `grep error -A 2 -B 2 some_file` 2 | -------------------------------------------------------------------------------- /exercises/programming/web_scraper.md: -------------------------------------------------------------------------------- 1 | ## Web Scraper 2 | 3 | 1. Pick a web site to scrape 4 | 2. Using any language you would like, write a web scraper to save some data from the site you chose 5 | 3. Save the results to a database (doesn't matter which database, just pick one) 6 | 7 | 8 | * Note: if you don't know which site to pick up have a look [here](http://toscrape.com) 9 | -------------------------------------------------------------------------------- /exercises/python/advanced_data_types.md: -------------------------------------------------------------------------------- 1 | ## (Advanced) Identify the data type 2 | 3 | For each of the following, identify what is the data type of the result variable 4 | 5 | 1. a = {'a', 'b', 'c'} 6 | 2. b = {'1': '2'} 7 | 4. c = ([1, 2, 3]) 8 | 4. d = (1, 2, 3) 9 | 4. e = True+True 10 | -------------------------------------------------------------------------------- /exercises/python/compress_string.md: -------------------------------------------------------------------------------- 1 | ## Compress String 2 | 3 | 1. Write a function that gets a string and compresses it 4 | - 'aaaabbccc' -> 'a4b2c3' 5 | - 'abbbc' -> 'a1b3c1' 6 | 2. Write a function that decompresses a given string 7 | - 'a4b2c3' -> 'aaaabbccc' 8 | - 'a1b3c1' -> 'abbbc' 9 | -------------------------------------------------------------------------------- /exercises/python/data_types.md: -------------------------------------------------------------------------------- 1 | ## Data Types 2 | 3 | For each of the following, identify what is the data type of the result variable 4 | 5 | 1. a = [1, 2, 3, 4, 5] 6 | 2. b = "Hello, is it me you looking for?" 7 | 3. e = 100 8 | 4. f = '100' 9 | 5. i = 0.100 10 | 6. i = True 11 | 12 | Bonus question: how to find out in Python what is the data type of certain variable? 13 | -------------------------------------------------------------------------------- /exercises/python/reverse_string.md: -------------------------------------------------------------------------------- 1 | ## Reverse a String 2 | 3 | Write a code that reverses a string 4 | -------------------------------------------------------------------------------- /exercises/python/solutions/advanced_data_types_solution.md: -------------------------------------------------------------------------------- 1 | ## (Advanced) Identify the data type 2 | 3 | For each of the following, identify what is the data type of the result variable 4 | 5 | 1. a = {'a', 'b', 'c'} -> set 6 | 2. b = {'1': '2'} -> dict 7 | 4. c = ([1, 2, 3]) -> list 8 | 4. d = (1, 2, 3) -> tuple 9 | 4. e = True+True -> int 10 | -------------------------------------------------------------------------------- /exercises/python/solutions/data_types_solution.md: -------------------------------------------------------------------------------- 1 | ## Data Types - Solution 2 | 3 | 1. a = [1, 2, 3, 4, 5] -> list 4 | 2. b = "Hello, is it me you looking for?" -> string 5 | 3. e = 100 -> int 6 | 4. f = '100' -> string 7 | 5. i = 0.100 -> float 8 | 6. i = True -> bool 9 | 10 | ### Bonus question - Answer 11 | 12 | `type(...)` 13 | -------------------------------------------------------------------------------- /exercises/python/solutions/reverse_string.md: -------------------------------------------------------------------------------- 1 | ## Reverse a String - Solution 2 | 3 | ``` 4 | my_string[::-1] 5 | ``` 6 | 7 | A more visual way is:
8 | Careful: this is very slow 9 | 10 | ``` 11 | def reverse_string(string): 12 | temp = "" 13 | for char in string: 14 | temp = char + temp 15 | return temp 16 | ``` 17 | -------------------------------------------------------------------------------- /exercises/sql/improve_query.md: -------------------------------------------------------------------------------- 1 | ## Comparisons vs. Functions 2 | 3 | 1. Improve the following query 4 | 5 | ``` 6 | SELECT count(*) 7 | FROM shawarma_purchases 8 | WHERE 9 | YEAR(purchased_at) == '2017' 10 | ``` 11 | -------------------------------------------------------------------------------- /exercises/sql/solutions/improve_query.md: -------------------------------------------------------------------------------- 1 | ## Comparisons vs. Functions - Solution 2 | 3 | ``` 4 | SELECT count(*) 5 | FROM shawarma_purchases 6 | WHERE 7 | purchased_at >= '2017-01-01' AND 8 | purchased_at <= '2017-31-12' 9 | ``` 10 | -------------------------------------------------------------------------------- /exercises/write_dockerfile_run_container.md: -------------------------------------------------------------------------------- 1 | # Write a Dockerfile and run a container 2 | 3 | Your task is as follows: 4 | 5 | 1. Create a Docker image: 6 | * Use centos or ubuntu as the base image 7 | * Install apache web server 8 | * Deploy any web application you want 9 | * Add https support (using HAProxy as reverse-proxy) 10 | 2. Once you wrote the Dockerfile and created an image, run the container and test the application. Describe how did you test it and provide output 11 | 3. Describe one or more weakness of your Dockerfile. Is it ready to be used in production? 12 | -------------------------------------------------------------------------------- /images/Go.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/Go.png -------------------------------------------------------------------------------- /images/HR.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/HR.png -------------------------------------------------------------------------------- /images/ansible.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/ansible.png -------------------------------------------------------------------------------- /images/aws.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/aws.png -------------------------------------------------------------------------------- /images/azure.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/azure.png -------------------------------------------------------------------------------- /images/bash.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/bash.png -------------------------------------------------------------------------------- /images/big-data.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/big-data.png -------------------------------------------------------------------------------- /images/certificates.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/certificates.png -------------------------------------------------------------------------------- /images/cloud.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/cloud.png -------------------------------------------------------------------------------- /images/containers.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/containers.png -------------------------------------------------------------------------------- /images/databases.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/databases.png -------------------------------------------------------------------------------- /images/design.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/design.png -------------------------------------------------------------------------------- /images/design/cdn-no-downtime.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/design/cdn-no-downtime.png -------------------------------------------------------------------------------- /images/design/input-process-output.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/design/input-process-output.png -------------------------------------------------------------------------------- /images/design/producers_consumers_fix.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/design/producers_consumers_fix.png -------------------------------------------------------------------------------- /images/design/producers_consumers_issue.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/design/producers_consumers_issue.png -------------------------------------------------------------------------------- /images/devops.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/devops.png -------------------------------------------------------------------------------- /images/devops_exercises.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/devops_exercises.png -------------------------------------------------------------------------------- /images/devops_resources.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/devops_resources.png -------------------------------------------------------------------------------- /images/distributed.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/distributed.png -------------------------------------------------------------------------------- /images/distributed/distributed_design_lb.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/distributed/distributed_design_lb.png -------------------------------------------------------------------------------- /images/distributed/distributed_design_standby.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/distributed/distributed_design_standby.png -------------------------------------------------------------------------------- /images/dns.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/dns.png -------------------------------------------------------------------------------- /images/elastic.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/elastic.png -------------------------------------------------------------------------------- /images/exercises.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/exercises.png -------------------------------------------------------------------------------- /images/general.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/general.png -------------------------------------------------------------------------------- /images/git.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/git.png -------------------------------------------------------------------------------- /images/googlecloud.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/googlecloud.png -------------------------------------------------------------------------------- /images/hardware.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/hardware.png -------------------------------------------------------------------------------- /images/how_they_devops.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/how_they_devops.png -------------------------------------------------------------------------------- /images/infraverse.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/infraverse.png -------------------------------------------------------------------------------- /images/jenkins.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/jenkins.png -------------------------------------------------------------------------------- /images/jenkins/jenkins-to-kibana.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/jenkins/jenkins-to-kibana.png -------------------------------------------------------------------------------- /images/kubernetes.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/kubernetes.png -------------------------------------------------------------------------------- /images/kubernetes/kubernetes_components.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/kubernetes/kubernetes_components.png -------------------------------------------------------------------------------- /images/kubernetes/kubernetes_components_solution.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/kubernetes/kubernetes_components_solution.png -------------------------------------------------------------------------------- /images/linux.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/linux.png -------------------------------------------------------------------------------- /images/linux_master.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/linux_master.jpeg -------------------------------------------------------------------------------- /images/mongo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/mongo.png -------------------------------------------------------------------------------- /images/monitoring.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/monitoring.png -------------------------------------------------------------------------------- /images/network.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/network.png -------------------------------------------------------------------------------- /images/openshift.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/openshift.png -------------------------------------------------------------------------------- /images/openstack.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/openstack.png -------------------------------------------------------------------------------- /images/os.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/os.png -------------------------------------------------------------------------------- /images/programming.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/programming.png -------------------------------------------------------------------------------- /images/prometheus.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/prometheus.png -------------------------------------------------------------------------------- /images/puppet.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/puppet.png -------------------------------------------------------------------------------- /images/python.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/python.png -------------------------------------------------------------------------------- /images/regex.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/regex.png -------------------------------------------------------------------------------- /images/security.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/security.png -------------------------------------------------------------------------------- /images/sql.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/sql.png -------------------------------------------------------------------------------- /images/storage.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/storage.png -------------------------------------------------------------------------------- /images/system_design_notebook.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/system_design_notebook.png -------------------------------------------------------------------------------- /images/terraform.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/terraform.png -------------------------------------------------------------------------------- /images/testing.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/testing.png -------------------------------------------------------------------------------- /images/virtualization.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/virtualization.png -------------------------------------------------------------------------------- /images/you.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/skhelge/https-github.com-bregman-arie-devops-exercises/a1a1f366992a7fd981878926e25fbf37f7ef1f82/images/you.png -------------------------------------------------------------------------------- /prepare_for_interview.md: -------------------------------------------------------------------------------- 1 | ## How to prepare for DevOps/SRE/Production Engineer interviews? 2 | 3 | Note: the following is opinionated. 4 | 5 | ### Skills you should have 6 | 7 | #### Linux 8 | 9 | Every DevOps Engineer should have a deep understanding of at least one operating system and if you have the option to choose then I would say it should definitely be Linux as I believe it's a requirement of at least 90% of the DevOps jobs postings out there. 10 | 11 | Usually, the followup question is "How extensive should my knowledge be?" Out of all the DevOps skills, I would say this, along with coding, should be your strongest skills. Be familiar with OS processes, debugging tools, filesystem, networking, ... know your operating system, understand how it works, how to manage issues, etc. 12 | 13 | Not long ago, I've created a list of Linux resources right [here](https://dev.to/abregman/collection-of-linux-resources-3nhk). There are some good sites there that you can use for learning more about Linux. 14 | 15 | #### Programming 16 | 17 | My personal belief is that any DevOps engineer should know programming, at least to some degree. Having this skill you can automate manual processes, improve some of the open source tools you are using today or build new tools & projects to provide a solution to existing problems. Knowing how to code = a lot of power. 18 | 19 | When it comes to interviews you'll notice that the level of knowledge very much depends on the company or position you are interviewing for. Some will require you just to be able to write simple scripts while others will deep dive into complex algorithms and data structures. 20 | 21 | The best way to practice this skill is by doing some actual coding - scripts, online challenges, CLI tools, web applications, ... just code :) 22 | 23 | Also, the following is probably clear to most people but let's still clarify it: when given the chance to choose any language for answering coding tasks/questions, choose the one you have experience with! Some candidates prefer to choose the language they think the company is using and this is a huge mistake since giving the right answer is always better than a wrong answer, no matter which language you have used :) 24 | 25 | I recommend the following sites for practicing coding: 26 | 27 | * [HackerRank](https://www.hackerrank.com) 28 | * [LeetCode](https://leetcode.com) 29 | * [Exercism](https://exercism.io) 30 | 31 | Starting your own project is also a good idea. More on that later on. 32 | 33 | #### Architecture and Design 34 | 35 | This is also an important aspect of DevOps. You should be able to describe how to design different systems, workflows, and architectures. Also, the scale is an important aspect of that. A design which might work for a dozen of hosts or X amount of data, will not necessarily work well with bigger scale. 36 | 37 | Some ideas for you to explore: 38 | 39 | * How to design and implement a CI pipeline (or pipelines) for verifying PRs, run multiple different types of tests, package the project and deploy it somewhere 40 | * How to design and implement secured ELK architecture which will get logs from 10,000 apps and will display the data eventually to the user 41 | * Microservices designs are also quite popular these days 42 | 43 | I recommend going over the following GitHub projects as they are really deep-diving into System Design: 44 | 45 | * https://github.com/donnemartin/system-design-primer 46 | 47 | #### Tools 48 | 49 | Some interviews will focus on specific tools or technologies. Which tools? this is mainly based on a combination of what you mentioned in your C.V & those that are mentioned in the job posting and used in the company. Here are some questions I believe anyone should know to answer regarding the tools he/she is familiar with: 50 | 51 | * What the tool does? What it allows us to achieve that we couldn't do without it? 52 | * What its advantages over other tools in the same area, with the same purpose? Why you specifically using it? 53 | * How it works? 54 | * How to use it? 55 | 56 | Let's deep dive into practical preparation steps 57 | 58 | ### Scenarios || Challenges || Tasks 59 | 60 | This is a very common way to interview today for DevOps roles. The candidate is given a task which represents a common task of DevOps Engineers or a piece of common knowledge and the candidate has several hours or days to accomplish the task.
61 | 62 | This is a great way to prepare for interviews and I recommend to try it out before actually interviewing. How? Take requirements from job posts and convert them into scenarios. Let's see an example: 63 | 64 | "Knowledge in CI/CD" -> Scenario: create a CI/CD pipeline for a project. 65 | 66 | At this point, some people ask: "but what project?" and the answer is: what about GitHub? it has only 9125912851285192 projects...and a free way to set up CI to any of them (also a great way to learn how to collaborate with others :) ) 67 | 68 | Let's convert another scenario: 69 | 70 | "Experience with provisioning servers" -> Scenario: provision a server (to make it more interesting: create a web server). 71 | 72 | And the last example: 73 | 74 | "Experience with scripting" -> Scenario: write a script. Don't waste too much time thinking "what script should I write?". Simply automate something you are doing manually or even implement your own version of common small utils. 75 | 76 | ### Start your own DevOps project 77 | 78 | Starting a DevOps project is a good idea because: 79 | 80 | * It will make you practice coding 81 | * It will be something you can add to your resume and talk about with the interviewer 82 | * Depends on size and complexity, it can teach you something about design in general 83 | * Depends on adoption, it can teach you about managing Open Source projects 84 | 85 | Same here, don't overthink what your project should be about. Just go and build something :) 86 | 87 | ### Sample interview questions 88 | 89 | Make a sample list of interview questions on various topics/areas like technical, company, role, ... and try to answer them. 90 | See if you can manage to answer them in a fluent, detailed way. 91 | 92 | Better yet, ask a good friend/colleague to challenge you with some questions. Your self-awareness might be an obstacle in objective self-review of your knowledge :) 93 | 94 | ### Networking 95 | 96 | For those who attend technical meetups and conferences, it can be a great opportunity to chat with people from other companies on their interviewing process. But don't start with it, it can be quite awkward. Say at least hello first... (: 97 | 98 | Doing so can give you a lot of information on what to expect from an interview at some companies or how to better prepare. 99 | 100 | ### Know your resume 101 | 102 | It may sound trivial but the idea here is simple: be ready to answer any question regarding any line you included in your resume. 103 | Sometimes candidates surprised when they are asked on a skill or line which seems to be not related to the position but the simple truth is: if you mentioned something on your resume, it's only fair to ask you about it. 104 | 105 | 106 | ### Know the company 107 | 108 | Be familiar with the company you are interviewing at. Some ideas: 109 | 110 | * What the company does? 111 | * What products it has? 112 | * Why its products are unique (or better than other products)? This can also be a good question for you to ask 113 | 114 | ### Books 115 | 116 | From my experience, this is not done by many candidates but it's one of the best ways to deep dive into topics like operating system, virtualization, scale, distributed systems, etc. 117 | 118 | In most cases, you will do fine without reading books but for the AAA interviews (hardest level) you'll want to read some books and overall if you inspire to be better DevOps Engineer, books (also articles, blog posts) is a great way :) 119 | 120 | ### Consider starting in non-DevOps position 121 | 122 | While not a preparation step, you should know that landing DevOps as a first position can be challenging. No, it's not impossible but still, since DevOps covers many different practices, tools, ... it can be quite challenging and also overwhelming for someone to try and achieve it as a first position.
123 | A possible path to becoming a DevOps engineer is to start with actually a different (but related) position and switch from there after 1-2 years or more. 124 | 125 | Some ideas: 126 | 127 | * System Administrator - This is perfect because every DevOps Engineer should have a solid understanding of the OS and sysadmins know their OS :) 128 | * Software Developer/Engineer - A DevOps should have coding skills and this position will provide more than the required knowledge in most cases 129 | * QA Engineer - This is a more tricky one because IMHO there are less overlapping areas/skills with DevOps Engineer. Sure, DevOps engineers should have some knowledge about testing but usually, it seems their solid skills/background is mainly composed out of system internals and coding skills. 130 | 131 | ### What to expect from a DevOps interview? 132 | 133 | DevOps interviews can be very different. Some will include design questions, some will focus on coding, others will include short technical questions and you might even have an interview where the interviewer only goes over your resume and discussing your past experience. 134 | 135 | There are a couple of things you can do about it so it will be a less overwhelming experience: 136 | 137 | 1. You can and probably should ask the HR (in some cases even the team lead) how the interview process looks like. Some will be kind enough to even tell you how to prepare. 138 | 2. Usually, the job posting gives more than a hint on where the focus will be and what you should focus on in your preparations so read it carefully. 139 | 3. There are plenty of sites that have notes or a summary of the interview process in different companies, especially big enterprises. 140 | 141 | ### Don't forget to be an interviewer as well 142 | 143 | Some people tend to look at interviews as a one-way road of "Determining whether a candidate is qualified" but in reality, a candidate should also determine whether 144 | the company he/she is interviewing at, is the right place for him/her. 145 | 146 | * Do I care about team size? More specifically, do I care about being a one-man show or being part of a bigger team? 147 | * Do I care about work-life balance? 148 | * Do I care about personal growth and how it's practically done? 149 | * Do I care about knowing what are my responsibilities as part of the role? 150 | 151 | If you do, you should also play the interviewer role :) 152 | 153 | ### One Last Thing 154 | 155 | [Good luck](https://youtu.be/AFUrG1-BAt4?t=59) :) 156 | -------------------------------------------------------------------------------- /scripts/count_questions.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # We dont care about non alphanumerics filenames so we just ls | grep to shorten the script. 4 | 5 | echo $(( $(grep \ -c README.md) + $(grep -i Solution README.md | grep \.md -c) )) 6 | -------------------------------------------------------------------------------- /scripts/question_utils.py: -------------------------------------------------------------------------------- 1 | """ 2 | Question utils functions 3 | """ 4 | 5 | import pathlib 6 | from random import choice 7 | from typing import List 8 | 9 | p = pathlib.Path(__file__).parent.parent.joinpath('README.md') 10 | 11 | 12 | def get_file_list(): 13 | with open(p, 'rb') as f: 14 | file_list = [line.rstrip() for line in f.readlines()] 15 | return file_list 16 | 17 | 18 | def get_question_list(file_list: List[bytes]) -> list: 19 | 20 | questions_list = [] 21 | temp = [] 22 | after_summary_tag = False 23 | 24 | for line in file_list: 25 | if line.startswith(b'
'): 26 | temp.append(line) 27 | after_summary_tag = True 28 | 29 | elif after_summary_tag and line != b'' and b'
' not in line: 30 | temp.append(line) 31 | 32 | elif after_summary_tag and b'' in line: 33 | temp.append(line) 34 | after_summary_tag = False 35 | 36 | questions_list.append(temp) 37 | temp = [] 38 | 39 | return questions_list 40 | 41 | 42 | def get_answered_questions(question_list: List[List[bytes]]) -> list: 43 | """Dont let the type hint confuse you, problem of not using classes. 44 | 45 | It takes the result of get_question_list(file_list) 46 | 47 | Returns a list of questions that are answered. 48 | """ 49 | 50 | t = [] 51 | 52 | for q in question_list: 53 | 54 | index = 0 55 | 56 | for i in q: 57 | if b'' in i: 58 | index = q.index(i) 59 | 60 | if q[index+1: len(q) - 1]: 61 | t.append(q) 62 | 63 | return t 64 | 65 | 66 | def get_challenges_count() -> int: 67 | challenges_path = pathlib.Path(__file__).parent.parent.joinpath('exercises').glob('*.md') 68 | return len(list(challenges_path)) 69 | 70 | 71 | # WIP WAITING FEEDBACK 72 | def get_random_question(question_list: List[List[bytes]], with_answer=False): 73 | if with_answer: 74 | return choice(get_answered_questions(question_list)) 75 | return choice(question_list) 76 | 77 | 78 | """Use this question_list. Unless you have already opened/worked/need the file, then don't or 79 | you will end up doing the same thing twice. 80 | 81 | eg: 82 | 83 | #my_dir/main.py 84 | 85 | from scripts import question_utils 86 | 87 | print(question_utils.get_answered_questions(question_utils.question_list) 88 | 89 | >> 123 90 | 91 | """ 92 | 93 | question_list = get_question_list(get_file_list()) 94 | -------------------------------------------------------------------------------- /scripts/run_ci.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # These are the same steps we are running in Travis CI 3 | 4 | python tests/syntax_lint.py 5 | flake8 --max-line-length=100 . && echo "PEP8 Passed" 6 | -------------------------------------------------------------------------------- /scripts/update_question_number.py: -------------------------------------------------------------------------------- 1 | """ 2 | Meant to be used like this: 3 | 4 | python scripts/update_question_number.py 5 | 6 | """ 7 | import pathlib 8 | from scripts.question_utils import get_question_list, get_challenges_count 9 | 10 | LINE_FLAG = b":bar_chart:" 11 | 12 | p = pathlib.Path(__file__).parent.parent.joinpath('README.md') 13 | 14 | 15 | with open(p, 'rb') as f: 16 | file = f.readlines() 17 | 18 | 19 | file_list = [line.rstrip() for line in file] 20 | 21 | question_list = get_question_list(file_list) 22 | question_count = len(question_list) 23 | total_count = question_count + get_challenges_count() 24 | print(question_count) 25 | print(get_challenges_count()) 26 | print(total_count) 27 | for line in file: 28 | if LINE_FLAG in line: 29 | file[file.index(line)] = b':bar_chart:  There are currently **%s** questions\r\n' %\ 30 | str(total_count).encode() 31 | break 32 | 33 | with open(p, 'wb') as f: 34 | f.writelines(file) 35 | -------------------------------------------------------------------------------- /tests/scripts_question_utils_unittest.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from pathlib import Path 3 | from typing import List 4 | from scripts.question_utils import get_answered_questions, get_question_list 5 | 6 | 7 | def open_test_case_file(n: int) -> List[bytes]: 8 | tests_path = Path(__file__).parent.joinpath() 9 | 10 | with open(f'{tests_path}/testcases/testcase{n}.md', 'rb') as f: 11 | file_list = [line.rstrip() for line in f.readlines()] 12 | return file_list 13 | 14 | 15 | class QuestionCount(unittest.TestCase): 16 | 17 | def test_case_1(self): 18 | raw_list = open_test_case_file(1) 19 | question_list = get_question_list(raw_list) 20 | answers = get_answered_questions(question_list) 21 | 22 | self.assertEqual(len(question_list), 11) 23 | self.assertEqual(len(answers), 3) 24 | 25 | def test_case_2(self): 26 | raw_list = open_test_case_file(2) 27 | question_list = get_question_list(raw_list) 28 | answers = get_answered_questions(question_list) 29 | 30 | self.assertEqual(len(question_list), 16) 31 | self.assertEqual(len(answers), 11) 32 | -------------------------------------------------------------------------------- /tests/syntax_checker_unittest.py: -------------------------------------------------------------------------------- 1 | """ 2 | WIP 3 | 4 | Yes, we do write tests for our tests. 5 | """ 6 | from pathlib import Path 7 | from typing import List 8 | from unittest import TestCase 9 | from tests import syntax_lint 10 | 11 | 12 | def open_test_case_file(n: int) -> List[bytes]: 13 | tests_path = Path(__file__).parent.joinpath() 14 | 15 | with open(f'{tests_path}/testcases/testcase{n}.md', 'rb') as f: 16 | file_list = [line.rstrip() for line in f.readlines()] 17 | return file_list 18 | 19 | 20 | test_case_1 = open_test_case_file(1) 21 | test_case_2 = open_test_case_file(2) 22 | test_case_3 = open_test_case_file(3) 23 | 24 | 25 | class TestSyntax(TestCase): 26 | 27 | def test_details_count_case1(self): 28 | self.assertTrue(syntax_lint.count_details(test_case_1)) 29 | 30 | def test_details_count_case2(self): 31 | self.assertTrue(syntax_lint.count_details(test_case_2)) 32 | 33 | def test_details_errors_1(self): 34 | syntax_lint.check_details_tag(test_case_1) 35 | self.assertFalse(syntax_lint.errors) 36 | 37 | def test_details_errors_2(self): 38 | syntax_lint.check_details_tag(test_case_2) 39 | self.assertFalse(syntax_lint.errors) 40 | # 41 | # def test_details_error_exist_1(self): 42 | # syntax_checker.check_details_tag(test_case_3) 43 | # print(syntax_checker.errors) 44 | # self.assertEqual(len(syntax_checker.errors), 3) 45 | -------------------------------------------------------------------------------- /tests/syntax_lint.py: -------------------------------------------------------------------------------- 1 | """ 2 | Testing suite for https://github.com/bregman-arie/devops-interview-questions 3 | written by surister 4 | 5 | Even though both check_details_tag and check_summary_tags are practically the 6 | same, due to readability and functionality it was decided to be split like 7 | that. 8 | 9 | Usage: 10 | $ python tests/syntax_lint.py 11 | 12 | """ 13 | 14 | import pathlib 15 | 16 | p = pathlib.Path(__file__).parent.parent.joinpath('README.md') 17 | 18 | with open(p, 'rb') as f: 19 | file_list = [line.rstrip() for line in f.readlines()] 20 | 21 | errors = [] 22 | 23 | 24 | def count_details(file_list): 25 | """ 26 | Counts the total amount of
and
27 | 28 | Used for debugging purpose, not meant to be used in actual tests 29 | """ 30 | details_final_count = 0 31 | details_count = 0 32 | 33 | for line_number, line in enumerate(file_list): 34 | if b'
' in line: 35 | details_count += 1 36 | if b'
' in line: 37 | details_final_count += 1 38 | 39 | return details_count == details_final_count 40 | 41 | 42 | def count_summary(file_list): 43 | """ 44 | Counts the total amount of
and
45 | 46 | Used for debugging purpose, not meant to be used in actual tests 47 | """ 48 | details_final_count = 0 49 | details_count = 0 50 | 51 | for line_number, line in enumerate(file_list): 52 | if b'' in line: 53 | details_count += 1 54 | if b'' in line: 55 | details_final_count += 1 56 | 57 | return details_count == details_final_count 58 | 59 | 60 | def check_details_tag(file_list): 61 | """ 62 | Check whether the structure: 63 |
64 | ... 65 |
66 | 67 | Is correctly followed, if not generates an error. 68 | 69 | """ 70 | 71 | after_detail = False 72 | error = False 73 | err_message = '' 74 | for line_number, line in enumerate(file_list): 75 | if b'
' in line and b'
' in line: 76 | pass 77 | else: 78 | if b'
' in line and after_detail: 79 | err_message = f'Missing closing detail tag round line {line_number - 1}' 80 | error = True 81 | if b'
' in line and not after_detail: 82 | err_message = f'Missing opening detail tag round line {line_number - 1}' 83 | error = True 84 | 85 | if b'
' in line: 86 | after_detail = True 87 | 88 | if b'
' in line and after_detail: 89 | after_detail = False 90 | 91 | if error: 92 | errors.append(err_message) 93 | 94 | error = False 95 | 96 | 97 | def check_summary_tag(file_list): 98 | """ 99 | Check whether the structure: 100 | 101 | ... 102 | 103 | 104 | Is correctly followed, if not generates an error. 105 | 106 | """ 107 | 108 | after_summary = False 109 | error = False 110 | err_message = '' 111 | for line_number, line in enumerate(file_list): 112 | if b'' in line and b'' in line: 113 | pass 114 | else: 115 | if b'' in line and after_summary: 116 | err_message = f'Missing closing summary tag around line {line_number}' 117 | error = True 118 | if b'' in line and not after_summary: 119 | err_message = f'Missing opening summary tag around line {line_number}' 120 | error = True 121 | 122 | if b'' in line: 123 | after_summary = True 124 | 125 | if b'' in line and after_summary: 126 | after_summary = False 127 | 128 | if error: 129 | errors.append(err_message) 130 | 131 | error = False 132 | 133 | 134 | if __name__ == '__main__': 135 | check_details_tag(file_list) 136 | check_summary_tag(file_list) 137 | if errors: 138 | for error in errors: 139 | print(error) 140 | exit(1) 141 | 142 | print("Tests passed successfully.") 143 | -------------------------------------------------------------------------------- /tests/testcases/testcase1.md: -------------------------------------------------------------------------------- 1 |
2 | What is Docker? What are you using it for?
3 |
4 | 5 |
6 | How containers are different from VMs?
7 | 8 | The primary difference between containers and VMs is that containers allow you to virtualize 9 | multiple workloads on the operating system while in the case of VMs the hardware is being virtualized to 10 | run multiple machines each with its own OS. 11 |
12 | 13 |
14 | In which scenarios would you use containers and in which you would prefer to use VMs?
15 | 16 | You should choose VMs when: 17 | * you need run an application which requires all the resources and functionalities of an OS 18 | * you need full isolation and security 19 | 20 | You should choose containers when: 21 | * you need a lightweight solution 22 | * Running multiple versions or instances of a single application 23 |
24 | 25 |
26 | Explain Docker architecture
27 |
28 | 29 |
30 | Describe in detail what happens when you run `docker run hello-world`?
31 | 32 | Docker CLI passes your request to Docker daemon. 33 | Docker daemon downloads the image from Docker Hub 34 | Docker daemon creates a new container by using the image it downloaded 35 | Docker daemon redirects output from container to Docker CLI which redirects it to the standard output 36 |
37 | 38 |
39 | How do you run a container?
40 |
41 | 42 |
43 | What `docker commit` does?. When will you use it?
44 |
45 | 46 |
47 | How would you transfer data from one container into another?
48 |
49 | 50 |
51 | What happens to data of the container when a container exists?
52 |
53 | 54 |
55 | Explain what each of the following commands do: 56 | 57 | * docker run 58 | * docker rm 59 | * docker ps 60 | * docker pull 61 | * docker build 62 | * docker commit
63 |
64 | 65 |
66 | How do you remove old, non running, containers?
67 |
68 | -------------------------------------------------------------------------------- /tests/testcases/testcase2.md: -------------------------------------------------------------------------------- 1 |
2 | Explain the following code: 3 | 4 | :(){ :|:& };: 5 | 6 |
7 |
8 | 9 |
10 | Can you give an example to some Bash best practices?
11 |
12 | 13 |
14 | What is the ternary operator? How do you use it in bash?
15 | 16 | A short way of using if/else. An example: 17 | 18 | [[ $a = 1 ]] && b="yes, equal" || b="nope" 19 |
20 | 21 |
22 | What does the following code do and when would you use it? 23 | 24 | diff <(ls /tmp) <(ls /var/tmp) 25 | 26 |
27 | It is called 'process substitution'. It provides a way to pass the output of a command to another command when using a pipe | is not possible. It can be used when a command does not support STDIN or you need the output of multiple commands. 28 | https://superuser.com/a/1060002/167769 29 |
30 | 31 | 32 | ## SQL 33 | 34 | 35 | #### :baby: Beginner 36 | 37 |
38 | What does SQL stand for?
39 | 40 | Structured Query Language 41 | 42 |
43 | 44 |
45 | How is SQL Different from NoSQL
46 | 47 | The main difference is that SQL databases are structured (data is stored in the form of 48 | tables with rows and columns - like an excel spreadsheet table) while NoSQL is 49 | unstructured, and the data storage can vary depending on how the NoSQL DB is set up, such 50 | as key-value pair, document-oriented, etc. 51 |
52 | 53 |
54 | What does it mean when a database is ACID compliant?
55 | 56 | ACID stands for Atomicity, Consistency, Isolation, Durability. In order to be ACID compliant, the database much meet each of the four criteria 57 | 58 | **Atomicity** - When a change occurs to the database, it should either succeed or fail as a whole. 59 | 60 | For example, if you were to update a table, the update should completely execute. If it only partially executes, the 61 | update is considered failed as a whole, and will not go through - the DB will revert back to it's original 62 | state before the update occurred. It should also be mentioned that Atomicity ensures that each 63 | transaction is completed as it's own stand alone "unit" - if any part fails, the whole statement fails. 64 | 65 | **Consistency** - any change made to the database should bring it from one valid state into the next. 66 | 67 | For example, if you make a change to the DB, it shouldn't corrupt it. Consistency is upheld by checks and constraints that 68 | are pre-defined in the DB. For example, if you tried to change a value from a string to an int when the column 69 | should be of datatype string, a consistent DB would not allow this transaction to go through, and the action would 70 | not be executed 71 | 72 | **Isolation** - this ensures that a database will never be seen "mid-update" - as multiple transactions are running at 73 | the same time, it should still leave the DB in the same state as if the transactions were being run sequentially. 74 | 75 | For example, let's say that 20 other people were making changes to the database at the same time. At the 76 | time you executed your query, 15 of the 20 changes had gone through, but 5 were still in progress. You should 77 | only see the 15 changes that had completed - you wouldn't see the database mid-update as the change goes through. 78 | 79 | **Durability** - Once a change is committed, it will remain committed regardless of what happens 80 | (power failure, system crash, etc.). This means that all completed transactions 81 | must be recorded in non-volatile memory. 82 | 83 | Note that SQL is by nature ACID compliant. Certain NoSQL DB's can be ACID compliant depending on 84 | how they operate, but as a general rule of thumb, NoSQL DB's are not considered ACID compliant 85 |
86 | 87 |
88 | When is it best to use SQL? NoSQL?
89 | 90 | SQL - Best used when data integrity is crucial. SQL is typically implemented with many 91 | businesses and areas within the finance field due to it's ACID compliance. 92 | 93 | NoSQL - Great if you need to scale things quickly. NoSQL was designed with web applications 94 | in mind, so it works great if you need to quickly spread the same information around to 95 | multiple servers 96 | 97 | Additionally, since NoSQL does not adhere to the strict table with columns and rows structure 98 | that Relational Databases require, you can store different data types together. 99 |
100 | 101 |
102 | What is a Cartesian Product?
103 | 104 | A Cartesian product is when all rows from the first table are joined to all rows in the second 105 | table. This can be done implicitly by not defining a key to join, or explicitly by 106 | calling a CROSS JOIN on two tables, such as below: 107 | 108 | Select * from customers **CROSS JOIN** orders; 109 | 110 | Note that a Cartesian product can also be a bad thing - when performing a join 111 | on two tables in which both do not have unique keys, this could cause the returned information 112 | to be incorrect. 113 |
114 | 115 | ##### SQL Specific Questions 116 | 117 | For these questions, we will be using the Customers and Orders tables shown below: 118 | 119 | **Customers** 120 | 121 | Customer_ID | Customer_Name | Items_in_cart | Cash_spent_to_Date 122 | ------------ | ------------- | ------------- | ------------- 123 | 100204 | John Smith | 0 | 20.00 124 | 100205 | Jane Smith | 3 | 40.00 125 | 100206 | Bobby Frank | 1 | 100.20 126 | 127 | **ORDERS** 128 | 129 | Customer_ID | Order_ID | Item | Price | Date_sold 130 | ------------ | ------------- | ------------- | ------------- | ------------- 131 | 100206 | A123 | Rubber Ducky | 2.20 | 2019-09-18 132 | 100206 | A123 | Bubble Bath | 8.00 | 2019-09-18 133 | 100206 | Q987 | 80-Pack TP | 90.00 | 2019-09-20 134 | 100205 | Z001 | Cat Food - Tuna Fish | 10.00 | 2019-08-05 135 | 100205 | Z001 | Cat Food - Chicken | 10.00 | 2019-08-05 136 | 100205 | Z001 | Cat Food - Beef | 10.00 | 2019-08-05 137 | 100205 | Z001 | Cat Food - Kitty quesadilla | 10.00 | 2019-08-05 138 | 100204 | X202 | Coffee | 20.00 | 2019-04-29 139 | 140 |
141 | How would I select all fields from this table?
142 | 143 | Select *
144 | From Customers; 145 |
146 | 147 |
148 | How many items are in John's cart?
149 | 150 | Select Items_in_cart
151 | From Customers
152 | Where Customer_Name = "John Smith"; 153 |
154 | 155 |
156 | What is the sum of all the cash spent across all customers?
157 | 158 | Select SUM(Cash_spent_to_Date) as SUM_CASH
159 | From Customers; 160 |
161 | 162 |
163 | Tell me about your last big project/task you worked on
164 |
165 | 166 |
167 | What was most challenging part in the project you worked on?
168 |
169 | 170 |
171 | Why do you want to work here?
172 |
173 | 174 |
175 | How did you hear about us?
176 | 177 | Tell them how did you hear about them :D 178 | Relax, there is no wrong or right answer here...I think. 179 |
-------------------------------------------------------------------------------- /tests/testcases/testcase3.md: -------------------------------------------------------------------------------- 1 | 2 | You have a colleague you don‘t get along with. Tell us some strategies how you create a good work relationship with them anyway.
3 | 4 | Bad answer: I don't. 5 | Better answer: Every person has strengths and weaknesses. This is true also for colleagues I don't have good work relationship with and this is what helps me to create good work relationship with them. If I am able to highlight or recognize their strengths I'm able to focus mainly on that when communicating with them. 6 | 7 | 8 |
9 | What do you love about your work?
10 | 11 | You know the best, but some ideas if you find it hard to express yourself: 12 | 13 | * Diversity 14 | * Complexity 15 | * Challenging 16 | * Communication with several different teams 17 |
18 | 19 |
20 | What are your responsibilities in your current position?
21 | 22 | You know the best :) 23 |
24 | 25 | 26 | Why should we hire you for the role?
27 | 28 | You can use and elaborate on one or all of the following: 29 | 30 | * Passion 31 | * Motivation 32 | * Autodidact 33 | * Creativity (be able to support it with some actual examples) 34 | 35 | 36 | ## Questions you CAN ask 37 | 38 | A list of questions you as a candidate can ask the interviewer during or after the interview. 39 | These are only a suggestion, use them carefully. Not every interviewer will be able to answer these (or happy to) which should be perhaps a red flag warning for your regarding working in such place but that's really up to you. 40 | 41 |
42 | What do you like about working here?
43 |
44 | 45 |
46 | How does the company promote personal growth?
47 | 48 | 49 |
50 | What is the current level of technical debt you are dealing with?
51 | 52 | Be careful when asking this question - all companies, regardless of size, have some level of tech debt. 53 | Phrase the question in the light that all companies have the deal with this, but you want to see the current 54 | pain points they are dealing with
55 | 56 | This is a great way to figure how managers deal with unplanned work, and how good they are at 57 | setting expectations with projects. 58 |
--------------------------------------------------------------------------------