├── selenium ├── __init__.py ├── src │ ├── __init__.py │ ├── requirements.txt │ ├── install-browser.sh │ ├── Dockerfile │ ├── chrome-deps.txt │ └── app.py ├── tests │ ├── __init__.py │ └── unit │ │ ├── __init__.py │ │ └── test_handler.py ├── samconfig.toml ├── template.yaml ├── events │ └── event.json ├── .gitignore └── README.md ├── README.md ├── LICENSE ├── .devcontainer ├── devcontainer.json ├── Dockerfile └── library-scripts │ ├── docker-in-docker-debian.sh │ └── common-debian.sh └── .gitignore /selenium/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /selenium/src/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /selenium/tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /selenium/tests/unit/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /selenium/src/requirements.txt: -------------------------------------------------------------------------------- 1 | selenium 2 | requests 3 | pandas -------------------------------------------------------------------------------- /selenium/samconfig.toml: -------------------------------------------------------------------------------- 1 | version = 0.1 2 | [default] 3 | [default.deploy] 4 | [default.deploy.parameters] 5 | stack_name = "selenium" 6 | s3_bucket = "aws-sam-cli-managed-default-samclisourcebucket-10ou7y86c11sr" 7 | s3_prefix = "selenium" 8 | region = "us-east-1" 9 | capabilities = "CAPABILITY_IAM" 10 | image_repositories = ["SeleniumFunction=268674271179.dkr.ecr.us-east-1.amazonaws.com/selenium8e00596a/seleniumfunction930ef00frepo"] 11 | -------------------------------------------------------------------------------- /selenium/src/install-browser.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | 4 | echo "Downloading Chromium..." 5 | curl "https://www.googleapis.com/download/storage/v1/b/chromium-browser-snapshots/o/Linux_x64%2F$CHROMIUM_VERSION%2Fchrome-linux.zip?generation=1652397748160413&alt=media" > /tmp/chromium.zip 6 | 7 | unzip /tmp/chromium.zip -d /tmp/ 8 | mv /tmp/chrome-linux/ /opt/chrome 9 | 10 | curl "https://www.googleapis.com/download/storage/v1/b/chromium-browser-snapshots/o/Linux_x64%2F$CHROMIUM_VERSION%2Fchromedriver_linux64.zip?generation=1652397753719852&alt=media" > /tmp/chromedriver_linux64.zip 11 | unzip /tmp/chromedriver_linux64.zip -d /tmp/ 12 | mv /tmp/chromedriver_linux64/chromedriver /opt/chromedriver 13 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Relevant [blog post](https://cloudbytes.dev/articles/run-selenium-in-aws-lambda-for-ui-testing) with detailed explanation. 2 | ------ 3 | 4 | # Using the GitHub repository directly 5 | 6 | You need AWS SAM CLI installed and AWS credentials configured. 7 | 8 | Open your terminal and run the following command to clone the [repository](https://github.com/rehanhaider/selenium-in-aws-lambda). 9 | 10 | ```git 11 | git clone https://github.com/rehanhaider/selenium-in-aws-lambda.git 12 | ``` 13 | 14 | Navigate to the app directory. 15 | 16 | ```bash 17 | cd selenium-in-aws-lambda/selenium 18 | ``` 19 | 20 | Build the app. 21 | 22 | ```bash 23 | sam build 24 | ``` 25 | 26 | Test the app locally. 27 | 28 | ```bash 29 | sam local invoke 30 | ``` 31 | 32 | Deploy the app to AWS. 33 | 34 | ```bash 35 | sam deploy --guided 36 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 Rehan Haider 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /.devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | // For format details, see https://aka.ms/devcontainer.json. For config options, see the README at: 2 | // https://github.com/microsoft/vscode-dev-containers/tree/v0.191.0/containers/docker-in-docker 3 | { 4 | "name": "Docker in Docker", 5 | "dockerFile": "Dockerfile", 6 | "runArgs": [ 7 | "--init", 8 | "--privileged" 9 | ], 10 | "mounts": [ 11 | "source=dind-var-lib-docker,target=/var/lib/docker,type=volume" 12 | ], 13 | "overrideCommand": false, 14 | // Set *default* container specific settings.json values on container create. 15 | "settings": {}, 16 | // Add the IDs of extensions you want installed when the container is created. 17 | "extensions": [ 18 | "ms-azuretools.vscode-docker", 19 | "GitHub.copilot" 20 | ], 21 | // Use 'forwardPorts' to make a list of ports inside the container available locally. 22 | // "forwardPorts": [], 23 | // Use 'postCreateCommand' to run commands after the container is created. 24 | // "postCreateCommand": "docker --version", 25 | // Comment out connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root. 26 | "remoteUser": "vscode" 27 | } 28 | -------------------------------------------------------------------------------- /selenium/src/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM public.ecr.aws/lambda/python:3.9 as stage 2 | 3 | # Hack to install chromium dependencies 4 | RUN yum install -y -q sudo unzip 5 | 6 | # Find the version of latest stable build of chromium from below 7 | # https://omahaproxy.appspot.com/ 8 | # Then follow the instructions here in below URL to download old builds of Chrome/Chromium that are stable 9 | # Current stable version of Chromium 10 | ENV CHROMIUM_VERSION=1002910 11 | # RUN yum install -y https://dl.google.com/linux/direct/google-chrome-stable_current_x86_64.rpm 12 | 13 | # Install Chromium 14 | COPY install-browser.sh /tmp/ 15 | RUN /usr/bin/bash /tmp/install-browser.sh 16 | 17 | FROM public.ecr.aws/lambda/python:3.9 as base 18 | 19 | COPY chrome-deps.txt /tmp/ 20 | RUN yum install -y $(cat /tmp/chrome-deps.txt) 21 | 22 | # Install Python dependencies for function 23 | COPY requirements.txt /tmp/ 24 | RUN python3 -m pip install --upgrade pip -q 25 | RUN python3 -m pip install -r /tmp/requirements.txt -q 26 | 27 | 28 | COPY --from=stage /opt/chrome /opt/chrome 29 | COPY --from=stage /opt/chromedriver /opt/chromedriver 30 | COPY app.py ${LAMBDA_TASK_ROOT} 31 | 32 | CMD [ "app.handler" ] 33 | -------------------------------------------------------------------------------- /selenium/src/chrome-deps.txt: -------------------------------------------------------------------------------- 1 | acl adwaita-cursor-theme adwaita-icon-theme alsa-lib at-spi2-atk at-spi2-core 2 | atk avahi-libs cairo cairo-gobject colord-libs cryptsetup-libs cups-libs dbus 3 | dbus-libs dconf desktop-file-utils device-mapper device-mapper-libs elfutils-default-yama-scope 4 | elfutils-libs emacs-filesystem fribidi gdk-pixbuf2 glib-networking gnutls graphite2 5 | gsettings-desktop-schemas gtk-update-icon-cache gtk3 harfbuzz hicolor-icon-theme hwdata jasper-libs 6 | jbigkit-libs json-glib kmod kmod-libs lcms2 libX11 libX11-common libXau libXcomposite libXcursor libXdamage 7 | libXext libXfixes libXft libXi libXinerama libXrandr libXrender libXtst libXxf86vm libdrm libepoxy 8 | liberation-fonts liberation-fonts-common liberation-mono-fonts liberation-narrow-fonts liberation-sans-fonts 9 | liberation-serif-fonts libfdisk libglvnd libglvnd-egl libglvnd-glx libgusb libidn libjpeg-turbo libmodman 10 | libpciaccess libproxy libsemanage libsmartcols libsoup libthai libtiff libusbx libutempter libwayland-client 11 | libwayland-cursor libwayland-egl libwayland-server libxcb libxkbcommon libxshmfence lz4 mesa-libEGL mesa-libGL 12 | mesa-libgbm mesa-libglapi nettle pango pixman qrencode-libs rest shadow-utils systemd systemd-libs trousers ustr 13 | util-linux vulkan vulkan-filesystem wget which xdg-utils xkeyboard-config -------------------------------------------------------------------------------- /selenium/src/app.py: -------------------------------------------------------------------------------- 1 | ## Run selenium and chrome driver to scrape data from cloudbytes.dev 2 | import time 3 | import json 4 | import os.path 5 | import os 6 | from selenium import webdriver 7 | from selenium.webdriver.chrome.service import Service 8 | from selenium.webdriver.common.by import By 9 | from selenium.webdriver.chrome.options import Options 10 | 11 | 12 | 13 | def handler(event=None, context=None): 14 | chrome_options = webdriver.ChromeOptions() 15 | chrome_options.binary_location = "/opt/chrome/chrome" 16 | chrome_options.add_argument("--headless") 17 | chrome_options.add_argument("--no-sandbox") 18 | chrome_options.add_argument("--disable-dev-shm-usage") 19 | chrome_options.add_argument("--disable-gpu") 20 | chrome_options.add_argument("--disable-dev-tools") 21 | chrome_options.add_argument("--no-zygote") 22 | chrome_options.add_argument("--single-process") 23 | chrome_options.add_argument("window-size=2560x1440") 24 | chrome_options.add_argument("--user-data-dir=/tmp/chrome-user-data") 25 | chrome_options.add_argument("--remote-debugging-port=9222") 26 | #chrome_options.add_argument("--data-path=/tmp/chrome-user-data") 27 | #chrome_options.add_argument("--disk-cache-dir=/tmp/chrome-user-data") 28 | chrome = webdriver.Chrome("/opt/chromedriver", options=chrome_options) 29 | chrome.get("https://cloudbytes.dev/") 30 | description = chrome.find_element(By.NAME, "description").get_attribute("content") 31 | print(description) 32 | return { 33 | "statusCode": 200, 34 | "body": json.dumps( 35 | { 36 | "message": description, 37 | } 38 | ), 39 | } 40 | -------------------------------------------------------------------------------- /selenium/template.yaml: -------------------------------------------------------------------------------- 1 | AWSTemplateFormatVersion: '2010-09-09' 2 | Transform: AWS::Serverless-2016-10-31 3 | Description: > 4 | python3.9 5 | 6 | Sample SAM Template for selenium 7 | 8 | # More info about Globals: https://github.com/awslabs/serverless-application-model/blob/master/docs/globals.rst 9 | Globals: 10 | Function: 11 | Timeout: 120 12 | MemorySize: 2048 13 | 14 | Resources: 15 | SeleniumFunction: 16 | Type: AWS::Serverless::Function # More info about Function Resource: https://github.com/awslabs/serverless-application-model/blob/master/versions/2016-10-31.md#awsserverlessfunction 17 | Properties: 18 | PackageType: Image 19 | Architectures: 20 | - x86_64 21 | Events: 22 | Selenium: 23 | Type: Api # More info about API Event Source: https://github.com/awslabs/serverless-application-model/blob/master/versions/2016-10-31.md#api 24 | Properties: 25 | Path: /selenium 26 | Method: get 27 | Metadata: 28 | Dockerfile: Dockerfile 29 | DockerContext: ./src 30 | DockerTag: python3.9-v1 31 | 32 | Outputs: 33 | # ServerlessRestApi is an implicit API created out of Events key under Serverless::Function 34 | # Find out more about other implicit resources you can reference within SAM 35 | # https://github.com/awslabs/serverless-application-model/blob/master/docs/internals/generated_resources.rst#api 36 | SeleniumApi: 37 | Description: "API Gateway endpoint URL for Prod stage for Seleniumc function" 38 | Value: !Sub "https://${ServerlessRestApi}.execute-api.${AWS::Region}.amazonaws.com/Prod/selenium/" 39 | SeleniumFunction: 40 | Description: "Selenium Lambda Function ARN" 41 | Value: !GetAtt SeleniumFunction.Arn 42 | SeleniumFunctionIamRole: 43 | Description: "Implicit IAM Role created for Selenium function" 44 | Value: !GetAtt SeleniumFunctionRole.Arn 45 | -------------------------------------------------------------------------------- /selenium/events/event.json: -------------------------------------------------------------------------------- 1 | { 2 | "body": "{\"message\": \"hello world\"}", 3 | "resource": "/hello", 4 | "path": "/hello", 5 | "httpMethod": "GET", 6 | "isBase64Encoded": false, 7 | "queryStringParameters": { 8 | "foo": "bar" 9 | }, 10 | "pathParameters": { 11 | "proxy": "/path/to/resource" 12 | }, 13 | "stageVariables": { 14 | "baz": "qux" 15 | }, 16 | "headers": { 17 | "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8", 18 | "Accept-Encoding": "gzip, deflate, sdch", 19 | "Accept-Language": "en-US,en;q=0.8", 20 | "Cache-Control": "max-age=0", 21 | "CloudFront-Forwarded-Proto": "https", 22 | "CloudFront-Is-Desktop-Viewer": "true", 23 | "CloudFront-Is-Mobile-Viewer": "false", 24 | "CloudFront-Is-SmartTV-Viewer": "false", 25 | "CloudFront-Is-Tablet-Viewer": "false", 26 | "CloudFront-Viewer-Country": "US", 27 | "Host": "1234567890.execute-api.us-east-1.amazonaws.com", 28 | "Upgrade-Insecure-Requests": "1", 29 | "User-Agent": "Custom User Agent String", 30 | "Via": "1.1 08f323deadbeefa7af34d5feb414ce27.cloudfront.net (CloudFront)", 31 | "X-Amz-Cf-Id": "cDehVQoZnx43VYQb9j2-nvCh-9z396Uhbp027Y2JvkCPNLmGJHqlaA==", 32 | "X-Forwarded-For": "127.0.0.1, 127.0.0.2", 33 | "X-Forwarded-Port": "443", 34 | "X-Forwarded-Proto": "https" 35 | }, 36 | "requestContext": { 37 | "accountId": "123456789012", 38 | "resourceId": "123456", 39 | "stage": "prod", 40 | "requestId": "c6af9ac6-7b61-11e6-9a41-93e8deadbeef", 41 | "requestTime": "09/Apr/2015:12:34:56 +0000", 42 | "requestTimeEpoch": 1428582896000, 43 | "identity": { 44 | "cognitoIdentityPoolId": null, 45 | "accountId": null, 46 | "cognitoIdentityId": null, 47 | "caller": null, 48 | "accessKey": null, 49 | "sourceIp": "127.0.0.1", 50 | "cognitoAuthenticationType": null, 51 | "cognitoAuthenticationProvider": null, 52 | "userArn": null, 53 | "userAgent": "Custom User Agent String", 54 | "user": null 55 | }, 56 | "path": "/prod/hello", 57 | "resourcePath": "/hello", 58 | "httpMethod": "POST", 59 | "apiId": "1234567890", 60 | "protocol": "HTTP/1.1" 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /.devcontainer/Dockerfile: -------------------------------------------------------------------------------- 1 | # [Choice] Python version: 3, 3.9, 3.8, 3.7, 3.6 2 | ARG VARIANT="3.8" 3 | FROM mcr.microsoft.com/vscode/devcontainers/python:0-${VARIANT} 4 | 5 | # Avoid warnings by switching to noninteractive 6 | ENV DEBIAN_FRONTEND=noninteractive 7 | 8 | # [Option] Install Node.js 9 | ARG INSTALL_NODE="true" 10 | ARG NODE_VERSION="lts/*" 11 | RUN if [ "${INSTALL_NODE}" = "true" ]; then su vscode -c "umask 0002 && . /usr/local/share/nvm/nvm.sh && nvm install ${NODE_VERSION} 2>&1"; fi 12 | 13 | 14 | # [Option] Install zsh 15 | ARG INSTALL_ZSH="true" 16 | # [Option] Upgrade OS packages to their latest versions 17 | ARG UPGRADE_PACKAGES="false" 18 | # [Option] Enable non-root Docker access in container 19 | ARG ENABLE_NONROOT_DOCKER="true" 20 | # [Option] Use the OSS Moby Engine instead of the licensed Docker Engine 21 | ARG USE_MOBY="true" 22 | 23 | # Install needed packages and setup non-root user. Use a separate RUN statement to add your 24 | # own dependencies. A user of "automatic" attempts to reuse an user ID if one already exists. 25 | ARG USERNAME=automatic 26 | ARG USER_UID=1000 27 | ARG USER_GID=$USER_UID 28 | COPY library-scripts/*.sh /tmp/library-scripts/ 29 | RUN apt-get update \ 30 | && /bin/bash /tmp/library-scripts/common-debian.sh "${INSTALL_ZSH}" "${USERNAME}" "${USER_UID}" "${USER_GID}" "${UPGRADE_PACKAGES}" "true" "true" \ 31 | # Use Docker script from script library to set things up 32 | && /bin/bash /tmp/library-scripts/docker-in-docker-debian.sh "${ENABLE_NONROOT_DOCKER}" "${USERNAME}" "${USE_MOBY}" \ 33 | # Clean up 34 | && apt-get autoremove -y && apt-get clean -y && rm -rf /var/lib/apt/lists/* /tmp/library-scripts/ 35 | 36 | VOLUME [ "/var/lib/docker" ] 37 | 38 | # Install AWS CLI 39 | RUN curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" \ 40 | && unzip awscliv2.zip && ./aws/install 41 | 42 | # Install AWS SAM-CDK Beta 43 | RUN wget "https://github.com/aws/aws-sam-cli/releases/latest/download/aws-sam-cli-linux-x86_64.zip" -O "awssambeta.zip" \ 44 | && unzip awssambeta.zip -d sam && ./sam/install 45 | 46 | # [Optional] Uncomment this line to install global node packages. 47 | RUN su vscode -c "source /usr/local/share/nvm/nvm.sh && npm install -g aws-cdk" 2>&1 48 | 49 | # Setting the ENTRYPOINT to docker-init.sh will start up the Docker Engine 50 | # inside the container "overrideCommand": false is set in devcontainer.json. 51 | # The script will also execute CMD if you need to alter startup behaviors. 52 | ENTRYPOINT [ "/usr/local/share/docker-init.sh" ] 53 | CMD [ "sleep", "infinity" ] -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # IPython 81 | profile_default/ 82 | ipython_config.py 83 | 84 | # pyenv 85 | .python-version 86 | 87 | # pipenv 88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 91 | # install all needed dependencies. 92 | #Pipfile.lock 93 | 94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 95 | __pypackages__/ 96 | 97 | # Celery stuff 98 | celerybeat-schedule 99 | celerybeat.pid 100 | 101 | # SageMath parsed files 102 | *.sage.py 103 | 104 | # Environments 105 | .env 106 | .venv 107 | env/ 108 | venv/ 109 | ENV/ 110 | env.bak/ 111 | venv.bak/ 112 | 113 | # Spyder project settings 114 | .spyderproject 115 | .spyproject 116 | 117 | # Rope project settings 118 | .ropeproject 119 | 120 | # mkdocs documentation 121 | /site 122 | 123 | # mypy 124 | .mypy_cache/ 125 | .dmypy.json 126 | dmypy.json 127 | 128 | # Pyre type checker 129 | .pyre/ 130 | 131 | 132 | # Stopping devcontainer changes 133 | # Local devcontainer contains mounted volumes which is too complicated to explain for multiple OSs. 134 | .devcontainer/devcontainer.json 135 | .aws-sam/* -------------------------------------------------------------------------------- /selenium/tests/unit/test_handler.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | import pytest 4 | 5 | from hello_world import app 6 | 7 | 8 | @pytest.fixture() 9 | def apigw_event(): 10 | """ Generates API GW Event""" 11 | 12 | return { 13 | "body": '{ "test": "body"}', 14 | "resource": "/{proxy+}", 15 | "requestContext": { 16 | "resourceId": "123456", 17 | "apiId": "1234567890", 18 | "resourcePath": "/{proxy+}", 19 | "httpMethod": "POST", 20 | "requestId": "c6af9ac6-7b61-11e6-9a41-93e8deadbeef", 21 | "accountId": "123456789012", 22 | "identity": { 23 | "apiKey": "", 24 | "userArn": "", 25 | "cognitoAuthenticationType": "", 26 | "caller": "", 27 | "userAgent": "Custom User Agent String", 28 | "user": "", 29 | "cognitoIdentityPoolId": "", 30 | "cognitoIdentityId": "", 31 | "cognitoAuthenticationProvider": "", 32 | "sourceIp": "127.0.0.1", 33 | "accountId": "", 34 | }, 35 | "stage": "prod", 36 | }, 37 | "queryStringParameters": {"foo": "bar"}, 38 | "headers": { 39 | "Via": "1.1 08f323deadbeefa7af34d5feb414ce27.cloudfront.net (CloudFront)", 40 | "Accept-Language": "en-US,en;q=0.8", 41 | "CloudFront-Is-Desktop-Viewer": "true", 42 | "CloudFront-Is-SmartTV-Viewer": "false", 43 | "CloudFront-Is-Mobile-Viewer": "false", 44 | "X-Forwarded-For": "127.0.0.1, 127.0.0.2", 45 | "CloudFront-Viewer-Country": "US", 46 | "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8", 47 | "Upgrade-Insecure-Requests": "1", 48 | "X-Forwarded-Port": "443", 49 | "Host": "1234567890.execute-api.us-east-1.amazonaws.com", 50 | "X-Forwarded-Proto": "https", 51 | "X-Amz-Cf-Id": "aaaaaaaaaae3VYQb9jd-nvCd-de396Uhbp027Y2JvkCPNLmGJHqlaA==", 52 | "CloudFront-Is-Tablet-Viewer": "false", 53 | "Cache-Control": "max-age=0", 54 | "User-Agent": "Custom User Agent String", 55 | "CloudFront-Forwarded-Proto": "https", 56 | "Accept-Encoding": "gzip, deflate, sdch", 57 | }, 58 | "pathParameters": {"proxy": "/examplepath"}, 59 | "httpMethod": "POST", 60 | "stageVariables": {"baz": "qux"}, 61 | "path": "/examplepath", 62 | } 63 | 64 | 65 | def test_lambda_handler(apigw_event, mocker): 66 | 67 | ret = app.lambda_handler(apigw_event, "") 68 | data = json.loads(ret["body"]) 69 | 70 | assert ret["statusCode"] == 200 71 | assert "message" in ret["body"] 72 | assert data["message"] == "hello world" 73 | -------------------------------------------------------------------------------- /selenium/.gitignore: -------------------------------------------------------------------------------- 1 | 2 | # Created by https://www.gitignore.io/api/osx,linux,python,windows,pycharm,visualstudiocode 3 | 4 | ### Linux ### 5 | *~ 6 | 7 | # temporary files which can be created if a process still has a handle open of a deleted file 8 | .fuse_hidden* 9 | 10 | # KDE directory preferences 11 | .directory 12 | 13 | # Linux trash folder which might appear on any partition or disk 14 | .Trash-* 15 | 16 | # .nfs files are created when an open file is removed but is still being accessed 17 | .nfs* 18 | 19 | ### OSX ### 20 | *.DS_Store 21 | .AppleDouble 22 | .LSOverride 23 | 24 | # Icon must end with two \r 25 | Icon 26 | 27 | # Thumbnails 28 | ._* 29 | 30 | # Files that might appear in the root of a volume 31 | .DocumentRevisions-V100 32 | .fseventsd 33 | .Spotlight-V100 34 | .TemporaryItems 35 | .Trashes 36 | .VolumeIcon.icns 37 | .com.apple.timemachine.donotpresent 38 | 39 | # Directories potentially created on remote AFP share 40 | .AppleDB 41 | .AppleDesktop 42 | Network Trash Folder 43 | Temporary Items 44 | .apdisk 45 | 46 | ### PyCharm ### 47 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm 48 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 49 | 50 | # User-specific stuff: 51 | .idea/**/workspace.xml 52 | .idea/**/tasks.xml 53 | .idea/dictionaries 54 | 55 | # Sensitive or high-churn files: 56 | .idea/**/dataSources/ 57 | .idea/**/dataSources.ids 58 | .idea/**/dataSources.xml 59 | .idea/**/dataSources.local.xml 60 | .idea/**/sqlDataSources.xml 61 | .idea/**/dynamic.xml 62 | .idea/**/uiDesigner.xml 63 | 64 | # Gradle: 65 | .idea/**/gradle.xml 66 | .idea/**/libraries 67 | 68 | # CMake 69 | cmake-build-debug/ 70 | 71 | # Mongo Explorer plugin: 72 | .idea/**/mongoSettings.xml 73 | 74 | ## File-based project format: 75 | *.iws 76 | 77 | ## Plugin-specific files: 78 | 79 | # IntelliJ 80 | /out/ 81 | 82 | # mpeltonen/sbt-idea plugin 83 | .idea_modules/ 84 | 85 | # JIRA plugin 86 | atlassian-ide-plugin.xml 87 | 88 | # Cursive Clojure plugin 89 | .idea/replstate.xml 90 | 91 | # Ruby plugin and RubyMine 92 | /.rakeTasks 93 | 94 | # Crashlytics plugin (for Android Studio and IntelliJ) 95 | com_crashlytics_export_strings.xml 96 | crashlytics.properties 97 | crashlytics-build.properties 98 | fabric.properties 99 | 100 | ### PyCharm Patch ### 101 | # Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721 102 | 103 | # *.iml 104 | # modules.xml 105 | # .idea/misc.xml 106 | # *.ipr 107 | 108 | # Sonarlint plugin 109 | .idea/sonarlint 110 | 111 | ### Python ### 112 | # Byte-compiled / optimized / DLL files 113 | __pycache__/ 114 | *.py[cod] 115 | *$py.class 116 | 117 | # C extensions 118 | *.so 119 | 120 | # Distribution / packaging 121 | .Python 122 | build/ 123 | develop-eggs/ 124 | dist/ 125 | downloads/ 126 | eggs/ 127 | .eggs/ 128 | lib/ 129 | lib64/ 130 | parts/ 131 | sdist/ 132 | var/ 133 | wheels/ 134 | *.egg-info/ 135 | .installed.cfg 136 | *.egg 137 | 138 | # PyInstaller 139 | # Usually these files are written by a python script from a template 140 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 141 | *.manifest 142 | *.spec 143 | 144 | # Installer logs 145 | pip-log.txt 146 | pip-delete-this-directory.txt 147 | 148 | # Unit test / coverage reports 149 | htmlcov/ 150 | .tox/ 151 | .coverage 152 | .coverage.* 153 | .cache 154 | .pytest_cache/ 155 | nosetests.xml 156 | coverage.xml 157 | *.cover 158 | .hypothesis/ 159 | 160 | # Translations 161 | *.mo 162 | *.pot 163 | 164 | # Flask stuff: 165 | instance/ 166 | .webassets-cache 167 | 168 | # Scrapy stuff: 169 | .scrapy 170 | 171 | # Sphinx documentation 172 | docs/_build/ 173 | 174 | # PyBuilder 175 | target/ 176 | 177 | # Jupyter Notebook 178 | .ipynb_checkpoints 179 | 180 | # pyenv 181 | .python-version 182 | 183 | # celery beat schedule file 184 | celerybeat-schedule.* 185 | 186 | # SageMath parsed files 187 | *.sage.py 188 | 189 | # Environments 190 | .env 191 | .venv 192 | env/ 193 | venv/ 194 | ENV/ 195 | env.bak/ 196 | venv.bak/ 197 | 198 | # Spyder project settings 199 | .spyderproject 200 | .spyproject 201 | 202 | # Rope project settings 203 | .ropeproject 204 | 205 | # mkdocs documentation 206 | /site 207 | 208 | # mypy 209 | .mypy_cache/ 210 | 211 | ### VisualStudioCode ### 212 | .vscode/* 213 | !.vscode/settings.json 214 | !.vscode/tasks.json 215 | !.vscode/launch.json 216 | !.vscode/extensions.json 217 | .history 218 | 219 | ### Windows ### 220 | # Windows thumbnail cache files 221 | Thumbs.db 222 | ehthumbs.db 223 | ehthumbs_vista.db 224 | 225 | # Folder config file 226 | Desktop.ini 227 | 228 | # Recycle Bin used on file shares 229 | $RECYCLE.BIN/ 230 | 231 | # Windows Installer files 232 | *.cab 233 | *.msi 234 | *.msm 235 | *.msp 236 | 237 | # Windows shortcuts 238 | *.lnk 239 | 240 | # Build folder 241 | 242 | */build/* 243 | 244 | # End of https://www.gitignore.io/api/osx,linux,python,windows,pycharm,visualstudiocode -------------------------------------------------------------------------------- /selenium/README.md: -------------------------------------------------------------------------------- 1 | # selenium 2 | 3 | This project contains source code and supporting files for a serverless application that you can deploy with the SAM CLI. It includes the following files and folders. 4 | 5 | - hello_world - Code for the application's Lambda function and Project Dockerfile. 6 | - events - Invocation events that you can use to invoke the function. 7 | - tests - Unit tests for the application code. 8 | - template.yaml - A template that defines the application's AWS resources. 9 | 10 | The application uses several AWS resources, including Lambda functions and an API Gateway API. These resources are defined in the `template.yaml` file in this project. You can update the template to add AWS resources through the same deployment process that updates your application code. 11 | 12 | ## Deploy the sample application 13 | 14 | The Serverless Application Model Command Line Interface (SAM CLI) is an extension of the AWS CLI that adds functionality for building and testing Lambda applications. It uses Docker to run your functions in an Amazon Linux environment that matches Lambda. It can also emulate your application's build environment and API. 15 | 16 | To use the SAM CLI, you need the following tools. 17 | 18 | * SAM CLI - [Install the SAM CLI](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-install.html) 19 | * Docker - [Install Docker community edition](https://hub.docker.com/search/?type=edition&offering=community) 20 | 21 | You may need the following for local testing. 22 | * [Python 3 installed](https://www.python.org/downloads/) 23 | 24 | To build and deploy your application for the first time, run the following in your shell: 25 | 26 | ```bash 27 | sam build 28 | sam deploy --guided 29 | ``` 30 | 31 | The first command will build a docker image from a Dockerfile and then copy the source of your application inside the Docker image. The second command will package and deploy your application to AWS, with a series of prompts: 32 | 33 | * **Stack Name**: The name of the stack to deploy to CloudFormation. This should be unique to your account and region, and a good starting point would be something matching your project name. 34 | * **AWS Region**: The AWS region you want to deploy your app to. 35 | * **Confirm changes before deploy**: If set to yes, any change sets will be shown to you before execution for manual review. If set to no, the AWS SAM CLI will automatically deploy application changes. 36 | * **Allow SAM CLI IAM role creation**: Many AWS SAM templates, including this example, create AWS IAM roles required for the AWS Lambda function(s) included to access AWS services. By default, these are scoped down to minimum required permissions. To deploy an AWS CloudFormation stack which creates or modifies IAM roles, the `CAPABILITY_IAM` value for `capabilities` must be provided. If permission isn't provided through this prompt, to deploy this example you must explicitly pass `--capabilities CAPABILITY_IAM` to the `sam deploy` command. 37 | * **Save arguments to samconfig.toml**: If set to yes, your choices will be saved to a configuration file inside the project, so that in the future you can just re-run `sam deploy` without parameters to deploy changes to your application. 38 | 39 | You can find your API Gateway Endpoint URL in the output values displayed after deployment. 40 | 41 | ## Use the SAM CLI to build and test locally 42 | 43 | Build your application with the `sam build` command. 44 | 45 | ```bash 46 | selenium$ sam build 47 | ``` 48 | 49 | The SAM CLI builds a docker image from a Dockerfile and then installs dependencies defined in `hello_world/requirements.txt` inside the docker image. The processed template file is saved in the `.aws-sam/build` folder. 50 | 51 | Test a single function by invoking it directly with a test event. An event is a JSON document that represents the input that the function receives from the event source. Test events are included in the `events` folder in this project. 52 | 53 | Run functions locally and invoke them with the `sam local invoke` command. 54 | 55 | ```bash 56 | selenium$ sam local invoke HelloWorldFunction --event events/event.json 57 | ``` 58 | 59 | The SAM CLI can also emulate your application's API. Use the `sam local start-api` to run the API locally on port 3000. 60 | 61 | ```bash 62 | selenium$ sam local start-api 63 | selenium$ curl http://localhost:3000/ 64 | ``` 65 | 66 | The SAM CLI reads the application template to determine the API's routes and the functions that they invoke. The `Events` property on each function's definition includes the route and method for each path. 67 | 68 | ```yaml 69 | Events: 70 | HelloWorld: 71 | Type: Api 72 | Properties: 73 | Path: /hello 74 | Method: get 75 | ``` 76 | 77 | ## Add a resource to your application 78 | The application template uses AWS Serverless Application Model (AWS SAM) to define application resources. AWS SAM is an extension of AWS CloudFormation with a simpler syntax for configuring common serverless application resources such as functions, triggers, and APIs. For resources not included in [the SAM specification](https://github.com/awslabs/serverless-application-model/blob/master/versions/2016-10-31.md), you can use standard [AWS CloudFormation](https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-template-resource-type-ref.html) resource types. 79 | 80 | ## Fetch, tail, and filter Lambda function logs 81 | 82 | To simplify troubleshooting, SAM CLI has a command called `sam logs`. `sam logs` lets you fetch logs generated by your deployed Lambda function from the command line. In addition to printing the logs on the terminal, this command has several nifty features to help you quickly find the bug. 83 | 84 | `NOTE`: This command works for all AWS Lambda functions; not just the ones you deploy using SAM. 85 | 86 | ```bash 87 | selenium$ sam logs -n HelloWorldFunction --stack-name selenium --tail 88 | ``` 89 | 90 | You can find more information and examples about filtering Lambda function logs in the [SAM CLI Documentation](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-logging.html). 91 | 92 | ## Unit tests 93 | 94 | Tests are defined in the `tests` folder in this project. Use PIP to install the [pytest](https://docs.pytest.org/en/latest/) and run unit tests from your local machine. 95 | 96 | ```bash 97 | selenium$ pip install pytest pytest-mock --user 98 | selenium$ python -m pytest tests/ -v 99 | ``` 100 | 101 | ## Cleanup 102 | 103 | To delete the sample application that you created, use the AWS CLI. Assuming you used your project name for the stack name, you can run the following: 104 | 105 | ```bash 106 | aws cloudformation delete-stack --stack-name selenium 107 | ``` 108 | 109 | ## Resources 110 | 111 | See the [AWS SAM developer guide](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/what-is-sam.html) for an introduction to SAM specification, the SAM CLI, and serverless application concepts. 112 | 113 | Next, you can use AWS Serverless Application Repository to deploy ready to use Apps that go beyond hello world samples and learn how authors developed their applications: [AWS Serverless Application Repository main page](https://aws.amazon.com/serverless/serverlessrepo/) 114 | -------------------------------------------------------------------------------- /.devcontainer/library-scripts/docker-in-docker-debian.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | #------------------------------------------------------------------------------------------------------------- 3 | # Copyright (c) Microsoft Corporation. All rights reserved. 4 | # Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information. 5 | #------------------------------------------------------------------------------------------------------------- 6 | # 7 | # Docs: https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/docker-in-docker.md 8 | # Maintainer: The VS Code and Codespaces Teams 9 | # 10 | # Syntax: ./docker-in-docker-debian.sh [enable non-root docker access flag] [non-root user] [use moby] 11 | 12 | ENABLE_NONROOT_DOCKER=${1:-"true"} 13 | USERNAME=${2:-"automatic"} 14 | USE_MOBY=${3:-"true"} 15 | MICROSOFT_GPG_KEYS_URI="https://packages.microsoft.com/keys/microsoft.asc" 16 | 17 | set -e 18 | 19 | if [ "$(id -u)" -ne 0 ]; then 20 | echo -e 'Script must be run as root. Use sudo, su, or add "USER root" to your Dockerfile before running this script.' 21 | exit 1 22 | fi 23 | 24 | # Determine the appropriate non-root user 25 | if [ "${USERNAME}" = "auto" ] || [ "${USERNAME}" = "automatic" ]; then 26 | USERNAME="" 27 | POSSIBLE_USERS=("vscode" "node" "codespace" "$(awk -v val=1000 -F ":" '$3==val{print $1}' /etc/passwd)") 28 | for CURRENT_USER in ${POSSIBLE_USERS[@]}; do 29 | if id -u ${CURRENT_USER} > /dev/null 2>&1; then 30 | USERNAME=${CURRENT_USER} 31 | break 32 | fi 33 | done 34 | if [ "${USERNAME}" = "" ]; then 35 | USERNAME=root 36 | fi 37 | elif [ "${USERNAME}" = "none" ] || ! id -u ${USERNAME} > /dev/null 2>&1; then 38 | USERNAME=root 39 | fi 40 | 41 | # Get central common setting 42 | get_common_setting() { 43 | if [ "${common_settings_file_loaded}" != "true" ]; then 44 | curl -sfL "https://aka.ms/vscode-dev-containers/script-library/settings.env" 2>/dev/null -o /tmp/vsdc-settings.env || echo "Could not download settings file. Skipping." 45 | common_settings_file_loaded=true 46 | fi 47 | if [ -f "/tmp/vsdc-settings.env" ]; then 48 | local multi_line="" 49 | if [ "$2" = "true" ]; then multi_line="-z"; fi 50 | local result="$(grep ${multi_line} -oP "$1=\"?\K[^\"]+" /tmp/vsdc-settings.env | tr -d '\0')" 51 | if [ ! -z "${result}" ]; then declare -g $1="${result}"; fi 52 | fi 53 | echo "$1=${!1}" 54 | } 55 | 56 | # Function to run apt-get if needed 57 | apt_get_update_if_needed() 58 | { 59 | if [ ! -d "/var/lib/apt/lists" ] || [ "$(ls /var/lib/apt/lists/ | wc -l)" = "0" ]; then 60 | echo "Running apt-get update..." 61 | apt-get update 62 | else 63 | echo "Skipping apt-get update." 64 | fi 65 | } 66 | 67 | # Checks if packages are installed and installs them if not 68 | check_packages() { 69 | if ! dpkg -s "$@" > /dev/null 2>&1; then 70 | apt_get_update_if_needed 71 | apt-get -y install --no-install-recommends "$@" 72 | fi 73 | } 74 | 75 | # Ensure apt is in non-interactive to avoid prompts 76 | export DEBIAN_FRONTEND=noninteractive 77 | 78 | # Install dependencies 79 | check_packages apt-transport-https curl ca-certificates lxc pigz iptables gnupg2 80 | 81 | # Swap to legacy iptables for compatibility 82 | if type iptables-legacy > /dev/null 2>&1; then 83 | update-alternatives --set iptables /usr/sbin/iptables-legacy 84 | update-alternatives --set ip6tables /usr/sbin/ip6tables-legacy 85 | fi 86 | 87 | # Install Docker / Moby CLI if not already installed 88 | if type docker > /dev/null 2>&1 && type dockerd > /dev/null 2>&1; then 89 | echo "Docker / Moby CLI and Engine already installed." 90 | else 91 | # Source /etc/os-release to get OS info 92 | . /etc/os-release 93 | if [ "${USE_MOBY}" = "true" ]; then 94 | # Import key safely (new 'signed-by' method rather than deprecated apt-key approach) and install 95 | get_common_setting MICROSOFT_GPG_KEYS_URI 96 | curl -sSL ${MICROSOFT_GPG_KEYS_URI} | gpg --dearmor > /usr/share/keyrings/microsoft-archive-keyring.gpg 97 | echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/microsoft-archive-keyring.gpg] https://packages.microsoft.com/repos/microsoft-${ID}-${VERSION_CODENAME}-prod ${VERSION_CODENAME} main" > /etc/apt/sources.list.d/microsoft.list 98 | apt-get update 99 | apt-get -y install --no-install-recommends moby-cli moby-buildx moby-compose moby-engine 100 | else 101 | # Import key safely (new 'signed-by' method rather than deprecated apt-key approach) and install 102 | curl -fsSL https://download.docker.com/linux/${ID}/gpg | gpg --dearmor > /usr/share/keyrings/docker-archive-keyring.gpg 103 | echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/${ID} ${VERSION_CODENAME} stable" > /etc/apt/sources.list.d/docker.list 104 | apt-get update 105 | apt-get -y install --no-install-recommends docker-ce-cli docker-ce 106 | fi 107 | fi 108 | 109 | echo "Finished installing docker / moby" 110 | 111 | # Install Docker Compose if not already installed and is on a supported architecture 112 | if type docker-compose > /dev/null 2>&1; then 113 | echo "Docker Compose already installed." 114 | else 115 | TARGET_COMPOSE_ARCH="$(uname -m)" 116 | if [ "${TARGET_COMPOSE_ARCH}" = "amd64" ]; then 117 | TARGET_COMPOSE_ARCH="x86_64" 118 | fi 119 | if [ "${TARGET_COMPOSE_ARCH}" != "x86_64" ]; then 120 | # Use pip to get a version that runns on this architecture 121 | if ! dpkg -s python3-minimal python3-pip libffi-dev python3-venv pipx > /dev/null 2>&1; then 122 | apt_get_update_if_needed 123 | apt-get -y install python3-minimal python3-pip libffi-dev python3-venv pipx 124 | fi 125 | export PIPX_HOME=/usr/local/pipx 126 | mkdir -p ${PIPX_HOME} 127 | export PIPX_BIN_DIR=/usr/local/bin 128 | export PIP_CACHE_DIR=/tmp/pip-tmp/cache 129 | pipx install --system-site-packages --pip-args '--no-cache-dir --force-reinstall' docker-compose 130 | rm -rf /tmp/pip-tmp 131 | else 132 | LATEST_COMPOSE_VERSION=$(basename "$(curl -fsSL -o /dev/null -w "%{url_effective}" https://github.com/docker/compose/releases/latest)") 133 | curl -fsSL "https://github.com/docker/compose/releases/download/${LATEST_COMPOSE_VERSION}/docker-compose-$(uname -s)-${TARGET_COMPOSE_ARCH}" -o /usr/local/bin/docker-compose 134 | chmod +x /usr/local/bin/docker-compose 135 | fi 136 | fi 137 | 138 | # If init file already exists, exit 139 | if [ -f "/usr/local/share/docker-init.sh" ]; then 140 | echo "/usr/local/share/docker-init.sh already exists, so exiting." 141 | exit 0 142 | fi 143 | echo "docker-init doesnt exist..." 144 | 145 | # Add user to the docker group 146 | if [ "${ENABLE_NONROOT_DOCKER}" = "true" ]; then 147 | if ! getent group docker > /dev/null 2>&1; then 148 | groupadd docker 149 | fi 150 | usermod -aG docker ${USERNAME} 151 | fi 152 | 153 | tee /usr/local/share/docker-init.sh > /dev/null \ 154 | << 'EOF' 155 | #!/usr/bin/env bash 156 | #------------------------------------------------------------------------------------------------------------- 157 | # Copyright (c) Microsoft Corporation. All rights reserved. 158 | # Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information. 159 | #------------------------------------------------------------------------------------------------------------- 160 | 161 | sudoIf() 162 | { 163 | if [ "$(id -u)" -ne 0 ]; then 164 | sudo "$@" 165 | else 166 | "$@" 167 | fi 168 | } 169 | 170 | # explicitly remove dockerd and containerd PID file to ensure that it can start properly if it was stopped uncleanly 171 | # ie: docker kill 172 | sudoIf find /run /var/run -iname 'docker*.pid' -delete || : 173 | sudoIf find /run /var/run -iname 'container*.pid' -delete || : 174 | 175 | set -e 176 | 177 | ## Dind wrapper script from docker team 178 | # Maintained: https://github.com/moby/moby/blob/master/hack/dind 179 | 180 | export container=docker 181 | 182 | if [ -d /sys/kernel/security ] && ! sudoIf mountpoint -q /sys/kernel/security; then 183 | sudoIf mount -t securityfs none /sys/kernel/security || { 184 | echo >&2 'Could not mount /sys/kernel/security.' 185 | echo >&2 'AppArmor detection and --privileged mode might break.' 186 | } 187 | fi 188 | 189 | # Mount /tmp (conditionally) 190 | if ! sudoIf mountpoint -q /tmp; then 191 | sudoIf mount -t tmpfs none /tmp 192 | fi 193 | 194 | # cgroup v2: enable nesting 195 | if [ -f /sys/fs/cgroup/cgroup.controllers ]; then 196 | # move the init process (PID 1) from the root group to the /init group, 197 | # otherwise writing subtree_control fails with EBUSY. 198 | sudoIf mkdir -p /sys/fs/cgroup/init 199 | sudoIf echo 1 > /sys/fs/cgroup/init/cgroup.procs 200 | # enable controllers 201 | sudoIf sed -e 's/ / +/g' -e 's/^/+/' < /sys/fs/cgroup/cgroup.controllers \ 202 | > /sys/fs/cgroup/cgroup.subtree_control 203 | fi 204 | ## Dind wrapper over. 205 | 206 | # Handle DNS 207 | set +e 208 | cat /etc/resolv.conf | grep -i 'internal.cloudapp.net' 209 | if [ $? -eq 0 ] 210 | then 211 | echo "Setting dockerd Azure DNS." 212 | CUSTOMDNS="--dns 168.63.129.16" 213 | else 214 | echo "Not setting dockerd DNS manually." 215 | CUSTOMDNS="" 216 | fi 217 | set -e 218 | 219 | # Start docker/moby engine 220 | ( sudoIf dockerd $CUSTOMDNS > /tmp/dockerd.log 2>&1 ) & 221 | 222 | set +e 223 | 224 | # Execute whatever commands were passed in (if any). This allows us 225 | # to set this script to ENTRYPOINT while still executing the default CMD. 226 | exec "$@" 227 | EOF 228 | 229 | chmod +x /usr/local/share/docker-init.sh 230 | chown ${USERNAME}:root /usr/local/share/docker-init.sh 231 | -------------------------------------------------------------------------------- /.devcontainer/library-scripts/common-debian.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | #------------------------------------------------------------------------------------------------------------- 3 | # Copyright (c) Microsoft Corporation. All rights reserved. 4 | # Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information. 5 | #------------------------------------------------------------------------------------------------------------- 6 | # 7 | # Docs: https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/common.md 8 | # Maintainer: The VS Code and Codespaces Teams 9 | # 10 | # Syntax: ./common-debian.sh [install zsh flag] [username] [user UID] [user GID] [upgrade packages flag] [install Oh My Zsh! flag] [Add non-free packages] 11 | 12 | set -e 13 | 14 | INSTALL_ZSH=${1:-"true"} 15 | USERNAME=${2:-"automatic"} 16 | USER_UID=${3:-"automatic"} 17 | USER_GID=${4:-"automatic"} 18 | UPGRADE_PACKAGES=${5:-"true"} 19 | INSTALL_OH_MYS=${6:-"true"} 20 | ADD_NON_FREE_PACKAGES=${7:-"false"} 21 | SCRIPT_DIR="$(cd $(dirname "${BASH_SOURCE[0]}") && pwd)" 22 | MARKER_FILE="/usr/local/etc/vscode-dev-containers/common" 23 | 24 | 25 | if [ "$(id -u)" -ne 0 ]; then 26 | echo -e 'Script must be run as root. Use sudo, su, or add "USER root" to your Dockerfile before running this script.' 27 | exit 1 28 | fi 29 | 30 | # Ensure that login shells get the correct path if the user updated the PATH using ENV. 31 | rm -f /etc/profile.d/00-restore-env.sh 32 | echo "export PATH=${PATH//$(sh -lc 'echo $PATH')/\$PATH}" > /etc/profile.d/00-restore-env.sh 33 | chmod +x /etc/profile.d/00-restore-env.sh 34 | 35 | # If in automatic mode, determine if a user already exists, if not use vscode 36 | if [ "${USERNAME}" = "auto" ] || [ "${USERNAME}" = "automatic" ]; then 37 | USERNAME="" 38 | POSSIBLE_USERS=("vscode" "node" "codespace" "$(awk -v val=1000 -F ":" '$3==val{print $1}' /etc/passwd)") 39 | for CURRENT_USER in ${POSSIBLE_USERS[@]}; do 40 | if id -u ${CURRENT_USER} > /dev/null 2>&1; then 41 | USERNAME=${CURRENT_USER} 42 | break 43 | fi 44 | done 45 | if [ "${USERNAME}" = "" ]; then 46 | USERNAME=vscode 47 | fi 48 | elif [ "${USERNAME}" = "none" ]; then 49 | USERNAME=root 50 | USER_UID=0 51 | USER_GID=0 52 | fi 53 | 54 | # Load markers to see which steps have already run 55 | if [ -f "${MARKER_FILE}" ]; then 56 | echo "Marker file found:" 57 | cat "${MARKER_FILE}" 58 | source "${MARKER_FILE}" 59 | fi 60 | 61 | # Ensure apt is in non-interactive to avoid prompts 62 | export DEBIAN_FRONTEND=noninteractive 63 | 64 | # Function to call apt-get if needed 65 | apt_get_update_if_needed() 66 | { 67 | if [ ! -d "/var/lib/apt/lists" ] || [ "$(ls /var/lib/apt/lists/ | wc -l)" = "0" ]; then 68 | echo "Running apt-get update..." 69 | apt-get update 70 | else 71 | echo "Skipping apt-get update." 72 | fi 73 | } 74 | 75 | # Run install apt-utils to avoid debconf warning then verify presence of other common developer tools and dependencies 76 | if [ "${PACKAGES_ALREADY_INSTALLED}" != "true" ]; then 77 | 78 | package_list="apt-utils \ 79 | openssh-client \ 80 | gnupg2 \ 81 | iproute2 \ 82 | procps \ 83 | lsof \ 84 | htop \ 85 | net-tools \ 86 | psmisc \ 87 | curl \ 88 | wget \ 89 | rsync \ 90 | ca-certificates \ 91 | unzip \ 92 | zip \ 93 | nano \ 94 | vim-tiny \ 95 | less \ 96 | jq \ 97 | lsb-release \ 98 | apt-transport-https \ 99 | dialog \ 100 | libc6 \ 101 | libgcc1 \ 102 | libkrb5-3 \ 103 | libgssapi-krb5-2 \ 104 | libicu[0-9][0-9] \ 105 | liblttng-ust0 \ 106 | libstdc++6 \ 107 | zlib1g \ 108 | locales \ 109 | sudo \ 110 | ncdu \ 111 | man-db \ 112 | strace \ 113 | manpages \ 114 | manpages-dev \ 115 | init-system-helpers" 116 | 117 | # Needed for adding manpages-posix and manpages-posix-dev which are non-free packages in Debian 118 | if [ "${ADD_NON_FREE_PACKAGES}" = "true" ]; then 119 | # Bring in variables from /etc/os-release like VERSION_CODENAME 120 | . /etc/os-release 121 | sed -i -E "s/deb http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME} main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME} main contrib non-free/" /etc/apt/sources.list 122 | sed -i -E "s/deb-src http:\/\/(deb|httredir)\.debian\.org\/debian ${VERSION_CODENAME} main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME} main contrib non-free/" /etc/apt/sources.list 123 | sed -i -E "s/deb http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME}-updates main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME}-updates main contrib non-free/" /etc/apt/sources.list 124 | sed -i -E "s/deb-src http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME}-updates main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME}-updates main contrib non-free/" /etc/apt/sources.list 125 | sed -i "s/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main contrib non-free/" /etc/apt/sources.list 126 | sed -i "s/deb-src http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main contrib non-free/" /etc/apt/sources.list 127 | sed -i "s/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main contrib non-free/" /etc/apt/sources.list 128 | sed -i "s/deb-src http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main contrib non-free/" /etc/apt/sources.list 129 | echo "Running apt-get update..." 130 | apt-get update 131 | package_list="${package_list} manpages-posix manpages-posix-dev" 132 | else 133 | apt_get_update_if_needed 134 | fi 135 | 136 | # Install libssl1.1 if available 137 | if [[ ! -z $(apt-cache --names-only search ^libssl1.1$) ]]; then 138 | package_list="${package_list} libssl1.1" 139 | fi 140 | 141 | # Install appropriate version of libssl1.0.x if available 142 | libssl_package=$(dpkg-query -f '${db:Status-Abbrev}\t${binary:Package}\n' -W 'libssl1\.0\.?' 2>&1 || echo '') 143 | if [ "$(echo "$LIlibssl_packageBSSL" | grep -o 'libssl1\.0\.[0-9]:' | uniq | sort | wc -l)" -eq 0 ]; then 144 | if [[ ! -z $(apt-cache --names-only search ^libssl1.0.2$) ]]; then 145 | # Debian 9 146 | package_list="${package_list} libssl1.0.2" 147 | elif [[ ! -z $(apt-cache --names-only search ^libssl1.0.0$) ]]; then 148 | # Ubuntu 18.04, 16.04, earlier 149 | package_list="${package_list} libssl1.0.0" 150 | fi 151 | fi 152 | 153 | echo "Packages to verify are installed: ${package_list}" 154 | apt-get -y install --no-install-recommends ${package_list} 2> >( grep -v 'debconf: delaying package configuration, since apt-utils is not installed' >&2 ) 155 | 156 | # Install git if not already installed (may be more recent than distro version) 157 | if ! type git > /dev/null 2>&1; then 158 | apt-get -y install --no-install-recommends git 159 | fi 160 | 161 | PACKAGES_ALREADY_INSTALLED="true" 162 | fi 163 | 164 | # Get to latest versions of all packages 165 | if [ "${UPGRADE_PACKAGES}" = "true" ]; then 166 | apt_get_update_if_needed 167 | apt-get -y upgrade --no-install-recommends 168 | apt-get autoremove -y 169 | fi 170 | 171 | # Ensure at least the en_US.UTF-8 UTF-8 locale is available. 172 | # Common need for both applications and things like the agnoster ZSH theme. 173 | if [ "${LOCALE_ALREADY_SET}" != "true" ] && ! grep -o -E '^\s*en_US.UTF-8\s+UTF-8' /etc/locale.gen > /dev/null; then 174 | echo "en_US.UTF-8 UTF-8" >> /etc/locale.gen 175 | locale-gen 176 | LOCALE_ALREADY_SET="true" 177 | fi 178 | 179 | # Create or update a non-root user to match UID/GID. 180 | if id -u ${USERNAME} > /dev/null 2>&1; then 181 | # User exists, update if needed 182 | if [ "${USER_GID}" != "automatic" ] && [ "$USER_GID" != "$(id -G $USERNAME)" ]; then 183 | groupmod --gid $USER_GID $USERNAME 184 | usermod --gid $USER_GID $USERNAME 185 | fi 186 | if [ "${USER_UID}" != "automatic" ] && [ "$USER_UID" != "$(id -u $USERNAME)" ]; then 187 | usermod --uid $USER_UID $USERNAME 188 | fi 189 | else 190 | # Create user 191 | if [ "${USER_GID}" = "automatic" ]; then 192 | groupadd $USERNAME 193 | else 194 | groupadd --gid $USER_GID $USERNAME 195 | fi 196 | if [ "${USER_UID}" = "automatic" ]; then 197 | useradd -s /bin/bash --gid $USERNAME -m $USERNAME 198 | else 199 | useradd -s /bin/bash --uid $USER_UID --gid $USERNAME -m $USERNAME 200 | fi 201 | fi 202 | 203 | # Add add sudo support for non-root user 204 | if [ "${USERNAME}" != "root" ] && [ "${EXISTING_NON_ROOT_USER}" != "${USERNAME}" ]; then 205 | echo $USERNAME ALL=\(root\) NOPASSWD:ALL > /etc/sudoers.d/$USERNAME 206 | chmod 0440 /etc/sudoers.d/$USERNAME 207 | EXISTING_NON_ROOT_USER="${USERNAME}" 208 | fi 209 | 210 | # ** Shell customization section ** 211 | if [ "${USERNAME}" = "root" ]; then 212 | user_rc_path="/root" 213 | else 214 | user_rc_path="/home/${USERNAME}" 215 | fi 216 | 217 | # Restore user .bashrc defaults from skeleton file if it doesn't exist or is empty 218 | if [ ! -f "${user_rc_path}/.bashrc" ] || [ ! -s "${user_rc_path}/.bashrc" ] ; then 219 | cp /etc/skel/.bashrc "${user_rc_path}/.bashrc" 220 | fi 221 | 222 | # Restore user .profile defaults from skeleton file if it doesn't exist or is empty 223 | if [ ! -f "${user_rc_path}/.profile" ] || [ ! -s "${user_rc_path}/.profile" ] ; then 224 | cp /etc/skel/.profile "${user_rc_path}/.profile" 225 | fi 226 | 227 | # .bashrc/.zshrc snippet 228 | rc_snippet="$(cat << 'EOF' 229 | 230 | if [ -z "${USER}" ]; then export USER=$(whoami); fi 231 | if [[ "${PATH}" != *"$HOME/.local/bin"* ]]; then export PATH="${PATH}:$HOME/.local/bin"; fi 232 | 233 | # Display optional first run image specific notice if configured and terminal is interactive 234 | if [ -t 1 ] && [[ "${TERM_PROGRAM}" = "vscode" || "${TERM_PROGRAM}" = "codespaces" ]] && [ ! -f "$HOME/.config/vscode-dev-containers/first-run-notice-already-displayed" ]; then 235 | if [ -f "/usr/local/etc/vscode-dev-containers/first-run-notice.txt" ]; then 236 | cat "/usr/local/etc/vscode-dev-containers/first-run-notice.txt" 237 | elif [ -f "/workspaces/.codespaces/shared/first-run-notice.txt" ]; then 238 | cat "/workspaces/.codespaces/shared/first-run-notice.txt" 239 | fi 240 | mkdir -p "$HOME/.config/vscode-dev-containers" 241 | # Mark first run notice as displayed after 10s to avoid problems with fast terminal refreshes hiding it 242 | ((sleep 10s; touch "$HOME/.config/vscode-dev-containers/first-run-notice-already-displayed") &) 243 | fi 244 | 245 | # Set the default git editor if not already set 246 | if [ -z "$(git config --get core.editor)" ] && [ -z "${GIT_EDITOR}" ]; then 247 | if [ "${TERM_PROGRAM}" = "vscode" ]; then 248 | if [[ -n $(command -v code-insiders) && -z $(command -v code) ]]; then 249 | export GIT_EDITOR="code-insiders --wait" 250 | else 251 | export GIT_EDITOR="code --wait" 252 | fi 253 | fi 254 | fi 255 | 256 | EOF 257 | )" 258 | 259 | # code shim, it fallbacks to code-insiders if code is not available 260 | cat << 'EOF' > /usr/local/bin/code 261 | #!/bin/sh 262 | 263 | get_in_path_except_current() { 264 | which -a "$1" | grep -A1 "$0" | grep -v "$0" 265 | } 266 | 267 | code="$(get_in_path_except_current code)" 268 | 269 | if [ -n "$code" ]; then 270 | exec "$code" "$@" 271 | elif [ "$(command -v code-insiders)" ]; then 272 | exec code-insiders "$@" 273 | else 274 | echo "code or code-insiders is not installed" >&2 275 | exit 127 276 | fi 277 | EOF 278 | chmod +x /usr/local/bin/code 279 | 280 | # systemctl shim - tells people to use 'service' if systemd is not running 281 | cat << 'EOF' > /usr/local/bin/systemctl 282 | #!/bin/sh 283 | set -e 284 | if [ -d "/run/systemd/system" ]; then 285 | exec /bin/systemctl/systemctl "$@" 286 | else 287 | echo '\n"systemd" is not running in this container due to its overhead.\nUse the "service" command to start services intead. e.g.: \n\nservice --status-all' 288 | fi 289 | EOF 290 | chmod +x /usr/local/bin/systemctl 291 | 292 | # Codespaces bash and OMZ themes - partly inspired by https://github.com/ohmyzsh/ohmyzsh/blob/master/themes/robbyrussell.zsh-theme 293 | codespaces_bash="$(cat \ 294 | <<'EOF' 295 | 296 | # Codespaces bash prompt theme 297 | __bash_prompt() { 298 | local userpart='`export XIT=$? \ 299 | && [ ! -z "${GITHUB_USER}" ] && echo -n "\[\033[0;32m\]@${GITHUB_USER} " || echo -n "\[\033[0;32m\]\u " \ 300 | && [ "$XIT" -ne "0" ] && echo -n "\[\033[1;31m\]➜" || echo -n "\[\033[0m\]➜"`' 301 | local gitbranch='`\ 302 | export BRANCH=$(git symbolic-ref --short HEAD 2>/dev/null || git rev-parse --short HEAD 2>/dev/null); \ 303 | if [ "${BRANCH}" != "" ]; then \ 304 | echo -n "\[\033[0;36m\](\[\033[1;31m\]${BRANCH}" \ 305 | && if git ls-files --error-unmatch -m --directory --no-empty-directory -o --exclude-standard ":/*" > /dev/null 2>&1; then \ 306 | echo -n " \[\033[1;33m\]✗"; \ 307 | fi \ 308 | && echo -n "\[\033[0;36m\]) "; \ 309 | fi`' 310 | local lightblue='\[\033[1;34m\]' 311 | local removecolor='\[\033[0m\]' 312 | PS1="${userpart} ${lightblue}\w ${gitbranch}${removecolor}\$ " 313 | unset -f __bash_prompt 314 | } 315 | __bash_prompt 316 | 317 | EOF 318 | )" 319 | 320 | codespaces_zsh="$(cat \ 321 | <<'EOF' 322 | # Codespaces zsh prompt theme 323 | __zsh_prompt() { 324 | local prompt_username 325 | if [ ! -z "${GITHUB_USER}" ]; then 326 | prompt_username="@${GITHUB_USER}" 327 | else 328 | prompt_username="%n" 329 | fi 330 | PROMPT="%{$fg[green]%}${prompt_username} %(?:%{$reset_color%}➜ :%{$fg_bold[red]%}➜ )" # User/exit code arrow 331 | PROMPT+='%{$fg_bold[blue]%}%(5~|%-1~/…/%3~|%4~)%{$reset_color%} ' # cwd 332 | PROMPT+='$(git_prompt_info)%{$fg[white]%}$ %{$reset_color%}' # Git status 333 | unset -f __zsh_prompt 334 | } 335 | ZSH_THEME_GIT_PROMPT_PREFIX="%{$fg_bold[cyan]%}(%{$fg_bold[red]%}" 336 | ZSH_THEME_GIT_PROMPT_SUFFIX="%{$reset_color%} " 337 | ZSH_THEME_GIT_PROMPT_DIRTY=" %{$fg_bold[yellow]%}✗%{$fg_bold[cyan]%})" 338 | ZSH_THEME_GIT_PROMPT_CLEAN="%{$fg_bold[cyan]%})" 339 | __zsh_prompt 340 | 341 | EOF 342 | )" 343 | 344 | # Add notice that Oh My Bash! has been removed from images and how to provide information on how to install manually 345 | omb_readme="$(cat \ 346 | <<'EOF' 347 | "Oh My Bash!" has been removed from this image in favor of a simple shell prompt. If you 348 | still wish to use it, remove "~/.oh-my-bash" and install it from: https://github.com/ohmybash/oh-my-bash 349 | You may also want to consider "Bash-it" as an alternative: https://github.com/bash-it/bash-it 350 | See here for infomation on adding it to your image or dotfiles: https://aka.ms/codespaces/omb-remove 351 | EOF 352 | )" 353 | omb_stub="$(cat \ 354 | <<'EOF' 355 | #!/usr/bin/env bash 356 | if [ -t 1 ]; then 357 | cat $HOME/.oh-my-bash/README.md 358 | fi 359 | EOF 360 | )" 361 | 362 | # Add RC snippet and custom bash prompt 363 | if [ "${RC_SNIPPET_ALREADY_ADDED}" != "true" ]; then 364 | echo "${rc_snippet}" >> /etc/bash.bashrc 365 | echo "${codespaces_bash}" >> "${user_rc_path}/.bashrc" 366 | echo 'export PROMPT_DIRTRIM=4' >> "${user_rc_path}/.bashrc" 367 | if [ "${USERNAME}" != "root" ]; then 368 | echo "${codespaces_bash}" >> "/root/.bashrc" 369 | echo 'export PROMPT_DIRTRIM=4' >> "/root/.bashrc" 370 | fi 371 | chown ${USERNAME}:${USERNAME} "${user_rc_path}/.bashrc" 372 | RC_SNIPPET_ALREADY_ADDED="true" 373 | fi 374 | 375 | # Add stub for Oh My Bash! 376 | if [ ! -d "${user_rc_path}/.oh-my-bash}" ] && [ "${INSTALL_OH_MYS}" = "true" ]; then 377 | mkdir -p "${user_rc_path}/.oh-my-bash" "/root/.oh-my-bash" 378 | echo "${omb_readme}" >> "${user_rc_path}/.oh-my-bash/README.md" 379 | echo "${omb_stub}" >> "${user_rc_path}/.oh-my-bash/oh-my-bash.sh" 380 | chmod +x "${user_rc_path}/.oh-my-bash/oh-my-bash.sh" 381 | if [ "${USERNAME}" != "root" ]; then 382 | echo "${omb_readme}" >> "/root/.oh-my-bash/README.md" 383 | echo "${omb_stub}" >> "/root/.oh-my-bash/oh-my-bash.sh" 384 | chmod +x "/root/.oh-my-bash/oh-my-bash.sh" 385 | fi 386 | chown -R "${USERNAME}:${USERNAME}" "${user_rc_path}/.oh-my-bash" 387 | fi 388 | 389 | # Optionally install and configure zsh and Oh My Zsh! 390 | if [ "${INSTALL_ZSH}" = "true" ]; then 391 | if ! type zsh > /dev/null 2>&1; then 392 | apt_get_update_if_needed 393 | apt-get install -y zsh 394 | fi 395 | if [ "${ZSH_ALREADY_INSTALLED}" != "true" ]; then 396 | echo "${rc_snippet}" >> /etc/zsh/zshrc 397 | ZSH_ALREADY_INSTALLED="true" 398 | fi 399 | 400 | # Adapted, simplified inline Oh My Zsh! install steps that adds, defaults to a codespaces theme. 401 | # See https://github.com/ohmyzsh/ohmyzsh/blob/master/tools/install.sh for official script. 402 | oh_my_install_dir="${user_rc_path}/.oh-my-zsh" 403 | if [ ! -d "${oh_my_install_dir}" ] && [ "${INSTALL_OH_MYS}" = "true" ]; then 404 | template_path="${oh_my_install_dir}/templates/zshrc.zsh-template" 405 | user_rc_file="${user_rc_path}/.zshrc" 406 | umask g-w,o-w 407 | mkdir -p ${oh_my_install_dir} 408 | git clone --depth=1 \ 409 | -c core.eol=lf \ 410 | -c core.autocrlf=false \ 411 | -c fsck.zeroPaddedFilemode=ignore \ 412 | -c fetch.fsck.zeroPaddedFilemode=ignore \ 413 | -c receive.fsck.zeroPaddedFilemode=ignore \ 414 | "https://github.com/ohmyzsh/ohmyzsh" "${oh_my_install_dir}" 2>&1 415 | echo -e "$(cat "${template_path}")\nDISABLE_AUTO_UPDATE=true\nDISABLE_UPDATE_PROMPT=true" > ${user_rc_file} 416 | sed -i -e 's/ZSH_THEME=.*/ZSH_THEME="codespaces"/g' ${user_rc_file} 417 | 418 | mkdir -p ${oh_my_install_dir}/custom/themes 419 | echo "${codespaces_zsh}" > "${oh_my_install_dir}/custom/themes/codespaces.zsh-theme" 420 | # Shrink git while still enabling updates 421 | cd "${oh_my_install_dir}" 422 | git repack -a -d -f --depth=1 --window=1 423 | # Copy to non-root user if one is specified 424 | if [ "${USERNAME}" != "root" ]; then 425 | cp -rf "${user_rc_file}" "${oh_my_install_dir}" /root 426 | chown -R ${USERNAME}:${USERNAME} "${user_rc_path}" 427 | fi 428 | fi 429 | fi 430 | 431 | # Persist image metadata info, script if meta.env found in same directory 432 | meta_info_script="$(cat << 'EOF' 433 | #!/bin/sh 434 | . /usr/local/etc/vscode-dev-containers/meta.env 435 | 436 | # Minimal output 437 | if [ "$1" = "version" ] || [ "$1" = "image-version" ]; then 438 | echo "${VERSION}" 439 | exit 0 440 | elif [ "$1" = "release" ]; then 441 | echo "${GIT_REPOSITORY_RELEASE}" 442 | exit 0 443 | elif [ "$1" = "content" ] || [ "$1" = "content-url" ] || [ "$1" = "contents" ] || [ "$1" = "contents-url" ]; then 444 | echo "${CONTENTS_URL}" 445 | exit 0 446 | fi 447 | 448 | #Full output 449 | echo 450 | echo "Development container image information" 451 | echo 452 | if [ ! -z "${VERSION}" ]; then echo "- Image version: ${VERSION}"; fi 453 | if [ ! -z "${DEFINITION_ID}" ]; then echo "- Definition ID: ${DEFINITION_ID}"; fi 454 | if [ ! -z "${VARIANT}" ]; then echo "- Variant: ${VARIANT}"; fi 455 | if [ ! -z "${GIT_REPOSITORY}" ]; then echo "- Source code repository: ${GIT_REPOSITORY}"; fi 456 | if [ ! -z "${GIT_REPOSITORY_RELEASE}" ]; then echo "- Source code release/branch: ${GIT_REPOSITORY_RELEASE}"; fi 457 | if [ ! -z "${BUILD_TIMESTAMP}" ]; then echo "- Timestamp: ${BUILD_TIMESTAMP}"; fi 458 | if [ ! -z "${CONTENTS_URL}" ]; then echo && echo "More info: ${CONTENTS_URL}"; fi 459 | echo 460 | EOF 461 | )" 462 | if [ -f "${SCRIPT_DIR}/meta.env" ]; then 463 | mkdir -p /usr/local/etc/vscode-dev-containers/ 464 | cp -f "${SCRIPT_DIR}/meta.env" /usr/local/etc/vscode-dev-containers/meta.env 465 | echo "${meta_info_script}" > /usr/local/bin/devcontainer-info 466 | chmod +x /usr/local/bin/devcontainer-info 467 | fi 468 | 469 | # Write marker file 470 | mkdir -p "$(dirname "${MARKER_FILE}")" 471 | echo -e "\ 472 | PACKAGES_ALREADY_INSTALLED=${PACKAGES_ALREADY_INSTALLED}\n\ 473 | LOCALE_ALREADY_SET=${LOCALE_ALREADY_SET}\n\ 474 | EXISTING_NON_ROOT_USER=${EXISTING_NON_ROOT_USER}\n\ 475 | RC_SNIPPET_ALREADY_ADDED=${RC_SNIPPET_ALREADY_ADDED}\n\ 476 | ZSH_ALREADY_INSTALLED=${ZSH_ALREADY_INSTALLED}" > "${MARKER_FILE}" 477 | 478 | echo "Done!" 479 | --------------------------------------------------------------------------------