├── perf_dashboard ├── __init__.py ├── bq_utils.py ├── posts_stats.py └── python_clientlibs_download.py ├── python-interpreter-builder ├── .dockerignore ├── .gitignore ├── README.md ├── DEBIAN │ └── control.in ├── Dockerfile.in └── scripts │ ├── package-python.sh │ ├── build-python-3.4.sh │ ├── build-python-3.5.sh │ ├── build-python-3.6.sh │ └── build-python-3.7.sh ├── runtime-image ├── .gitignore ├── resources │ ├── requirements-virtualenv.txt │ ├── requirements.txt │ └── apt-packages.txt ├── scripts │ └── install-apt-packages.sh └── Dockerfile.in ├── tests ├── benchmark │ ├── .gitignore │ ├── Dockerfile.in │ ├── benchmark_between_releases.sh │ └── generate_csv.py ├── integration │ ├── .gitignore │ ├── app.yaml │ ├── requirements.txt │ ├── Dockerfile.in │ └── server.py ├── eventlet │ ├── .gitignore │ ├── README.md │ ├── requirements.txt │ └── Dockerfile.in ├── google-cloud-python │ ├── .gitignore │ ├── Dockerfile.in │ └── run_unit_tests.sh ├── deploy_check │ ├── requirements.txt │ ├── app.yaml │ └── main.py ├── python2-libraries │ ├── Dockerfile │ ├── python2-libraries.yaml │ └── requirements.txt ├── python3-libraries │ ├── Dockerfile │ ├── python3-libraries.yaml │ └── requirements.txt ├── license-test │ └── license-test.yaml ├── virtualenv │ ├── virtualenv_default.yaml │ ├── virtualenv_python27.yaml │ ├── virtualenv_python34.yaml │ ├── virtualenv_python35.yaml │ ├── virtualenv_python36.yaml │ └── virtualenv_python37.yaml └── no-virtualenv │ └── no-virtualenv.yaml ├── scripts ├── data │ ├── Dockerfile.install_app │ ├── Dockerfile.preamble.template │ ├── Dockerfile.entrypoint.template │ ├── dockerignore.python_compat │ ├── Dockerfile.requirements_txt │ ├── Dockerfile.python_compat │ ├── Dockerfile.virtualenv.template │ └── dockerignore ├── testdata │ ├── hello_world │ │ ├── requirements.txt │ │ ├── app.yaml │ │ ├── main_test.py │ │ └── main.py │ ├── cloudbuild_err_rc1.yaml │ ├── cloudbuild_err_not_found.yaml │ ├── hello_world_compat_golden │ │ ├── .dockerignore │ │ └── Dockerfile │ ├── cloudbuild_difficult_cleanup.yaml │ ├── cloudbuild_ok.yaml │ ├── hello_world_compat │ │ ├── app.yaml │ │ └── main.py │ ├── hello_world_golden │ │ ├── Dockerfile │ │ └── .dockerignore │ ├── cloudbuild_user_substitutions.yaml │ ├── cloudbuild_builtin_substitutions.yaml │ └── cloudbuild_ok.yaml_golden.sh ├── requirements-test.txt ├── release.sh ├── deploy_check.sh ├── integration-test.sh ├── validation_utils_test.py ├── validation_utils.py ├── gen_dockerfile_test.py ├── gen_dockerfile.py ├── local_cloudbuild.py └── local_cloudbuild_test.py ├── .gitignore ├── .coveragerc ├── .travis.yml ├── CODEOWNERS ├── cloudbuild_benchmark.yaml ├── cloudbuild_client_test.yaml ├── cloudbuild.yaml ├── cloudbuild_interpreters.yaml ├── CONTRIBUTING.md ├── nox.py ├── README.md ├── cloudbuild_test.yaml ├── RELEASING.md └── LICENSE /perf_dashboard/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /python-interpreter-builder/.dockerignore: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /runtime-image/.gitignore: -------------------------------------------------------------------------------- 1 | Dockerfile 2 | -------------------------------------------------------------------------------- /tests/benchmark/.gitignore: -------------------------------------------------------------------------------- 1 | Dockerfile 2 | -------------------------------------------------------------------------------- /tests/integration/.gitignore: -------------------------------------------------------------------------------- 1 | Dockerfile 2 | -------------------------------------------------------------------------------- /python-interpreter-builder/.gitignore: -------------------------------------------------------------------------------- 1 | Dockerfile 2 | -------------------------------------------------------------------------------- /scripts/data/Dockerfile.install_app: -------------------------------------------------------------------------------- 1 | ADD . /app/ 2 | -------------------------------------------------------------------------------- /tests/eventlet/.gitignore: -------------------------------------------------------------------------------- 1 | Dockerfile 2 | main.py 3 | -------------------------------------------------------------------------------- /tests/google-cloud-python/.gitignore: -------------------------------------------------------------------------------- 1 | Dockerfile 2 | -------------------------------------------------------------------------------- /scripts/data/Dockerfile.preamble.template: -------------------------------------------------------------------------------- 1 | FROM {base_image} 2 | -------------------------------------------------------------------------------- /tests/integration/app.yaml: -------------------------------------------------------------------------------- 1 | runtime: custom 2 | env: flex 3 | -------------------------------------------------------------------------------- /scripts/data/Dockerfile.entrypoint.template: -------------------------------------------------------------------------------- 1 | CMD {entrypoint} 2 | -------------------------------------------------------------------------------- /tests/deploy_check/requirements.txt: -------------------------------------------------------------------------------- 1 | Flask==1.0.2 2 | gunicorn==19.9.0 3 | -------------------------------------------------------------------------------- /runtime-image/resources/requirements-virtualenv.txt: -------------------------------------------------------------------------------- 1 | virtualenv==20.0.31 2 | -------------------------------------------------------------------------------- /scripts/testdata/hello_world/requirements.txt: -------------------------------------------------------------------------------- 1 | Flask==1.0.2 2 | gunicorn==19.9.0 3 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | .cache 3 | .coverage 4 | .nox 5 | /*_local.sh 6 | __pycache__ 7 | -------------------------------------------------------------------------------- /tests/eventlet/README.md: -------------------------------------------------------------------------------- 1 | # Test the Python base image against the 'eventlet' library 2 | -------------------------------------------------------------------------------- /runtime-image/resources/requirements.txt: -------------------------------------------------------------------------------- 1 | pip 2 | setuptools==40.2.0 3 | wheel==0.31.1 4 | -------------------------------------------------------------------------------- /scripts/data/dockerignore.python_compat: -------------------------------------------------------------------------------- 1 | .dockerignore 2 | Dockerfile 3 | .git 4 | .hg 5 | .svn 6 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | branch = True 3 | 4 | [report] 5 | exclude_lines = 6 | pragma: no cover 7 | -------------------------------------------------------------------------------- /scripts/requirements-test.txt: -------------------------------------------------------------------------------- 1 | flask==1.0.2 2 | pytest==3.7.3 3 | pytest-cov==2.5.1 4 | pyyaml==3.13 5 | -------------------------------------------------------------------------------- /tests/deploy_check/app.yaml: -------------------------------------------------------------------------------- 1 | runtime: python 2 | env: flex 3 | entrypoint: gunicorn -b :$PORT main:app 4 | -------------------------------------------------------------------------------- /scripts/data/Dockerfile.requirements_txt: -------------------------------------------------------------------------------- 1 | ADD requirements.txt /app/ 2 | RUN pip install -r requirements.txt 3 | -------------------------------------------------------------------------------- /scripts/testdata/cloudbuild_err_rc1.yaml: -------------------------------------------------------------------------------- 1 | steps: 2 | - name: debian 3 | args: ['/bin/sh', '-c', 'exit 1'] 4 | -------------------------------------------------------------------------------- /scripts/testdata/cloudbuild_err_not_found.yaml: -------------------------------------------------------------------------------- 1 | steps: 2 | - name: debian 3 | args: ['/expected file not found'] 4 | -------------------------------------------------------------------------------- /scripts/testdata/hello_world_compat_golden/.dockerignore: -------------------------------------------------------------------------------- 1 | .dockerignore 2 | Dockerfile 3 | .git 4 | .hg 5 | .svn 6 | -------------------------------------------------------------------------------- /tests/python2-libraries/Dockerfile: -------------------------------------------------------------------------------- 1 | ARG intermediate_image 2 | FROM $intermediate_image 3 | COPY requirements.txt /requirements.txt 4 | -------------------------------------------------------------------------------- /tests/python3-libraries/Dockerfile: -------------------------------------------------------------------------------- 1 | ARG intermediate_image 2 | FROM $intermediate_image 3 | COPY requirements.txt /requirements.txt 4 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | sudo: required 2 | services: 3 | - docker 4 | script: 5 | - make build 6 | - make tests/virtualenv 7 | - make tests/no-virtualenv 8 | -------------------------------------------------------------------------------- /CODEOWNERS: -------------------------------------------------------------------------------- 1 | # Code owners file. 2 | # This file controls who is tagged for review for any given pull request. 3 | 4 | * @jinglundong @donmccasland 5 | -------------------------------------------------------------------------------- /scripts/testdata/cloudbuild_difficult_cleanup.yaml: -------------------------------------------------------------------------------- 1 | steps: 2 | - name: debian 3 | args: ['/bin/sh', '-c', 'mkdir root; umask 0000; touch root/deny_all.txt'] 4 | -------------------------------------------------------------------------------- /scripts/testdata/hello_world/app.yaml: -------------------------------------------------------------------------------- 1 | runtime: python 2 | env: flex 3 | entrypoint: gunicorn -b :$PORT main:app 4 | 5 | runtime_config: 6 | python_version: 3 7 | -------------------------------------------------------------------------------- /scripts/data/Dockerfile.python_compat: -------------------------------------------------------------------------------- 1 | FROM gcr.io/google_appengine/python-compat-multicore 2 | ADD . /app/ 3 | RUN if [ -s requirements.txt ]; then pip install -r requirements.txt; fi 4 | -------------------------------------------------------------------------------- /scripts/testdata/hello_world_compat_golden/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM gcr.io/google_appengine/python-compat-multicore 2 | ADD . /app/ 3 | RUN if [ -s requirements.txt ]; then pip install -r requirements.txt; fi 4 | -------------------------------------------------------------------------------- /tests/license-test/license-test.yaml: -------------------------------------------------------------------------------- 1 | schemaVersion: "1.0.0" 2 | 3 | # See https://github.com/GoogleCloudPlatform/container-structure-test/blob/master/README.md 4 | licenseTests: 5 | - debian: true 6 | files: [] 7 | -------------------------------------------------------------------------------- /tests/eventlet/requirements.txt: -------------------------------------------------------------------------------- 1 | click==6.7 2 | enum-compat==0.0.2 3 | eventlet==0.24.1 4 | Flask==2.2.5 5 | greenlet==0.4.14 6 | gunicorn==19.9.0 7 | itsdangerous==0.24 8 | Jinja2==2.10 9 | MarkupSafe==1.1.1 10 | Werkzeug==2.2.3 11 | -------------------------------------------------------------------------------- /tests/integration/requirements.txt: -------------------------------------------------------------------------------- 1 | Flask==2.2.5 2 | google-cloud-error-reporting==0.32.1 3 | google-cloud-logging==1.12.1 4 | google-cloud-monitoring==0.33.0 5 | gunicorn==19.9.0 6 | requests==2.31.0 7 | retrying==1.3.3 8 | six==1.12.0 9 | protobuf>=3.6.0 10 | -------------------------------------------------------------------------------- /scripts/testdata/cloudbuild_ok.yaml: -------------------------------------------------------------------------------- 1 | steps: 2 | - name: debian 3 | args: ['/bin/sh', '-c', 'printenv MESSAGE'] 4 | env: ['MESSAGE=Hello World!'] 5 | - name: debian 6 | args: ['/bin/sh', '-c', 'printenv MESSAGE'] 7 | env: ['MESSAGE=Goodbye\n And Farewell!', 'UNUSED=unused'] 8 | -------------------------------------------------------------------------------- /cloudbuild_benchmark.yaml: -------------------------------------------------------------------------------- 1 | timeout: 3600s 2 | steps: 3 | - name: gcr.io/cloud-builders/docker:latest 4 | args: ['build', '--tag=${_DOCKER_NAMESPACE}/python/tests/benchmark:${_TAG}', 5 | '--no-cache', '/workspace/tests/benchmark/'] 6 | images: [ 7 | # Intentionally empty 8 | ] 9 | -------------------------------------------------------------------------------- /scripts/testdata/hello_world_compat/app.yaml: -------------------------------------------------------------------------------- 1 | service: default 2 | runtime: python-compat 3 | env: flex 4 | 5 | api_version: 1 6 | threadsafe: true 7 | 8 | beta_settings: 9 | enable_app_engine_apis: true # Needed for compat apps. 10 | 11 | handlers: 12 | - url: .* 13 | script: main.app 14 | -------------------------------------------------------------------------------- /scripts/data/Dockerfile.virtualenv.template: -------------------------------------------------------------------------------- 1 | LABEL python_version=python{python_version} 2 | RUN virtualenv --no-download /env -p python{python_version} 3 | 4 | # Set virtualenv environment variables. This is equivalent to running 5 | # source /env/bin/activate 6 | ENV VIRTUAL_ENV /env 7 | ENV PATH /env/bin:$PATH 8 | -------------------------------------------------------------------------------- /runtime-image/scripts/install-apt-packages.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | apt-get -q update 6 | 7 | xargs -a <(awk '/^\s*[^#]/' '/resources/apt-packages.txt') -r -- \ 8 | apt-get install --no-install-recommends -yq 9 | 10 | apt-get upgrade -yq 11 | 12 | # Remove unneeded files. 13 | apt-get clean 14 | rm /var/lib/apt/lists/*_* 15 | -------------------------------------------------------------------------------- /tests/python2-libraries/python2-libraries.yaml: -------------------------------------------------------------------------------- 1 | schemaVersion: "1.0.0" 2 | 3 | globalEnvVars: 4 | - key: "VIRTUAL_ENV" 5 | value: "/env" 6 | - key: "PATH" 7 | value: "/env/bin:$PATH" 8 | 9 | commandTests: 10 | - name: "requirements" 11 | setup: [["virtualenv", "-p", "python", "/env"]] 12 | command: ["pip", "install", "-r", "/requirements.txt"] 13 | exitCode: 0 14 | -------------------------------------------------------------------------------- /scripts/release.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -euo pipefail 3 | export KOKORO_GITHUB_DIR=${KOKORO_ROOT}/src/github 4 | source ${KOKORO_GFILE_DIR}/kokoro/common.sh 5 | 6 | cd ${KOKORO_GITHUB_DIR}/python-runtime 7 | 8 | if [ -z "${TAG:+set}" ]; then 9 | export TAG=$(date +%Y-%m-%d-%H%M%S) 10 | fi 11 | 12 | ./build.sh $BUILD_FLAGS 13 | 14 | METADATA=$(pwd)/METADATA 15 | cd ${KOKORO_GFILE_DIR}/kokoro 16 | python note.py python -m ${METADATA} -t ${TAG} 17 | -------------------------------------------------------------------------------- /cloudbuild_client_test.yaml: -------------------------------------------------------------------------------- 1 | timeout: 3600s 2 | steps: 3 | - # Build image to run google client library unit tests 4 | name: gcr.io/cloud-builders/docker:latest 5 | args: ['build', '--tag=${_DOCKER_NAMESPACE}/python/tests/google-cloud-python:${_TAG}', 6 | '--no-cache', '/workspace/tests/google-cloud-python/'] 7 | - # Run google client library unit tests 8 | name: ${_DOCKER_NAMESPACE}/python/tests/google-cloud-python:${_TAG} 9 | images: [] 10 | -------------------------------------------------------------------------------- /scripts/testdata/hello_world_compat/main.py: -------------------------------------------------------------------------------- 1 | """The hello world flex app!""" 2 | 3 | import webapp2 4 | 5 | 6 | class HelloHandler(webapp2.RequestHandler): 7 | 8 | def get(self): 9 | msg = 'Hello GAE Flex (env: flex) Compat-Runtime App\n' 10 | self.response.headers['Content-Type'] = 'text/plain' 11 | self.response.out.write(msg) 12 | 13 | app = webapp2.WSGIApplication([('/', HelloHandler)], 14 | debug=True) 15 | -------------------------------------------------------------------------------- /scripts/testdata/hello_world_golden/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM gcr.io/google-appengine/python 2 | LABEL python_version=python3.6 3 | RUN virtualenv --no-download /env -p python3.6 4 | 5 | # Set virtualenv environment variables. This is equivalent to running 6 | # source /env/bin/activate 7 | ENV VIRTUAL_ENV /env 8 | ENV PATH /env/bin:$PATH 9 | ADD requirements.txt /app/ 10 | RUN pip install -r requirements.txt 11 | ADD . /app/ 12 | CMD exec gunicorn -b :$PORT main:app 13 | -------------------------------------------------------------------------------- /scripts/testdata/cloudbuild_user_substitutions.yaml: -------------------------------------------------------------------------------- 1 | steps: 2 | - name: debian 3 | args: ['/bin/sh', '-c', 'echo "$_FOO"'] 4 | - name: debian 5 | args: ['/usr/bin/test', 'this is foo value', '=', '$_FOO'] 6 | - name: debian 7 | args: ['/usr/bin/test', 'this is foo value', '=', '${_FOO}'] 8 | - name: debian 9 | args: ['/bin/sh', '-c', 'test "this is foo value" = "$_FOO"'] 10 | - name: debian 11 | args: ['/bin/sh', '-c', 'test "this is foo value" = "${_FOO}"'] 12 | -------------------------------------------------------------------------------- /scripts/testdata/cloudbuild_builtin_substitutions.yaml: -------------------------------------------------------------------------------- 1 | steps: 2 | - name: debian 3 | args: ['/bin/sh', '-c', 'echo "$PROJECT_ID"'] 4 | - name: debian 5 | args: ['/usr/bin/test', 'dummy-project-id', '=', '$PROJECT_ID'] 6 | - name: debian 7 | args: ['/usr/bin/test', 'dummy-project-id', '=', '${PROJECT_ID}'] 8 | - name: debian 9 | args: ['/bin/sh', '-c', 'test dummy-project-id = "$PROJECT_ID"'] 10 | - name: debian 11 | args: ['/bin/sh', '-c', 'test dummy-project-id = "${PROJECT_ID}"'] 12 | -------------------------------------------------------------------------------- /tests/google-cloud-python/Dockerfile.in: -------------------------------------------------------------------------------- 1 | FROM ${STAGING_IMAGE} 2 | 3 | # Get the source. 4 | RUN git clone --depth 1 https://github.com/GoogleCloudPlatform/google-cloud-python.git 5 | WORKDIR google-cloud-python 6 | 7 | # Upgrade setuptools 8 | RUN pip install --upgrade setuptools 9 | 10 | # Install nox 11 | RUN pip install --upgrade nox-automation 12 | 13 | # Run unit tests for all supported Python versions 14 | ADD run_unit_tests.sh /run_unit_tests.sh 15 | ENTRYPOINT ["/run_unit_tests.sh"] 16 | -------------------------------------------------------------------------------- /tests/eventlet/Dockerfile.in: -------------------------------------------------------------------------------- 1 | FROM ${STAGING_IMAGE} 2 | LABEL python_version=python3.6 3 | RUN virtualenv --no-download /env -p python3.6 4 | 5 | # Set virtualenv environment variables. This is equivalent to running 6 | # source /env/bin/activate 7 | ENV VIRTUAL_ENV /env 8 | ENV PATH /env/bin:$PATH 9 | ADD requirements.txt /app/ 10 | RUN pip install -r requirements.txt 11 | ADD . /app/ 12 | RUN gunicorn -k eventlet -b :$PORT --daemon main:app ; \ 13 | wget --retry-connrefused --tries=5 http://localhost:$PORT/ 14 | -------------------------------------------------------------------------------- /tests/python3-libraries/python3-libraries.yaml: -------------------------------------------------------------------------------- 1 | schemaVersion: "1.0.0" 2 | 3 | globalEnvVars: 4 | - key: "VIRTUAL_ENV" 5 | value: "/env" 6 | - key: "PATH" 7 | value: "/env/bin:$PATH" 8 | 9 | commandTests: 10 | - name: "requirements 3.5" 11 | setup: [["virtualenv", "-p", "/opt/python3.5/bin/python3.5", "/env"]] 12 | command: ["pip", "install", "-r", "/requirements.txt"] 13 | exitCode: 0 14 | 15 | - name: "requirements 3.6" 16 | setup: [["virtualenv", "-p", "/opt/python3.6/bin/python3.6", "/env"]] 17 | command: ["pip", "install", "-r", "/requirements.txt"] 18 | exitCode: 0 19 | -------------------------------------------------------------------------------- /cloudbuild.yaml: -------------------------------------------------------------------------------- 1 | timeout: 10800s 2 | steps: 3 | - # Build base runtime image 4 | name: gcr.io/cloud-builders/docker:latest 5 | args: ['build', '--tag=${_DOCKER_NAMESPACE}/python:${_TAG}', 6 | '--no-cache', '/workspace/runtime-image/'] 7 | id: runtime 8 | - # Build runtime builder image 9 | name: gcr.io/cloud-builders/docker:latest 10 | args: ['build', '--tag=${_BUILDER_DOCKER_NAMESPACE}/python/gen-dockerfile:${_TAG}', 11 | '--no-cache', '/workspace/builder/gen-dockerfile/'] 12 | id: gen-dockerfile 13 | waitFor: ['runtime'] 14 | images: [ 15 | '${_DOCKER_NAMESPACE}/python:${_TAG}', 16 | '${_BUILDER_DOCKER_NAMESPACE}/python/gen-dockerfile:${_TAG}', 17 | ] 18 | -------------------------------------------------------------------------------- /scripts/data/dockerignore: -------------------------------------------------------------------------------- 1 | # Copyright 2015 Google Inc. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | .dockerignore 16 | Dockerfile 17 | .git 18 | .hg 19 | .svn 20 | -------------------------------------------------------------------------------- /runtime-image/resources/apt-packages.txt: -------------------------------------------------------------------------------- 1 | # utilities 2 | git 3 | mercurial 4 | pkg-config 5 | wget 6 | # debian-provided interpreters 7 | python2.7 8 | python2.7-dev 9 | # Dependenies for third-party Python packages 10 | # with C-extensions 11 | build-essential 12 | libcurl4-openssl-dev 13 | libffi-dev 14 | libjpeg-dev 15 | libmysqlclient-dev 16 | libpng12-dev 17 | libpq-dev 18 | libssl-dev 19 | libxml2-dev 20 | libxslt1-dev 21 | swig 22 | zlib1g-dev 23 | # Needed by scipy/numpy 24 | gfortran 25 | libatlas-dev 26 | libblas-dev 27 | libfreetype6-dev 28 | liblapack-dev 29 | libquadmath0 30 | # Needed by pylibmc 31 | libmemcached-dev 32 | libsasl2-2 33 | libsasl2-dev 34 | libsasl2-modules 35 | sasl2-bin 36 | # Needed by eventlet 37 | netbase 38 | -------------------------------------------------------------------------------- /scripts/testdata/hello_world_golden/.dockerignore: -------------------------------------------------------------------------------- 1 | # Copyright 2015 Google Inc. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | .dockerignore 16 | Dockerfile 17 | .git 18 | .hg 19 | .svn 20 | -------------------------------------------------------------------------------- /python-interpreter-builder/README.md: -------------------------------------------------------------------------------- 1 | # Python Interpreter Builder 2 | 3 | This is a Docker-based Python interpreter builder. It builds Python interpreters 4 | using a Debian-based Docker image. These interpreters are suitable to be moved 5 | to another Debian-based Docker image. This avoids needing to install build 6 | dependencies in the final container. 7 | 8 | 9 | ## Building 10 | 11 | Use: 12 | 13 | docker build --tag=google/python/interpreter-builder . 14 | 15 | The interpreters will be stored in the image at `/interpreters.tar.gz`. This is 16 | suitable to be extracted from this image and added directly to another Docker 17 | image via: 18 | 19 | ADD interpreters.tar.gz / 20 | 21 | Docker will automatically un-tar the interpreters into `/opt`. 22 | -------------------------------------------------------------------------------- /python-interpreter-builder/DEBIAN/control.in: -------------------------------------------------------------------------------- 1 | Package: ${DEB_PACKAGE_NAME} 2 | Version: ${DEB_PACKAGE_VERSION} 3 | Section: python 4 | Priority: optional 5 | Architecture: amd64 6 | Maintainer: Douglas Greiman 7 | Description: Interactive high-level object-oriented language (version ${SHORT_VERSION}) 8 | Python is a high-level, interactive, object-oriented language. Its ${SHORT_VERSION} version 9 | includes an extensive class library with lots of goodies for 10 | network programming, system administration, sounds and graphics. 11 | Depends: libbz2-1.0, 12 | libc6, 13 | libdb5.3, 14 | libexpat1, 15 | libffi6, 16 | liblzma5, 17 | libmpdec2, 18 | libncursesw5, 19 | libreadline6, 20 | libsqlite3-0, 21 | libssl1.0.0, 22 | libtinfo5, 23 | mime-support, 24 | zlib1g 25 | Homepage: https://www.python.org 26 | -------------------------------------------------------------------------------- /tests/integration/Dockerfile.in: -------------------------------------------------------------------------------- 1 | # Copyright 2016 Google Inc. All rights reserved. 2 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | FROM ${STAGING_IMAGE} 16 | 17 | COPY . /app 18 | WORKDIR /app 19 | 20 | RUN pip install -r requirements.txt 21 | 22 | ENTRYPOINT ["gunicorn", "-b", ":8080", "server:app"] 23 | -------------------------------------------------------------------------------- /tests/google-cloud-python/run_unit_tests.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -eu 3 | 4 | cd /app/google-cloud-python 5 | 6 | exit_code=0 7 | failed_files= 8 | for noxfile in */nox.py; do 9 | if [ "${noxfile}" = "dlp/nox.py" ]; then 10 | echo "**** Skipping ${noxfile} ****" 11 | continue 12 | fi 13 | echo "**** Starting tests in ${noxfile} ****" 14 | nox \ 15 | -f "${noxfile}" \ 16 | -e \ 17 | "unit(py='2.7')" \ 18 | "unit(py='3.4')" \ 19 | "unit(py='3.5')" \ 20 | "unit(py='3.6')" \ 21 | || { 22 | echo "**** FAILED tests in ${noxfile} ****" 23 | exit_code=1 24 | failed_files="${failed_files} ${noxfile}" 25 | } 26 | echo "**** Finished tests in ${noxfile} ****" 27 | done 28 | 29 | if [ "${exit_code}" -eq 0 ]; then 30 | echo "**** All tests passed ****" 31 | else 32 | echo "**** There were test failures:${failed_files} ****" 33 | fi 34 | exit "${exit_code}" 35 | -------------------------------------------------------------------------------- /scripts/testdata/hello_world/main_test.py: -------------------------------------------------------------------------------- 1 | # Copyright 2015 Google Inc. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | import main 16 | 17 | 18 | def test_index(): 19 | main.app.testing = True 20 | client = main.app.test_client() 21 | 22 | r = client.get('/') 23 | assert r.status_code == 200 24 | assert 'Hello World' in r.data.decode('utf-8') 25 | -------------------------------------------------------------------------------- /scripts/deploy_check.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -ex 4 | 5 | export KOKORO_GITHUB_DIR=${KOKORO_ROOT}/src/github 6 | source ${KOKORO_GFILE_DIR}/kokoro/common.sh 7 | 8 | cd ${KOKORO_GITHUB_DIR}/${SAMPLE_APP_DIRECTORY} 9 | if [ -n "${RUNTIME_SPEC}" -a -f app.yaml.in ]; then 10 | sed "s|\${RUNTIME_SPEC}|${RUNTIME_SPEC}|" app.yaml.in > app.yaml 11 | fi 12 | 13 | cd ${KOKORO_GFILE_DIR}/appengine/integration_tests 14 | 15 | sudo -E /usr/local/bin/pip install --upgrade -r requirements.txt 16 | 17 | if [ -f ${KOKORO_GITHUB_DIR}/${SAMPLE_APP_DIRECTORY}/requirements.txt ] 18 | then 19 | sudo -E /usr/local/bin/pip install --upgrade -r ${KOKORO_GITHUB_DIR}/${SAMPLE_APP_DIRECTORY}/requirements.txt 20 | fi 21 | 22 | export DEPLOY_LATENCY_PROJECT='cloud-deploy-latency' 23 | 24 | skip_flag="" 25 | 26 | if [ "${SKIP_CUSTOM_LOGGING_TESTS}" = "true" -o "${SKIP_BUILDERS}" = "true" ]; then 27 | skip_flag="$skip_flag --skip-builders" 28 | fi 29 | 30 | if [ "${SKIP_XRT}" = "true" ]; then 31 | skip_flag="$skip_flag --skip-xrt" 32 | fi 33 | 34 | python deploy_check.py -d ${KOKORO_GITHUB_DIR}/${SAMPLE_APP_DIRECTORY} -l ${LANGUAGE} ${skip_flag} 35 | -------------------------------------------------------------------------------- /python-interpreter-builder/Dockerfile.in: -------------------------------------------------------------------------------- 1 | # The Google App Engine base image is debian (jessie) with ca-certificates 2 | # installed. 3 | FROM ${OS_BASE_IMAGE} 4 | 5 | # Install Python build dependencies (based on Debian Build-Depends) 6 | RUN apt-get update && apt-get install -yq \ 7 | autoconf \ 8 | blt-dev \ 9 | bzip2 \ 10 | debhelper \ 11 | dpkg-dev \ 12 | gcc \ 13 | gettext-base \ 14 | libbluetooth-dev \ 15 | libbz2-dev \ 16 | libdb-dev \ 17 | libexpat1-dev \ 18 | libffi-dev \ 19 | libgdbm-dev \ 20 | libgpm2 \ 21 | liblzma-dev \ 22 | libmpdec-dev \ 23 | libncursesw5-dev \ 24 | libreadline-dev \ 25 | libsqlite3-dev \ 26 | libssl-dev \ 27 | locales \ 28 | lsb-release \ 29 | mime-support \ 30 | net-tools \ 31 | netbase \ 32 | python \ 33 | python3 \ 34 | sharutils \ 35 | time \ 36 | tk-dev \ 37 | wget \ 38 | xauth \ 39 | xvfb \ 40 | zlib1g-dev \ 41 | && rm -rf /var/lib/apt/lists/* 42 | 43 | # Setup locale. This prevents Python 3 IO encoding issues. 44 | ENV LANG C.UTF-8 45 | 46 | # Add build scripts 47 | ADD scripts /scripts 48 | ADD DEBIAN /DEBIAN 49 | -------------------------------------------------------------------------------- /tests/benchmark/Dockerfile.in: -------------------------------------------------------------------------------- 1 | FROM ${STAGING_IMAGE} 2 | 3 | # Install performance 4 | RUN pip install performance 5 | 6 | # Create virtual environment 7 | RUN pip install --upgrade virtualenv 8 | 9 | # Required for Python 3.4, see 10 | # https://bugs.launchpad.net/ubuntu/+source/python3.4/+bug/1290847 11 | RUN apt-get update && apt-get install -y --force-yes python3-pip python3-venv 12 | 13 | RUN mkdir /result 14 | 15 | # Run the benchmark and compare the performance, add the 16 | # --debug-single-value flag to let the benchmark run in fastest mode 17 | RUN pyperformance run --debug-single-value --python=python2.7 -o /result/py2.7.json 18 | RUN pyperformance run --debug-single-value --python=python3.4 -o /result/py3.4.json 19 | RUN pyperformance run --debug-single-value --python=python3.5 -o /result/py3.5.json 20 | RUN if [ -e "/opt/python3.6/bin/python3.6" ]; then pyperformance run --debug-single-value --python=python3.6 -o /result/py3.6.json; fi 21 | 22 | RUN pyperformance compare /result/py2.7.json /result/py3.4.json --output_style table 23 | RUN pyperformance compare /result/py3.4.json /result/py3.5.json --output_style table 24 | RUN if [ -e "/result/py3.6.json" ]; then pyperformance compare /result/py3.5.json /result/py3.6.json --output_style table; fi -------------------------------------------------------------------------------- /cloudbuild_interpreters.yaml: -------------------------------------------------------------------------------- 1 | timeout: 10800s 2 | steps: 3 | - # Compile Python interpreters from source. This step happens first, then 4 | # the next three in parallel. 5 | name: gcr.io/cloud-builders/docker:latest 6 | args: ['build', '--tag=interpreter-builder', 7 | '--no-cache', '/workspace/python-interpreter-builder/'] 8 | id: interpreter-builder 9 | - name: interpreter-builder 10 | args: ['/scripts/build-python-3.4.sh'] 11 | id: build-3.4 12 | waitFor: ['interpreter-builder'] 13 | - name: interpreter-builder 14 | args: ['/scripts/build-python-3.5.sh'] 15 | id: build-3.5 16 | waitFor: ['interpreter-builder'] 17 | - name: interpreter-builder 18 | args: ['/scripts/build-python-3.6.sh'] 19 | id: build-3.6 20 | waitFor: ['interpreter-builder'] 21 | - name: interpreter-builder 22 | args: ['/scripts/build-python-3.7.sh'] 23 | id: build-3.7 24 | waitFor: ['interpreter-builder'] 25 | 26 | # Upload them to tbe build-id location 27 | - name: gcr.io/cloud-builders/gsutil:latest 28 | args: ['cp', '/workspace/runtime-image/*.tar.gz', 'gs://python-interpreters/$BUILD_ID/'] 29 | waitFor: ['build-3.4', 'build-3.5', 'build-3.6', 'build-3.7'] 30 | 31 | # "Tag" this as latest 32 | - name: gcr.io/cloud-builders/gsutil:latest 33 | args: ['cp', '-r', 'gs://python-interpreters/$BUILD_ID/*', 'gs://python-interpreters/latest/'] 34 | -------------------------------------------------------------------------------- /tests/virtualenv/virtualenv_default.yaml: -------------------------------------------------------------------------------- 1 | schemaVersion: "1.0.0" 2 | 3 | globalEnvVars: 4 | - key: "VIRTUAL_ENV" 5 | value: "/env" 6 | - key: "PATH" 7 | value: "/env/bin:$PATH" 8 | 9 | commandTests: 10 | - name: "virtualenv python installation" 11 | setup: [["virtualenv", "/env"]] 12 | command: ["which", "python"] 13 | expectedOutput: ["/env/bin/python\n"] 14 | 15 | - name: "virtualenv python version" 16 | setup: [["virtualenv", "/env"]] 17 | command: ["python", "--version"] 18 | # we check stderr instead of stdout for Python versions < 3.4 19 | # https://bugs.python.org/issue18338 20 | expectedError: ["Python 2.7.(9|12)\n"] 21 | 22 | - name: "virtualenv pip installation" 23 | setup: [["virtualenv", "/env"]] 24 | command: ["which", "pip"] 25 | expectedOutput: ["/env/bin/pip\n"] 26 | 27 | - name: "virtualenv gunicorn installation" 28 | setup: [["virtualenv", "/env"], 29 | ["pip", "install", "gunicorn"]] 30 | command: ["which", "gunicorn"] 31 | expectedOutput: ["/env/bin/gunicorn"] 32 | 33 | - name: "virtualenv flask installation" 34 | setup: [["virtualenv", "/env"], 35 | ["pip", "install", "flask"]] 36 | command: ["python", "-c", "import flask; print(flask.__file__)"] 37 | expectedOutput: ["/env/lib/python2.7/site-packages/flask/__init__.pyc"] 38 | -------------------------------------------------------------------------------- /tests/deploy_check/main.py: -------------------------------------------------------------------------------- 1 | # Copyright 2015 Google Inc. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | # [START app] 16 | import logging 17 | 18 | from flask import Flask 19 | 20 | 21 | app = Flask(__name__) 22 | 23 | 24 | @app.route('/') 25 | def hello(): 26 | """Return a friendly HTTP greeting.""" 27 | return 'Hello World!' 28 | 29 | 30 | @app.errorhandler(500) 31 | def server_error(e): 32 | logging.exception('An error occurred during a request.') 33 | return """ 34 | An internal error occurred:
{}
35 | See logs for full stacktrace. 36 | """.format(e), 500 37 | 38 | 39 | if __name__ == '__main__': 40 | # This is used when running locally. Gunicorn is used to run the 41 | # application on Google App Engine. See entrypoint in app.yaml. 42 | app.run(host='127.0.0.1', port=8080, debug=True) 43 | # [END app] 44 | -------------------------------------------------------------------------------- /scripts/testdata/hello_world/main.py: -------------------------------------------------------------------------------- 1 | # Copyright 2015 Google Inc. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | # [START app] 16 | import logging 17 | 18 | from flask import Flask 19 | 20 | 21 | app = Flask(__name__) 22 | 23 | 24 | @app.route('/') 25 | def hello(): 26 | """Return a friendly HTTP greeting.""" 27 | return 'Hello World!' 28 | 29 | 30 | @app.errorhandler(500) 31 | def server_error(e): 32 | logging.exception('An error occurred during a request.') 33 | return """ 34 | An internal error occurred:
{}
35 | See logs for full stacktrace. 36 | """.format(e), 500 37 | 38 | 39 | if __name__ == '__main__': 40 | # This is used when running locally. Gunicorn is used to run the 41 | # application on Google App Engine. See entrypoint in app.yaml. 42 | app.run(host='127.0.0.1', port=8080, debug=True) 43 | # [END app] 44 | -------------------------------------------------------------------------------- /perf_dashboard/bq_utils.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 Google Inc. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | """Common util methods for processing data in BigQuery.""" 16 | 17 | import uuid 18 | 19 | from google.cloud import bigquery 20 | 21 | 22 | def insert_rows(project, dataset_name, table_name, rows): 23 | """Insert rows to bigquery table.""" 24 | client = bigquery.Client(project=project) 25 | dataset_ref = client.dataset(dataset_name) 26 | table_ref = dataset_ref.table(table_name) 27 | table = client.get_table(table_ref) 28 | client.create_rows(table, rows) 29 | 30 | def execute_query(query): 31 | """Execute query and return the query results.""" 32 | client = bigquery.Client() 33 | query_job = client.query((query)) 34 | 35 | # Start the query job and wait it to complete 36 | return [row.values() for row in query_job.result()] 37 | -------------------------------------------------------------------------------- /scripts/testdata/cloudbuild_ok.yaml_golden.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # This is a generated file. Do not edit. 3 | 4 | set -euo pipefail 5 | 6 | SOURCE_DIR=. 7 | 8 | # Setup staging directory 9 | HOST_WORKSPACE=$(mktemp -d -t local_cloudbuild_XXXXXXXXXX) 10 | function cleanup { 11 | if [ "${HOST_WORKSPACE}" != '/' -a -d "${HOST_WORKSPACE}" ]; then 12 | # Expect a single error message about /workspace busy 13 | docker run --volume /var/run/docker.sock:/var/run/docker.sock --volume /root/.docker:/root/.docker --volume ${HOST_WORKSPACE}:/workspace --workdir /workspace gcr.io/google-appengine/debian8 rm -rf /workspace 2>/dev/null || true 14 | # Do not expect error messages here. Display but ignore. 15 | rmdir "${HOST_WORKSPACE}" || true 16 | fi 17 | } 18 | trap cleanup EXIT 19 | 20 | # Copy source to staging directory 21 | echo "Copying source to staging directory ${HOST_WORKSPACE}" 22 | rsync -avzq --exclude=.git "${SOURCE_DIR}" "${HOST_WORKSPACE}" 23 | 24 | # Build commands 25 | docker run --volume /var/run/docker.sock:/var/run/docker.sock --volume /root/.docker:/root/.docker --volume ${HOST_WORKSPACE}:/workspace --workdir /workspace --env 'MESSAGE=Hello World!' debian /bin/sh -c 'printenv MESSAGE' 26 | 27 | docker run --volume /var/run/docker.sock:/var/run/docker.sock --volume /root/.docker:/root/.docker --volume ${HOST_WORKSPACE}:/workspace --workdir /workspace --env 'MESSAGE=Goodbye\n And Farewell!' --env UNUSED=unused debian /bin/sh -c 'printenv MESSAGE' 28 | 29 | 30 | # End of build commands 31 | echo "Build completed successfully" 32 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # How to become a contributor and submit your own code 2 | 3 | ## Contributor License Agreements 4 | 5 | We'd love to accept your sample apps and patches! Before we can take them, we 6 | have to jump a couple of legal hurdles. 7 | 8 | Please fill out either the individual or corporate Contributor License 9 | Agreement (CLA). 10 | 11 | * If you are an individual writing original source code and you're sure you 12 | own the intellectual property, then you'll need to sign an [individual CLA] 13 | (https://developers.google.com/open-source/cla/individual). 14 | * If you work for a company that wants to allow you to contribute your work, 15 | then you'll need to sign a [corporate CLA] 16 | (https://developers.google.com/open-source/cla/corporate). 17 | 18 | Follow either of the two links above to access the appropriate CLA and 19 | instructions for how to sign and return it. Once we receive it, we'll 20 | be able to accept your pull requests. 21 | 22 | ## Contributing A Patch 23 | 24 | 1. Submit an issue describing your proposed change to the repo in question. 25 | 1. The repo owner will respond to your issue promptly. 26 | 1. If your proposed change is accepted, and you haven't already done so, sign a 27 | Contributor License Agreement (see details above). 28 | 1. Fork the desired repo, develop and test your code changes. 29 | 1. Ensure that your code adheres to the existing style in the sample to which 30 | you are contributing. Refer to the 31 | [Google Cloud Platform Samples Style Guide] 32 | (https://github.com/GoogleCloudPlatform/Template/wiki/style.html) for the 33 | recommended coding standards for this organization. 34 | 1. Ensure that your code has an appropriate set of unit tests which all pass. 35 | 1. Submit a pull request. 36 | -------------------------------------------------------------------------------- /tests/virtualenv/virtualenv_python27.yaml: -------------------------------------------------------------------------------- 1 | schemaVersion: "1.0.0" 2 | 3 | globalEnvVars: 4 | - key: "VIRTUAL_ENV" 5 | value: "/env" 6 | - key: "PATH" 7 | value: "/env/bin:$PATH" 8 | 9 | commandTests: 10 | - name: "virtualenv27 python installation" 11 | setup: [["virtualenv", "-p", "python", "/env"]] 12 | command: ["which", "python"] 13 | expectedOutput: ["/env/bin/python\n"] 14 | 15 | - name: "virtualenv27 python2 installation" 16 | setup: [["virtualenv", "-p", "python", "/env"]] 17 | command: ["which", "python2"] 18 | expectedOutput: ["/env/bin/python2\n"] 19 | 20 | - name: "virtualenv27python2.7 installation" 21 | setup: [["virtualenv", "-p", "python", "/env"]] 22 | command: ["which", "python2.7"] 23 | expectedOutput: ["/env/bin/python2.7\n"] 24 | 25 | - name: "virtualenv27 python version" 26 | setup: [["virtualenv", "-p", "python", "/env"]] 27 | command: ["python", "--version"] 28 | # we check stderr instead of stdout for Python versions < 3.4 29 | # https://bugs.python.org/issue18338 30 | expectedError: ["Python 2.7.(9|12)\n"] 31 | 32 | - name: "virtualenv27 pip installation" 33 | setup: [["virtualenv", "-p", "python", "/env"]] 34 | command: ["which", "pip"] 35 | expectedOutput: ["/env/bin/pip\n"] 36 | 37 | - name: "virtualenv27 gunicorn installation" 38 | setup: [["virtualenv", "-p", "python", "/env"], 39 | ["pip", "install", "gunicorn"]] 40 | command: ["which", "gunicorn"] 41 | expectedOutput: ["/env/bin/gunicorn"] 42 | 43 | - name: "virtualenv27 flask installation" 44 | setup: [["virtualenv", "-p", "python", "/env"], 45 | ["pip", "install", "flask"]] 46 | command: ["python", "-c", "import flask; print(flask.__file__)"] 47 | expectedOutput: ["/env/lib/python2.7/site-packages/flask/__init__.pyc"] 48 | -------------------------------------------------------------------------------- /scripts/integration-test.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -ex 4 | 5 | export KOKORO_GITHUB_DIR=${KOKORO_ROOT}/src/github 6 | source ${KOKORO_GFILE_DIR}/kokoro/common.sh 7 | 8 | export GOOGLE_CLOUD_PROJECT=gcp-runtimes 9 | 10 | sudo -E /usr/local/bin/pip install --upgrade -r ${KOKORO_GFILE_DIR}/appengine/integration_tests/requirements.txt 11 | 12 | if [ -f ${KOKORO_GITHUB_DIR}/${SAMPLE_APP_DIRECTORY}/requirements.txt ] 13 | then 14 | sudo -E /usr/local/bin/pip install --upgrade -r ${KOKORO_GITHUB_DIR}/${SAMPLE_APP_DIRECTORY}/requirements.txt 15 | fi 16 | 17 | export GOPATH=${KOKORO_GITHUB_DIR}/${SAMPLE_APP_DIRECTORY} 18 | 19 | flags="" 20 | 21 | if [ -n "${STAGING_IMAGE}" ]; then 22 | flags="$flags -i ${STAGING_IMAGE}" 23 | fi 24 | 25 | if [ "${SKIP_STANDARD_LOGGING_TESTS}" = "true" ]; then 26 | flags="$flags --skip-standard-logging-tests" 27 | fi 28 | 29 | if [ "${SKIP_CUSTOM_LOGGING_TESTS}" = "true" ]; then 30 | flags="$flags --skip-custom-logging-tests" 31 | fi 32 | 33 | if [ "${SKIP_MONITORING_TESTS}" = "true" ]; then 34 | flags="$flags --skip-monitoring-tests" 35 | fi 36 | 37 | if [ "${SKIP_EXCEPTION_TESTS}" = "true" ]; then 38 | flags="$flags --skip-exception-tests" 39 | fi 40 | 41 | if [ "${SKIP_CUSTOM_TESTS}" = "true" ]; then 42 | flags="$flags --skip-custom-tests" 43 | fi 44 | 45 | if [ -n "${URL}" ]; then 46 | flags="$flags --url ${URL}" 47 | fi 48 | 49 | if [ -n "${BUILDER}" ]; then 50 | flags="$flags --builder ${BUILDER}" 51 | gcloud config set app/use_runtime_builders True 52 | gcloud config set app/runtime_builders_root file://${KOKORO_GITHUB_DIR}/${SAMPLE_APP_DIRECTORY} 53 | fi 54 | 55 | if [ -n "${YAML}" ]; then 56 | flags="$flags --yaml ${KOKORO_GITHUB_DIR}/${YAML}" 57 | fi 58 | 59 | 60 | chmod a+x ${KOKORO_GFILE_DIR}/appengine/integration_tests/testsuite/driver.py 61 | ${KOKORO_GFILE_DIR}/appengine/integration_tests/testsuite/driver.py -d ${KOKORO_GITHUB_DIR}/${SAMPLE_APP_DIRECTORY} ${flags} 62 | -------------------------------------------------------------------------------- /python-interpreter-builder/scripts/package-python.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -euo pipefail 4 | set -x 5 | 6 | function usage { 7 | echo "Usage: $0 long_version tag 8 | Create .deb package file for a Python interpreter with 9 | long_version: (x.y.z) Interpreter version 10 | tag: version suffix unique to this build 11 | " >&2 12 | exit 1 13 | } 14 | # Process command line 15 | if [ -z "${1:+set}" -o -z "${2:+set}" ]; then 16 | usage 17 | fi 18 | LONG_VERSION=$1 19 | BUILD_TAG=$2 20 | SHORT_VERSION=${1%.*} 21 | 22 | # Compute version specs 23 | DEB_PACKAGE_NAME=gcp-python${SHORT_VERSION} 24 | # Can't have - (hyphen) in debian revision as per 25 | # https://www.debian.org/doc/debian-policy/ch-controlfields.html#s-f-Version 26 | DEBIAN_REVISION=${BUILD_TAG//-/.} 27 | DEB_PACKAGE_VERSION=${LONG_VERSION}-${DEBIAN_REVISION} 28 | 29 | PACKAGE_DIR=/opt/packages 30 | # E.g. gcp-python3.6_3.6.2-1gcp~2017.07.25.110644_amd64.deb 31 | DEB_FILENAME=${DEB_PACKAGE_NAME}_${DEB_PACKAGE_VERSION}_amd64.deb 32 | 33 | # Create directory for intermediate files 34 | SCRATCH_DIR=$(mktemp --directory) 35 | cd "${SCRATCH_DIR}" 36 | 37 | # Synthesize Debian control file. Note that the "Depends:" is 38 | # currently Debian8-specific, and lacks version specifiers present in 39 | # the standard Debian Python packages. 40 | export DEB_PACKAGE_NAME DEB_PACKAGE_VERSION SHORT_VERSION 41 | envsubst control \ 42 | '${DEB_PACKAGE_NAME} ${DEB_PACKAGE_VERSION} ${SHORT_VERSION}' 43 | 44 | # Generate components of .deb archive 45 | tar czf control.tar.gz control 46 | tar czf data.tar.gz "/opt/python${SHORT_VERSION}" 47 | echo "2.0" >debian-binary 48 | 49 | # Generate final .deb. 50 | mkdir -p "${PACKAGE_DIR}" 51 | ar rcD "${PACKAGE_DIR}/${DEB_FILENAME}" \ 52 | debian-binary control.tar.gz data.tar.gz 53 | rm debian-binary control.tar.gz data.tar.gz 54 | 55 | # Validate .deb 56 | dpkg --install --dry-run "${PACKAGE_DIR}/${DEB_FILENAME}" 57 | 58 | # Add to list 59 | echo "${DEB_FILENAME}" >> "${PACKAGE_DIR}/packages.txt" 60 | -------------------------------------------------------------------------------- /tests/virtualenv/virtualenv_python34.yaml: -------------------------------------------------------------------------------- 1 | schemaVersion: "1.0.0" 2 | 3 | globalEnvVars: 4 | - key: "VIRTUAL_ENV" 5 | value: "/env" 6 | - key: "PATH" 7 | value: "/env/bin:$PATH" 8 | 9 | commandTests: 10 | - name: "virtualenv34 python installation" 11 | setup: [["virtualenv", "-p", "python3.4", "/env"]] 12 | command: ["which", "python"] 13 | expectedOutput: ["/env/bin/python\n"] 14 | 15 | - name: "virtualenv34 python3 installation" 16 | setup: [["virtualenv", "-p", "python3.4", "/env"]] 17 | command: ["which", "python3"] 18 | expectedOutput: ["/env/bin/python3\n"] 19 | 20 | - name: "virtualenv34 python3.4 installation" 21 | setup: [["virtualenv", "-p", "python3.4", "/env"]] 22 | command: ["which", "python3.4"] 23 | expectedOutput: ["/env/bin/python3.4\n"] 24 | 25 | - name: "virtualenv34 python version" 26 | setup: [["virtualenv", "-p", "python3.4", "/env"]] 27 | command: ["python", "--version"] 28 | expectedOutput: ["Python 3.4.8\n"] 29 | 30 | - name: "virtualenv34 pip installation" 31 | setup: [["virtualenv", "-p", "python3.4", "/env"]] 32 | command: ["which", "pip"] 33 | expectedOutput: ["/env/bin/pip\n"] 34 | 35 | - name: "virtualenv34 pip3 installation" 36 | setup: [["virtualenv", "-p", "python3.4", "/env"]] 37 | command: ["which", "pip3"] 38 | expectedOutput: ["/env/bin/pip3\n"] 39 | 40 | - name: "virtualenv34 gunicorn installation" 41 | setup: [["virtualenv", "-p", "python3.4", "/env"], 42 | ["pip", "install", "gunicorn"]] 43 | command: ["which", "gunicorn"] 44 | expectedOutput: ["/env/bin/gunicorn"] 45 | 46 | - name: "virtualenv34 flask installation" 47 | setup: [["virtualenv", "-p", "python3.4", "/env"], 48 | ["pip", "install", "flask"]] 49 | command: ["python", "-c", "import flask; print(flask.__file__)"] 50 | expectedOutput: ["/env/lib/python3.4/site-packages/flask/__init__.py"] 51 | 52 | - name: "virtualenv34 test.support availability" 53 | setup: [["virtualenv", "-p", "python3.4", "/env"]] 54 | command: ["python", "-c", "\"from test import pystone, regrtest, support\""] 55 | -------------------------------------------------------------------------------- /tests/virtualenv/virtualenv_python35.yaml: -------------------------------------------------------------------------------- 1 | schemaVersion: "1.0.0" 2 | 3 | globalEnvVars: 4 | - key: "VIRTUAL_ENV" 5 | value: "/env" 6 | - key: "PATH" 7 | value: "/env/bin:$PATH" 8 | 9 | commandTests: 10 | - name: "virtualenv35 python installation" 11 | setup: [["virtualenv", "-p", "python3.5", "/env"]] 12 | command: ["which", "python"] 13 | expectedOutput: ["/env/bin/python\n"] 14 | 15 | - name: "virtualenv35 python3 installation" 16 | setup: [["virtualenv", "-p", "python3.5", "/env"]] 17 | command: ["which", "python3"] 18 | expectedOutput: ["/env/bin/python3\n"] 19 | 20 | - name: "virtualenv35 python3.5 installation" 21 | setup: [["virtualenv", "-p", "python3.5", "/env"]] 22 | command: ["which", "python3.5"] 23 | expectedOutput: ["/env/bin/python3.5\n"] 24 | 25 | - name: "virtualenv35 python version" 26 | setup: [["virtualenv", "-p", "python3.5", "/env"]] 27 | command: ["python", "--version"] 28 | expectedOutput: ["Python 3.5.9\n"] 29 | 30 | - name: "virtualenv35 pip installation" 31 | setup: [["virtualenv", "-p", "python3.5", "/env"]] 32 | command: ["which", "pip"] 33 | expectedOutput: ["/env/bin/pip\n"] 34 | 35 | - name: "virtualenv35 pip3 installation" 36 | setup: [["virtualenv", "-p", "python3.5", "/env"]] 37 | command: ["which", "pip3"] 38 | expectedOutput: ["/env/bin/pip3\n"] 39 | 40 | - name: "virtualenv35 gunicorn installation" 41 | setup: [["virtualenv", "-p", "python3.5", "/env"], 42 | ["pip", "install", "gunicorn"]] 43 | command: ["which", "gunicorn"] 44 | expectedOutput: ["/env/bin/gunicorn"] 45 | 46 | - name: "virtualenv35 flask installation" 47 | setup: [["virtualenv", "-p", "python3.5", "/env"], 48 | ["pip", "install", "flask"]] 49 | command: ["python", "-c", "import flask; print(flask.__file__)"] 50 | expectedOutput: ["/env/lib/python3.5/site-packages/flask/__init__.py"] 51 | 52 | - name: "virtualenv35 test.support availability" 53 | setup: [["virtualenv", "-p", "python3.5", "/env"]] 54 | command: ["python", "-c", "\"from test import pystone, regrtest, support\""] 55 | -------------------------------------------------------------------------------- /tests/virtualenv/virtualenv_python36.yaml: -------------------------------------------------------------------------------- 1 | schemaVersion: "1.0.0" 2 | 3 | globalEnvVars: 4 | - key: "VIRTUAL_ENV" 5 | value: "/env" 6 | - key: "PATH" 7 | value: "/env/bin:$PATH" 8 | 9 | commandTests: 10 | - name: "virtualenv36 python installation" 11 | setup: [["virtualenv", "-p", "python3.6", "/env"]] 12 | command: ["which", "python"] 13 | expectedOutput: ["/env/bin/python\n"] 14 | 15 | - name: "virtualenv36 python3 installation" 16 | setup: [["virtualenv", "-p", "python3.6", "/env"]] 17 | command: ["which", "python3"] 18 | expectedOutput: ["/env/bin/python3\n"] 19 | 20 | - name: "virtualenv36 python3.6 installation" 21 | setup: [["virtualenv", "-p", "python3.6", "/env"]] 22 | command: ["which", "python3.6"] 23 | expectedOutput: ["/env/bin/python3.6\n"] 24 | 25 | - name: "virtualenv36 python version" 26 | setup: [["virtualenv", "-p", "python3.6", "/env"]] 27 | command: ["python", "--version"] 28 | expectedOutput: ["Python 3.6.10\n"] 29 | 30 | - name: "virtualenv36 pip installation" 31 | setup: [["virtualenv", "-p", "python3.6", "/env"]] 32 | command: ["which", "pip"] 33 | expectedOutput: ["/env/bin/pip\n"] 34 | 35 | - name: "virtualenv36 pip3 installation" 36 | setup: [["virtualenv", "-p", "python3.6", "/env"]] 37 | command: ["which", "pip3"] 38 | expectedOutput: ["/env/bin/pip3\n"] 39 | 40 | - name: "virtualenv36 gunicorn installation" 41 | setup: [["virtualenv", "-p", "python3.6", "/env"], 42 | ["pip", "install", "gunicorn"]] 43 | command: ["which", "gunicorn"] 44 | expectedOutput: ["/env/bin/gunicorn"] 45 | 46 | - name: "virtualenv36 flask installation" 47 | setup: [["virtualenv", "-p", "python3.6", "/env"], 48 | ["pip", "install", "flask"]] 49 | command: ["python", "-c", "import flask; print(flask.__file__)"] 50 | expectedOutput: ["/env/lib/python3.6/site-packages/flask/__init__.py"] 51 | 52 | - name: "virtualenv36 test.support availability" 53 | setup: [["virtualenv", "-p", "python3.6", "/env"]] 54 | command: ["python", "-c", "\"from test import pystone, regrtest, support\""] 55 | -------------------------------------------------------------------------------- /tests/virtualenv/virtualenv_python37.yaml: -------------------------------------------------------------------------------- 1 | schemaVersion: "1.0.0" 2 | 3 | globalEnvVars: 4 | - key: "VIRTUAL_ENV" 5 | value: "/env" 6 | - key: "PATH" 7 | value: "/env/bin:$PATH" 8 | 9 | commandTests: 10 | - name: "virtualenv37 python installation" 11 | setup: [["virtualenv", "-p", "python3.7", "/env"]] 12 | command: ["which", "python"] 13 | expectedOutput: ["/env/bin/python\n"] 14 | 15 | - name: "virtualenv37 python3 installation" 16 | setup: [["virtualenv", "-p", "python3.7", "/env"]] 17 | command: ["which", "python3"] 18 | expectedOutput: ["/env/bin/python3\n"] 19 | 20 | - name: "virtualenv37 python3.7 installation" 21 | setup: [["virtualenv", "-p", "python3.7", "/env"]] 22 | command: ["which", "python3.7"] 23 | expectedOutput: ["/env/bin/python3.7\n"] 24 | 25 | - name: "virtualenv37 python version" 26 | setup: [["virtualenv", "-p", "python3.7", "/env"]] 27 | command: ["python", "--version"] 28 | expectedOutput: ["Python 3.7.9\n"] 29 | 30 | - name: "virtualenv37 pip installation" 31 | setup: [["virtualenv", "-p", "python3.7", "/env"]] 32 | command: ["which", "pip"] 33 | expectedOutput: ["/env/bin/pip\n"] 34 | 35 | - name: "virtualenv37 pip3 installation" 36 | setup: [["virtualenv", "-p", "python3.7", "/env"]] 37 | command: ["which", "pip3"] 38 | expectedOutput: ["/env/bin/pip3\n"] 39 | 40 | - name: "virtualenv37 gunicorn installation" 41 | setup: [["virtualenv", "-p", "python3.7", "/env"], 42 | ["pip", "install", "gunicorn"]] 43 | command: ["which", "gunicorn"] 44 | expectedOutput: ["/env/bin/gunicorn"] 45 | 46 | - name: "virtualenv37 flask installation" 47 | setup: [["virtualenv", "-p", "python3.7", "/env"], 48 | ["pip", "install", "flask"]] 49 | command: ["python", "-c", "import flask; print(flask.__file__)"] 50 | expectedOutput: ["/env/lib/python3.7/site-packages/flask/__init__.py"] 51 | 52 | - name: "virtualenv37 test.support availability" 53 | setup: [["virtualenv", "-p", "python3.7", "/env"]] 54 | command: ["python", "-c", "\"from test import pystone, regrtest, support\""] 55 | -------------------------------------------------------------------------------- /tests/no-virtualenv/no-virtualenv.yaml: -------------------------------------------------------------------------------- 1 | schemaVersion: "1.0.0" 2 | commandTests: 3 | - name: "default python installation" 4 | command: ["which", "python"] 5 | expectedOutput: ["/usr/bin/python\n"] 6 | 7 | - name: "default pip installation" 8 | command: ["which", "pip"] 9 | expectedOutput: ["/usr/local/bin/pip\n"] 10 | 11 | - name: "default pip python version" 12 | command: ["pip", "-V"] 13 | expectedOutput: ["pip .* from .*python 2[.]7"] 14 | 15 | - name: "default virtualenv installation" 16 | command: ["which", "virtualenv"] 17 | expectedOutput: ["/usr/local/bin/virtualenv\n"] 18 | 19 | - name: "default python2.7 installation" 20 | command: ["which", "python2.7"] 21 | expectedOutput: ["/usr/bin/python2.7\n"] 22 | 23 | - name: "default python3.4 installation" 24 | command: ["which", "python3.4"] 25 | expectedOutput: ["/opt/python3.4/bin/python3.4\n"] 26 | 27 | - name: "default python3.5 installation" 28 | command: ["which", "python3.5"] 29 | expectedOutput: ["/opt/python3.5/bin/python3.5\n"] 30 | 31 | - name: "default python3.6 installation" 32 | command: ["which", "python3.6"] 33 | expectedOutput: ["/opt/python3.6/bin/python3.6\n"] 34 | 35 | - name: "default gunicorn installation" 36 | setup: [["pip", "install", "gunicorn"]] 37 | command: ["which", "gunicorn"] 38 | expectedOutput: ["/usr/local/bin/gunicorn\n"] 39 | 40 | - # Regression test for issue187 41 | name: "default python3 installation" 42 | command: ["which", "python3"] 43 | expectedOutput: ["/usr/local/bin/python3\n"] 44 | - name: "default python3 version" 45 | command: ["python3", "--version"] 46 | expectedOutput: ["Python 3.7.9\n"] 47 | - name: "default pip3 installation" 48 | command: ["which", "pip3"] 49 | expectedOutput: ["/usr/local/bin/pip3\n"] 50 | 51 | - name: "default flask installation" 52 | # Checks that 'pip' and 'python' are using the same Python version 53 | setup: [["pip", "install", "flask"]] 54 | command: ["python", "-c", "import flask; print(flask.__file__)"] 55 | expectedOutput: ["/usr/local/lib/python2.7/dist-packages/flask"] 56 | -------------------------------------------------------------------------------- /runtime-image/Dockerfile.in: -------------------------------------------------------------------------------- 1 | # The Google App Engine base image is debian (jessie) with ca-certificates 2 | # installed. 3 | # Source: https://github.com/GoogleCloudPlatform/debian-docker 4 | FROM ${OS_BASE_IMAGE} 5 | 6 | ADD resources /resources 7 | ADD scripts /scripts 8 | 9 | # Install Python, pip, and C dev libraries necessary to compile the most popular 10 | # Python libraries. 11 | RUN /scripts/install-apt-packages.sh 12 | RUN curl "https://bootstrap.pypa.io/pip/2.7/get-pip.py" -o "get-pip.py" && python ./get-pip.py && ln -s /usr/local/bin/pip /usr/bin/pip 13 | 14 | # Setup locale. This prevents Python 3 IO encoding issues. 15 | ENV LANG C.UTF-8 16 | # Make stdout/stderr unbuffered. This prevents delay between output and cloud 17 | # logging collection. 18 | ENV PYTHONUNBUFFERED 1 19 | 20 | RUN wget https://storage.googleapis.com/python-interpreters/latest/interpreter-3.4.tar.gz && \ 21 | wget https://storage.googleapis.com/python-interpreters/latest/interpreter-3.5.tar.gz && \ 22 | wget https://storage.googleapis.com/python-interpreters/latest/interpreter-3.6.tar.gz && \ 23 | wget https://storage.googleapis.com/python-interpreters/latest/interpreter-3.7.tar.gz && \ 24 | tar -xzf interpreter-3.4.tar.gz && \ 25 | tar -xzf interpreter-3.5.tar.gz && \ 26 | tar -xzf interpreter-3.6.tar.gz && \ 27 | tar -xzf interpreter-3.7.tar.gz && \ 28 | rm interpreter-*.tar.gz 29 | 30 | # Add Google-built interpreters to the path 31 | ENV PATH /opt/python3.7/bin:/opt/python3.6/bin:/opt/python3.5/bin:/opt/python3.4/bin:$PATH 32 | RUN update-alternatives --install /usr/local/bin/python3 python3 /opt/python3.7/bin/python3.7 50 && \ 33 | update-alternatives --install /usr/local/bin/pip3 pip3 /opt/python3.7/bin/pip3.7 50 34 | 35 | # Upgrade pip (debian package version tends to run a few version behind) and 36 | # install virtualenv system-wide. 37 | RUN /usr/bin/pip install --upgrade -r /resources/requirements.txt && \ 38 | /opt/python3.4/bin/pip3.4 install --upgrade -r /resources/requirements.txt && \ 39 | rm -f /opt/python3.4/bin/pip /opt/python3.4/bin/pip3 && \ 40 | /opt/python3.5/bin/pip3.5 install --upgrade -r /resources/requirements.txt && \ 41 | rm -f /opt/python3.5/bin/pip /opt/python3.5/bin/pip3 && \ 42 | /opt/python3.6/bin/pip3.6 install --upgrade -r /resources/requirements.txt && \ 43 | rm -f /opt/python3.6/bin/pip /opt/python3.6/bin/pip3 && \ 44 | /opt/python3.7/bin/pip3.7 install --upgrade -r /resources/requirements.txt && \ 45 | rm -f /opt/python3.7/bin/pip /opt/python3.7/bin/pip3 && \ 46 | /usr/bin/pip install --upgrade -r /resources/requirements-virtualenv.txt 47 | 48 | # Setup the app working directory 49 | RUN ln -s /home/vmagent/app /app 50 | WORKDIR /app 51 | 52 | # Port 8080 is the port used by Google App Engine for serving HTTP traffic. 53 | EXPOSE 8080 54 | ENV PORT 8080 55 | 56 | # The user's Dockerfile must specify an entrypoint with ENTRYPOINT or CMD. 57 | CMD [] 58 | -------------------------------------------------------------------------------- /nox.py: -------------------------------------------------------------------------------- 1 | # Copyright 2016 Google Inc. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | import fnmatch 16 | import os 17 | 18 | import nox 19 | 20 | 21 | def _list_files(folder, pattern): 22 | """Lists all files below the given folder that match the pattern.""" 23 | for root, folders, files in os.walk(folder): 24 | for filename in files: 25 | if fnmatch.fnmatch(filename, pattern): 26 | yield os.path.join(root, filename) 27 | 28 | 29 | @nox.session 30 | def check_requirements(session): 31 | """Checks for out of date requirements and optionally updates them.""" 32 | session.install('gcp-devrel-py-tools') 33 | 34 | if 'update' in session.posargs: 35 | command = 'update-requirements' 36 | else: 37 | command = 'check-requirements' 38 | 39 | reqfiles = list(_list_files('.', 'requirements*.txt')) 40 | 41 | for reqfile in reqfiles: 42 | session.run('gcp-devrel-py-tools', command, reqfile) 43 | 44 | 45 | @nox.session 46 | def lint(session): 47 | session.interpreter = 'python3' # So it understands Python3 syntax 48 | session.install('flake8', 'flake8-import-order') 49 | session.run( 50 | 'flake8', 51 | '--import-order-style', 'google', 52 | '--application-import-names', 53 | 'gen_dockerfile,local_cloudbuild,validation_utils', 54 | 'scripts', 55 | 'nox.py', 56 | ) 57 | 58 | 59 | @nox.session 60 | @nox.parametrize('version', ['3.4', '3.5', '3.6', '3.7']) 61 | def tests(session, version): 62 | session.interpreter = 'python' + version 63 | session.install('-r', 'scripts/requirements-test.txt') 64 | session.run( 65 | 'py.test', 66 | '--ignore=scripts/testdata', 67 | '--cov=scripts', 68 | '--cov-append', 69 | '--cov-config=.coveragerc', 70 | '--cov-report=', # Report generated below 71 | 'scripts', 72 | env={'PYTHONPATH': ''} 73 | ) 74 | 75 | 76 | @nox.session 77 | def cover(session): 78 | """Run the final coverage report. 79 | 80 | This outputs the coverage report aggregating coverage from the unit 81 | test runs (not system test runs), and then erases coverage data. 82 | """ 83 | session.interpreter = 'python3.6' 84 | session.install('coverage', 'pytest-cov') 85 | session.run('coverage', 'report', '--show-missing', '--fail-under=97') 86 | session.run('coverage', 'erase') 87 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Google Cloud Platform - Python Runtime Docker Image 2 | 3 | This repository contains the source for the 4 | [`gcr.io/google-appengine/python`](https://gcr.io/google-appengine/python) 5 | [docker](https://docker.io) base image. This image can be used as the base image 6 | for running applications on 7 | [Google App Engine Flexible](https://cloud.google.com/appengine/docs/flexible/), 8 | [Google Kubernetes Engine](https://cloud.google.com/kubernetes-engine), or any 9 | other Docker host. 10 | 11 | This image is based on Ubuntu Xenial and contains packages required to build 12 | most of the popular Python libraries. For more information about this runtime, 13 | see the 14 | [documentation](https://cloud.google.com/appengine/docs/flexible/python/runtime). 15 | 16 | ## App Engine 17 | 18 | When using App Engine Flexible, you can use the runtime without worrying about 19 | docker by specifying `runtime: python` in your `app.yaml`: 20 | 21 | ```yaml 22 | runtime: python 23 | env: flex 24 | entrypoint: gunicorn -b :$PORT main:app 25 | 26 | runtime_config: 27 | # You can also specify 2 for Python 2.7 28 | python_version: 3 29 | ``` 30 | 31 | If you have an existing App Engine application using this runtime and want to 32 | customize it, you can use the 33 | [`Cloud SDK`](https://cloud.google.com/sdk/gcloud/reference/preview/app/gen-config) 34 | to create a custom runtime: 35 | 36 | gcloud beta app gen-config --custom 37 | 38 | You can then modify the `Dockerfile` and `.dockerignore` as needed for you 39 | application. 40 | 41 | ## Kubernetes Engine & other Docker hosts. 42 | 43 | For other docker hosts, you'll need to create a `Dockerfile` based on this image 44 | that copies your application code, installs dependencies, and declares an 45 | command or entrypoint. For example: 46 | 47 | FROM gcr.io/google-appengine/python 48 | 49 | # Create a virtualenv for dependencies. This isolates these packages from 50 | # system-level packages. 51 | # Use -p python3 or -p python3.7 to select python version. Default is version 2. 52 | RUN virtualenv /env 53 | 54 | # Setting these environment variables are the same as running 55 | # source /env/bin/activate. 56 | ENV VIRTUAL_ENV /env 57 | ENV PATH /env/bin:$PATH 58 | 59 | # Copy the application's requirements.txt and run pip to install all 60 | # dependencies into the virtualenv. 61 | ADD requirements.txt /app/requirements.txt 62 | RUN pip install -r /app/requirements.txt 63 | 64 | # Add the application source code. 65 | ADD . /app 66 | 67 | # Run a WSGI server to serve the application. gunicorn must be declared as 68 | # a dependency in requirements.txt. 69 | CMD gunicorn -b :$PORT main:app 70 | 71 | ## Building the image 72 | 73 | Google regularly builds and releases this image at 74 | [`gcr.io/google-appengine/python`](https://gcr.io/google-appengine/python). 75 | 76 | See [RELEASING.md](RELEASING.md) for more information. 77 | 78 | ## Contributing changes 79 | 80 | * See [CONTRIBUTING.md](CONTRIBUTING.md) 81 | 82 | ## Licensing 83 | 84 | * See [LICENSE](LICENSE) 85 | -------------------------------------------------------------------------------- /perf_dashboard/posts_stats.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 Google Inc. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | """A script to collect the number of StackOverflow posts related to 16 | Python and Google Cloud Platform.""" 17 | 18 | import datetime 19 | import os 20 | import sys 21 | import time 22 | import uuid 23 | 24 | from collections import Counter 25 | 26 | from google.cloud import bigquery 27 | 28 | import bq_utils 29 | 30 | GCLOUD_PROJECT_ENV = 'GCLOUD_PROJECT' 31 | DATASET_NAME = 'stackoverflow' 32 | TAG_COUNT_TABLE_NAME = 'tag_count_timestamp' 33 | UNANSWERED_POSTS_TABLE_NAME = 'unanswered_posts' 34 | 35 | 36 | def get_stackoverflow_tags_count(): 37 | """Get all the tags contains python and cloud key words""" 38 | query = """ 39 | SELECT 40 | SPLIT(tags, '|') tags 41 | FROM 42 | `bigquery-public-data.stackoverflow.posts_questions` 43 | WHERE 44 | tags LIKE '%python%' 45 | AND (tags LIKE '%google-cloud-platform%' OR tags LIKE '%gcp%') 46 | """ 47 | 48 | results = bq_utils.execute_query(query) 49 | 50 | rows = [row[0] for row in results] 51 | 52 | return rows 53 | 54 | 55 | def get_posts_list_unanswered(): 56 | # Get the list of posts that are unanswered 57 | query = """ 58 | SELECT 59 | id, title, tags 60 | FROM 61 | `bigquery-public-data.stackoverflow.posts_questions` 62 | WHERE 63 | tags LIKE '%python%' 64 | AND (tags LIKE '%google-cloud-platform%' OR tags LIKE '%gcp%') 65 | AND accepted_answer_id is NULL 66 | AND answer_count = 0; 67 | """ 68 | 69 | results = bq_utils.execute_query(query) 70 | 71 | # Add current timestamp to the rows 72 | date_time = datetime.datetime.now() 73 | rows = [(date_time,) + row for row in results] 74 | 75 | return rows 76 | 77 | 78 | def count_unique_tags(data): 79 | flattened_tag_list = [tag for tag_list in data for tag in tag_list] 80 | tag_count = Counter(flattened_tag_list) 81 | 82 | # Add current timestamp to the rows 83 | date_time = datetime.datetime.now() 84 | time_tag_count = [(date_time,) + item for item in tag_count.items()] 85 | 86 | return time_tag_count 87 | 88 | 89 | def main(): 90 | project = os.environ.get(GCLOUD_PROJECT_ENV) 91 | 92 | # Get the posts count for each tag 93 | rows = get_stackoverflow_tags_count() 94 | tag_count = count_unique_tags(rows) 95 | bq_utils.insert_rows( 96 | project, DATASET_NAME, TAG_COUNT_TABLE_NAME, tag_count) 97 | 98 | # Get the list of unanswered posts 99 | unanswered_posts = get_posts_list_unanswered() 100 | bq_utils.insert_rows( 101 | project, DATASET_NAME, UNANSWERED_POSTS_TABLE_NAME, unanswered_posts) 102 | 103 | 104 | if __name__ == '__main__': 105 | main() 106 | -------------------------------------------------------------------------------- /scripts/validation_utils_test.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # Copyright 2017 Google Inc. All Rights Reserved. 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | """Unit test for validation_utils.py""" 18 | 19 | import argparse 20 | import re 21 | 22 | import pytest 23 | 24 | import validation_utils 25 | 26 | 27 | @pytest.mark.parametrize('container, field_name, field_type, expected', [ 28 | # Normal case, field present and correct type 29 | ({'present': 1}, 'present', int, 1), 30 | ({'present': '1'}, 'present', str, '1'), 31 | ({'present': [1]}, 'present', list, [1]), 32 | ({'present': {1: 2}}, 'present', dict, {1: 2}), 33 | # Missing field replaced by default 34 | ({}, 'missing', str, ''), 35 | # Valid conversions 36 | ({'str_to_int': '1'}, 'str_to_int', int, 1), 37 | ({'int_to_str': 1}, 'int_to_str', str, '1'), 38 | # None 39 | ({'None_to_int': None}, 'None_to_int', int, 0), 40 | ({'None_to_str': None}, 'None_to_str', str, ''), 41 | ]) 42 | def test_get_field_value_valid(container, field_name, field_type, expected): 43 | assert validation_utils.get_field_value( 44 | container, field_name, field_type) == expected 45 | 46 | 47 | @pytest.mark.parametrize('container, field_name, field_type', [ 48 | # Type conversion failures 49 | ({'bad_list_to_dict': [1]}, 'bad_list_to_dict', dict), 50 | ({'bad_list_to_str': [1]}, 'bad_list_to_str', str), 51 | ({'bad_dict_to_list': {1: 2}}, 'bad_dict_to_list', list), 52 | ({'bad_str_to_int': 'not_an_int'}, 'bad_str_to_int', int), 53 | ({'bad_str_to_list': 'abc'}, 'bad_str_to_list', list), 54 | ]) 55 | def test_get_field_value_invalid(container, field_name, field_type): 56 | with pytest.raises(ValueError): 57 | validation_utils.get_field_value(container, field_name, field_type) 58 | 59 | 60 | def test_validate_arg_regex(): 61 | assert validation_utils.validate_arg_regex( 62 | 'abc', re.compile('a[b]c')) == 'abc' 63 | with pytest.raises(argparse.ArgumentTypeError): 64 | validation_utils.validate_arg_regex('abc', re.compile('a[d]c')) 65 | 66 | 67 | @pytest.mark.parametrize('arg, expected', [ 68 | # Normal case, field present and correct type 69 | ('', {}), 70 | ('_A=1', {'_A': '1'}), 71 | ('_A=1,_B=2', {'_A': '1', '_B': '2'}), 72 | # Repeated key is ok 73 | ('_A=1,_A=2', {'_A': '2'}), 74 | # Extra = is ok 75 | ('_A=x=y=z,_B=2', {'_A': 'x=y=z', '_B': '2'}), 76 | # No value is ok 77 | ('_A=', {'_A': ''}), 78 | ]) 79 | def test_validate_arg_dicts_valid(arg, expected): 80 | assert validation_utils.validate_arg_dict(arg) == expected 81 | 82 | 83 | @pytest.mark.parametrize('arg', [ 84 | # No key 85 | ',_A', 86 | '_A,', 87 | # Invalid variable name 88 | '_Aa=1', 89 | '_aA=1', 90 | '0A=1', 91 | ]) 92 | def test_validate_arg_dicts_invalid(arg): 93 | with pytest.raises(argparse.ArgumentTypeError): 94 | validation_utils.validate_arg_dict(arg) 95 | 96 | 97 | if __name__ == '__main__': 98 | pytest.main([__file__]) 99 | -------------------------------------------------------------------------------- /tests/benchmark/benchmark_between_releases.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Build the benchmark image for release 1 from Dockerfile 4 | echo "Building image for release 1" 5 | export STAGING_IMAGE="${DOCKER_NAMESPACE}/python:${TAG1}" 6 | envsubst <"Dockerfile".in >"Dockerfile" '$STAGING_IMAGE' 7 | docker build --no-cache -t benchmark_1 . 8 | rm Dockerfile 9 | 10 | # Build the benchmark image for release 2 from Dockerfile 11 | echo "Building image for release 2" 12 | export STAGING_IMAGE="${DOCKER_NAMESPACE}/python:${TAG2}" 13 | envsubst <"Dockerfile".in >"Dockerfile" '$STAGING_IMAGE' 14 | docker build --no-cache -t benchmark_2 . 15 | rm Dockerfile 16 | 17 | echo "Successfully built images" 18 | 19 | # Create folders to hold the files 20 | mkdir "$TAG1" 21 | mkdir "$TAG2" 22 | 23 | # Start running the containers and copy the benchmark result for python versions from container to host 24 | docker run -it --name benchmark_1 -h CONTAINER1 -v "${PWD}"/"$TAG1":/export benchmark_1 /bin/bash -c "cp /result/py*.json /export/" 25 | docker run -it --name benchmark_2 -h CONTAINER2 -v "${PWD}"/"$TAG2":/export benchmark_2 /bin/bash -c "cp /result/py*.json /export/" 26 | 27 | echo "Start benchmarking the python interpreter performance between the two releases" 28 | 29 | # Compare the performance between the interpreter in different release 30 | pyperformance compare "$TAG1"/py2.7.json "$TAG2"/py2.7.json --output_style table > py2.7_res 31 | pyperformance compare "$TAG1"/py3.4.json "$TAG2"/py3.4.json --output_style table > py3.4_res 32 | pyperformance compare "$TAG1"/py3.5.json "$TAG2"/py3.5.json --output_style table > py3.5_res 33 | 34 | # Check if the python3.6 benchmark result exists 35 | if [[ ( -e '"$TAG1"/py3.6.json' ) && ( -e '"$TAG2"/py3.6.json' ) ]]; then 36 | pyperformance compare "$TAG1"/py3.6.json "$TAG2"/py3.6.json --output_style table > py3.6_res; 37 | fi 38 | 39 | echo "Start extracting data and generating CSV file, then upload to Cloud Storage and insert to Big Query table" 40 | 41 | # Extracting memory usage and running time data from the performace result json, generating CSV files 42 | for path_to_file in $TAG1/*.json; do 43 | python generate_csv.py --filename $path_to_file --tag $TAG1 44 | done 45 | 46 | for path_to_file in $TAG2/*.json; do 47 | python generate_csv.py --filename $path_to_file --tag $TAG2 48 | done 49 | 50 | # Set the project that hold the cloud storage bucket and big query tables 51 | gcloud config set project cloud-python-runtime-qa 52 | 53 | # Get the list of existing release data on Cloud Storage and skip if the current TAG1 or TAG2 existing in the list 54 | gsutil ls gs://python-runtime-benchmark > existing_releases 55 | 56 | for container_tag in $TAG1 $TAG2; do 57 | if grep --fixed-strings --quiet "$container_tag" existing_releases; then 58 | echo "Performance data of $container_tag existed, so skip processing it." 59 | else 60 | # Upload the CSV files to Cloud Storage 61 | gsutil cp -r $container_tag gs://python-runtime-benchmark 62 | # Load the CSV files from Cloud Storage to Big Query table 63 | # Load the performance data of each function 64 | for path_to_file in $container_tag/py2.7.csv $container_tag/py3.4.csv $container_tag/py3.5.csv; do 65 | bq load benchmark.benchmark_functions gs://python-runtime-benchmark/"$path_to_file" container_tag:string,runtime_version:string,function_name:string,time_used:float,mem_usage:float 66 | done 67 | # Load the average performance data of each runtime version in a release 68 | bq load benchmark.benchmark_statistics gs://python-runtime-benchmark/"$container_tag"/averages.csv container_tag:string,runtime_version:string,ave_time_used:float,ave_mem_usage:float 69 | fi 70 | done 71 | 72 | echo "Completed" 73 | -------------------------------------------------------------------------------- /cloudbuild_test.yaml: -------------------------------------------------------------------------------- 1 | timeout: 3600s 2 | steps: 3 | - # Explicitly pull image into GCB so that later steps work 4 | name: '${_DOCKER_NAMESPACE}/python:${_TAG}' 5 | args: [ 6 | '/bin/true', 7 | ] 8 | id: runtime 9 | 10 | - # Validate structure of base runtime image 11 | name: gcr.io/gcp-runtimes/container-structure-test:v0.2.1 12 | args: [ 13 | '-test.v', 14 | '-image', '${_DOCKER_NAMESPACE}/python:${_TAG}', 15 | '/workspace/tests/virtualenv/virtualenv_default.yaml', 16 | ] 17 | waitFor: ['runtime'] 18 | - name: gcr.io/gcp-runtimes/container-structure-test:v0.2.1 19 | args: [ 20 | '-test.v', 21 | '-image', '${_DOCKER_NAMESPACE}/python:${_TAG}', 22 | '/workspace/tests/virtualenv/virtualenv_python27.yaml', 23 | ] 24 | waitFor: ['runtime'] 25 | - name: gcr.io/gcp-runtimes/container-structure-test:v0.2.1 26 | args: [ 27 | '-test.v', 28 | '-image', '${_DOCKER_NAMESPACE}/python:${_TAG}', 29 | '/workspace/tests/virtualenv/virtualenv_python34.yaml', 30 | ] 31 | waitFor: ['runtime'] 32 | - name: gcr.io/gcp-runtimes/container-structure-test:v0.2.1 33 | args: [ 34 | '-test.v', 35 | '-image', '${_DOCKER_NAMESPACE}/python:${_TAG}', 36 | '/workspace/tests/virtualenv/virtualenv_python35.yaml', 37 | ] 38 | waitFor: ['runtime'] 39 | - name: gcr.io/gcp-runtimes/container-structure-test:v0.2.1 40 | args: [ 41 | '-test.v', 42 | '-image', '${_DOCKER_NAMESPACE}/python:${_TAG}', 43 | '/workspace/tests/virtualenv/virtualenv_python36.yaml', 44 | ] 45 | waitFor: ['runtime'] 46 | - name: gcr.io/gcp-runtimes/container-structure-test:v0.2.1 47 | args: [ 48 | '-test.v', 49 | '-image', '${_DOCKER_NAMESPACE}/python:${_TAG}', 50 | '/workspace/tests/virtualenv/virtualenv_python37.yaml', 51 | ] 52 | waitFor: ['runtime'] 53 | - name: gcr.io/gcp-runtimes/container-structure-test:v0.2.1 54 | args: [ 55 | '-test.v', 56 | '-image', '${_DOCKER_NAMESPACE}/python:${_TAG}', 57 | '/workspace/tests/no-virtualenv/no-virtualenv.yaml', 58 | ] 59 | waitFor: ['runtime'] 60 | 61 | # Temporarily disabled because it fails on symbolic links in Ubuntu: 62 | # https://github.com/GoogleCloudPlatform/container-structure-test/issues/77 63 | #- # Check license compliance 64 | # name: gcr.io/gcp-runtimes/container-structure-test:v0.2.1 65 | # args: [ 66 | # '-test.v', 67 | # '-image', '${_DOCKER_NAMESPACE}/python:${_TAG}', 68 | # '/workspace/tests/license-test/license-test.yaml' 69 | # ] 70 | # waitFor: ['runtime'] 71 | 72 | - # Do third-party library compatibility tests for Python 2 73 | name: gcr.io/cloud-builders/docker:latest 74 | args: [ 75 | 'build', '-t', 'python2-libraries-intermediate', '--build-arg', 76 | 'intermediate_image=${_DOCKER_NAMESPACE}/python:${_TAG}', 77 | '/workspace/tests/python2-libraries' 78 | ] 79 | id: python2-libraries-intermediate 80 | waitFor: ['runtime'] 81 | - name: gcr.io/gcp-runtimes/container-structure-test:v0.2.1 82 | args: [ 83 | '-test.v', 84 | '-image', 'python2-libraries-intermediate', 85 | '/workspace/tests/python2-libraries/python2-libraries.yaml' 86 | ] 87 | waitFor: ['python2-libraries-intermediate'] 88 | 89 | - # Do third-party library compatibility tests for Python 3 90 | name: gcr.io/cloud-builders/docker:latest 91 | args: [ 92 | 'build', '-t', 'python3-libraries-intermediate', '--build-arg', 93 | 'intermediate_image=${_DOCKER_NAMESPACE}/python:${_TAG}', 94 | '/workspace/tests/python3-libraries' 95 | ] 96 | id: python3-libraries-intermediate 97 | waitFor: ['runtime'] 98 | - name: gcr.io/gcp-runtimes/container-structure-test:v0.2.1 99 | args: [ 100 | '-test.v', 101 | '-image', 'python3-libraries-intermediate', 102 | '/workspace/tests/python3-libraries/python3-libraries.yaml' 103 | ] 104 | waitFor: ['python3-libraries-intermediate'] 105 | 106 | - # Run other compatibility tests 107 | name: gcr.io/cloud-builders/docker:latest 108 | args: [ 109 | 'build', '--tag=${_DOCKER_NAMESPACE}/python/tests/eventlet:${_TAG}', 110 | '--no-cache', '/workspace/tests/eventlet/' 111 | ] 112 | waitFor: ['runtime'] 113 | 114 | images: [] 115 | -------------------------------------------------------------------------------- /tests/python3-libraries/requirements.txt: -------------------------------------------------------------------------------- 1 | alembic==1.0.0 2 | amqp==2.3.2 3 | amqplib==1.0.2 4 | ansible==7.0.0 5 | anyjson==0.3.3 6 | apache-libcloud==2.3.0 7 | argparse==1.4.0 8 | astroid==2.0.4 9 | awscli==1.16.1 10 | babel==2.6.0 11 | backports.ssl_match_hostname==3.5.0.1 12 | bcdoc==0.16.0 13 | beautifulsoup4==4.6.3 14 | billiard==3.5.0.4 15 | blessings==1.7 16 | blinker==1.4 17 | boto==2.49.0 18 | botocore==1.11.1 19 | bottle==0.12.13 20 | celery==4.2.1 21 | certifi==2023.7.22 22 | cffi==1.11.5 23 | chardet==3.0.4 24 | click==6.7 25 | cliff==2.13.0 26 | cmd2==0.9.4 27 | colorama==0.3.9 28 | configobj==5.0.6 29 | cov-core==1.15.0 30 | coverage==4.5.1 31 | coveralls==1.4.0 32 | crcmod==1.7 33 | cryptography==41.0.3 34 | cssselect==1.0.3 35 | cython==0.28.5 36 | decorator==4.3.0 37 | django-celery==3.2.2 38 | django-debug-toolbar==1.9.1 39 | django-extensions==2.1.1 40 | django==2.2.28 41 | django_compress==1.0.1 42 | djangorestframework==3.8.2 43 | docker-py==1.10.6 44 | docopt==0.6.2 45 | docutils==0.14 46 | ecdsa==0.13 47 | elasticsearch==6.3.1 48 | enum34==1.1.6 49 | eventlet==0.24.1 50 | extras==1.0.0 51 | fabric==2.3.1 52 | fixtures==3.0.0 53 | flake8==3.5.0 54 | flask==2.2.5 55 | funcsigs==1.0.2 56 | gevent==1.3.6 57 | google-api-python-client==1.7.4 58 | greenlet==0.4.14 59 | gunicorn==19.9.0 60 | hiredis==0.2.0 61 | honcho==1.0.1 62 | html5lib==1.0.1 63 | httplib2==0.11.3 64 | idna==2.7 65 | ipaddress==1.0.22 66 | ipython==6.5.0 67 | iso8601==0.1.12 68 | isodate==0.6.0 69 | itsdangerous==0.24 70 | jinja2==2.10 71 | jmespath==0.9.3 72 | jsonschema==2.6.0 73 | kombu==4.2.1 74 | linecache2==1.0.0 75 | logilab-common==1.4.2 76 | lxml==4.2.4 77 | mako==1.0.7 78 | manifestparser==1.1 79 | markdown==2.6.11 80 | markupsafe==1.1.1 81 | matplotlib==2.2.3 82 | mccabe==0.6.1 83 | meld3==1.0.2 84 | mock==2.0.0 85 | mozcrash==1.0 86 | mozdevice==1.0.1 87 | mozfile==1.2 88 | mozinfo==0.10 89 | mozlog==3.8 90 | moznetwork==0.27 91 | mozprocess==0.26 92 | msgpack-python==0.5.6 93 | ndg-httpsclient==0.5.1 94 | netaddr==0.7.19 95 | netifaces==0.10.7 96 | newrelic==4.2.0.100 97 | nose==1.3.7 98 | numpy==1.22.0 99 | oauth2==1.9.0.post1 100 | oauth2client==4.1.2 101 | oauthlib==2.1.0 102 | ordereddict==1.1 103 | oslo.config==6.4.0 104 | pandas==0.23.4 105 | paramiko==2.4.1 106 | passlib==1.7.1 107 | paste==2.0.3 108 | pastedeploy==1.5.2 109 | pastescript==2.0.2 110 | pbr==4.2.0 111 | pep8==1.7.1 112 | pexpect==4.6.0 113 | pika==0.12.0 114 | pillow==9.3.0 115 | pip==18.0 116 | prettytable==0.7.2 117 | protobuf==3.6.1 118 | psutil==5.4.7 119 | psycopg2==2.7.5 120 | py==1.5.4 121 | pyasn1-modules==0.2.2 122 | pyasn1==0.4.4 123 | pycparser==2.18 124 | pycrypto==2.6.1 125 | pyflakes==2.0.0 126 | pygments==2.15.0 127 | pyjwt==1.6.4 128 | pylibmc==1.5.2 129 | pylint==2.1.1 130 | pymongo==3.7.1 131 | pymysql==0.9.2 132 | pyopenssl==18.0.0 133 | pyparsing==2.2.0 134 | pyramid==1.9.2 135 | pystache==0.5.4 136 | pytest-cov==2.5.1 137 | pytest==3.7.3 138 | python-daemon==2.2.0 139 | python-dateutil==2.7.3 140 | python-gflags==3.1.2 141 | python-keystoneclient==3.17.0 142 | python-memcached==1.59 143 | python-mimeparse==1.6.0 144 | python-novaclient==11.0.0 145 | python-subunit==1.3.0 146 | python-swiftclient==3.6.0 147 | pytz==2018.5 148 | pyyaml==5.4 149 | pyzmq==17.1.2 150 | raven==6.9.0 151 | redis==2.10.6 152 | repoze.lru==0.7 153 | requests-oauthlib==1.0.0 154 | requests==2.31.0 155 | retrying==1.3.3 156 | rsa==3.4.2 157 | scipy==1.10.0 158 | selenium==3.14.0 159 | setuptools-git==1.2 160 | setuptools==40.2.0 161 | sh==1.12.14 162 | simplejson==3.16.0 163 | six==1.11.0 164 | snowballstemmer==1.2.1 165 | south==1.0.2 166 | sphinx==1.7.7 167 | sqlalchemy-migrate==0.11.0 168 | sqlalchemy==1.2.11 169 | sqlparse==0.4.4 170 | statsd==3.3.0 171 | stevedore==1.29.0 172 | testrepository==0.0.20 173 | testtools==2.3.0 174 | thrift==0.11.0 175 | tornado==6.3.3 176 | tox==3.2.1 177 | twisted==18.7.0 178 | ujson==1.35 179 | unidecode==1.0.22 180 | unittest2==1.1.0 181 | uritemplate==3.0.0 182 | urllib3==1.26.5 183 | uwsgi==2.0.22 184 | versiontools==1.9.1 185 | virtualenv==16.0.0 186 | waitress==2.1.2 187 | warlock==1.3.0 188 | webob==1.8.2 189 | websocket-client==0.51.0 190 | webtest==2.0.30 191 | werkzeug==2.2.3 192 | wheel==0.31.1 193 | xlrd==1.1.0 194 | zc.buildout==2.12.1 195 | zope.interface==4.5.0 196 | -------------------------------------------------------------------------------- /scripts/validation_utils.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # Copyright 2017 Google Inc. All Rights Reserved. 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | """Utilities for schema and command line validation""" 18 | 19 | import argparse 20 | import re 21 | 22 | 23 | # For easier development, we allow redefining builtins like 24 | # --substitutions=PROJECT_ID=foo even though gcloud doesn't. 25 | KEY_VALUE_REGEX = re.compile(r'^([A-Z_][A-Z0-9_]*)=(.*)$') 26 | 27 | 28 | def get_field_value(container, field_name, field_type): 29 | """Fetch a field from a container with typechecking and default values. 30 | 31 | The field value is coerced to the desired type. If the field is 32 | not present, an instance of `field_type` is constructed with no 33 | arguments and used as the default value. 34 | 35 | This function exists because yaml parsing can lead to surprising 36 | outputs, and the resulting errors are confusing. For example: 37 | entrypoint1: a string, but I can accidentally treat as an sequence 38 | entrypoint2: [a, list, but, I, might, think, its, a, string] 39 | version1: 3 # Parsed to int 40 | version2: 3.1 # Parsed to float 41 | version3: 3.1.1 # Parsed to str 42 | feature: off # Parsed to the boolean False 43 | 44 | Args: 45 | container (dict): Object decoded from yaml 46 | field_name (str): Field that should be present in `container` 47 | field_type (type): Expected type for field value 48 | 49 | Returns: 50 | Any: Fetched or default value of field 51 | 52 | Raises: 53 | ValueError: if field value cannot be converted to the desired type 54 | """ 55 | try: 56 | value = container[field_name] 57 | if value is None: 58 | return field_type() 59 | except (IndexError, KeyError): 60 | return field_type() 61 | 62 | msg = 'Expected "{}" field to be of type "{}", but found type "{}"' 63 | if not isinstance(value, field_type): 64 | # list('some string') is a successful type cast as far as Python 65 | # is concerned, but doesn't exactly produce the results we want. 66 | # We have a whitelist of conversions we will attempt. 67 | whitelist = ( 68 | (float, str), 69 | (int, str), 70 | (str, float), 71 | (str, int), 72 | (int, float), 73 | ) 74 | if (type(value), field_type) not in whitelist: 75 | raise ValueError(msg.format(field_name, field_type, type(value))) 76 | 77 | try: 78 | value = field_type(value) 79 | except ValueError as e: 80 | e.message = msg.format(field_name, field_type, type(value)) 81 | raise 82 | return value 83 | 84 | 85 | def validate_arg_regex(flag_value, flag_regex): 86 | """Check a named command line flag against a regular expression""" 87 | if not re.match(flag_regex, flag_value): 88 | raise argparse.ArgumentTypeError( 89 | 'Value "{}" does not match pattern "{}"'.format( 90 | flag_value, flag_regex.pattern)) 91 | return flag_value 92 | 93 | 94 | def validate_arg_dict(flag_value): 95 | """Parse a command line flag as a key=val,... dict""" 96 | if not flag_value: 97 | return {} 98 | entries = flag_value.split(',') 99 | pairs = [] 100 | for entry in entries: 101 | match = re.match(KEY_VALUE_REGEX, entry) 102 | if not match: 103 | raise argparse.ArgumentTypeError( 104 | 'Value "{}" should be a list like _KEY1=value1,_KEY2=value2"'. 105 | format(flag_value)) 106 | pairs.append((match.group(1), match.group(2))) 107 | return dict(pairs) 108 | -------------------------------------------------------------------------------- /tests/benchmark/generate_csv.py: -------------------------------------------------------------------------------- 1 | # Copyright 2016 Google Inc. All Rights Reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | 16 | import argparse 17 | import csv 18 | import json 19 | import os 20 | import sys 21 | 22 | 23 | def generate_csv(filename, tag): 24 | """Extract function name, time used and memory usage from the metadata and write to the output CSV file. 25 | 26 | Args: 27 | filename (str): Filename of the performance json file to read 28 | tag (str): Tag of the docker container 29 | """ 30 | with open(filename) as input: 31 | data = json.load(input) 32 | benchmarks = data["benchmarks"] 33 | runtime_version = os.path.basename(filename).split(".json")[0] 34 | 35 | # Write data to CSV file 36 | with open("{}.csv".format(os.path.splitext(filename)[0]), "wb") as output: 37 | csv_writer = csv.writer(output, delimiter=',') 38 | for benchmark in benchmarks: 39 | try: 40 | # Get the function name 41 | func_name = benchmark["metadata"]["name"] 42 | # Get the time used for this function, convert to millisecond 43 | time_used = float(benchmark["runs"][0]["values"][0]) * 1000 44 | # Get the memory usage, convert to MB 45 | mem_usage = float(benchmark["metadata"]["mem_max_rss"]) / float(1<<20) 46 | line = [tag, runtime_version, func_name, time_used, mem_usage] 47 | # Write to CSV file 48 | csv_writer.writerow(line) 49 | except KeyError: 50 | # Skip the benchmark result if it does not contain the fields we want 51 | pass 52 | 53 | 54 | def get_averages(filename, tag): 55 | """Calculate the averages of time_used and memory_usage and append to CSV file. 56 | 57 | Args: 58 | filename (str): Filename of the performance json file to read 59 | tag (str): Tag of the docker container 60 | """ 61 | with open("{}.csv".format(os.path.splitext(filename)[0]), "rb") as input: 62 | lines = input.readlines() 63 | # Get the two columns of times_used and mem_usage 64 | rows_of_data = [map(float, line.split(',')[-2:]) for line in lines] 65 | # Calculate the sum of the two columns 66 | col_sums = map(sum, zip(*rows_of_data)) 67 | # Calculate the average of the two columns by using the sum divided by the total number of lines 68 | averages = [col_sum / len(lines) for col_sum in col_sums] 69 | 70 | # Get the runtime version from filename 71 | runtime_version = os.path.basename(filename).split(".json")[0] 72 | 73 | # Write the averages to CSV file in appending mode 74 | with open("{}/averages.csv".format(tag), "a+") as output: 75 | try: 76 | csv_writer = csv.writer(output, delimiter=',') 77 | csv_writer.writerow([tag, runtime_version] + averages) 78 | except IOError: 79 | print "Could not write averages to file." 80 | 81 | 82 | def parse_args(argv): 83 | """Parse and validate command line flags""" 84 | parser = argparse.ArgumentParser( 85 | description='Read the python performance json file and extract data to genarate CSV file.' 86 | ) 87 | parser.add_argument( 88 | '--filename', 89 | help='Filename of the performance json file to read' 90 | ) 91 | parser.add_argument( 92 | '--tag', 93 | help='Tag of the docker container' 94 | ) 95 | args = parser.parse_args(argv[1:]) 96 | return args 97 | 98 | 99 | def main(): 100 | args = parse_args(sys.argv) 101 | generate_csv(args.filename, args.tag) 102 | get_averages(args.filename, args.tag) 103 | 104 | 105 | if __name__ == '__main__': 106 | main() 107 | -------------------------------------------------------------------------------- /tests/python2-libraries/requirements.txt: -------------------------------------------------------------------------------- 1 | alembic==1.0.0 2 | amqp==2.3.2 3 | amqplib==1.0.2 4 | ansible==7.0.0 5 | anyjson==0.3.3 6 | apache-libcloud==2.3.0 7 | argparse==1.4.0 8 | astroid==1.6.5 9 | awscli==1.16.1 10 | babel==2.6.0 11 | backports.ssl_match_hostname==3.5.0.1 12 | bcdoc==0.16.0 13 | beautifulsoup4==4.6.3 14 | beautifulsoup==3.2.1 15 | billiard==3.5.0.4 16 | blessings==1.7 17 | blinker==1.4 18 | boto==2.49.0 19 | botocore==1.11.1 20 | bottle==0.12.13 21 | carbon<1.1.1 22 | celery==4.2.1 23 | certifi==2023.7.22 24 | cffi==1.11.5 25 | chardet==3.0.4 26 | click==6.7 27 | cliff==2.13.0 28 | cmd2==0.8.9 29 | colorama==0.3.9 30 | configobj==5.0.6 31 | cov-core==1.15.0 32 | coverage==4.5.1 33 | coveralls==1.4.0 34 | crcmod==1.7 35 | cryptography==2.3.1 36 | cssselect==1.0.3 37 | cython==0.28.5 38 | decorator==4.3.0 39 | django-celery==3.2.2 40 | django-debug-toolbar==1.9.1 41 | django-extensions==2.1.1 42 | django<2.0 43 | django_compress==1.0.1 44 | djangorestframework==3.8.2 45 | docker-py==1.10.6 46 | docopt==0.6.2 47 | docutils==0.14 48 | ecdsa==0.13 49 | elasticsearch==6.3.1 50 | enum34==1.1.6 51 | eventlet==0.24.1 52 | extras==1.0.0 53 | fabric==2.3.1 54 | fixtures==3.0.0 55 | flake8==3.5.0 56 | flask==2.2.5 57 | funcsigs==1.0.2 58 | functools32==3.2.3.post2 59 | futures==3.2.0 60 | gevent==1.3.6 61 | google-api-python-client==1.7.4 62 | graphite-web==1.1.3 63 | greenlet==0.4.14 64 | gunicorn==19.9.0 65 | hiredis==0.2.0 66 | honcho==1.0.1 67 | html5lib==1.0.1 68 | httplib2==0.11.3 69 | idna==2.7 70 | ipaddress==1.0.22 71 | iso8601==0.1.12 72 | isodate==0.6.0 73 | itsdangerous==0.24 74 | jinja2==2.10 75 | jmespath==0.9.3 76 | jsonschema==2.6.0 77 | kombu==4.2.1 78 | linecache2==1.0.0 79 | logilab-common==1.4.2 80 | lxml==4.2.4 81 | m2crypto==0.30.1 82 | mako==1.0.7 83 | manifestparser==1.1 84 | markdown==2.6.11 85 | markupsafe==1.0 86 | matplotlib==2.2.3 87 | mccabe==0.6.1 88 | meld3==1.0.2 89 | mock==2.0.0 90 | mozcrash==1.0 91 | mozdevice==1.0.1 92 | mozfile==1.2 93 | mozinfo==0.10 94 | mozlog==3.8 95 | moznetwork==0.27 96 | mozprocess==0.26 97 | mozprofile==1.1.0 98 | mozrunner==7.0.1 99 | msgpack-python==0.5.6 100 | mysql-python==1.2.5 101 | ndg-httpsclient==0.5.1 102 | netaddr==0.7.19 103 | netifaces==0.10.7 104 | newrelic==4.2.0.100 105 | nose==1.3.7 106 | numpy==1.22.0 107 | oauth2==1.9.0.post1 108 | oauth2client==4.1.2 109 | oauthlib==2.1.0 110 | ordereddict==1.1 111 | oslo.config==6.4.0 112 | pandas==0.23.4 113 | paramiko==2.4.1 114 | passlib==1.7.1 115 | paste==2.0.3 116 | pastedeploy==1.5.2 117 | pastescript==2.0.2 118 | pbr==4.2.0 119 | pep8==1.7.1 120 | pexpect==4.6.0 121 | pika==0.12.0 122 | pillow==9.3.0 123 | pip==18.0 124 | prettytable==0.7.2 125 | protobuf==3.6.1 126 | psutil==5.4.7 127 | psycopg2==2.7.5 128 | py==1.5.4 129 | pyasn1-modules==0.2.2 130 | pyasn1==0.4.4 131 | pycparser==2.18 132 | pycrypto==2.6.1 133 | pycurl==7.43.0.2 134 | pyflakes==2.0.0 135 | pygments==2.2.0 136 | pyjwt==1.6.4 137 | pylibmc==1.5.2 138 | pylint==1.9.3 139 | pymongo==3.7.1 140 | pymysql==0.9.2 141 | pyopenssl==18.0.0 142 | pyparsing==2.2.0 143 | pyramid==1.9.2 144 | pystache==0.5.4 145 | pytest-cov==2.5.1 146 | pytest==3.7.3 147 | python-cjson==1.2.1 148 | python-daemon==2.2.0 149 | python-dateutil==2.7.3 150 | python-gflags==3.1.2 151 | python-keystoneclient==3.17.0 152 | python-memcached==1.59 153 | python-mimeparse==1.6.0 154 | python-novaclient==11.0.0 155 | python-subunit==1.3.0 156 | python-swiftclient==3.6.0 157 | pytz==2018.5 158 | pyyaml==5.4 159 | pyzmq==17.1.2 160 | raven==6.9.0 161 | redis==2.10.6 162 | repoze.lru==0.7 163 | requests-oauthlib==1.0.0 164 | requests==2.31.0 165 | retrying==1.3.3 166 | rsa==3.4.2 167 | scipy==1.10.0 168 | selenium==3.14.0 169 | setuptools-git==1.2 170 | setuptools==40.2.0 171 | sh==1.12.14 172 | simplejson==3.16.0 173 | six==1.11.0 174 | snowballstemmer==1.2.1 175 | south==1.0.2 176 | sphinx==1.7.7 177 | sqlalchemy-migrate==0.11.0 178 | sqlalchemy==1.2.11 179 | sqlparse==0.4.4 180 | statsd==3.3.0 181 | stevedore==1.29.0 182 | suds==0.4 183 | supervisor==3.3.4 184 | testrepository==0.0.20 185 | testtools==2.3.0 186 | thrift==0.11.0 187 | tornado==6.3.3 188 | tox==3.2.1 189 | twisted==18.7.0 190 | ujson==1.35 191 | unidecode==1.0.22 192 | unittest2==1.1.0 193 | uritemplate==3.0.0 194 | urllib3==1.26.5 195 | uwsgi==2.0.22 196 | versiontools==1.9.1 197 | virtualenv==16.0.0 198 | waitress==2.1.2 199 | warlock==1.3.0 200 | webob==1.8.2 201 | websocket-client==0.51.0 202 | webtest==2.0.30 203 | werkzeug==2.2.3 204 | wheel==0.31.1 205 | xlrd==1.1.0 206 | zc.buildout==2.12.1 207 | zope.interface==4.5.0 208 | -------------------------------------------------------------------------------- /python-interpreter-builder/scripts/build-python-3.4.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -euo pipefail 4 | set -x 5 | 6 | # Get the source 7 | mkdir -p /opt/sources 8 | cd /opt/sources 9 | wget --no-verbose https://www.python.org/ftp/python/3.4.8/Python-3.4.8.tgz 10 | # SHA-256 generated via `shasum -a 256 [file]` 11 | shasum --check <.yaml) 89 | source_dir (str): Directory containing user's source code 90 | 91 | Returns: 92 | AppConfig: valid configuration 93 | """ 94 | # Examine app.yaml 95 | if not isinstance(raw_config, collections.abc.Mapping): 96 | raise ValueError( 97 | 'Expected {} contents to be a Mapping type, but found type "{}"'. 98 | format(config_file, type(raw_config))) 99 | 100 | # Short circuit for python compat. 101 | if validation_utils.get_field_value( 102 | raw_config, 'runtime', str) == 'python-compat': 103 | return AppConfig( 104 | base_image=None, 105 | dockerfile_python_version=None, 106 | entrypoint=None, 107 | has_requirements_txt=None, 108 | is_python_compat=True) 109 | 110 | entrypoint = validation_utils.get_field_value( 111 | raw_config, 'entrypoint', str) 112 | if not PRINTABLE_REGEX.match(entrypoint): 113 | raise ValueError( 114 | 'Invalid "entrypoint" value in app.yaml: {!r}'.format(entrypoint)) 115 | 116 | # Mangle entrypoint in the same way as the Cloud SDK 117 | # (googlecloudsdk/third_party/appengine/api/validation.py) 118 | # 119 | # We could handle both string ("shell form") and list ("exec 120 | # form") but it appears that gcloud only handles string form. 121 | if entrypoint and not entrypoint.startswith('exec '): 122 | entrypoint = 'exec ' + entrypoint 123 | 124 | raw_runtime_config = validation_utils.get_field_value( 125 | raw_config, 'runtime_config', dict) 126 | python_version = validation_utils.get_field_value( 127 | raw_runtime_config, 'python_version', str) 128 | 129 | dockerfile_python_version = PYTHON_INTERPRETER_VERSION_MAP.get( 130 | python_version) 131 | if dockerfile_python_version is None: 132 | valid_versions = str(sorted(PYTHON_INTERPRETER_VERSION_MAP.keys())) 133 | raise ValueError( 134 | 'Invalid "python_version" field in "runtime_config" section ' 135 | 'of app.yaml: {!r}. Valid options are: {}'. 136 | format(python_version, valid_versions)) 137 | 138 | # Examine user's files 139 | has_requirements_txt = os.path.isfile( 140 | os.path.join(source_dir, 'requirements.txt')) 141 | 142 | return AppConfig( 143 | base_image=base_image, 144 | dockerfile_python_version=dockerfile_python_version, 145 | entrypoint=entrypoint, 146 | has_requirements_txt=has_requirements_txt, 147 | is_python_compat=False) 148 | 149 | 150 | def get_data(name): 151 | """Return the contents of the named data resource 152 | 153 | These templates are copied from the Google Cloud SDK at 154 | google-cloud-sdk/platform/ext-runtime/python/data 155 | and the two should be kept in sync. 156 | 157 | Args: 158 | name (str): Name of file, without directory 159 | 160 | Returns: 161 | str: Contents of data file 162 | """ 163 | filename = os.path.join(os.path.dirname(__file__), 'data', name) 164 | with io.open(filename, 'r', encoding='utf8') as template_file: 165 | return template_file.read() 166 | 167 | 168 | def generate_files(app_config): 169 | """Generate a Dockerfile and helper files for an application. 170 | 171 | Args: 172 | app_config (AppConfig): Validated configuration 173 | 174 | Returns: 175 | dict: Map of filename to desired file contents 176 | """ 177 | if app_config.has_requirements_txt: 178 | optional_requirements_txt = get_data('Dockerfile.requirements_txt') 179 | else: 180 | optional_requirements_txt = '' 181 | 182 | if app_config.entrypoint: 183 | optional_entrypoint = get_data( 184 | 'Dockerfile.entrypoint.template').format( 185 | entrypoint=app_config.entrypoint) 186 | else: 187 | optional_entrypoint = '' 188 | 189 | if app_config.is_python_compat: 190 | dockerfile = get_data('Dockerfile.python_compat') 191 | dockerignore = get_data('dockerignore.python_compat') 192 | else: 193 | dockerfile = ''.join([ 194 | get_data('Dockerfile.preamble.template').format( 195 | base_image=app_config.base_image), 196 | get_data('Dockerfile.virtualenv.template').format( 197 | python_version=app_config.dockerfile_python_version), 198 | optional_requirements_txt, 199 | get_data('Dockerfile.install_app'), 200 | optional_entrypoint, 201 | ]) 202 | dockerignore = get_data('dockerignore') 203 | 204 | return { 205 | 'Dockerfile': dockerfile, 206 | '.dockerignore': dockerignore, 207 | } 208 | 209 | 210 | def generate_dockerfile_command(base_image, config_file, source_dir): 211 | """Write a Dockerfile and helper files for an application. 212 | 213 | Args: 214 | base_image (str): Docker image name to build on top of 215 | config_file (str): Path to user's app.yaml (might be .yaml) 216 | source_dir (str): Directory container user's source code 217 | """ 218 | # Read yaml file. Does not currently support multiple services 219 | # with configuration filenames besides app.yaml 220 | with io.open(config_file, 'r', encoding='utf8') as yaml_config_file: 221 | raw_config = yaml.safe_load(yaml_config_file) 222 | 223 | # Determine complete configuration 224 | app_config = get_app_config(raw_config, base_image, config_file, 225 | source_dir) 226 | 227 | # Generate list of filenames and their textual contents 228 | files = generate_files(app_config) 229 | 230 | # Write files 231 | for filename, contents in files.items(): 232 | full_filename = os.path.join(source_dir, filename) 233 | with io.open(full_filename, 'w', encoding='utf8') as outfile: 234 | outfile.write(contents) 235 | 236 | 237 | def parse_args(argv): 238 | """Parse and validate command line flags""" 239 | parser = argparse.ArgumentParser() 240 | parser.add_argument( 241 | '--base-image', 242 | type=functools.partial( 243 | validation_utils.validate_arg_regex, flag_regex=IMAGE_REGEX), 244 | default='gcr.io/google-appengine/python:latest', 245 | help='Name of Docker image to use as base') 246 | # In some cases, gcloud sets an environment variable to indicate 247 | # the location of the application configuration file, rather than 248 | # using the --config flag. The order of precedence from highest 249 | # to lowest is: 250 | # 251 | # 1) --config flag 252 | # 2) $GAE_APPLICATION_YAML_PATH environment variable 253 | # 3) a file named "app.yaml" in the current working directory 254 | parser.add_argument( 255 | '--config', 256 | type=functools.partial( 257 | validation_utils.validate_arg_regex, flag_regex=PRINTABLE_REGEX), 258 | default=(os.environ.get(GAE_APPLICATION_YAML_PATH) or 'app.yaml'), 259 | help='Path to application configuration file' 260 | ) 261 | parser.add_argument( 262 | '--source-dir', 263 | type=functools.partial( 264 | validation_utils.validate_arg_regex, flag_regex=PRINTABLE_REGEX), 265 | default='.', 266 | help=('Application source and output directory')) 267 | args = parser.parse_args(argv[1:]) 268 | return args 269 | 270 | 271 | def main(): 272 | args = parse_args(sys.argv) 273 | generate_dockerfile_command(args.base_image, args.config, args.source_dir) 274 | 275 | 276 | if __name__ == '__main__': 277 | main() 278 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright {yyyy} {name of copyright owner} 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. -------------------------------------------------------------------------------- /scripts/local_cloudbuild.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # Copyright 2017 Google Inc. All Rights Reserved. 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | """Emulate the Google Cloud Build locally. 18 | 19 | The input is a local cloudbuild.yaml file. This is translated into a 20 | series of commands for the locally installed Docker daemon. These 21 | commands are output as a shell script and optionally executed. 22 | 23 | The output images are not pushed to the Google Container Registry. 24 | Not all cloudbuild.yaml functionality is supported. In particular, 25 | substitutions are a simplified subset that doesn't include all the 26 | corner cases and error conditions. 27 | 28 | See https://cloud.google.com/container-builder/docs/api/build-steps 29 | for more information. 30 | """ 31 | 32 | import argparse 33 | import collections 34 | import collections.abc 35 | import functools 36 | import io 37 | import os 38 | import re 39 | import shlex 40 | import subprocess 41 | import sys 42 | 43 | import yaml 44 | 45 | import validation_utils 46 | 47 | 48 | # Exclude non-printable control characters (including newlines) 49 | PRINTABLE_REGEX = re.compile(r"""^[^\x00-\x1f]*$""") 50 | 51 | # Cloud Build substitutions 52 | # https://cloud.google.com/cloud-build/docs/api/build-requests#substitutions 53 | SUBSTITUTION_REGEX = re.compile(r"""(?x) 54 | [$] # Dollar sign 55 | ( 56 | [A-Z_][A-Z0-9_]* # Variable name, no curly brackets 57 | | 58 | {[A-Z_][A-Z0-9_]*} # Variable name, with curly brackets 59 | | 60 | [$] # $$, translated to a single literal $ 61 | ) 62 | """) 63 | 64 | # Default builtin substitutions 65 | DEFAULT_SUBSTITUTIONS = { 66 | 'BRANCH_NAME': '', 67 | 'BUILD_ID': 'abcdef12-3456-7890-abcd-ef0123456789', 68 | 'COMMIT_SHA': '', 69 | 'PROJECT_ID': 'dummy-project-id', 70 | 'REPO_NAME': '', 71 | 'REVISION_ID': '', 72 | 'TAG_NAME': '', 73 | } 74 | 75 | # Use this image for cleanup actions 76 | DEBIAN_IMAGE = 'gcr.io/google-appengine/debian8' 77 | 78 | # File template 79 | BUILD_SCRIPT_TEMPLATE = """\ 80 | #!/bin/bash 81 | # This is a generated file. Do not edit. 82 | 83 | set -euo pipefail 84 | 85 | SOURCE_DIR=. 86 | 87 | # Setup staging directory 88 | HOST_WORKSPACE=$(mktemp -d -t local_cloudbuild_XXXXXXXXXX) 89 | function cleanup {{ 90 | if [ "${{HOST_WORKSPACE}}" != '/' -a -d "${{HOST_WORKSPACE}}" ]; then 91 | # Expect a single error message about /workspace busy 92 | {cleanup_str} 2>/dev/null || true 93 | # Do not expect error messages here. Display but ignore. 94 | rmdir "${{HOST_WORKSPACE}}" || true 95 | fi 96 | }} 97 | trap cleanup EXIT 98 | 99 | # Copy source to staging directory 100 | echo "Copying source to staging directory ${{HOST_WORKSPACE}}" 101 | rsync -avzq --exclude=.git "${{SOURCE_DIR}}" "${{HOST_WORKSPACE}}" 102 | 103 | # Build commands 104 | {docker_str} 105 | # End of build commands 106 | echo "Build completed successfully" 107 | """ 108 | 109 | 110 | # Validated cloudbuild recipe + flags 111 | CloudBuild = collections.namedtuple('CloudBuild', 112 | 'output_script run steps substitutions') 113 | 114 | # Single validated step in a cloudbuild recipe 115 | Step = collections.namedtuple('Step', 'args dir_ env name') 116 | 117 | 118 | def sub_and_quote(s, substitutions, substitutions_used): 119 | """Return a shell-escaped, variable substituted, version of the string s. 120 | 121 | Args: 122 | s (str): Any string 123 | subs (dict): Substitution map to apply 124 | subs_used (set): Updated with names from `subs.keys()` when those 125 | substitutions are encountered in `s` 126 | """ 127 | 128 | def sub(match): 129 | """Perform a single substitution.""" 130 | variable_name = match.group(1) 131 | if variable_name[0] == '{': 132 | # Strip curly brackets 133 | variable_name = variable_name[1:-1] 134 | if variable_name == '$': 135 | value = '$' 136 | elif variable_name not in substitutions: 137 | # Variables must be set 138 | raise ValueError( 139 | 'Variable "{}" used without being defined. Try adding ' 140 | 'it to the --substitutions flag'.format(variable_name)) 141 | else: 142 | value = substitutions.get(variable_name) 143 | substitutions_used.add(variable_name) 144 | return value 145 | 146 | substituted_s = re.sub(SUBSTITUTION_REGEX, sub, s) 147 | quoted_s = shlex.quote(substituted_s) 148 | return quoted_s 149 | 150 | 151 | def get_cloudbuild(raw_config, args): 152 | """Read and validate a cloudbuild recipe 153 | 154 | Args: 155 | raw_config (dict): deserialized cloudbuild.yaml 156 | args (argparse.Namespace): command line flags 157 | 158 | Returns: 159 | CloudBuild: valid configuration 160 | """ 161 | if not isinstance(raw_config, dict): 162 | raise ValueError( 163 | 'Expected {} contents to be of type "dict", but found type "{}"'. 164 | format(args.config, type(raw_config))) 165 | 166 | raw_steps = validation_utils.get_field_value(raw_config, 'steps', list) 167 | if not raw_steps: 168 | raise ValueError('No steps defined in {}'.format(args.config)) 169 | 170 | steps = [get_step(raw_step) for raw_step in raw_steps] 171 | return CloudBuild( 172 | output_script=args.output_script, 173 | run=args.run, 174 | steps=steps, 175 | substitutions=args.substitutions, 176 | ) 177 | 178 | 179 | def get_step(raw_step): 180 | """Read and validate a single cloudbuild step 181 | 182 | Args: 183 | raw_step (dict): deserialized step 184 | 185 | Returns: 186 | Step: valid build step 187 | """ 188 | if not isinstance(raw_step, dict): 189 | raise ValueError( 190 | 'Expected step to be of type "dict", but found type "{}"'. 191 | format(type(raw_step))) 192 | raw_args = validation_utils.get_field_value(raw_step, 'args', list) 193 | args = [validation_utils.get_field_value(raw_args, index, str) 194 | for index in range(len(raw_args))] 195 | dir_ = validation_utils.get_field_value(raw_step, 'dir', str) 196 | raw_env = validation_utils.get_field_value(raw_step, 'env', list) 197 | env = [validation_utils.get_field_value(raw_env, index, str) 198 | for index in range(len(raw_env))] 199 | name = validation_utils.get_field_value(raw_step, 'name', str) 200 | return Step( 201 | args=args, 202 | dir_=dir_, 203 | env=env, 204 | name=name, 205 | ) 206 | 207 | 208 | def generate_command(step, substitutions, substitutions_used): 209 | """Generate a single shell command to run for a single cloudbuild step 210 | 211 | Args: 212 | step (Step): Valid build step 213 | subs (dict): Substitution map to apply 214 | subs_used (set): Updated with names from `subs.keys()` when those 215 | substitutions are encountered in an element of `step` 216 | 217 | Returns: 218 | [str]: A single shell command, expressed as a list of quoted tokens. 219 | """ 220 | quoted_args = [sub_and_quote(arg, substitutions, substitutions_used) 221 | for arg in step.args] 222 | quoted_env = [] 223 | for env in step.env: 224 | quoted_env.extend(['--env', sub_and_quote(env, substitutions, 225 | substitutions_used)]) 226 | quoted_name = sub_and_quote(step.name, substitutions, substitutions_used) 227 | workdir = '/workspace' 228 | if step.dir_: 229 | workdir = os.path.join(workdir, sub_and_quote(step.dir_, substitutions, 230 | substitutions_used)) 231 | process_args = [ 232 | 'docker', 233 | 'run', 234 | '--volume', 235 | '/var/run/docker.sock:/var/run/docker.sock', 236 | '--volume', 237 | '/root/.docker:/root/.docker', 238 | '--volume', 239 | '${HOST_WORKSPACE}:/workspace', 240 | '--workdir', 241 | workdir, 242 | ] + quoted_env + [quoted_name] + quoted_args 243 | return process_args 244 | 245 | 246 | def generate_script(cloudbuild): 247 | """Generate the contents of a shell script 248 | 249 | Args: 250 | cloudbuild (CloudBuild): Valid cloudbuild configuration 251 | 252 | Returns: 253 | (str): Contents of shell script 254 | """ 255 | # This deletes everything in /workspace including hidden files, 256 | # but not /workspace itself 257 | cleanup_step = Step( 258 | args=['rm', '-rf', '/workspace'], 259 | dir_='', 260 | env=[], 261 | name=DEBIAN_IMAGE, 262 | ) 263 | cleanup_command = generate_command(cleanup_step, {}, set()) 264 | subs_used = set() 265 | docker_commands = [ 266 | generate_command(step, cloudbuild.substitutions, subs_used) 267 | for step in cloudbuild.steps] 268 | 269 | # Check that all user variables were referenced at least once 270 | user_subs_unused = [name for name in cloudbuild.substitutions.keys() 271 | if name not in subs_used and name[0] == '_'] 272 | if user_subs_unused: 273 | nice_list = '"' + '", "'.join(sorted(user_subs_unused)) + '"' 274 | raise ValueError( 275 | 'User substitution variables {} were defined in the ' 276 | '--substitution flag but never used in the cloudbuild file.'. 277 | format(nice_list)) 278 | 279 | cleanup_str = ' '.join(cleanup_command) 280 | docker_lines = [] 281 | for docker_command in docker_commands: 282 | line = ' '.join(docker_command) + '\n\n' 283 | docker_lines.append(line) 284 | docker_str = ''.join(docker_lines) 285 | 286 | s = BUILD_SCRIPT_TEMPLATE.format(cleanup_str=cleanup_str, 287 | docker_str=docker_str) 288 | return s 289 | 290 | 291 | def make_executable(path): 292 | """Set executable bit(s) on file""" 293 | # http://stackoverflow.com/questions/12791997 294 | mode = os.stat(path).st_mode 295 | mode |= (mode & 0o444) >> 2 # copy R bits to X 296 | os.chmod(path, mode) 297 | 298 | 299 | def write_script(cloudbuild, contents): 300 | """Write a shell script to a file.""" 301 | print('Writing build script to {}'.format(cloudbuild.output_script)) 302 | with io.open(cloudbuild.output_script, 'w', encoding='utf8') as outfile: 303 | outfile.write(contents) 304 | make_executable(cloudbuild.output_script) 305 | 306 | 307 | def local_cloudbuild(args): 308 | """Execute the steps of a cloudbuild.yaml locally 309 | 310 | Args: 311 | args: command line flags as per parse_args 312 | """ 313 | # Load and parse cloudbuild.yaml 314 | with io.open(args.config, 'r', encoding='utf8') as cloudbuild_file: 315 | raw_config = yaml.safe_load(cloudbuild_file) 316 | 317 | # Determine configuration 318 | cloudbuild = get_cloudbuild(raw_config, args) 319 | 320 | # Create shell script 321 | contents = generate_script(cloudbuild) 322 | write_script(cloudbuild, contents) 323 | 324 | # Run shell script 325 | if cloudbuild.run: 326 | print('Running {}'.format(cloudbuild.output_script)) 327 | args = [os.path.abspath(cloudbuild.output_script)] 328 | subprocess.check_call(args) 329 | 330 | 331 | def parse_args(argv): 332 | """Parse and validate command line flags""" 333 | parser = argparse.ArgumentParser( 334 | description='Process cloudbuild.yaml locally to build Docker images') 335 | parser.add_argument( 336 | '--config', 337 | type=functools.partial( 338 | validation_utils.validate_arg_regex, flag_regex=PRINTABLE_REGEX), 339 | default='cloudbuild.yaml', 340 | help='Path to cloudbuild.yaml file' 341 | ) 342 | parser.add_argument( 343 | '--output_script', 344 | type=functools.partial( 345 | validation_utils.validate_arg_regex, flag_regex=PRINTABLE_REGEX), 346 | help='Filename to write shell script to', 347 | ) 348 | parser.add_argument( 349 | '--no-run', 350 | action='store_false', 351 | help='Create shell script but don\'t execute it', 352 | dest='run', 353 | ) 354 | parser.add_argument( 355 | '--substitutions', 356 | type=validation_utils.validate_arg_dict, 357 | default={}, 358 | help='Parameters to be substituted in the build specification', 359 | ) 360 | args = parser.parse_args(argv[1:]) 361 | if not args.output_script: 362 | args.output_script = args.config + "_local.sh" 363 | return args 364 | 365 | 366 | def main(): 367 | args = parse_args(sys.argv) 368 | local_cloudbuild(args) 369 | 370 | 371 | if __name__ == '__main__': 372 | main() 373 | -------------------------------------------------------------------------------- /scripts/local_cloudbuild_test.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # Copyright 2017 Google Inc. All Rights Reserved. 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | """Unit test for local_cloudbuild.py""" 18 | 19 | import argparse 20 | import contextlib 21 | import os 22 | import re 23 | import shutil 24 | import subprocess 25 | import unittest.mock 26 | 27 | import pytest 28 | import yaml 29 | 30 | import local_cloudbuild 31 | 32 | 33 | # Matches script boilerplate 34 | STAGING_DIR_REGEX = re.compile( 35 | b'(?m)Copying source to staging directory (.+)$') 36 | 37 | 38 | @pytest.fixture 39 | def testdata_dir(): 40 | testdata_dir = os.path.join(os.path.dirname(__file__), 'testdata') 41 | assert os.path.isdir(testdata_dir), ( 42 | 'Could not run test: testdata directory not found') 43 | return testdata_dir 44 | 45 | 46 | @pytest.mark.parametrize('s, subs, expected, expected_used', [ 47 | # Empty string 48 | ('', {}, "''", []), 49 | # No substitutions 50 | ('a', {}, 'a', []), 51 | # Unused substitition (ok here but error in generate_script) 52 | ('a', {'FOO': 'foo'}, 'a', []), 53 | ('a', {'_FOO': '_foo'}, 'a', []), 54 | # Defined builtin substitution 55 | ('a$FOOb', {'FOO': 'foo'}, 'afoob', ['FOO']), 56 | ('a${FOO}b', {'FOO': 'foo'}, 'afoob', ['FOO']), 57 | # Defined user substitution 58 | ('a$_FOOb', {'_FOO': '_foo'}, 'a_foob', ['_FOO']), 59 | ('a${_FOO}b', {'_FOO': '_foo'}, 'a_foob', ['_FOO']), 60 | # Multiple substitutions 61 | ('$FOO${FOO}${BAR}$FOO', 62 | {'FOO': 'foo', 'BAR': 'bar'}, 63 | 'foofoobarfoo', 64 | ['FOO', 'BAR']), 65 | # Invalid names 66 | ('a $ b', {}, "'a $ b'", []), 67 | ('a$foo b', {}, "'a$foo b'", []), 68 | ('a$0FOO b', {}, "'a$0FOO b'", []), 69 | ]) 70 | def test_sub_and_quote_valid(s, subs, expected, expected_used): 71 | used = set() 72 | actual = local_cloudbuild.sub_and_quote(s, subs, used) 73 | assert actual == expected 74 | assert used == set(expected_used) 75 | 76 | 77 | @pytest.mark.parametrize('s, subs', [ 78 | # Undefined builtin substitution 79 | ('a$FOOb', {}), 80 | ('a${FOO}b', {}), 81 | # Undefined user substitution 82 | ('a$_FOOb', {}), 83 | ('a${_FOO}b', {}), 84 | ]) 85 | def test_sub_and_quote_invalid(s, subs): 86 | with pytest.raises(ValueError): 87 | used = set() 88 | local_cloudbuild.sub_and_quote(s, subs, used) 89 | 90 | 91 | def have_docker(): 92 | """Determine if the Docker daemon is present and usable""" 93 | if ((shutil.which('docker') is not None) and 94 | (subprocess.call(['docker', 'info'], 95 | stdout=subprocess.DEVNULL, 96 | stderr=subprocess.DEVNULL) == 0)): 97 | return True 98 | return False 99 | 100 | 101 | _args = argparse.Namespace( 102 | config='some_config_file', 103 | output_script='some_output_script', 104 | run=False, 105 | substitutions={}, 106 | ) 107 | 108 | 109 | def test_get_cloudbuild_valid(): 110 | raw_yaml = 'steps:\n- name: step1\n- name: step2\n' 111 | raw_config = yaml.safe_load(raw_yaml) 112 | actual = local_cloudbuild.get_cloudbuild(raw_config, _args) 113 | assert len(actual.steps) == 2 114 | 115 | 116 | @pytest.mark.parametrize('raw_yaml', [ 117 | # Empty cloud build 118 | '', 119 | # No steps 120 | 'foo: bar\n', 121 | # Steps not a list 122 | 'steps: astring\n', 123 | ]) 124 | def test_get_cloudbuild_invalid(raw_yaml): 125 | raw_config = yaml.safe_load(raw_yaml) 126 | with pytest.raises(ValueError): 127 | local_cloudbuild.get_cloudbuild(raw_config, _args) 128 | 129 | 130 | @pytest.mark.parametrize('raw_step, expected', [ 131 | # Empty step 132 | ({}, local_cloudbuild.Step( 133 | args=[], 134 | dir_='', 135 | env=[], 136 | name='', 137 | )), 138 | # Full step 139 | ({'name': 'aname', 140 | 'args': ['arg1', 2, 'arg3 with \n newline'], 141 | 'env': ['ENV1=value1', 'ENV2=space in value2'], 142 | 'dir': 'adir', 143 | }, local_cloudbuild.Step( 144 | args=['arg1', '2', 'arg3 with \n newline'], 145 | env=['ENV1=value1', 'ENV2=space in value2'], 146 | dir_='adir', 147 | name='aname', 148 | )), 149 | ]) 150 | def test_get_step_valid(raw_step, expected): 151 | actual = local_cloudbuild.get_step(raw_step) 152 | assert actual == expected 153 | 154 | 155 | @pytest.mark.parametrize('raw_step', [ 156 | # Wrong type 157 | [], 158 | # More wrong types 159 | {'args': 'not_a_list'}, 160 | {'args': [[]]}, 161 | {'env': 'not_a_list'}, 162 | {'env': [{}]}, 163 | {'dir': {}}, 164 | {'name': []}, 165 | ]) 166 | def test_get_step_invalid(raw_step): 167 | with pytest.raises(ValueError): 168 | local_cloudbuild.get_step(raw_step) 169 | 170 | 171 | # Basic valid case 172 | _base_step = local_cloudbuild.Step( 173 | args=['arg1', 'arg2'], 174 | dir_='', 175 | env=['ENV1=value1', 'ENV2=value2'], 176 | name='aname', 177 | ) 178 | _subs = {'BUILTIN': 'builtin', '_USER': '_user'} 179 | 180 | 181 | def test_generate_command_basic(): 182 | command = local_cloudbuild.generate_command(_base_step, _subs, set()) 183 | assert command == [ 184 | 'docker', 185 | 'run', 186 | '--volume', 187 | '/var/run/docker.sock:/var/run/docker.sock', 188 | '--volume', 189 | '/root/.docker:/root/.docker', 190 | '--volume', 191 | '${HOST_WORKSPACE}:/workspace', 192 | '--workdir', 193 | '/workspace', 194 | '--env', 195 | 'ENV1=value1', 196 | '--env', 197 | 'ENV2=value2', 198 | 'aname', 199 | 'arg1', 200 | 'arg2', 201 | ] 202 | 203 | 204 | @pytest.mark.parametrize('step, args', [ 205 | # dir specified 206 | (_base_step._replace(dir_='adir'), 207 | ['--workdir', '/workspace/adir']), 208 | # Shell quoting 209 | (_base_step._replace(args=['arg with \n newline']), 210 | ["'arg with \n newline'"]), 211 | (_base_step._replace(dir_='dir/ with space/'), 212 | ["/workspace/'dir/ with space/'"]), 213 | (_base_step._replace(env=['env with space']), 214 | ["'env with space'"]), 215 | (_base_step._replace(name='a name'), 216 | ["'a name'"]), 217 | # Variable substitution 218 | (_base_step._replace(name='a $BUILTIN substitution'), 219 | ["'a builtin substitution'"]), 220 | (_base_step._replace(name='a $_USER substitution'), 221 | ["'a _user substitution'"]), 222 | (_base_step._replace(name='a curly brace ${BUILTIN} substitution'), 223 | ["'a curly brace builtin substitution'"]), 224 | (_base_step._replace( 225 | name='an escaped $$ or $$$$ or $$FOO or $${_FOO} is unescaped'), 226 | ["'an escaped $ or $$ or $FOO or ${_FOO} is unescaped'"]), 227 | ]) 228 | def test_generate_command_valid(step, args): 229 | command = local_cloudbuild.generate_command(step, _subs, set()) 230 | for arg in args: 231 | assert arg in command 232 | 233 | 234 | @pytest.mark.parametrize('step', [ 235 | _base_step._replace(name='a $UNSET_BUILTIN substitution'), 236 | _base_step._replace(name='a $_UNSET_USER substitution'), 237 | ]) 238 | def test_generate_command_invalid(step): 239 | with pytest.raises(ValueError): 240 | local_cloudbuild.generate_command(step, _subs, set()) 241 | 242 | 243 | def test_generate_script_golden(testdata_dir): 244 | config_name = 'cloudbuild_ok.yaml' 245 | expected_output_script = os.path.join( 246 | testdata_dir, config_name + '_golden.sh') 247 | cloudbuild = local_cloudbuild.CloudBuild( 248 | output_script='test_generate_script', 249 | run=False, 250 | steps=[ 251 | local_cloudbuild.Step( 252 | args=['/bin/sh', '-c', 'printenv MESSAGE'], 253 | dir_='', 254 | env=['MESSAGE=Hello World!'], 255 | name='debian', 256 | ), 257 | local_cloudbuild.Step( 258 | args=['/bin/sh', '-c', 'printenv MESSAGE'], 259 | dir_='', 260 | env=['MESSAGE=Goodbye\\n And Farewell!', 'UNUSED=unused'], 261 | name='debian', 262 | ) 263 | ], 264 | substitutions=local_cloudbuild.DEFAULT_SUBSTITUTIONS, 265 | ) 266 | actual = local_cloudbuild.generate_script(cloudbuild) 267 | # Compare output against golden 268 | with open(expected_output_script, 'r', encoding='utf8') as expected_file: 269 | expected = expected_file.read() 270 | assert actual == expected 271 | 272 | 273 | def test_generate_script_unused_user_substitution(): 274 | cloudbuild = local_cloudbuild.CloudBuild( 275 | output_script='', 276 | run=False, 277 | steps=[], 278 | substitutions={'_FOO': '_foo'}, 279 | ) 280 | with pytest.raises(ValueError, match='User substitution variables'): 281 | local_cloudbuild.generate_script(cloudbuild) 282 | 283 | 284 | def test_make_executable(tmpdir): 285 | test_script_filename = tmpdir.join('test_make_executable.sh') 286 | with test_script_filename.open('w', encoding='utf8') as test_script: 287 | test_script.write('#!/bin/sh\necho "Output from test_make_executable"') 288 | local_cloudbuild.make_executable(str(test_script_filename)) 289 | output = subprocess.check_output([str(test_script_filename)]) 290 | assert output.decode('utf8') == "Output from test_make_executable\n" 291 | 292 | 293 | def test_write_script(tmpdir): 294 | contents = 'The contents\n' 295 | output_script_filename = tmpdir.join('test_write_script') 296 | cloudbuild = local_cloudbuild.CloudBuild( 297 | output_script=str(output_script_filename), 298 | run=False, 299 | steps=[], 300 | substitutions={}, 301 | ) 302 | local_cloudbuild.write_script(cloudbuild, contents) 303 | with output_script_filename.open('r', encoding='utf8') as output_script: 304 | actual = output_script.read() 305 | assert actual == contents 306 | 307 | 308 | @contextlib.contextmanager 309 | def chdir(new_dir): 310 | """Not threadsafe""" 311 | old_dir = os.getcwd() 312 | os.chdir(new_dir) 313 | yield 314 | os.chdir(old_dir) 315 | 316 | 317 | @pytest.mark.parametrize('config_name, substitutions, exception, cleanup', [ 318 | # Everything is ok 319 | ('cloudbuild_ok.yaml', None, None, True), 320 | # Builtin substitutions like $PROJECT_ID work 321 | ('cloudbuild_builtin_substitutions.yaml', None, None, True), 322 | # User substitutions like $_FOO work 323 | ('cloudbuild_user_substitutions.yaml', 324 | {'_FOO': 'this is foo value'}, 325 | None, True 326 | ), 327 | # User substitutions like $_FOO fails when undefined 328 | ('cloudbuild_user_substitutions.yaml', None, ValueError, False), 329 | # Exit code 1 (failure) 330 | ('cloudbuild_err_rc1.yaml', None, subprocess.CalledProcessError, True), 331 | # Command not found 332 | ('cloudbuild_err_not_found.yaml', None, subprocess.CalledProcessError, 333 | True), 334 | # Cleaning up files owned by root 335 | ('cloudbuild_difficult_cleanup.yaml', None, None, True), 336 | ]) 337 | def test_local_cloudbuild(testdata_dir, tmpdir, config_name, substitutions, 338 | exception, cleanup): 339 | if not have_docker(): 340 | pytest.fail('This test requires a working Docker daemon') 341 | 342 | check_call_output = None 343 | 344 | def check_call(*args, **kw_args): 345 | """Act like subprocess.check_call but store stdout""" 346 | nonlocal check_call_output 347 | try: 348 | check_call_output = subprocess.check_output(*args, **kw_args) 349 | print(check_call_output) 350 | except subprocess.CalledProcessError as e: 351 | check_call_output = e.output 352 | print(check_call_output) 353 | raise 354 | 355 | # Read cloudbuild.yaml from testdata file, write output to 356 | # tempdir, and maybe try to run it 357 | with unittest.mock.patch('subprocess.check_call', check_call): 358 | if substitutions is None: 359 | substitutions = local_cloudbuild.DEFAULT_SUBSTITUTIONS 360 | should_succeed = (exception is None) 361 | config = os.path.join(testdata_dir, config_name) 362 | actual_output_script = tmpdir.join(config_name + '_local.sh') 363 | args = argparse.Namespace( 364 | config=config, 365 | output_script=str(actual_output_script), 366 | run=True, 367 | substitutions=substitutions, 368 | ) 369 | 370 | # The source directory of the build is currently hardcoded as 371 | # '.', so we must chdir there. 372 | with chdir(testdata_dir): 373 | if should_succeed: 374 | local_cloudbuild.local_cloudbuild(args) 375 | else: 376 | with pytest.raises(exception): 377 | local_cloudbuild.local_cloudbuild(args) 378 | 379 | # Check that staging dir was cleaned up 380 | if cleanup: 381 | assert check_call_output is not None 382 | match = re.search(STAGING_DIR_REGEX, check_call_output) 383 | assert match 384 | staging_dir = match.group(1) 385 | assert not os.path.isdir(staging_dir) 386 | 387 | 388 | @pytest.mark.parametrize('argv, expected', [ 389 | # Test explicit output_script 390 | (['argv0', '--output_script=my_output'], 'my_output'), 391 | # Test implicit output_script 392 | (['argv0', '--config=my_config'], 'my_config_local.sh'), 393 | ]) 394 | def test_parse_args_output_script(argv, expected): 395 | args = local_cloudbuild.parse_args(argv) 396 | assert args.output_script == expected 397 | 398 | 399 | @pytest.mark.parametrize('argv, expected', [ 400 | # Test run flag (default) 401 | (['argv0'], True), 402 | (['argv0', '--no-run'], False), 403 | ]) 404 | def test_parse_args_run_flag(argv, expected): 405 | args = local_cloudbuild.parse_args(argv) 406 | assert args.run == expected 407 | 408 | 409 | if __name__ == '__main__': 410 | pytest.main([__file__]) 411 | --------------------------------------------------------------------------------