├── .devcontainer ├── Dockerfile └── devcontainer.json ├── .gitattributes ├── .github ├── ISSUE_TEMPLATE │ ├── bug.md │ ├── feature.md │ ├── proposal.md │ ├── question.md │ ├── task.md │ └── tinytask.md └── workflows │ └── pull_request.yml ├── .gitignore ├── .prospector.yaml ├── .vscode ├── extensions.json ├── launch.json └── tasks.json ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── GOVERNANCE.md ├── LICENSE.txt ├── MAINTAINERS.md ├── MANIFEST.in ├── NOTICE.txt ├── README.md ├── ci ├── Dockerfile ├── evaluate_docs.py ├── release_tests.py ├── test_commit_message.py └── test_files_touched.py ├── docker ├── Dockerfile └── Dockerfile.scancode ├── docker_run.sh ├── docs ├── adding-to-command-library.md ├── architecture.md ├── creating-custom-templates.md ├── creating-tool-extensions.md ├── data-model.md ├── examples │ ├── photon-cdx.json │ ├── photon-spdx.json │ ├── photon.html │ ├── photon.json │ ├── photon.spdx │ ├── photon.txt │ └── photon.yaml ├── faq.md ├── glossary.md ├── img │ ├── arch.png │ ├── tern_data_model.png │ ├── tern_demo_fast.gif │ ├── tern_flow.png │ └── tern_logo.png ├── navigating-the-code.md ├── project-roadmap-archive.md ├── project-roadmap.md ├── releases │ ├── release_checklist.md │ ├── v0_1_0.md │ ├── v0_2_0.md │ ├── v0_3_0.md │ ├── v0_4_0.md │ ├── v0_5_0-requirements.txt │ ├── v0_5_0.md │ ├── v0_5_4-requirements.txt │ ├── v0_5_4.md │ ├── v1_0_0-requirements.txt │ ├── v1_0_0.md │ ├── v1_0_1-requirements.txt │ ├── v1_0_1.md │ ├── v2_0_0-requirements.txt │ ├── v2_0_0.md │ ├── v2_10_0-requirements.txt │ ├── v2_10_0.md │ ├── v2_10_1-requirements.txt │ ├── v2_10_1.md │ ├── v2_11_0-requirements.txt │ ├── v2_11_0.md │ ├── v2_12_0-requirements.txt │ ├── v2_12_0.md │ ├── v2_12_1-requirements.txt │ ├── v2_12_1.md │ ├── v2_1_0-requirements.txt │ ├── v2_1_0.md │ ├── v2_2_0-requirements.txt │ ├── v2_2_0.md │ ├── v2_3_0-requirements.txt │ ├── v2_3_0.md │ ├── v2_4_0-requirements.txt │ ├── v2_4_0.md │ ├── v2_5_0-requirements.txt │ ├── v2_5_0.md │ ├── v2_6_1-requirements.txt │ ├── v2_6_1.md │ ├── v2_7_0-requirements.txt │ ├── v2_7_0.md │ ├── v2_8_0-requirements.txt │ ├── v2_8_0.md │ ├── v2_9_0-requirements.txt │ ├── v2_9_0.md │ ├── v2_9_1-requirements.txt │ └── v2_9_1.md ├── spdx-tag-value-mapping.md ├── spdx-tag-value-overview.md ├── tern-lab-tutorial.md └── yaml_output.md ├── requirements.in ├── requirements.txt ├── samples ├── alpine_python │ └── Dockerfile ├── debian_vim │ └── Dockerfile ├── photon_3_layers │ └── Dockerfile ├── photon_git │ └── Dockerfile ├── photon_openjre │ └── Dockerfile └── single_stage_tern │ └── Dockerfile ├── setup.cfg ├── setup.py ├── tern ├── __init__.py ├── __main__.py ├── analyze │ ├── __init__.py │ ├── common.py │ ├── default │ │ ├── __init__.py │ │ ├── bundle.py │ │ ├── collect.py │ │ ├── command_lib │ │ │ ├── __init__.py │ │ │ ├── base.yml │ │ │ ├── command_lib.py │ │ │ ├── common.yml │ │ │ └── snippets.yml │ │ ├── container │ │ │ ├── __init__.py │ │ │ ├── image.py │ │ │ ├── multi_layer.py │ │ │ ├── run.py │ │ │ └── single_layer.py │ │ ├── core.py │ │ ├── debug │ │ │ ├── __init__.py │ │ │ └── run.py │ │ ├── default_common.py │ │ ├── dockerfile │ │ │ ├── __init__.py │ │ │ ├── lock.py │ │ │ ├── parse.py │ │ │ └── run.py │ │ ├── filter.py │ │ └── live │ │ │ ├── __init__.py │ │ │ ├── collect.py │ │ │ └── run.py │ └── passthrough.py ├── classes │ ├── __init__.py │ ├── command.py │ ├── docker_image.py │ ├── file_data.py │ ├── image.py │ ├── image_layer.py │ ├── notice.py │ ├── notice_origin.py │ ├── oci_image.py │ ├── origins.py │ ├── package.py │ └── template.py ├── extensions │ ├── __init__.py │ ├── cve_bin_tool │ │ ├── __init__.py │ │ └── executor.py │ ├── executor.py │ └── scancode │ │ ├── __init__.py │ │ └── executor.py ├── formats │ ├── __init__.py │ ├── consumer.py │ ├── cyclonedx │ │ ├── __init__.py │ │ ├── cyclonedx_common.py │ │ └── cyclonedxjson │ │ │ ├── __init__.py │ │ │ ├── generator.py │ │ │ ├── image_helpers.py │ │ │ └── package_helpers.py │ ├── default │ │ ├── __init__.py │ │ └── generator.py │ ├── generator.py │ ├── html │ │ ├── __init__.py │ │ └── generator.py │ ├── json │ │ ├── __init__.py │ │ ├── consumer.py │ │ └── generator.py │ ├── spdx │ │ ├── __init__.py │ │ ├── spdx.py │ │ ├── spdx_common.py │ │ ├── spdxjson │ │ │ ├── __init__.py │ │ │ ├── consumer.py │ │ │ ├── file_helpers.py │ │ │ ├── formats.py │ │ │ ├── generator.py │ │ │ ├── image_helpers.py │ │ │ ├── layer_helpers.py │ │ │ └── package_helpers.py │ │ └── spdxtagvalue │ │ │ ├── __init__.py │ │ │ ├── file_helpers.py │ │ │ ├── formats.py │ │ │ ├── generator.py │ │ │ ├── image_helpers.py │ │ │ ├── layer_helpers.py │ │ │ └── package_helpers.py │ └── yaml │ │ ├── __init__.py │ │ └── generator.py ├── load │ ├── __init__.py │ ├── docker_api.py │ └── skopeo.py ├── prep.py ├── report │ ├── __init__.py │ ├── content.py │ ├── errors.py │ ├── formats.py │ └── report.py ├── scripts │ └── debian │ │ ├── apt_get_sources.sh │ │ └── jessie │ │ └── sources.list ├── tools │ └── fs_hash.sh └── utils │ ├── __init__.py │ ├── cache.py │ ├── constants.py │ ├── general.py │ ├── host.py │ └── rootfs.py ├── tests ├── dockerfiles │ ├── buildpack_deps_jessie_arg │ ├── buildpack_deps_jessie_curl │ ├── buildpack_deps_jessie_pinned │ ├── debian_buster_apt │ ├── fail_build │ ├── golang_1.13_stretch │ ├── pin_add_command_test │ │ ├── pin_add_command_test_dockerfile │ │ └── plain_file │ └── split_shell_script │ │ ├── buildpack_deps_buster │ │ ├── buildpack_deps_buster_scm │ │ └── debian_buster_slim ├── test_analyze_common.py ├── test_analyze_default_common.py ├── test_analyze_default_dockerfile_parse.py ├── test_analyze_default_filter.py ├── test_class_command.py ├── test_class_docker_image.py ├── test_class_file_data.py ├── test_class_image.py ├── test_class_image_layer.py ├── test_class_notice.py ├── test_class_notice_origin.py ├── test_class_oci_image.py ├── test_class_origins.py ├── test_class_package.py ├── test_class_template.py ├── test_fixtures.py ├── test_load_docker_api.py └── test_util_general.py ├── tox.ini └── vagrant ├── Vagrantfile └── bootstrap.sh /.devcontainer/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM mcr.microsoft.com/vscode/devcontainers/python:3.9 2 | 3 | RUN apt-get update \ 4 | && export DEBIAN_FRONTEND=noninteractive \ 5 | && apt-get -y --no-install-recommends install \ 6 | attr \ 7 | # Require a more recent version of fuse-overlayfs 8 | && echo 'deb http://deb.debian.org/debian unstable main' >> /etc/apt/sources.list \ 9 | && apt-get update \ 10 | && apt-get -y --no-install-recommends install \ 11 | fuse-overlayfs \ 12 | && rm -rf /var/lib/apt/lists/* -------------------------------------------------------------------------------- /.devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Python 3", 3 | "dockerFile": "Dockerfile", 4 | "runArgs": ["--privileged"], 5 | "mounts": ["source=/var/run/docker.sock,target=/var/run/docker.sock,type=bind"], 6 | "extensions": ["ms-python.python"], 7 | "settings": { 8 | "terminal.integrated.shell.linux": "/bin/bash", 9 | "python.pythonPath": "/usr/local/bin/python", 10 | "python.linting.enabled": true, 11 | "python.linting.pylintEnabled": true, 12 | "python.linting.pylintPath": "/usr/local/py-utils/bin/pylint" 13 | }, 14 | "postCreateCommand": "pip3 install --user -r requirements.txt", 15 | } 16 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | * text=auto eol=lf -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug Report 3 | about: Report bugs you notice while using Tern 4 | 5 | --- 6 | 7 | **Describe the bug** 8 | A clear and concise description of what the bug is. 9 | 10 | **To Reproduce** 11 | Steps to reproduce the behavior: 12 | 1. Go to '...' 13 | 2. Click on '....' 14 | 3. Scroll down to '....' 15 | 4. See error 16 | 17 | **Error in terminal** 18 | ``` 19 | Paste your terminal output here 20 | ``` 21 | 22 | **Expected behavior** 23 | A clear and concise description of what you expected to happen. 24 | 25 | **Environment you are running Tern on** 26 | Enter all that apply 27 | - Output of 'tern --version' 28 | - Operating System (Linux Distro and version or Mac or Windows) 29 | - Vagrant file 30 | - Container OS 31 | - Python version (3.6 or higher) 32 | - Cloud environment (AWS, Azure, GCP) 33 | 34 | **Please attach files if they exist** 35 | - tern.log 36 | - report.* 37 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature Request 3 | about: Request a Feature 4 | 5 | --- 6 | 7 | **Describe the Feature** 8 | A clear and concise description of what you would like Tern to do for you. 9 | 10 | **Use Cases** 11 | Describe the use case(s) where this feature would be helpful. 12 | 13 | **Implementation Changes** 14 | Describe what changes you think would need to happen to the code to implement this feature 15 | ``` 16 | pseudocode or python code is fine here 17 | ``` 18 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/proposal.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Proposal 3 | about: Propose a change to the project. This covers changes to docs, file organization or CI/CD. 4 | 5 | --- 6 | 7 | **Problem Statement** 8 | Describe the problem you are facing with the project. 9 | 10 | **Describe the Proposal** 11 | Describe the change you would like to see in the project. 12 | 13 | **Steps to Implement Proposal** 14 | List steps required to implement the change you would like to see. 15 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/question.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Question 3 | about: Ask a question about the project 4 | 5 | --- 6 | 7 | **Please read the Code of Conduct before you proceed** 8 | [Code of Conduct](/CODE_OF_CONDUCT.md) 9 | 10 | **Ask your question here** 11 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/task.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Task 3 | about: This is a task that is work towards one or more issues 4 | --- 5 | 6 | **Description** 7 | Description of the task to complete 8 | 9 | **To Do** 10 | List the steps to complete the task 11 | 12 | **Background** 13 | List issues for background on the task 14 | 15 | **Super Issues** 16 | Enter the issue numbers this issue is work towards resolving 17 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/tinytask.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Tiny Task 3 | about: This is a very small task - around a couple of lines of code change 4 | --- 5 | 6 | **Description** 7 | Description of the task to complete 8 | 9 | **Implementation** 10 | List the steps to complete the task 11 | 12 | **References** 13 | Some places in the code base to look for reference implementations if any 14 | 15 | **Super Issues** 16 | Enter the issue numbers this issue is work towards resolving 17 | -------------------------------------------------------------------------------- /.github/workflows/pull_request.yml: -------------------------------------------------------------------------------- 1 | name: Pull Request Lint and Test 2 | on: pull_request 3 | 4 | jobs: 5 | Prospector_Linting: 6 | runs-on: ubuntu-latest 7 | steps: 8 | - uses: actions/checkout@v1 #equivlent to running git fetch and git checkout latest 9 | - uses: actions/setup-python@v1 # setup python3 environment 10 | with: 11 | python-version: '3.8' 12 | - name: Setup 13 | run: | 14 | sudo apt-get install -y attr 15 | pip install --upgrade pip 16 | pip install . 17 | - name: Prospector Linting 18 | run: | 19 | pip install prospector>=1.5.1 20 | prospector --version 21 | prospector 22 | Commit_Message_Linting: 23 | runs-on: ubuntu-latest 24 | steps: 25 | - uses: actions/checkout@v1 26 | - uses: actions/setup-python@v1 27 | with: 28 | python-version: '3.8' 29 | - name: Setup 30 | run: | 31 | sudo apt-get install -y attr 32 | pip install --upgrade pip 33 | pip install . 34 | - name: Commit Message Linting 35 | run: python ci/test_commit_message.py 36 | Security_Linting: 37 | runs-on: ubuntu-latest 38 | steps: 39 | - uses: actions/checkout@v1 40 | - uses: actions/setup-python@v1 41 | with: 42 | python-version: '3.8' 43 | - name: Setup 44 | run: | 45 | sudo apt-get install -y attr 46 | pip install --upgrade pip 47 | pip install . 48 | - name: Security Linting 49 | run: | 50 | pip install bandit>=1.6 51 | c=`python ci/evaluate_docs.py`; if [ -z $c ]; then echo "No .py files to lint"; else echo $c | xargs bandit; fi 52 | Test_Changes: 53 | runs-on: ubuntu-latest 54 | steps: 55 | - uses: actions/checkout@v1 56 | - uses: actions/setup-python@v1 57 | with: 58 | python-version: '3.8' 59 | - name: Setup 60 | run: | 61 | sudo apt-get update && sudo apt-get install -y attr openjdk-8-jdk-headless maven 62 | pip install --upgrade pip 63 | pip install . 64 | docker pull photon:3.0 && docker save photon:3.0 > photon.tar 65 | # build SPDX validation tool from source 66 | git clone https://github.com/spdx/tools-java.git && cd tools-java 67 | export JAVA_HOME=$(readlink -f /usr/bin/javac | sed "s:/bin/javac::") 68 | mvn clean install && cd .. 69 | - name: Test Changes 70 | run: python ci/test_files_touched.py 71 | Test_Coverage: 72 | runs-on: ubuntu-latest 73 | steps: 74 | - uses: actions/checkout@v1 75 | - uses: actions/setup-python@v1 76 | with: 77 | python-version: '3.8' 78 | - name: Setup 79 | run: | 80 | sudo apt-get install -y attr 81 | pip install --upgrade pip 82 | pip install . 83 | - name: Test Coverage 84 | run: | 85 | pip install coverage 86 | pip install . 87 | coverage run -m unittest discover -s tests 88 | coverage report 89 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .ropeproject 2 | __pycache__ 3 | *.pyc 4 | sources 5 | temp 6 | report.txt 7 | report.yml 8 | report.json 9 | tern.log 10 | cache.yml 11 | vagrant/.* 12 | .tox 13 | .coverage 14 | build/ 15 | *.egg-info -------------------------------------------------------------------------------- /.prospector.yaml: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Copyright © 2020, VMware, Inc. All rights reserved. 3 | # SPDX-License-Identifier: BSD-2-Clause 4 | 5 | strictness: medium 6 | test-warnings: false 7 | 8 | pycodestyle: 9 | full: true 10 | disable: 11 | - N802 # pep8-naming: function name should be lower case 12 | 13 | pydocstyle: 14 | run: false 15 | 16 | pylint: 17 | disable: 18 | - consider-using-f-string 19 | - global-variable-not-assigned 20 | - consider-iterating-dictionary 21 | -------------------------------------------------------------------------------- /.vscode/extensions.json: -------------------------------------------------------------------------------- 1 | { 2 | "recommendations": ["ms-python.python"] 3 | } 4 | -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "0.2.0", 3 | "configurations": [ 4 | { 5 | "name": "Tern", 6 | "type": "python", 7 | "preLaunchTask": "build", 8 | "request": "launch", 9 | "program": "tern", 10 | "args": [ 11 | "--driver", 12 | "fuse", 13 | "--clear-cache", 14 | "report", 15 | "--docker-image", 16 | "ubuntu:latest" 17 | ], 18 | "cwd": "${workspaceFolder}", 19 | "env": { "PYTHONPATH": "${cwd}" } 20 | } 21 | ] 22 | } 23 | -------------------------------------------------------------------------------- /.vscode/tasks.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "2.0.0", 3 | "tasks": [ 4 | { 5 | "label": "build", 6 | "command": "python", 7 | "args": ["setup.py", "build"] 8 | } 9 | ] 10 | } -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | In the interest of fostering an open and welcoming environment, we as 6 | contributors and maintainers pledge to making participation in our project and 7 | our community a harassment-free experience for everyone, regardless of age, body 8 | size, disability, ethnicity, gender identity and expression, level of experience, 9 | education, socio-economic status, nationality, personal appearance, race, 10 | religion, or sexual identity and orientation. 11 | 12 | ## Our Standards 13 | 14 | Examples of behavior that contributes to creating a positive environment 15 | include: 16 | 17 | * Using welcoming and inclusive language 18 | * Being respectful of differing viewpoints and experiences 19 | * Gracefully accepting constructive criticism 20 | * Focusing on what is best for the community 21 | * Showing empathy towards other community members 22 | 23 | Examples of unacceptable behavior by participants include: 24 | 25 | * The use of sexualized language or imagery and unwelcome sexual attention or 26 | advances 27 | * Trolling, insulting/derogatory comments, and personal or political attacks 28 | * Public or private harassment 29 | * Publishing others' private information, such as a physical or electronic 30 | address, without explicit permission 31 | * Other conduct which could reasonably be considered inappropriate in a 32 | professional setting 33 | 34 | ## Our Responsibilities 35 | 36 | Project maintainers are responsible for clarifying the standards of acceptable 37 | behavior and are expected to take appropriate and fair corrective action in 38 | response to any instances of unacceptable behavior. 39 | 40 | Project maintainers have the right and responsibility to remove, edit, or 41 | reject comments, commits, code, wiki edits, issues, and other contributions 42 | that are not aligned to this Code of Conduct, or to ban temporarily or 43 | permanently any contributor for other behaviors that they deem inappropriate, 44 | threatening, offensive, or harmful. 45 | 46 | ## Scope 47 | 48 | This Code of Conduct applies both within project spaces and in public spaces 49 | when an individual is representing the project or its community. Examples of 50 | representing a project or community include using an official project e-mail 51 | address, posting via an official social media account, or acting as an appointed 52 | representative at an online or offline event. Representation of a project may be 53 | further defined and clarified by project maintainers. 54 | 55 | ## Enforcement 56 | 57 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 58 | reported by sending an email to nishak@vmware.com and/or tpepper@vmware.com. All 59 | complaints will be reviewed and investigated and will result in a response that 60 | is deemed necessary and appropriate to the circumstances. The project team is 61 | obligated to maintain confidentiality with regard to the reporter of an incident. 62 | Further details of specific enforcement policies may be posted separately. 63 | 64 | Project maintainers who do not follow or enforce the Code of Conduct in good 65 | faith may face temporary or permanent repercussions as determined by other 66 | members of the project's leadership. 67 | 68 | ## Attribution 69 | 70 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, 71 | available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html 72 | 73 | [homepage]: https://www.contributor-covenant.org 74 | 75 | -------------------------------------------------------------------------------- /GOVERNANCE.md: -------------------------------------------------------------------------------- 1 | # Tern Governance 2 | 3 | This document defines the project governance for Tern. 4 | 5 | ## Overview 6 | 7 | **Tern**, an open source project, is committed to building an open, inclusive, productive and self-governing open source community focused on building a high-quality and high performance container inspection tool. The community is governed by this document with the goal of defining how community should work together to achieve this goal. 8 | 9 | ## Community Roles 10 | 11 | * **Users:** Members that engage with the Tern community via any medium (Slack, GitHub, mailing lists, etc.). 12 | * **Contributors:** Regular contributions to projects (documentation, code reviews, responding to issues, participation in proposal discussions, contributing code, etc.). 13 | * **Maintainers**: The Tern project leaders. They are responsible for the overall health and direction of the project; final reviewers of PRs and responsible for releases. Maintainers are expected to contribute code and documentation, review PRs including ensuring quality of code, triage issues, proactively fix bugs, and perform maintenance tasks for Tern. 14 | 15 | ### Maintainers 16 | 17 | New maintainers must be nominated by an existing maintainer and must be elected by a supermajority of existing maintainers. Likewise, maintainers can be removed by a supermajority of the existing maintainers or can resign by notifying one of the other maintainers. 18 | 19 | ### Decision Making 20 | 21 | Ideally, all project decisions are resolved by consensus. If impossible, any maintainer may call a vote. Unless otherwise specified in this document, any vote will be decided by a supermajority of maintainers. 22 | 23 | ## Proposal Process 24 | 25 | One of the most important aspects in any open source community is the concept of proposals. Large changes to the codebase and / or new features should be preceded by a proposal in the Tern repo via issue. This process allows for all members of the community to weigh in on the concept (including the technical details), share their comments and ideas, and offer to help. It also ensures that members are not duplicating work or inadvertently stepping on toes by making large conflicting changes. 26 | 27 | The project roadmap is defined by accepted proposals. 28 | 29 | Proposals should cover the high-level objectives, use cases, and technical recommendations on how to implement. In general, the community member(s) interested in implementing the proposal should be either deeply engaged in the proposal process or be an author of the proposal. 30 | 31 | 32 | ## Lazy Consensus 33 | 34 | To maintain velocity in the Tern project, the concept of [Lazy Consensus](http://en.osswiki.info/concepts/lazy_consensus) is practiced. Lazy Consensus is practiced for all projects and decisions in the `tern-tools` org. 35 | 36 | Lazy consensus does _not_ apply to the process of: 37 | * Removal of maintainers from Tern 38 | * Governance changes 39 | * Adding new maintainers to Tern 40 | 41 | For the above mentioned processes, consensus between @rnjudge and @nishakm is required until there are enough maintainers for a supermajority vote. 42 | 43 | ## Updating Governance 44 | 45 | All substantive changes in Governance require a supermajority agreement by all maintainers. 46 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | BSD 2-Clause License 2 | 3 | Copyright (c) 2017 VMware, Inc. All rights reserved 4 | 5 | Redistribution and use in source and binary forms, with or without 6 | modification, are permitted provided that the following conditions are met: 7 | 8 | 1. Redistributions of source code must retain the above copyright notice, this 9 | list of conditions and the following disclaimer. 10 | 11 | 2. Redistributions in binary form must reproduce the above copyright notice, 12 | this list of conditions and the following disclaimer in the documentation 13 | and/or other materials provided with the distribution. 14 | 15 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 16 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 17 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 18 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 19 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 20 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 21 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 22 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 23 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 24 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 25 | -------------------------------------------------------------------------------- /MAINTAINERS.md: -------------------------------------------------------------------------------- 1 | # Tern Maintainers 2 | 3 | [GOVERNANCE.md](https://github.com/tern-tools/tern/blob/main/GOVERNANCE.md) 4 | describes governance guidelines and maintainer responsibilities. 5 | 6 | ## Maintainers 7 | 8 | | Maintainer | GitHub ID | Affiliation | 9 | | --------------- | --------- | ----------- | 10 | | Nisha Kumar | [nishakm](https://github.com/nishakm) | [Oracle](https://www.github.com/oracle/) | 11 | | Rose Judge | [rnjudge](https://github.com/rnjudge/) | [VMware](https://www.github.com/vmware/) | 12 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include tern/command_lib/*.yml 2 | include tern/tools/fs_hash.sh 3 | recursive-include tern/scripts *.sh *.list 4 | prune ci 5 | prune tests 6 | -------------------------------------------------------------------------------- /NOTICE.txt: -------------------------------------------------------------------------------- 1 | Tern 2 | 3 | Copyright (c) 2017 VMware, Inc. All Rights Reserved. 4 | 5 | This product is licensed to you under the BSD-2 license (the "License"). You may not use this product except in compliance with the BSD-2 License. 6 | 7 | This product may include a number of subcomponents with separate copyright notices and license terms. Your use of these subcomponents is subject to the terms and conditions of the subcomponent's license, as noted in the LICENSE file. 8 | 9 | -------------------------------------------------------------------------------- /ci/Dockerfile: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2019-2021 VMware, Inc. All Rights Reserved. 2 | # SPDX-License-Identifier: BSD-2-Clause 3 | 4 | FROM python:3.9-slim-buster as base 5 | 6 | FROM base as builder 7 | 8 | RUN mkdir /install 9 | WORKDIR /install 10 | 11 | COPY dist/tern-*.tar.gz . 12 | RUN pip install --no-warn-script-location --prefix=/install \ 13 | tern-*.tar.gz 14 | 15 | FROM base 16 | 17 | RUN echo "deb http://deb.debian.org/debian bullseye main" > /etc/apt/sources.list.d/bullseye.list \ 18 | && echo "Package: *\nPin: release n=bullseye\nPin-Priority: 50" > /etc/apt/preferences.d/bullseye \ 19 | && apt-get update \ 20 | && apt-get install -y --no-install-recommends \ 21 | attr \ 22 | findutils \ 23 | fuse-overlayfs/bullseye \ 24 | fuse3/bullseye \ 25 | git \ 26 | jq \ 27 | skopeo \ 28 | && rm -rf /var/lib/apt/lists/* 29 | 30 | COPY --from=builder /install /usr/local 31 | 32 | ENTRYPOINT ["tern"] 33 | CMD ["--help"] 34 | -------------------------------------------------------------------------------- /ci/evaluate_docs.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2019-2020 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | 6 | from git import Repo 7 | from git import GitCommandError 8 | import os 9 | import sys 10 | 11 | # This is meant to run within CI Integration 12 | # Print out only .py files that have changed 13 | # Pipe to any linting tools 14 | # Note that some linting tools will lint everything if the output 15 | # of this script is nothing 16 | 17 | repo = Repo(os.getcwd()) 18 | try: 19 | repo.git.remote('add', 'upstream', 20 | 'https://github.com/tern-tools/tern.git') 21 | except GitCommandError: 22 | pass 23 | repo.git.fetch('upstream') 24 | 25 | hcommit = repo.head.commit 26 | diff = hcommit.diff('upstream/main') 27 | 28 | if not diff: 29 | sys.exit(0) 30 | 31 | for d in diff: 32 | if os.path.exists(d.b_path) and (d.b_path)[-3:] == '.py': 33 | print(d.b_path) 34 | -------------------------------------------------------------------------------- /ci/release_tests.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # SPDX-License-Identifier: BSD-2-Clause 3 | 4 | tests = [ 5 | 'tern report -i photon:3.0', 6 | 'python3 setup.py sdist && ' 7 | 'docker build -t ternd -f ci/Dockerfile . && ' 8 | './docker_run.sh ternd "report -i golang:alpine"', 9 | 'tern report -i golang:alpine', 10 | 'python tests/test_class_command.py', 11 | 'python tests/test_class_file_data.py', 12 | 'python tests/test_class_image.py', 13 | 'python tests/test_class_image_layer.py', 14 | 'python tests/test_class_notice.py', 15 | 'python tests/test_class_notice_origin.py', 16 | 'python tests/test_class_origins.py', 17 | 'python tests/test_class_package.py', 18 | 'python tests/test_class_template.py', 19 | 'tern report -f spdxtagvalue -i photon:3.0', 20 | 'tern lock samples/single_stage_tern/Dockerfile', 21 | 'tern report -i debian:buster', 22 | 'tern report -i alpine:3.9', 23 | 'tern report -i archlinux:latest', 24 | 'tern report -i centos:7', 25 | 'tern report -i node:12.16-alpine', 26 | 'python tests/test_analyze_default_dockerfile_parse.py', 27 | 'python tests/test_analyze_common.py', 28 | 'tern report -i golang:alpine', 29 | 'tern report -d samples/alpine_python/Dockerfile', 30 | 'tern report -w photon.tar', 31 | 'python tests/test_load_docker_api.py', 32 | 'tern report -f yaml -i photon:3.0', 33 | 'tern report -f json -i photon:3.0', 34 | 'tern report -f spdxjson -i photon:3.0', 35 | 'tern report -f html -i photon:3.0', 36 | 'tern report -f spdxtagvalue -i photon:3.0 -o spdx.spdx && ' 37 | 'java -jar tools-java/target/tools-java-*-jar-with-dependencies.jar ' 38 | 'Verify spdx.spdx', 39 | 'tern report -f spdxjson -i photon:3.0 -o spdx.json && ' 40 | 'java -jar tools-java/target/tools-java-*-jar-with-dependencies.jar ' 41 | 'Verify spdx.json', 42 | 'python tests/test_util_general.py', 43 | 'python tests/test_analyze_default_filter.py', 44 | 'python tests/test_class_command.py', 45 | 'python tests/test_class_docker_image.py' 46 | ] 47 | -------------------------------------------------------------------------------- /ci/test_commit_message.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2019-2020 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | 6 | from git import Repo 7 | from git import GitCommandError 8 | import os 9 | import re 10 | import sys 11 | 12 | 13 | # This script is written to be used with CI Integration 14 | 15 | def has_url(string): 16 | # findall() has been used 17 | # with valid conditions for urls in string 18 | regex = r"((?:https?:\/\/|www\d{0,3}[.]|[a-z0-9.\-]+[.][a-z]{2,4}\/)" \ 19 | r"(?:[^\s()<>]+|\((?:[^\s()<>]+|(?:\([^\s()<>]+\)))*\))+" \ 20 | r"(?:\((?:[^\s()<>]+|(?:\([^\s()<>]+\)))*\)|" \ 21 | r"[^\s`!()\[\]{};:'\".,<>?«»“”‘’]))" 22 | urls = re.findall(regex, string) 23 | return bool(len(urls) > 0) 24 | 25 | 26 | def lint_commit(commit_id): 27 | """Given a commit ID, determine if the 28 | commit message complies with the tern guidelines. If 29 | it does not comply, output the offending commit and 30 | specify the reason for failure.""" 31 | 32 | r = Repo(os.getcwd()) 33 | check = True 34 | message = r.commit(commit_id).message 35 | sha_short = r.git.rev_parse(commit_id, short=7) 36 | 37 | # Check 1: Subject, body and DCO exist 38 | # Note that git does not allow for empty subjects 39 | msg_list = re.split('\n\n|\r', message) 40 | commit_subject = msg_list[0] 41 | if len(msg_list) <= 2: 42 | print("Commit {} does not have a body.".format(sha_short)) 43 | check = False 44 | try: 45 | # pop the subject and signature 46 | msg_list.pop(0) 47 | msg_list.pop() 48 | except IndexError: 49 | pass 50 | 51 | # Check 2: Subject length is less than about 50 52 | if len(commit_subject) > 54: 53 | print( 54 | "The subject of commit {} should be 50 characters or less.".format( 55 | sha_short)) 56 | check = False 57 | 58 | # Check 3: Each line of the body is less than 72 59 | for msg in msg_list: 60 | for line in msg.split('\n'): 61 | if has_url(line) or line.startswith("[CM]") or line.startswith("[LINK]"): 62 | print("Line contains url(s)/compiler messages. Skipping . . .\n" 63 | "Line: {0}\n" 64 | "Commit: {1}\n\n".format(line, sha_short)) 65 | continue 66 | if len(line) > 72: 67 | print("Line exceeds 72 characters.\n" 68 | "Line: {0}\n" 69 | "Commit: {1}\n\n".format(line, sha_short)) 70 | check = False 71 | 72 | if check: 73 | print("Commit message checks pass") 74 | sys.exit(0) 75 | else: 76 | sys.exit(1) 77 | 78 | 79 | if __name__ == '__main__': 80 | # Get the list of commits and loop through them, 81 | # inputting each one into the linting function 82 | 83 | # We are in the 'tern' directory 84 | repo = Repo(os.getcwd()) 85 | try: 86 | repo.git.remote( 87 | 'add', 'upstream', 'https://github.com/tern-tools/tern.git') 88 | except GitCommandError: 89 | pass 90 | repo.git.fetch('upstream') 91 | # Will return commit IDs differentiating HEAD and main 92 | commitstr = repo.git.rev_list('HEAD', '^upstream/main', no_merges=True) 93 | # If we are on the project's main branch then there will be no 94 | # difference and the result will be an empty string 95 | # So we will not proceed if there is no difference 96 | if commitstr: 97 | commits = commitstr.split('\n') 98 | for commit_id in commits: 99 | lint_commit(commit_id) 100 | -------------------------------------------------------------------------------- /docker/Dockerfile: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2019-2021 VMware, Inc. All Rights Reserved. 2 | # SPDX-License-Identifier: BSD-2-Clause 3 | 4 | FROM python:3.9-slim-buster as base 5 | 6 | FROM base as builder 7 | 8 | RUN mkdir /install 9 | WORKDIR /install 10 | 11 | RUN pip install --no-warn-script-location --prefix=/install \ 12 | tern 13 | 14 | FROM base 15 | 16 | RUN echo "deb http://deb.debian.org/debian bullseye main" > /etc/apt/sources.list.d/bullseye.list \ 17 | && echo "Package: *\nPin: release n=bullseye\nPin-Priority: 50" > /etc/apt/preferences.d/bullseye \ 18 | && apt-get update \ 19 | && apt-get install -y --no-install-recommends \ 20 | attr \ 21 | findutils \ 22 | fuse-overlayfs/bullseye \ 23 | fuse3/bullseye \ 24 | git \ 25 | jq \ 26 | skopeo \ 27 | && rm -rf /var/lib/apt/lists/* 28 | 29 | COPY --from=builder /install /usr/local 30 | 31 | ENTRYPOINT ["tern"] 32 | CMD ["--help"] 33 | -------------------------------------------------------------------------------- /docker/Dockerfile.scancode: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2019-2021 VMware, Inc. All Rights Reserved. 2 | # SPDX-License-Identifier: BSD-2-Clause 3 | 4 | FROM python:3.9-slim-buster as base 5 | 6 | FROM base as builder 7 | 8 | RUN mkdir /install 9 | WORKDIR /install 10 | 11 | RUN apt-get update \ 12 | && apt-get install -y --no-install-recommends \ 13 | build-essential \ 14 | && rm -rf /var/lib/apt/lists/* 15 | 16 | RUN pip install --no-warn-script-location --prefix=/install \ 17 | tern \ 18 | scancode-toolkit[full] 19 | 20 | FROM base 21 | 22 | RUN echo "deb http://deb.debian.org/debian bullseye main" > /etc/apt/sources.list.d/bullseye.list \ 23 | && echo "Package: *\nPin: release n=bullseye\nPin-Priority: 50" > /etc/apt/preferences.d/bullseye \ 24 | && apt-get update \ 25 | && apt-get install -y --no-install-recommends \ 26 | attr \ 27 | findutils \ 28 | fuse-overlayfs/bullseye \ 29 | fuse3/bullseye \ 30 | git \ 31 | jq \ 32 | libgomp1 \ 33 | skopeo \ 34 | && rm -rf /var/lib/apt/lists/* 35 | 36 | COPY --from=builder /install /usr/local 37 | 38 | ENTRYPOINT ["tern"] 39 | CMD ["--help"] 40 | -------------------------------------------------------------------------------- /docker_run.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # 3 | # Copyright (c) 2019-2021 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | # 6 | # Script to run Tern within a prebuilt Docker container 7 | # Assume the Tern Docker container exists on the host 8 | # The script will make a directory that you provide 9 | # It will then run a docker container in privileged mode and bind mount to the directory 10 | # 11 | # Usage: ./docker_run.sh > output.txt 12 | # Example: ./docker_run.sh ternd "report -i golang:alpine" > output.txt 13 | 14 | docker run -v /var/run/docker.sock:/var/run/docker.sock --rm $1 $2 15 | -------------------------------------------------------------------------------- /docs/creating-tool-extensions.md: -------------------------------------------------------------------------------- 1 | # Creating a Tool Extension 2 | You can use Tern with another file or filesystem analysis tool to analyze container images. You can find examples of such tools in the `tern/extensions` folder. Currently two external tools are supported: 3 | * [scancode-toolkit](https://github.com/nexB/scancode-toolkit): A license scanning tool that finds licenses in source code and binaries. Although support for formatting is not in place at the moment, it is something that will be completed in subsequent releases. 4 | * [cve-bin-tool](https://github.com/intel/cve-bin-tool): A security vulnerability scanning tool that finds common vulnerabilities. Note that although you can use a security scanner with Tern, there isn't any support for reporting the results beyond printing them to console. This may change as the industry demand for security information in Software Bill of Materials seems to be on the rise. 5 | 6 | If you would like to make a tool extension, here are some general steps to follow: 7 | 8 | ## 1. Familiarize yourself with Tern's Data Model 9 | 10 | The classes for the objects that are used to keep discovered metadata are in the `tern/classes` folder. Check out the [data model document](./data-model.md) for a general layout of the classes. Refer to the individual files for a list of properties. These store the supported metadata. If you do not see the metadata you are interested in, please submit a proposal issue to add this property to the appropriate class. This should be a reasonably trivial change with minimal effect on backwards compatibility. 11 | 12 | ## 2. Create a plugin 13 | 14 | To create a plugin for the tool, create a folder under `tern/extensions` with the plugin name. Create an empty `__init__.py` file here and create a file called `executor.py`. This file will contain a class which is derived from the abstract base class `executor.py` located under `tern/extensions`. The `Executor` class requires you to implement the method `execute` which takes an object of type `Image` or any of its derived classes (for example `DockerImage`). You can use this method to call a library function or run a CLI command to collect the required information. Once done, you can set the properties of the `Image` object and the objects within it (see the data model as a reference. See the `.py` files in `tern/classes` for a list of properties you can set. 15 | 16 | You can refer to the existing plugins as a guide for implementing the `execute` method of your executor class. There are helper methods in `tern/analyze/passthrough.py` which you can make use of, or write your own implementation if you need to. 17 | 18 | ## 3. Test your plugin 19 | 20 | To test your plugin, add the plugin to `setup.cfg` under `tern.extensions`. For example, let's say you have created a plugin called "custom" to run a custom script. Your plugin's `executor.py` should live in `tern/extensions/custom`. You will add the plugin as follows: 21 | 22 | ``` 23 | tern.extensions = 24 | cve_bin_tool = tern.extensions.cve_bin_tool.executor:CveBinTool 25 | scancode = tern.extensions.scancode.executor:Scancode 26 | custom = tern.extensions.custom.executor:Custom 27 | ``` 28 | 29 | To test out your plugin run: 30 | 31 | ``` 32 | $ pip install -e.[dev] 33 | $ tern report -x custom -i 34 | ``` 35 | 36 | To test out the different formats for your plugin run: 37 | 38 | ``` 39 | $ tern report -x custom -f -i 40 | ``` 41 | 42 | where is one of Tern's supported formats. To see what formats are supported, run `tern report --help`. 43 | 44 | If you need a custom report format please refer to the document on [creating custom report formats](./creating-custom-templates.md) 45 | 46 | [Back to the README](../README.md) 47 | -------------------------------------------------------------------------------- /docs/data-model.md: -------------------------------------------------------------------------------- 1 | # Tern's Data Model 2 | 3 | Tern stores metadata about the image and messages during operation in objects described here. The overall data model looks like this: 4 | 5 | ![Tern data model](./img/tern_data_model.png) 6 | 7 | The main class is `Image` and its derived classes. This class contains a list of type `ImageLayer`. `ImageLayer` contains a list of type `Package`. `Image`, `ImageLayer` and `Package` contain a property called `origins` which is an object of type `Origins`. This class is used to record notes while Tern operates on an image such as what tools were used to retrieve the metadata or if the filesystem is of unknown content. `Origins` contains a list of type `NoticeOrigin` which contains a string and a list of type `Notice`. The `Notice` objects are where messages get recorded. You can easily record a message in the `origins` property of the `Image`, `ImageLayer` and `Package` types of objects by using the `add_notice_to_origins` method which just adds a `Notice` object to the `NoticeOrigin` object containing the origin string you give it ("origin_str" is basically a string indicating where in the image or analysis stage an event that you want recorded occurred). 8 | 9 | You will also see a class called `Template`. This is an abstract base class used to make custom formats. To learn more see the [documentation on creating custom formats](./creating-custom-templates.md). 10 | 11 | [Back to the README](../README.md) 12 | -------------------------------------------------------------------------------- /docs/faq.md: -------------------------------------------------------------------------------- 1 | # FAQ (Work in Progress) 2 | 3 | ## Why Tern? 4 | Tern was created to help developers meet open source compliance requirements for containers. Open source software compliance is a hard problem in general but it gets harder with containers due to the ability to reuse diff filesystems. How those filesystems were created is still an ad hoc process. If you happen to have a LWN subscription you can read an article about it [here](https://lwn.net/Articles/752982/). 5 | 6 | The first step in meeting open source compliance obligations is knowing your container's Bill of Materials or BoM. This knowledge gives you some added benefits like debugging integration and build errors and identifying vulnerable packages in your running containers. 7 | 8 | ## Why not filesystem scanning? 9 | Static analysis is a reasonable approach to find software components and there are tools like [clair](https://github.com/coreos/clair) that create a BoM as part of vulnerability scanning. Some things to consider when using static analysis are the number of false positives that are detected, the time it takes to scan numerous files (some of which may not even be needed for an application to work) and the reliance on data that may not be open sourced. Tern is not meant to be a replacement for static analysis but simply a tool that automates some of the methods that developers and sysadmins use anyway. 10 | 11 | ## Why Python? 12 | Python is well suited for easy string formatting which is most of the work that Tern does. 13 | 14 | [Back to the README](../README.md) 15 | -------------------------------------------------------------------------------- /docs/glossary.md: -------------------------------------------------------------------------------- 1 | # Glossary of Terms 2 | 3 | - Command Library: Tern references a database of shell commands to determine what packages got installed. This is called the "Command Library". 4 | - Report: the artifact produced after running Tern. This is either a text document or a machine readable format. 5 | - Image: A container image, typically created by [Docker](https://www.docker.com/) or following the [OCI image specification](https://github.com/opencontainers/image-spec/blob/master/spec.md) 6 | - Layer: A root filesystem or the difference between a previous filesystem and a new filesystem as created by storage drivers like AUFS or OverlayFS. See the [OCI Image Layer specification](https://github.com/opencontainers/image-spec/blob/master/layer.md) for a general overview of how layer filesystems are created. 7 | - Package: A software package or library 8 | - Notice: A record of an incident that Tern came across during execution 9 | - Notice Origin: The location from which the Notice came. This can be the container or Dockerfile or Command Library or something in the development environment. 10 | - Cache: A database that associates container layer filesystems with the packages that were installed on them. Currently this is only represented by a yaml file and some CRUD operations against it. 11 | - Dockerfile: A file containing instructions to the [Docker](https://docs.docker.com/engine/reference/commandline/build/) daemon on how to build a container image. 12 | - Extension: An external tool Tern can use to analyze a container image 13 | 14 | [Back to the README](../README.md) 15 | -------------------------------------------------------------------------------- /docs/img/arch.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tern-tools/tern/717ea47be7310d055b86fb1b80d39fb472c0ddbf/docs/img/arch.png -------------------------------------------------------------------------------- /docs/img/tern_data_model.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tern-tools/tern/717ea47be7310d055b86fb1b80d39fb472c0ddbf/docs/img/tern_data_model.png -------------------------------------------------------------------------------- /docs/img/tern_demo_fast.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tern-tools/tern/717ea47be7310d055b86fb1b80d39fb472c0ddbf/docs/img/tern_demo_fast.gif -------------------------------------------------------------------------------- /docs/img/tern_flow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tern-tools/tern/717ea47be7310d055b86fb1b80d39fb472c0ddbf/docs/img/tern_flow.png -------------------------------------------------------------------------------- /docs/img/tern_logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tern-tools/tern/717ea47be7310d055b86fb1b80d39fb472c0ddbf/docs/img/tern_logo.png -------------------------------------------------------------------------------- /docs/project-roadmap.md: -------------------------------------------------------------------------------- 1 | # Project Road Map 2 | 3 | Tern is an open source project with limited resources. In 2023 we will primarily focus on bug fixing and step back from active feature development with the exception of adding SPDX generation to Tern using the [spdx-tools](https://pypi.org/project/spdx-tools/) library instead of the in-house SPDX model. 4 | 5 | We will continue to support the SPDX format for container images. To that end, we will make changes to update the format of the document as the [spec](https://spdx.github.io/spdx-spec/) evolves. 6 | 7 | This timetable is based on time, resources and feedback from you and will change accordingly. 8 | 9 | See archived roadmaps [here](project-roadmap-archive.md) 10 | 11 | [Back to the README](../README.md) 12 | -------------------------------------------------------------------------------- /docs/releases/release_checklist.md: -------------------------------------------------------------------------------- 1 | # Release Checklist 2 | 3 | This is a checklist for cutting a release 4 | 5 | - [ ] Prepare Release PR. 6 | * Freeze development on main. 7 | * Prepare your local development environment by committing or stashing your changes. Work at the tip of main. 8 | * Create a branch for the release: `git checkout -b `. 9 | * In a separate folder, create a fresh environment and activate it. 10 | * Clone the `tern/main` repository by running `git clone --single-branch git@github.com:tern-tools/tern.git` and `cd` into it. 11 | 12 | - [ ] Update direct dependencies and run tests. 13 | * In the fresh environment, run `pip install wheel pip-tools twine`. 14 | * Run `pip-compile --upgrade --output-file upgrade.txt`. 15 | * Compare the module versions in upgrade.txt with requirements.txt in the development environment. Bump up versions if needed. 16 | * In the fresh environment, run `pip install .` to install tern. 17 | * Run appropriate tests. Roll back requirements if necessary. 18 | * When satisfied, run `pip-compile --generate-hashes --output-file v-requirements.txt` where is of the form `major_minor_patch`. 19 | * Copy this file to the `docs/releases/` folder in the development environment. 20 | 21 | - [ ] Write release notes. 22 | * In the development environment, create a new file for the release notes: `docs/releases/v.md` 23 | * If you are writing release notes for a patched release, only include: 24 | - A link to the primary release notes. 25 | - A brief summary of what the patched release changes do. 26 | - A list of patches since the last release was cut. You can get this information by running `git log --oneline` and finding the commits since the tag. 27 | 28 | * For any other release, include the following in your notes: 29 | - Summary 30 | - New Features (if any) 31 | - Deprecated Features (if any) 32 | - Bug Fixes (if any) 33 | - Resolved Technical Debt (if any) 34 | - Future Work 35 | - Changelog 36 | * "Note: This changelog will not include these release notes" 37 | * "Changelog produced by command: `git log --pretty=format:"%h %s" v..HEAD`" 38 | - Contributors (look at Authors in the changelog `git log --pretty=format:"%an %ae" v..HEAD | sort | uniq`). Remove the maintainers name from the contributor list. 39 | - Contact the Maintainers 40 | 41 | * Update the Project Status part of the README.md to reflect this release and add it to the list of releases. 42 | 43 | - [ ] Commit release notes and submit a PR 44 | * `git add` and `git commit` any changes. This will likely include`v-requirements.txt`, any changes to `requirements.txt` and `v.md`. 45 | * Open a pull request in the Tern project repository for your release changes. 46 | * Request a review from another maintainer. Update PR as needed based on feedback. Merge the PR. This commit is where the release will be tagged. 47 | 48 | - [ ] Tag release on GitHub. 49 | * Navigate to the Tern GitHub page. Click on `Releases`. Click on `Draft a new release` to add a new tag. The `tag version` should be `v`. `Release title` field should be `Release `. 50 | * Provide a link to the release notes. 51 | 52 | - [ ] Deploy to PyPI 53 | * Run the following steps in the fresh environment where you first cloned tern/main. 54 | * Run `git fetch --tags` to get the release tag. 55 | * Run `git checkout -b release `. 56 | * Run `pip-compile`. 57 | * Run `python setup.py sdist bdist_wheel`. 58 | * Run `twine check dist/*`. 59 | * Run `twine upload dist/*`. Here enter username and password and verify via 2FA. 60 | 61 | - [ ] Test pip package. 62 | * Create a fresh environment. 63 | * Pip install tern. 64 | * Run appropriate tests. 65 | 66 | - [ ] Prepare sources tarball. 67 | * In the release environment, create a new directory called `vendor`. 68 | * Run `pip download -d vendor --require-hashes --no-binary :all: -r docs/releases/v-requirements.txt`. 69 | * Run `tar cvzf tern--vendor.tar.gz vendor/`. 70 | * Upload the vendor tarball to the GitHub release page. 71 | 72 | - [ ] Upload the wheel package to the GitHub release page. The wheel package can be found under the `dist/` directory in the environment where you first cloned tern/main or it can be downloaded for the PyPI release page. 73 | -------------------------------------------------------------------------------- /docs/releases/v0_1_0.md: -------------------------------------------------------------------------------- 1 | # Release v0.1.0 2 | 3 | ## Summary 4 | This is Tern's first release. 5 | 6 | ## Notes 7 | * Tern now uses Docker CLI only when working with Docker images. At this time, only images built by Docker are supported. 8 | * Tern uses overlayfs to step through container image filesystem layers. OverlayFS is supported in kernel version 4.0 or higher. 9 | * Tern requires root privileges to run because it needs to mount procfs in order to run commands within a chroot environment and call the Docker CLI. It is enough if you have configured sudo; Tern will ask for your password before running any priviledged commands. 10 | * You can give Tern a Dockerfile to build and inspect the resulting image. This is helpful either in a development environment or during build and release. This gives you some understanding on how your Dockerfile affects the packages installed in your container during a build. 11 | ``` 12 | $ ./tern report -d 13 | ``` 14 | * You can give Tern a local Docker image that you have pulled from a registry like Dockerhub. Tern will try to give as much information as it can obtain from the container image. 15 | ``` 16 | $ ./tern report -i 17 | ``` 18 | * The Dockerfile and shell parser are still rudimentary. Please file an issue if it does not work for your Dockerfile or image. 19 | * The Command Library is still small. If you would like to contribute to it, please read our [contributing guidelines](CONTRIBUTING.md) and [this document](docs/adding-to-command-library.md). 20 | -------------------------------------------------------------------------------- /docs/releases/v0_3_0.md: -------------------------------------------------------------------------------- 1 | # Release 0.3.0 2 | 3 | ## Summary 4 | This release is much smaller than the previous releases due to holidays in December and work towards conference CFPs. Much as I would have liked to have gotten some big ticket items done this time around, I think we've achieved a really big step in this release by containerizing Tern. This is one feature that a lot of organizations interested in integrating Tern wanted. It makes sense due to infrastructure moving towards running microservices in containers and connecting them together. Tern can still run in a desktop environment, so this is added functionality. 5 | 6 | Also, since Tern is now part of the Linux Foundation, the roadmap has some additional features that need to be factored in. See the [project roadmap](/docs/project-roadmap.md) for more details on how the project benefits from this change. 7 | 8 | ## New Features 9 | 10 | - Most of the changes went towards making Tern run in a container. This resulted in the addition of a Dockerfile and instructions to try Tern out using Docker. Here are the functional changes to make this happen: 11 | ``` 12 | 7dbe6c0 Added Dockerfile and script to run in a container 13 | 6f7c0be Report out to stdout by default 14 | 10edeb2 Added -b option to work with bind mount directory 15 | 7cc9518 Enabled console logging as an option 16 | ff99864 Remove logging's console StreamHandler 17 | 6e75f82 Call fs_hash.sh tool using root_command wrapper 18 | 389bafd Pull docker image if not on disk 19 | 6e46f5d Convert Docker CLI calls to Docker API calls 20 | ``` 21 | - The [Precaution app](https://github.com/apps/precaution) was enabled on the project, which caught some assert issues, and some warnings on using the subprocess module. Hence there is a commit to replace `assert` with `if-else` raising a `TypeError`, and one that contains a bunch of `# nosec` comments to ignore these warnings. 22 | ``` 23 | cf3146a Dismiss security issues for subprocess and random 24 | aca9441 Fixed issues discovered by security linter 25 | ``` 26 | - A listing for binary `tdnf` was added to the Command Library. 27 | - There were also a number of changes that were targeted towards project hygiene and readability. 28 | ``` 29 | 659ee65 Added a demo gif 30 | e275193 Updated docs and project roadmap 31 | 0cebdd2 Refactor: Move all source code to src folder 32 | ``` 33 | ## Bug Fixes 34 | * [Tern crashes when running in a container](https://github.com/vmware/tern/issues/153) 35 | * [Tern report fails with error: 'Origins' object has no attribute 'add_notice_origins'](https://github.com/vmware/tern/issues/142) 36 | 37 | ## Future Work 38 | Release 0.4.0 will be focused on enabling SPDX formatted output. See the [project roadmap](/docs/project-roadmap.md) for more details. 39 | 40 | ## Changelog 41 | 42 | Note: this changelog will not include these release notes 43 | 44 | ``` 45 | 659ee65 Added a demo gif 46 | e275193 Updated docs and project roadmap 47 | cf3146a Dismiss security issues for subprocess and random 48 | 0cebdd2 Refactor: Move all source code to src folder 49 | aca9441 Fixed issues discovered by security linter 50 | 7dbe6c0 Added Dockerfile and script to run in a container 51 | 6f7c0be Report out to stdout by default 52 | 10edeb2 Added -b option to work with bind mount directory 53 | 1473fa9 Added listing for tdnf command 54 | 7cc9518 Enabled console logging as an option 55 | ff99864 Remove logging's console StreamHandler 56 | 6e75f82 Call fs_hash.sh tool using root_command wrapper 57 | 389bafd Pull docker image if not on disk 58 | 6e46f5d Convert Docker CLI calls to Docker API calls 59 | e6017a8 Updated git stash commands in dealing with cache 60 | 0a77e2d Rename docker.py to docker_helpers.py 61 | 7650ac8 Fix typo for adding a notice to list of origins 62 | ``` 63 | 64 | ## Contact the Maintainer 65 | 66 | Email: nishak@vmware.com 67 | 68 | Twitter: @nishakmr 69 | -------------------------------------------------------------------------------- /docs/releases/v0_5_0-requirements.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile 3 | # To update, run: 4 | # 5 | # pip-compile --output-file=v0_5_0-requirements.txt 6 | # 7 | certifi==2019.6.16 # via requests 8 | chardet==3.0.4 # via requests 9 | docker==4.0.2 10 | idna==2.8 # via requests 11 | pyyaml==5.1.2 12 | requests==2.22.0 13 | six==1.12.0 # via docker, websocket-client 14 | urllib3==1.25.3 # via requests 15 | websocket-client==0.56.0 # via docker 16 | -------------------------------------------------------------------------------- /docs/releases/v0_5_4-requirements.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile 3 | # To update, run: 4 | # 5 | # pip-compile --output-file=v0_5_0-requirements.txt 6 | # 7 | certifi==2019.6.16 # via requests 8 | chardet==3.0.4 # via requests 9 | docker==4.0.2 10 | idna==2.8 # via requests 11 | pyyaml==5.1.2 12 | requests==2.22.0 13 | six==1.12.0 # via docker, websocket-client 14 | urllib3==1.25.3 # via requests 15 | websocket-client==0.56.0 # via docker 16 | -------------------------------------------------------------------------------- /docs/releases/v0_5_4.md: -------------------------------------------------------------------------------- 1 | # Release 0.5.4 2 | 3 | Please see the [Release 0.5.0 release notes](v0_5_0.md) for details on the first cut. 4 | 5 | Patches on top of v0.5.0 involve fixing our deployment pipeline. 6 | 7 | ## Patches 8 | ``` 9 | 6607e82 docs: Release notes for release 0.5.3 10 | a12fe9c ci/cd: Update description key in setup.cfg 11 | 74cc9fb docs: Release notes for release 0.5.2 12 | ee99d6a ci/cd: Fix typo in pypirc file 13 | 6db943d docs: Release notes for release 0.5.1 14 | 28d7683 ci/cd: Remove 'verify' custom command 15 | ``` 16 | -------------------------------------------------------------------------------- /docs/releases/v1_0_0-requirements.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile 3 | # To update, run: 4 | # 5 | # pip-compile --generate-hashes --output-file=docs/releases/v1_0_0-requirements.txt 6 | # 7 | certifi==2019.9.11 \ 8 | --hash=sha256:e4f3620cfea4f83eedc95b24abd9cd56f3c4b146dd0177e83a21b4eb49e21e50 \ 9 | --hash=sha256:fd7c7c74727ddcf00e9acd26bba8da604ffec95bf1c2144e67aff7a8b50e6cef \ 10 | # via requests 11 | chardet==3.0.4 \ 12 | --hash=sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae \ 13 | --hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691 \ 14 | # via requests 15 | docker==4.1.0 \ 16 | --hash=sha256:6e06c5e70ba4fad73e35f00c55a895a448398f3ada7faae072e2bb01348bafc1 \ 17 | --hash=sha256:8f93775b8bdae3a2df6bc9a5312cce564cade58d6555f2c2570165a1270cd8a7 18 | idna==2.8 \ 19 | --hash=sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407 \ 20 | --hash=sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c \ 21 | # via requests 22 | pbr==5.4.3 \ 23 | --hash=sha256:2c8e420cd4ed4cec4e7999ee47409e876af575d4c35a45840d59e8b5f3155ab8 \ 24 | --hash=sha256:b32c8ccaac7b1a20c0ce00ce317642e6cf231cf038f9875e0280e28af5bf7ac9 25 | pyyaml==5.1.2 \ 26 | --hash=sha256:0113bc0ec2ad727182326b61326afa3d1d8280ae1122493553fd6f4397f33df9 \ 27 | --hash=sha256:01adf0b6c6f61bd11af6e10ca52b7d4057dd0be0343eb9283c878cf3af56aee4 \ 28 | --hash=sha256:5124373960b0b3f4aa7df1707e63e9f109b5263eca5976c66e08b1c552d4eaf8 \ 29 | --hash=sha256:5ca4f10adbddae56d824b2c09668e91219bb178a1eee1faa56af6f99f11bf696 \ 30 | --hash=sha256:7907be34ffa3c5a32b60b95f4d95ea25361c951383a894fec31be7252b2b6f34 \ 31 | --hash=sha256:7ec9b2a4ed5cad025c2278a1e6a19c011c80a3caaac804fd2d329e9cc2c287c9 \ 32 | --hash=sha256:87ae4c829bb25b9fe99cf71fbb2140c448f534e24c998cc60f39ae4f94396a73 \ 33 | --hash=sha256:9de9919becc9cc2ff03637872a440195ac4241c80536632fffeb6a1e25a74299 \ 34 | --hash=sha256:a5a85b10e450c66b49f98846937e8cfca1db3127a9d5d1e31ca45c3d0bef4c5b \ 35 | --hash=sha256:b0997827b4f6a7c286c01c5f60384d218dca4ed7d9efa945c3e1aa623d5709ae \ 36 | --hash=sha256:b631ef96d3222e62861443cc89d6563ba3eeb816eeb96b2629345ab795e53681 \ 37 | --hash=sha256:bf47c0607522fdbca6c9e817a6e81b08491de50f3766a7a0e6a5be7905961b41 \ 38 | --hash=sha256:f81025eddd0327c7d4cfe9b62cf33190e1e736cc6e97502b3ec425f574b3e7a8 39 | requests==2.22.0 \ 40 | --hash=sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4 \ 41 | --hash=sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31 42 | six==1.12.0 \ 43 | --hash=sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c \ 44 | --hash=sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73 \ 45 | # via docker, stevedore, websocket-client 46 | stevedore==1.31.0 \ 47 | --hash=sha256:01d9f4beecf0fbd070ddb18e5efb10567801ba7ef3ddab0074f54e3cd4e91730 \ 48 | --hash=sha256:e0739f9739a681c7a1fda76a102b65295e96a144ccdb552f2ae03c5f0abe8a14 49 | urllib3==1.25.6 \ 50 | --hash=sha256:3de946ffbed6e6746608990594d08faac602528ac7015ac28d33cee6a45b7398 \ 51 | --hash=sha256:9a107b99a5393caf59c7aa3c1249c16e6879447533d0887f4336dde834c7be86 \ 52 | # via requests 53 | websocket-client==0.56.0 \ 54 | --hash=sha256:1151d5fb3a62dc129164292e1227655e4bbc5dd5340a5165dfae61128ec50aa9 \ 55 | --hash=sha256:1fd5520878b68b84b5748bb30e592b10d0a91529d5383f74f4964e72b297fd3a \ 56 | # via docker 57 | -------------------------------------------------------------------------------- /docs/releases/v1_0_1-requirements.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile 3 | # To update, run: 4 | # 5 | # pip-compile --generate-hashes --output-file=docs/releases/v1_0_1-requirements.txt 6 | # 7 | certifi==2019.11.28 \ 8 | --hash=sha256:017c25db2a153ce562900032d5bc68e9f191e44e9a0f762f373977de9df1fbb3 \ 9 | --hash=sha256:25b64c7da4cd7479594d035c08c2d809eb4aab3a26e5a990ea98cc450c320f1f \ 10 | # via requests 11 | chardet==3.0.4 \ 12 | --hash=sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae \ 13 | --hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691 \ 14 | # via requests 15 | docker==4.1.0 \ 16 | --hash=sha256:6e06c5e70ba4fad73e35f00c55a895a448398f3ada7faae072e2bb01348bafc1 \ 17 | --hash=sha256:8f93775b8bdae3a2df6bc9a5312cce564cade58d6555f2c2570165a1270cd8a7 18 | idna==2.8 \ 19 | --hash=sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407 \ 20 | --hash=sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c \ 21 | # via requests 22 | pbr==5.4.4 \ 23 | --hash=sha256:139d2625547dbfa5fb0b81daebb39601c478c21956dc57e2e07b74450a8c506b \ 24 | --hash=sha256:61aa52a0f18b71c5cc58232d2cf8f8d09cd67fcad60b742a60124cb8d6951488 25 | pyyaml==5.2 \ 26 | --hash=sha256:0e7f69397d53155e55d10ff68fdfb2cf630a35e6daf65cf0bdeaf04f127c09dc \ 27 | --hash=sha256:2e9f0b7c5914367b0916c3c104a024bb68f269a486b9d04a2e8ac6f6597b7803 \ 28 | --hash=sha256:35ace9b4147848cafac3db142795ee42deebe9d0dad885ce643928e88daebdcc \ 29 | --hash=sha256:38a4f0d114101c58c0f3a88aeaa44d63efd588845c5a2df5290b73db8f246d15 \ 30 | --hash=sha256:483eb6a33b671408c8529106df3707270bfacb2447bf8ad856a4b4f57f6e3075 \ 31 | --hash=sha256:4b6be5edb9f6bb73680f5bf4ee08ff25416d1400fbd4535fe0069b2994da07cd \ 32 | --hash=sha256:7f38e35c00e160db592091751d385cd7b3046d6d51f578b29943225178257b31 \ 33 | --hash=sha256:8100c896ecb361794d8bfdb9c11fce618c7cf83d624d73d5ab38aef3bc82d43f \ 34 | --hash=sha256:c0ee8eca2c582d29c3c2ec6e2c4f703d1b7f1fb10bc72317355a746057e7346c \ 35 | --hash=sha256:e4c015484ff0ff197564917b4b4246ca03f411b9bd7f16e02a2f586eb48b6d04 \ 36 | --hash=sha256:ebc4ed52dcc93eeebeae5cf5deb2ae4347b3a81c3fa12b0b8c976544829396a4 37 | requests==2.22.0 \ 38 | --hash=sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4 \ 39 | --hash=sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31 40 | six==1.13.0 \ 41 | --hash=sha256:1f1b7d42e254082a9db6279deae68afb421ceba6158efa6131de7b3003ee93fd \ 42 | --hash=sha256:30f610279e8b2578cab6db20741130331735c781b56053c59c4076da27f06b66 \ 43 | # via docker, stevedore, websocket-client 44 | stevedore==1.31.0 \ 45 | --hash=sha256:01d9f4beecf0fbd070ddb18e5efb10567801ba7ef3ddab0074f54e3cd4e91730 \ 46 | --hash=sha256:e0739f9739a681c7a1fda76a102b65295e96a144ccdb552f2ae03c5f0abe8a14 47 | urllib3==1.25.7 \ 48 | --hash=sha256:a8a318824cc77d1fd4b2bec2ded92646630d7fe8619497b142c84a9e6f5a7293 \ 49 | --hash=sha256:f3c5fd51747d450d4dcf6f923c81f78f811aab8205fda64b0aba34a4e48b0745 \ 50 | # via requests 51 | websocket-client==0.56.0 \ 52 | --hash=sha256:1151d5fb3a62dc129164292e1227655e4bbc5dd5340a5165dfae61128ec50aa9 \ 53 | --hash=sha256:1fd5520878b68b84b5748bb30e592b10d0a91529d5383f74f4964e72b297fd3a \ 54 | # via docker 55 | -------------------------------------------------------------------------------- /docs/releases/v1_0_1.md: -------------------------------------------------------------------------------- 1 | # Release 1.0.1 2 | 3 | Please see the [Release 1.0.0 release notes](v1_0_0.md) for details on the first cut. 4 | 5 | Patches on top of v1.0.0 involve fixing a regression when running Tern inside of a container. 6 | 7 | ## Patches 8 | ``` 9 | 7c74676 cleanup: remove unnecessary blank lines 10 | 1e54353 Delete bind_mount argument from clean_working_dir 11 | e1f11b0 Set working dir based on --bind_mount CLI option 12 | d52bfd1 Clean working dir the same for --bind-mount option 13 | b7cf1c1 Update Dockerfile to run in container 14 | 61a881d Change bind_mount CLI option to store value 15 | a7f4dbf Dockerfile: add tar dependency 16 | 52daca5 release: New post release flow and corrections 17 | 56b18b9 docs: Tern Lab tweaks 18 | 245d6fe docs: install `wheel` as part of setup in the lab 19 | 267497d docs: add git to the install dependencies in the lab tutorial 20 | 88e7d8e docs: fix a typo in the lab tutorial 21 | 791c260 docs: fix JSON report generation command in lab tutorial 22 | ``` 23 | -------------------------------------------------------------------------------- /docs/releases/v2_0_0-requirements.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile 3 | # To update, run: 4 | # 5 | # pip-compile --generate-hashes --output-file=docs/releases/v2_0_0-requirements.txt 6 | # 7 | certifi==2019.11.28 \ 8 | --hash=sha256:017c25db2a153ce562900032d5bc68e9f191e44e9a0f762f373977de9df1fbb3 \ 9 | --hash=sha256:25b64c7da4cd7479594d035c08c2d809eb4aab3a26e5a990ea98cc450c320f1f \ 10 | # via requests 11 | chardet==3.0.4 \ 12 | --hash=sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae \ 13 | --hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691 \ 14 | # via requests 15 | docker==4.2.0 \ 16 | --hash=sha256:1c2ddb7a047b2599d1faec00889561316c674f7099427b9c51e8cb804114b553 \ 17 | --hash=sha256:ddae66620ab5f4bce769f64bcd7934f880c8abe6aa50986298db56735d0f722e \ 18 | # via -r requirements.in 19 | dockerfile-parse==0.0.16 \ 20 | --hash=sha256:1e3c6f190eff204ab232ebba34d2f5c68591d22a27a9606bf2612c17499ec30b \ 21 | # via -r requirements.in 22 | idna==2.9 \ 23 | --hash=sha256:7588d1c14ae4c77d74036e8c22ff447b26d0fde8f007354fd48a7814db15b7cb \ 24 | --hash=sha256:a068a21ceac8a4d63dbfd964670474107f541babbd2250d61922f029858365fa \ 25 | # via requests 26 | pbr==5.4.4 \ 27 | --hash=sha256:139d2625547dbfa5fb0b81daebb39601c478c21956dc57e2e07b74450a8c506b \ 28 | --hash=sha256:61aa52a0f18b71c5cc58232d2cf8f8d09cd67fcad60b742a60124cb8d6951488 \ 29 | # via -r requirements.in, stevedore 30 | pyyaml==5.3.1 \ 31 | --hash=sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97 \ 32 | --hash=sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76 \ 33 | --hash=sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2 \ 34 | --hash=sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648 \ 35 | --hash=sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf \ 36 | --hash=sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f \ 37 | --hash=sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2 \ 38 | --hash=sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee \ 39 | --hash=sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d \ 40 | --hash=sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c \ 41 | --hash=sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a \ 42 | # via -r requirements.in 43 | requests==2.23.0 \ 44 | --hash=sha256:43999036bfa82904b6af1d99e4882b560e5e2c68e5c4b0aa03b655f3d7d73fee \ 45 | --hash=sha256:b3f43d496c6daba4493e7c431722aeb7dbc6288f52a6e04e7b6023b0247817e6 \ 46 | # via -r requirements.in, docker 47 | six==1.14.0 \ 48 | --hash=sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a \ 49 | --hash=sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c \ 50 | # via docker, dockerfile-parse, stevedore 51 | stevedore==1.32.0 \ 52 | --hash=sha256:18afaf1d623af5950cc0f7e75e70f917784c73b652a34a12d90b309451b5500b \ 53 | --hash=sha256:a4e7dc759fb0f2e3e2f7d8ffe2358c19d45b9b8297f393ef1256858d82f69c9b \ 54 | # via -r requirements.in 55 | urllib3==1.25.8 \ 56 | --hash=sha256:2f3db8b19923a873b3e5256dc9c2dedfa883e33d87c690d9c7913e1f40673cdc \ 57 | --hash=sha256:87716c2d2a7121198ebcb7ce7cccf6ce5e9ba539041cfbaeecfb641dc0bf6acc \ 58 | # via requests 59 | websocket-client==0.57.0 \ 60 | --hash=sha256:0fc45c961324d79c781bab301359d5a1b00b13ad1b10415a4780229ef71a5549 \ 61 | --hash=sha256:d735b91d6d1692a6a181f2a8c9e0238e5f6373356f561bb9dc4c7af36f452010 \ 62 | # via docker 63 | -------------------------------------------------------------------------------- /docs/releases/v2_10_1.md: -------------------------------------------------------------------------------- 1 | # Release 2.10.1 2 | 3 | This is a patched release to address a few important bugs and enhancements. Please see the [Release 2.10.0 release notes](v2_10_0.md) for details on the first cut. 4 | 5 | Specifically, patches on top of v2.10.0 in this release do the following: 6 | * [Fix rootfs error executing command in chroot in Ubuntu container](https://github.com/tern-tools/tern/issues/1161) 7 | * [Fix wrong case for 'FilesAnalyzed' false value](https://github.com/tern-tools/tern/issues/1170) 8 | * [Use license text instead of LicenseRef when possible for SPDX output](https://github.com/tern-tools/tern/issues/1147) 9 | * [Fix License file formatting to be machine readable](https://github.com/tern-tools/tern/pull/1167) 10 | * [Enable Kaniko image analysis](https://github.com/tern-tools/tern/pull/1162) 11 | 12 | ## Changelog 13 | ``` 14 | 9baf731 Clean up comments for SPDX files 15 | 878aafb Use license text instead of LicenseRef if possible 16 | ad64968 Use correct case for 'false'. Resolves: #1170 17 | edbb48c Fix pylint unnecessary-list-index-lookup 18 | 43fd06f Add subdir for tar files in the working dir 19 | 00a6ac9 Make License data properly normalized 20 | 8b73fa4 Remove license refs for validated SPDX licenses 21 | d935c74 Move SPDX license check to `spdx_common` 22 | 184a6d2 Update `licenseDeclared` to output license info (json) 23 | 6de507c Update PackageLicenseDeclared to output license info (tag/value) 24 | 048ae80 Fix error executing in chroot in Ubuntu container 25 | 27868a3 Fix license file formatting to be machine readable 26 | ``` 27 | 28 | ## Contributors 29 | ``` 30 | Ivana Atanasova iyovcheva@vmware.com 31 | Marc-Etienne Vargenau marc-etienne.vargenau@nokia.com 32 | Roger Lehmann roger.lehmann@newtron.de 33 | ``` 34 | 35 | 36 | -------------------------------------------------------------------------------- /docs/releases/v2_11_0.md: -------------------------------------------------------------------------------- 1 | # Release 2.11.0 2 | 3 | ## Summary 4 | This is a small but mighty release with several important bug fixes and much needed updates to dependencies. Specifically, this release adds functionality to pull local images using Skopeo. Previously, Tern would always pull the image provided using the `report -i` option from a registry. Additionally, this release updates the SPDX reports to include license information from Debian packages. Previously, Tern omitted this information as it was not provided by a package manager but, rather, abstracted from copyright text. Tern will now use the license text found in Debian copyright files and list it as the declared package license as either an SPDX license identifier (if applicable), or, as an SPDX LicenseRef. 5 | 6 | **NOTE**: At the time this release was cut there was an open CVE for GitPython ([CVE-2022-24439](https://nvd.nist.gov/vuln/detail/CVE-2022-24439)) with no fix available. As soon as a fix is available, Tern will publish a patched release to include an updated version of GitPython. 7 | 8 | ## New Features 9 | * [Add functionality to pull local images with skopeo](https://github.com/tern-tools/tern/pull/1198): In order to analyze local images with Tern, prefix your local image with `docker-daemon:` when generating a report. 10 | * [Include license info for deb pkgs in SPDX reports](https://github.com/tern-tools/tern/issues/1188) 11 | 12 | ## Bug Fixes 13 | * [Replace invalid license key characters](https://github.com/tern-tools/tern/issues/1199) 14 | 15 | ## Technical Debt 16 | * [Update debian-inspector requirement](https://github.com/tern-tools/tern/issues/1185) 17 | * [Update SPDX license list to 3.19](https://github.com/tern-tools/tern/pull/1192) 18 | 19 | ## Changelog 20 | Note: This changelog will not include these release notes 21 | 22 | Changelog generated by command: `git log --pretty=format:"%h %s" v2.10.1..HEAD` 23 | 24 | ``` 25 | 48e22cf Replace inalid license key characters 26 | 40b981c Add functionality to pull local images with skopeo 27 | c5919fa Include license info for deb pkgs in SPDX reports 28 | 8a1a75c Update debian-inspector requirement 29 | 23aec2f Update LicenseListVersion to 3.19 30 | 3e7ee23 Fix complaints from Prospector 31 | 38f20a6 Update LicenseListVersion to 3.19 32 | 2dd3599 Update LicenseListVersion: 3.17 33 | d916d77 Updates maintainers affiliation 34 | 0bb0e90 LicenseListVersion: 3.17 35 | f7003e7 Update README.md: SPDX is now an ISO standard 36 | 385f2e6 LicenseListVersion: 3.17 37 | 3b737fb LicenseListVersion: 3.17 38 | ``` 39 | 40 | ## Contributors 41 | ``` 42 | Ivana Atanasova iyovcheva@vmware.com 43 | Marc-Etienne Vargenau marc-etienne.vargenau@nokia.com 44 | Rui Valim ruivalim@protonmail.com 45 | ``` 46 | 47 | ## Contact the Maintainers 48 | 49 | Nisha Kumar: nishak@vmware.com 50 | Rose Judge: rjudge@vmware.com 51 | 52 | -------------------------------------------------------------------------------- /docs/releases/v2_12_0.md: -------------------------------------------------------------------------------- 1 | # Release 2.12.0 2 | 3 | ## Summary 4 | This release is a mix of features, bug fixes, and technical debt cleanup. In accordance with [EO 14028](https://www.whitehouse.gov/briefing-room/presidential-actions/2021/05/12/executive-order-on-improving-the-nations-cybersecurity/), this release added functionality to ensure that Tern's SPDX reports include all of the [NTIA's minimum elements for an SBOM](https://spdx.github.io/spdx-spec/v2.3/how-to-use/#k22-mapping-ntia-minimum-elements-to-spdx-fields). This release also adds [Package URL](https://github.com/package-url/purl-spec) (purl) [external references](https://spdx.github.io/spdx-spec/v2.3/package-information/#721-external-reference-field) to SPDX reports. Finally, this release includes an important security update for GitPython to address [CVE-2022-24439](https://nvd.nist.gov/vuln/detail/CVE-2022-24439). 5 | 6 | ## New Features 7 | * [Add purl ExternalRefs to SPDX reports](https://github.com/tern-tools/tern/issues/1206) 8 | * [Add package architecture info to data model](https://github.com/tern-tools/tern/commit/3624b3020773e5b2f51fa8f8e5aa01825355aff8) 9 | * [Add package supplier info to SPDX reports](https://github.com/tern-tools/tern/issues/1205) 10 | 11 | ## Bug Fixes 12 | * [Catch all invalid license key characters](https://github.com/tern-tools/tern/issues/1203) 13 | * [Fix container package version when no tag provided](https://github.com/tern-tools/tern/issues/1214) 14 | * [Fix container package name when analyzing local image](https://github.com/tern-tools/tern/issues/1212) 15 | * [Commas included in SPDX license expressions instead of 'AND'](https://github.com/tern-tools/tern/issues/1223) 16 | * [Remove slashes from SPDX package refs](https://github.com/tern-tools/tern/issues/1220) 17 | 18 | 19 | ## Technical Debt 20 | * [Add version info to layer Packages in SPDX reports](https://github.com/tern-tools/tern/issues/1205) 21 | * [Update SPDX LicenseListVersion to 3.20](https://github.com/tern-tools/tern/commit/ba67656e1672588094f7c5665bde1076ea31409c) 22 | 23 | 24 | ## Changelog 25 | Note: This changelog will not include these release notes 26 | 27 | Changelog generated by command: `git log --pretty=format:"%h %s" v2.11.0..HEAD` 28 | 29 | ``` 30 | ba67656 Update LicenseListVersion to 3.20 31 | ea97fb6 Remove slashes from SPDX package refs 32 | 6be6976 Invalid chars included in SPDX declared licenses 33 | c4b3508 Add purl information to SPDX reports 34 | a5ebbc1 Add purl information to SPDX reports 35 | eec8761 Add pkg_supplier collection method for tdnf 36 | 3624b30 Add package architecture info to data model 37 | 5ab79f3 Change pacman and go pkg_format to mirror PURL 38 | df242ba Correctly parse and report local image names 39 | b45e584 Add package supplier info to Tern reports 40 | ede4645 Add package supplier info to SPDX reports 41 | bb2a724 Add package supplier info to package objects 42 | 2e51f67 Add version info to layer Packages in SPDX reports 43 | 700df46 Catch all invalid license key characters 44 | ``` 45 | 46 | ## Contributors 47 | ``` 48 | Marc-Etienne Vargenau marc-etienne.vargenau@nokia.com 49 | Ivana Atanasova iyovcheva@vmware.com 50 | ``` 51 | 52 | ## Contact the Maintainers 53 | 54 | Rose Judge: rjudge@vmware.com 55 | Nisha Kumar: nishak@vmware.com 56 | -------------------------------------------------------------------------------- /docs/releases/v2_12_1.md: -------------------------------------------------------------------------------- 1 | # Release 2.12.1 2 | 3 | This is a patched release to address a few important bugs. Please see the [Release 2.12.0 release notes](v2_12_0.md) for details on the first cut. 4 | 5 | Specifically, patches on top of v2.10.0 in this release do the following: 6 | * [Error if Tern calls Scancode-Toolkit](https://github.com/tern-tools/tern/issues/1202): The Scancode library updated several of the attribute names in its output which was causing errors when Tern would run with Scancode. This fix now accomodates the new attribute property names in the newer versions of Scancode, as well as the older value names (in case we have users still using older Scancode versions). 7 | * [Can't build images with Dockerfile samples](https://github.com/tern-tools/tern/issues/1235): Several of the `samples` Dockerfiles were using stale base images, causing the image build to fail. This fix updates the base images. 8 | 9 | ## Changelog 10 | ``` 11 | d3dd148 Accomodate updated Scancode attribute names 12 | 852af8c Update `samples` dfiles to use correct base images 13 | ``` 14 | 15 | ## Contributors 16 | ``` 17 | Rose Judge rjudge@vmware.com 18 | ``` 19 | -------------------------------------------------------------------------------- /docs/releases/v2_1_0-requirements.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile 3 | # To update, run: 4 | # 5 | # pip-compile --generate-hashes --output-file=docs/releases/v2_1_0-requirements.txt 6 | # 7 | attrs==19.3.0 \ 8 | --hash=sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c \ 9 | --hash=sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72 \ 10 | # via debut 11 | certifi==2020.4.5.1 \ 12 | --hash=sha256:1d987a998c75633c40847cc966fcf5904906c920a7f17ef374f5aa4282abd304 \ 13 | --hash=sha256:51fcb31174be6e6664c5f69e3e1691a2d72a1a12e90f872cbdb1567eb47b6519 \ 14 | # via requests 15 | chardet==3.0.4 \ 16 | --hash=sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae \ 17 | --hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691 \ 18 | # via debut, requests 19 | debut==0.9.4 \ 20 | --hash=sha256:218d64f72602d6f8a690c7af3e735b2e338a3b52040f797ea56ee873629b9fd6 \ 21 | --hash=sha256:4bf44200da54b8cfc730a236f76de883663cd1edb15e9ecdc060a5a791f863dc \ 22 | # via -r requirements.in 23 | docker==4.2.0 \ 24 | --hash=sha256:1c2ddb7a047b2599d1faec00889561316c674f7099427b9c51e8cb804114b553 \ 25 | --hash=sha256:ddae66620ab5f4bce769f64bcd7934f880c8abe6aa50986298db56735d0f722e \ 26 | # via -r requirements.in 27 | dockerfile-parse==0.0.17 \ 28 | --hash=sha256:7b8ab184c24ab35c2a0af47b1766dfeeeb7f47da42197ee9756aa4695c60c775 \ 29 | --hash=sha256:868a6a00db2150ae92af177757eb35210f54243f6d8b2c362fe777e44fc98279 \ 30 | --hash=sha256:a69d4ed44c4a890c16437327009ae59ec3a3afeb1abc3819d0c1b14a46099220 \ 31 | # via -r requirements.in 32 | idna==2.9 \ 33 | --hash=sha256:7588d1c14ae4c77d74036e8c22ff447b26d0fde8f007354fd48a7814db15b7cb \ 34 | --hash=sha256:a068a21ceac8a4d63dbfd964670474107f541babbd2250d61922f029858365fa \ 35 | # via requests 36 | pbr==5.4.5 \ 37 | --hash=sha256:07f558fece33b05caf857474a366dfcc00562bca13dd8b47b2b3e22d9f9bf55c \ 38 | --hash=sha256:579170e23f8e0c2f24b0de612f71f648eccb79fb1322c814ae6b3c07b5ba23e8 \ 39 | # via -r requirements.in, stevedore 40 | pyyaml==5.3.1 \ 41 | --hash=sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97 \ 42 | --hash=sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76 \ 43 | --hash=sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2 \ 44 | --hash=sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648 \ 45 | --hash=sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf \ 46 | --hash=sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f \ 47 | --hash=sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2 \ 48 | --hash=sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee \ 49 | --hash=sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d \ 50 | --hash=sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c \ 51 | --hash=sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a \ 52 | # via -r requirements.in 53 | requests==2.23.0 \ 54 | --hash=sha256:43999036bfa82904b6af1d99e4882b560e5e2c68e5c4b0aa03b655f3d7d73fee \ 55 | --hash=sha256:b3f43d496c6daba4493e7c431722aeb7dbc6288f52a6e04e7b6023b0247817e6 \ 56 | # via -r requirements.in, docker 57 | six==1.15.0 \ 58 | --hash=sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259 \ 59 | --hash=sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced \ 60 | # via docker, dockerfile-parse, stevedore 61 | stevedore==1.32.0 \ 62 | --hash=sha256:18afaf1d623af5950cc0f7e75e70f917784c73b652a34a12d90b309451b5500b \ 63 | --hash=sha256:a4e7dc759fb0f2e3e2f7d8ffe2358c19d45b9b8297f393ef1256858d82f69c9b \ 64 | # via -r requirements.in 65 | urllib3==1.25.9 \ 66 | --hash=sha256:3018294ebefce6572a474f0604c2021e33b3fd8006ecd11d62107a5d2a963527 \ 67 | --hash=sha256:88206b0eb87e6d677d424843ac5209e3fb9d0190d0ee169599165ec25e9d9115 \ 68 | # via requests 69 | websocket-client==0.57.0 \ 70 | --hash=sha256:0fc45c961324d79c781bab301359d5a1b00b13ad1b10415a4780229ef71a5549 \ 71 | --hash=sha256:d735b91d6d1692a6a181f2a8c9e0238e5f6373356f561bb9dc4c7af36f452010 \ 72 | # via docker 73 | -------------------------------------------------------------------------------- /docs/releases/v2_7_0.md: -------------------------------------------------------------------------------- 1 | # Release 2.7.0 2 | 3 | ## Summary 4 | This is a smaller release in terms of commits but still manages to introduce three new features and a few bug fixes. Namely, the `dockerfile lock` functionality now works for multistage Dockerfiles. We also added a complementary feature to the `--live` functionality that can take in one or more layer SBOMs and reason about them in context with the current layer during a `--live` Tern run. Currently, this feature only works for Tern-produced JSON formatted SBOMs. Lastly, the package type was added to the default report which indicates to the user the method of metadata collection that was used for the layer. A bug was also fixed that should enable the Tern + Scancode execution path to run without error when collecting package metadata. 5 | 6 | ## New Features 7 | * [Enable Dockerfile "locking" for multistage builds](https://github.com/tern-tools/tern/issues/969): Tern's `dockerfile lock` command now works for multistage Dockerfiles. 8 | * [Add functionality for consuming JSON reports](https://github.com/tern-tools/tern/issues/946): This feature introduces the CLI argument `--with-context` or `-ctx` which takes a list of reports that can provide previous context for container builds. This argument is meant to be used with the `--live` option to input reports from previous runs. 9 | * [Show package type in default report](https://github.com/tern-tools/tern/issues/984): This feature updates the default report to include the package type in the report (i.e. deb, rpm, etc). This might be helpful for users who want to look for source code for the package or those who simply want to know what package manger was used to collect the information outputted in the report. 10 | 11 | ## Bug Fixes 12 | * [Error getting package licenses with Scancode](https://github.com/tern-tools/tern/issues/985) 13 | * [Can't generate html output when running Scancode](https://github.com/tern-tools/tern/issues/844) 14 | * [Fix live execution if no previous SBOMs are given](https://github.com/tern-tools/tern/commit/273e3c8cd8969df3dacc56c5d878d65378d8e4bf) 15 | 16 | ## Future Work 17 | * Use skopeo to pull container images 18 | * Automate parts of the release process 19 | * Investigation for potential support of CycleDX BOM format 20 | * Add functionality for consuming SPDX JSON reports 21 | 22 | ## Changelog 23 | Note: This changelog will not include these release notes 24 | 25 | Changelog generated by command: `git log --pretty=format:"%h %s" v2.6.1..main` 26 | 27 | ``` 28 | 273e3c8 Fix live execution if no previous sboms are given 29 | 20573c4 Fix Scancode collection of package licenses 30 | b8e7837 Show package type in default report 31 | e62a6c1 Bump debian-inspector version 32 | 81f441c Make "tern lock" work for multistage docker file 33 | 8ae40cc Fix prereqs.fs_shell variable naming 34 | 383905a Introduce JSON consumer 35 | 2d295cf Fix diffing of packages in layers 36 | fce138b Connect machinery to ingest previous reports 37 | 1e167fa main: Add command line arg for consuming reports 38 | ccec6cd Install scancode in /install 39 | 95acf0a Install scancode with fixed dependencies 40 | 0c46292 formats: Add layer level JSON consumer 41 | 837f3aa formats: Add consumer abstract base class 42 | ``` 43 | 44 | ## Contributors 45 | ``` 46 | Jeroen Knoops jeroen.knoops@philips.com 47 | Mukul Taneja mtaneja@vmware.com 48 | ``` 49 | 50 | ## Contact the Maintainers 51 | 52 | Nisha Kumar: nishak@vmware.com 53 | Rose Judge: rjudge@vmware.com 54 | 55 | -------------------------------------------------------------------------------- /docs/releases/v2_8_0.md: -------------------------------------------------------------------------------- 1 | # Release 2.8.0 2 | 3 | ## Summary 4 | This release contains a new feature and several bug fixes. Tern now supports a CycloneDX JSON reporting format. This capability now gives users the option between two SBOM standards for output reports -- SPDX or CycloneDX. There were several Scancode related fixes that were resolved in this release. Additionally, a fix for the situation where Tern was yielding different results with the `-c` and `-r` command line options, which in theory should produce the same results. Lastly, six new contributors were a part of this release, many of whom were completely new to open source. 5 | 6 | ## New Features 7 | * [Add CycloneDX JSON Format](https://github.com/tern-tools/tern/issues/987): Tern can now generate [CycloneDX](https://cyclonedx.org/) JSON reports. 8 | 9 | ## Bug Fixes 10 | * [Duplicate scancode files being reported when cache is empty](https://github.com/tern-tools/tern/issues/1000) 11 | * [Running Tern with -r and -c gives different results](https://github.com/tern-tools/tern/issues/999) 12 | * [Add pkg_format values for missing package managers in base.yml](https://github.com/tern-tools/tern/issues/994) 13 | * [Remove `/` from image SPDX Identifier Reference](https://github.com/tern-tools/tern/commit/f5eb1abdbc637005bbfb429127b056876c2d52c8) 14 | 15 | ## Future Work 16 | * Enable Tern to run without root privileges 17 | 18 | ## Changelog 19 | Note: This changelog will not include these release notes 20 | 21 | Changelog generated by command: `git log --pretty=format:"%h %s" v2.7.0..main` 22 | 23 | ``` 24 | 5927427 Cleanup unecessary files 25 | b32745e Add cyclonedxjson to help menu 26 | c90cf6e Fix: duplicate scancode files being reported 27 | 6a2abfe Add Maintainer and Governance Info 28 | 5dbb44b Update docs around getting started in VS Code 29 | 2186c1a Suppress some pylint warnings 30 | 6855f1e Force prospector version 1.5.1 to be installed 31 | dfc84d5 fix: Pass the redo flag to the executor 32 | f5eb1ab Remove `/` from image SPDX Identifier Reference 33 | 4c4b2a8 Prospector 1.4.1 fixes 34 | 9bbb5dd Add CycloneDX JSON output support 35 | a0c08ba Fix: Ignore newlines in os_release file 36 | 75bd6ac Explain commit message guidelines better 37 | 4719f62 Fix duplicate line in Dockerfile.scancode 38 | e1ba6a5 formats: Add spdxjson consumer 39 | 3dce966 Remove requirements.scancode.txt 40 | c6d26fa Add pkg_format values to base.yml 41 | c8817fd Identify Distroless version in os-release file 42 | fc4a876 Added test for the pkg_format property 43 | 2828ec7 Created a functional test suite for releases 44 | 0fd02ec Deprecate run_on_image() 45 | ``` 46 | 47 | ## Contributors 48 | ``` 49 | Daneshwari K. kankanwadidaneshwari55555@gmail.com 50 | Jamila Ritter jamila.ritter@rutgers.edu 51 | Kerin Pithawala kerinpithawala7@gmail.com 52 | Patrick Dwyer patrick.dwyer@owasp.org 53 | Sayantani Saha ii.sayantani.ii@gmail.com 54 | Trang trangology@gmail.com 55 | ``` 56 | 57 | ## Contact the Maintainers 58 | 59 | Nisha Kumar: nishak@vmware.com 60 | Rose Judge: rjudge@vmware.com 61 | -------------------------------------------------------------------------------- /docs/releases/v2_9_1.md: -------------------------------------------------------------------------------- 1 | # Release 2.9.1 2 | 3 | This is a patched release to address a few important bugs. Please see the [Release 2.9.0 release notes](v2_9_0.md) for details on the first cut. 4 | 5 | Specifically, patches on top of v2.9.0 in this release do the following: 6 | * [Use Skoepo to fix the retrieval method for the image digest](https://github.com/tern-tools/tern/issues/1101) 7 | * [Parse extended attributes using new parse_hash_content() method](https://github.com/tern-tools/tern/issues/1100) 8 | * [Fix CycloneDX report generation](https://github.com/tern-tools/tern/issues/1097) 9 | * [Fix Scancode parsing TypeError](https://github.com/tern-tools/tern/issues/1063) 10 | 11 | ## Patches 12 | ``` 13 | ce5c763 Fix scancode KeyError during license parsing 14 | 57c644c classes: Parse extended attributes 15 | e74466b Fix retrieving image digest 16 | 16db01a Fix CycloneDX report generation 17 | ``` 18 | -------------------------------------------------------------------------------- /docs/yaml_output.md: -------------------------------------------------------------------------------- 1 | # YAML Format Output 2 | 3 | You can get the results in a YAML file to be consumed by a downstream tool or script. 4 | 5 | `$ tern report -f yaml -i golang:1.12-alpine -o output.yaml` 6 | 7 | A YAML format output file starts with line comments indicating the version of Tern. It has a `image` key(image.py) corresponding to the analyzed image. Here are the subkeys and their description: 8 | 9 | - `config`: the image config metadata 10 | - `image_id`: this is a unique identifier for the image - for OCI spec 11 | this could be the digest. For now this is the sha256sum of the 12 | config.json 13 | - `manifest`: the json object representing the image manifest 14 | - `layers`: list of layer objects in the image(image_layer.py) 15 | - `diff_id`: the sha256 of the layer filesystem 16 | - `fs_hash`: the hashed contents of the layer filesystem - default to empty string if there is no tarball of the layer filesystem 17 | - `packages`: list of objects of type Package (package.py) 18 | - `name`: package name 19 | - `version`: package version 20 | - `pkg_license`: package license that is declared 21 | - `copyright`: copyright text 22 | - `proj_url`: package source url 23 | - `download_url`: package download url 24 | - `origins`: a list of NoticeOrigin objects(notice_origin.py, expanded below) 25 | - `checksum`: checksum as package property 26 | - `origins`: list of NoticeOrigin objects (notice_origin.py, expended below) 27 | - `tar_file`: the path to the layer filesystem tarball 28 | - `created_by`: sometimes the metadata will contain a created_by 29 | key containing the command that created the filesystem layer 30 | - `import_image`: if the layer is imported from another image this is a pointer to that image. In Python terms it is just the name of the Image object or any object that uses this layer Based on how container image layers are created, this is usually the last layer of the image that was imported 31 | - `import_str`: The string from a build tool (like a Dockerfile) that created this layer by importing it from another image 32 | - `files_analyzed`: whether the files in this layer are analyzed or not 33 | - `analyzed_output`: the result of the file analysis 34 | 35 | `origins` is the origin of a notice, which is expanded as follows. 36 | 37 | - origins 38 | - origin_str: the origin string, from the input or the environment or the configuration 39 | - notices 40 | - message: the notice message 41 | - level: notice level - error, warning or hint 42 | * error: cannot continue further 43 | * warning: will try to continue from here 44 | * info: information only 45 | * hint: message on how to make the results better 46 | -------------------------------------------------------------------------------- /requirements.in: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2019-2020 VMware, Inc. All Rights Reserved. 2 | # SPDX-License-Identifier: BSD-2-Clause 3 | # 4 | # This file is used by pip-tools for release management 5 | # 6 | # Please only add direct dependencies here, i.e., do not update with the 7 | # output of `pip freeze`. 8 | 9 | PyYAML 10 | docker 11 | requests 12 | stevedore 13 | pbr 14 | dockerfile-parse 15 | debian-inspector 16 | regex 17 | GitPython 18 | prettytable 19 | packageurl-python 20 | license-expression 21 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2017-2021 VMware, Inc. All Rights Reserved. 2 | # SPDX-License-Identifier: BSD-2-Clause 3 | # 4 | # Please only add direct dependencies here, i.e., do not update with the 5 | # output of `pip freeze`. When updating dependency versions, having the 6 | # transitive dependencies listed make it more difficult to figure out 7 | # what should be updated. 8 | 9 | PyYAML>=6.0 10 | docker~=6.1 11 | dockerfile-parse~=2.0 12 | requests~=2.31 13 | stevedore>=5.1 14 | pbr>=5.11 15 | debian-inspector>=31.0 16 | regex>=2023.6 17 | GitPython~=3.1 18 | prettytable~=3.8 19 | packageurl-python>=0.11.1 20 | license-expression>=30.1 21 | 22 | -------------------------------------------------------------------------------- /samples/alpine_python/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM alpine:3.7 2 | RUN set -ex && apk add --no-cache python2 3 | CMD ["python2"] 4 | -------------------------------------------------------------------------------- /samples/debian_vim/Dockerfile: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2017 VMware, Inc. All Rights Reserved. 2 | # SPDX-License-Identifier: BSD-2-Clause 3 | FROM debian:bullseye 4 | RUN apt-get update && apt-get install -y vim && apt-get clean 5 | -------------------------------------------------------------------------------- /samples/photon_3_layers/Dockerfile: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2017 VMware, Inc. All Rights Reserved 2 | # SPDX-License-Identifier: BSD-2-Clause 3 | FROM photon:3.0 4 | RUN tyum install -y git && tyum clean all 5 | RUN tyum install -y vim && tyum clean all 6 | -------------------------------------------------------------------------------- /samples/photon_git/Dockerfile: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2017 VMware, Inc. All Rights Reserved 2 | # SPDX-License-Identifier: BSD-2-Clause 3 | FROM photon:5.0 4 | RUN tyum install -y git && tyum clean all 5 | -------------------------------------------------------------------------------- /samples/photon_openjre/Dockerfile: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2017 VMware, Inc. All Rights Reserved 2 | # SPDX-License-Identifier: BSD-2-Clause 3 | FROM photon:5.0 4 | RUN tyum install -y openjre && tyum clean all 5 | -------------------------------------------------------------------------------- /samples/single_stage_tern/Dockerfile: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2019-2021 VMware, Inc. All Rights Reserved. 2 | # SPDX-License-Identifier: BSD-2-Clause 3 | 4 | FROM debian:buster 5 | 6 | # Install fuse-overlayfs and Tern dependencies 7 | RUN apt-get update && \ 8 | apt-get -y install \ 9 | attr \ 10 | findutils \ 11 | git \ 12 | gnupg2 \ 13 | jq \ 14 | python3 \ 15 | python3-pip \ 16 | python3-setuptools \ 17 | tar \ 18 | util-linux \ 19 | wget && \ 20 | echo 'deb http://download.opensuse.org/repositories/devel:/kubic:/libcontainers:/stable/Debian_10/ /' > /etc/apt/sources.list.d/devel:kubic:libcontainers:stable.list && \ 21 | wget --no-verbose https://download.opensuse.org/repositories/devel:kubic:libcontainers:stable/Debian_10/Release.key -O - | apt-key add - && \ 22 | apt-get update && \ 23 | apt-get -y install \ 24 | buildah \ 25 | fuse-overlayfs && \ 26 | apt-get clean && \ 27 | rm -rf /var/lib/apt/lists/* 28 | 29 | # Adjust storage.conf to enable Fuse storage. 30 | RUN sed -i -e 's|^#mount_program|mount_program|g' -e '/additionalimage.*/a "/var/lib/shared",' /etc/containers/storage.conf 31 | 32 | # Install tern 33 | RUN pip3 install --upgrade pip && \ 34 | pip3 install --no-cache-dir \ 35 | tern 36 | 37 | ENTRYPOINT ["tern", "--driver", "fuse"] 38 | CMD ["-h"] 39 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | # 4 | # Copyright (c) 2019-2020 VMware, Inc. All Rights Reserved. 5 | # SPDX-License-Identifier: BSD-2-Clause 6 | 7 | [metadata] 8 | name = tern 9 | author = VMware Inc 10 | author_email = nishak@vmware.com 11 | summary = An inspection tool to find the OSS compliance metadata of the packages installed in a container image. 12 | long_description = file: README.md 13 | long_description_content_type = text/markdown; charset=UTF-8 14 | home_page = https://github.com/tern-tools/tern/ 15 | project_urls = 16 | Documentation = https://github.com/tern-tools/tern/tree/master/docs 17 | Source Code = https://github.com/tern-tools/tern 18 | Issues = https://github.com/tern-tools/tern/issues 19 | license = BSD-2.0 20 | keywords = 21 | Distribution 22 | Container 23 | Cloud-Native 24 | classifier = 25 | Development Status :: 3 - Alpha 26 | Environment :: Console 27 | Intended Audience :: Developers 28 | License :: OSI Approved :: BSD License 29 | Natural Language :: English 30 | Operating System :: POSIX 31 | Operating System :: POSIX :: Linux 32 | Programming Language :: Python :: 3.6 33 | Programming Language :: Python :: 3.7 34 | Programming Language :: Python :: Implementation :: CPython 35 | Topic :: Software Development 36 | 37 | [files] 38 | packages = 39 | tern 40 | 41 | [options] 42 | include_package_data = True 43 | 44 | [entry_points] 45 | tern.formats = 46 | default = tern.formats.default.generator:Default 47 | spdxtagvalue = tern.formats.spdx.spdxtagvalue.generator:SpdxTagValue 48 | spdxjson = tern.formats.spdx.spdxjson.generator:SpdxJSON 49 | spdxjsonc = tern.formats.spdx.spdxjson.consumer:SpdxJSON 50 | json = tern.formats.json.generator:JSON 51 | jsonc = tern.formats.json.consumer:JSON 52 | yaml = tern.formats.yaml.generator:YAML 53 | html = tern.formats.html.generator:HTML 54 | cyclonedxjson = tern.formats.cyclonedx.cyclonedxjson.generator:CycloneDXJSON 55 | tern.extensions = 56 | cve_bin_tool = tern.extensions.cve_bin_tool.executor:CveBinTool 57 | scancode = tern.extensions.scancode.executor:Scancode 58 | console_scripts = 59 | tern = tern.__main__:main 60 | 61 | [options.extras_require] 62 | dev = bandit>=1.6; prospector>=1.5.1; tox>=3.14 63 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | # 4 | # Copyright (c) 2019 VMware, Inc. All Rights Reserved. 5 | # SPDX-License-Identifier: BSD-2-Clause 6 | 7 | from setuptools import setup 8 | 9 | 10 | setup( 11 | setup_requires=['pbr'], 12 | pbr=True, 13 | test_suite="tests.runtests", 14 | ) 15 | -------------------------------------------------------------------------------- /tern/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2019 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | -------------------------------------------------------------------------------- /tern/analyze/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2019 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | -------------------------------------------------------------------------------- /tern/analyze/default/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2020 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | -------------------------------------------------------------------------------- /tern/analyze/default/bundle.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2017-2020 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | 6 | """ 7 | Functions to bundle results into an image object 8 | """ 9 | 10 | import logging 11 | import os 12 | 13 | from tern.utils import constants 14 | from tern.classes.package import Package 15 | from tern.classes.file_data import FileData 16 | 17 | # global logger 18 | logger = logging.getLogger(constants.logger_name) 19 | 20 | 21 | def get_pkg_dict_for_index(attr_list, index): 22 | """Given the package dictionary with attribute lists of the form 23 | {'name': [...], 'version': [...],...} and an index, return 24 | a package dictionary of the form {'name': x1, 'version': x2,...} for that 25 | index""" 26 | pkg_dict = {} 27 | for key in attr_list.keys(): 28 | if key == 'files': 29 | # convert file paths into FileData dictionaries 30 | fd_list = [] 31 | for filepath in attr_list['files'][index]: 32 | fd_dict = FileData(os.path.split( 33 | filepath)[1], filepath).to_dict() 34 | fd_list.append(fd_dict) 35 | pkg_dict.update({'files': fd_list}) 36 | else: 37 | pkg_dict.update({key: attr_list[key][index]}) 38 | return pkg_dict 39 | 40 | 41 | def convert_to_pkg_dicts(attr_lists): 42 | '''attr_lists is what gets returned after collecting individual 43 | metadata as a list. It looks like this if property collected: 44 | {'names': [....], 'versions': [...], 'licenses': [...], ....} 45 | Convert these into a package dictionary expected by the Package 46 | Object''' 47 | mapping = {'name': 'names', 48 | 'version': 'versions', 49 | 'pkg_license': 'licenses', 50 | 'copyright': 'copyrights', 51 | 'proj_url': 'proj_urls', 52 | 'pkg_licenses': 'pkg_licenses', 53 | 'files': 'files', 54 | 'src_name': 'source_names', 55 | 'src_version': 'source_versions', 56 | 'pkg_supplier': 'pkg_suppliers', 57 | 'arch': 'archs'} 58 | pkg_list = [] 59 | len_names = len(attr_lists['names']) 60 | # make a list of keys that correspond with package property names 61 | filtered_attr_list = {} 62 | for key, value in mapping.items(): 63 | if value in attr_lists.keys(): 64 | if len(attr_lists[value]) == len_names: 65 | filtered_attr_list.update({key: attr_lists[value]}) 66 | else: 67 | logger.warning("Inconsistent lengths for key: %s", value) 68 | # convert each of the keys into package dictionaries 69 | for index in range(0, len_names): 70 | pkg_list.append(get_pkg_dict_for_index(filtered_attr_list, index)) 71 | return pkg_list 72 | 73 | 74 | def fill_pkg_results(image_layer, pkg_list_dict, pkg_format): 75 | """Fill results from collecting package information into the image layer 76 | object""" 77 | if 'names' in pkg_list_dict and len(pkg_list_dict['names']) > 1: 78 | pkg_list = convert_to_pkg_dicts(pkg_list_dict) 79 | for pkg_dict in pkg_list: 80 | pkg = Package(pkg_dict['name']) 81 | pkg.fill(pkg_dict) 82 | pkg.pkg_format = pkg_format 83 | image_layer.add_package(pkg) 84 | -------------------------------------------------------------------------------- /tern/analyze/default/command_lib/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2019 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | -------------------------------------------------------------------------------- /tern/analyze/default/command_lib/common.yml: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2020 VMware, Inc. All Rights Reserved. 2 | # SPDX-License-Identifier: BSD-2-Clause 3 | # 4 | # Known common aspects of container image filesystems 5 | 6 | # possible base OS shells 7 | shells: 8 | - /bin/sh 9 | - /usr/bin/sh 10 | - /bin/bash 11 | - /usr/bin/bash 12 | - /bin/dash 13 | -------------------------------------------------------------------------------- /tern/analyze/default/command_lib/snippets.yml: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2017-2020 VMware, Inc. All Rights Reserved. 2 | # SPDX-License-Identifier: BSD-2-Clause 3 | # 4 | # general commands to be invoked when retrieving package information 5 | # Key-Value substitutions: 6 | # package: the package name 7 | # image: the image name that the tool uses 8 | # tag: the image tag name that the tool uses 9 | apt-get: 10 | install: 11 | - 'install' # subcommand to install 12 | remove: 13 | - 'purge' # subcommand to remove a package 14 | ignore: # list of subcommands that don't add or remove packages 15 | - 'update' 16 | packages: 'dpkg' # refer to base.yml's method of collection 17 | 18 | apt: 19 | install: 20 | - 'install' # subcommand to install 21 | remove: 22 | - 'purge' # subcommand to remove a package 23 | ignore: # list of subcommands that don't add or remove packages 24 | - 'update' 25 | packages: 'dpkg' # refer to base.yml's method of collection 26 | 27 | tyum: 28 | install: 29 | - 'install' 30 | remove: 31 | - 'remove' 32 | ignore: 33 | - 'check-update' 34 | - 'clean' 35 | packages: 'tdnf' 36 | 37 | tdnf: 38 | install: 39 | - 'install' 40 | remove: 41 | - 'remove' 42 | ignore: 43 | - 'check-update' 44 | - 'clean' 45 | packages: 'tdnf' 46 | 47 | apk: 48 | install: 49 | - 'add' 50 | remove: 51 | - 'del' 52 | packages: 'apk' 53 | 54 | pacman: 55 | install: 56 | - '-Syu' 57 | remove: 58 | - '-Rcs' 59 | packages: 'pacman' 60 | 61 | yum: 62 | install: 63 | - 'install' 64 | remove: 65 | - 'remove' 66 | ignore: 67 | - 'check-update' 68 | - 'clean' 69 | packages: 'rpm' 70 | 71 | dnf: 72 | install: 73 | - 'install' 74 | remove: 75 | - 'remove' 76 | ignore: 77 | - 'check-update' 78 | - 'clean' 79 | packages: 'rpm' 80 | 81 | rpm: 82 | install: 83 | - '-i' 84 | - '-U' 85 | remove: 86 | - '-e' 87 | packages: 'rpm' 88 | 89 | pip: 90 | install: 91 | - 'install' 92 | remove: 93 | - 'uninstall' 94 | ignore: 95 | - 'freeze' 96 | - 'list' 97 | - 'download' 98 | - 'show' 99 | - 'check' 100 | - 'config' 101 | - 'hash' 102 | - 'wheel' 103 | packages: 'pip' 104 | 105 | pip3: 106 | install: 107 | - 'install' 108 | remove: 109 | - 'uninstall' 110 | ignore: 111 | - 'freeze' 112 | - 'list' 113 | - 'download' 114 | - 'show' 115 | - 'check' 116 | - 'config' 117 | - 'hash' 118 | - 'wheel' 119 | packages: 'pip3' 120 | 121 | python3: 122 | install: 123 | - 'install' 124 | remove: 125 | - 'uninstall' 126 | ignore: 127 | - 'freeze' 128 | - 'list' 129 | - 'download' 130 | - 'show' 131 | - 'check' 132 | - 'config' 133 | - 'hash' 134 | - 'wheel' 135 | packages: 'pip3' 136 | 137 | gem: 138 | install: 139 | - 'install' 140 | remove: 141 | - 'uninstall' 142 | ignore: 143 | - 'fetch' 144 | - 'build' 145 | - 'lock' 146 | - 'unpack' 147 | - 'cleanup' 148 | - 'check' 149 | - 'mirror' 150 | packages: 'gem' 151 | 152 | bundle: 153 | install: 154 | - 'install' 155 | remove: 156 | - 'remove' 157 | ignore: 158 | - 'update' 159 | - 'config' 160 | - 'add' 161 | - 'init' 162 | - 'package' 163 | - 'exec' 164 | packages: 'gem' 165 | 166 | npm: 167 | install: 168 | - 'install' 169 | remove: 170 | - 'uninstall' 171 | ignore: 172 | - 'ping' 173 | packages: 'npm' 174 | 175 | yarn: 176 | install: 177 | - 'install' 178 | remove: 179 | - 'uninstall' 180 | ignore: 181 | - 'publish' 182 | - 'add' 183 | - 'remove' 184 | packages: 'npm' 185 | 186 | zypper: 187 | install: 188 | - 'in' 189 | - 'install' 190 | remove: 191 | - 'rm' 192 | - 'remove' 193 | ignore: 194 | - 'clean' 195 | packages: 'rpm' 196 | 197 | microdnf: 198 | install: 199 | - 'install' 200 | remove: 201 | - 'remove' 202 | ignore: 203 | - 'check-update' 204 | - 'clean' 205 | packages: 'rpm' 206 | 207 | go: 208 | install: 209 | - 'build' 210 | - 'mod' 211 | remove: 212 | - 'remove' 213 | ignore: 214 | - 'clean' 215 | packages: 'go' 216 | -------------------------------------------------------------------------------- /tern/analyze/default/container/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2020 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | -------------------------------------------------------------------------------- /tern/analyze/default/container/image.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2019-2021 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | 6 | """ 7 | Analyze the container image in default mode 8 | """ 9 | 10 | import docker 11 | import logging 12 | import subprocess # nosec 13 | 14 | from tern.classes.notice import Notice 15 | from tern.classes.docker_image import DockerImage 16 | from tern.classes.oci_image import OCIImage 17 | from tern.utils import constants 18 | from tern.analyze import passthrough 19 | from tern.analyze.default.container import single_layer 20 | from tern.analyze.default.container import multi_layer 21 | from tern.report import formats 22 | 23 | # global logger 24 | logger = logging.getLogger(constants.logger_name) 25 | 26 | 27 | def load_full_image(image_tag_string, image_type='oci', load_until_layer=0): 28 | """Create image object from image name and tag and return the object. 29 | * The kind of image object is created based on the image_type. 30 | image_type = oci OR docker 31 | * Loads only as many layers as needed. 32 | * Remove docker-daemon prefix for local images""" 33 | if image_type == 'oci': 34 | image = OCIImage(image_tag_string.replace('docker-daemon:', '')) 35 | elif image_type == 'docker': 36 | image = DockerImage(image_tag_string.replace('docker-daemon:', '')) 37 | failure_origin = formats.image_load_failure.format( 38 | testimage=image.repotag) 39 | try: 40 | image.load_image(load_until_layer) 41 | except (NameError, 42 | subprocess.CalledProcessError, 43 | IOError, 44 | docker.errors.APIError, 45 | ValueError, 46 | EOFError) as error: 47 | logger.warning('Error in loading image: %s', str(error)) 48 | image.origins.add_notice_to_origins( 49 | failure_origin, Notice(str(error), 'error')) 50 | return image 51 | 52 | 53 | def default_analyze(image_obj, options): 54 | """ Steps to analyze a container image (we assume it is a DockerImage 55 | object for now) 56 | 1. Analyze the first layer to get a baseline list of packages 57 | 2. Analyze subsequent loaded layers 58 | 3. Return the final image with all metadata filled in 59 | Options: 60 | redo: do not use the cache; False by default 61 | driver: mount using the chosen driver; 62 | If no driver is provided, we will use the kernel's 63 | overlayfs driver (only available with Linux mainline 64 | kernel version 4.0 or later)""" 65 | # set up empty master list of packages 66 | master_list = [] 67 | # Analyze the first layer and get prerequisites for the next layer 68 | prereqs = single_layer.analyze_first_layer(image_obj, master_list, options) 69 | if options.extend: 70 | # Run the extension that the user has chosen for the first layer 71 | passthrough.run_extension_layer(image_obj.layers[0], options.extend, 72 | options.redo) 73 | # Analyze the remaining layers if there are more 74 | if prereqs and len(image_obj.layers) > 1: 75 | multi_layer.analyze_subsequent_layers( 76 | image_obj, prereqs, master_list, options) 77 | return image_obj 78 | -------------------------------------------------------------------------------- /tern/analyze/default/container/run.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2019-2022 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | 6 | """ 7 | Run analysis on a container image 8 | """ 9 | 10 | import logging 11 | 12 | from tern.utils import constants 13 | from tern.utils import rootfs 14 | from tern.report import report 15 | from tern.report import formats 16 | from tern import prep 17 | from tern.load import skopeo 18 | from tern.analyze import common 19 | from tern.analyze.default.container import image as cimage 20 | 21 | 22 | # global logger 23 | logger = logging.getLogger(constants.logger_name) 24 | 25 | 26 | def extract_image(args): 27 | """The image can either be downloaded from a container registry or provided 28 | as an image tarball. Extract the image into a working directory accordingly 29 | Return an image name and tag and an image digest if it exists""" 30 | if args.image: 31 | # download the image 32 | result = skopeo.pull_image(args.image, args.no_tls) 33 | if result: 34 | return 'oci', args.image 35 | logger.critical("Cannot download Container image: \"%s\"", args.image) 36 | if args.raw_image: 37 | # for now we assume that the raw image tarball is always 38 | # the product of "docker save", hence it will be in 39 | # the docker style layout 40 | if rootfs.extract_tarfile(args.raw_image, rootfs.get_working_dir()): 41 | return 'docker', args.raw_image 42 | logger.critical("Cannot extract raw Docker image") 43 | return None, None 44 | 45 | 46 | def setup(image_obj): 47 | """Setup the image object and anything else for analysis""" 48 | # Add a Notice object for each layer 49 | for layer in image_obj.layers: 50 | origin_str = 'Layer {}'.format(layer.layer_index) 51 | layer.origins.add_notice_origin(origin_str) 52 | # Set up working directories and mount points 53 | rootfs.set_up() 54 | 55 | 56 | def teardown(image_obj): 57 | """Teardown and cleanup after analysis""" 58 | # Add the image layers to the cache 59 | common.save_to_cache(image_obj) 60 | # Clean up working directories and mount points 61 | rootfs.clean_up() 62 | 63 | 64 | def execute_image(args): 65 | """Execution path for container images""" 66 | logger.debug('Starting analysis...') 67 | image_type, image_string = extract_image(args) 68 | # If the image has been extracted, load the metadata 69 | if image_type and image_string: 70 | full_image = cimage.load_full_image( 71 | image_string, image_type, args.load_until_layer) 72 | # check if the image was loaded successfully 73 | if full_image.origins.is_empty(): 74 | # Add an image origin here 75 | full_image.origins.add_notice_origin( 76 | formats.docker_image.format(imagetag=image_string)) 77 | # Set up for analysis 78 | setup(full_image) 79 | # analyze image 80 | cimage.default_analyze(full_image, args) 81 | # report out 82 | report.report_out(args, full_image) 83 | # clean up 84 | teardown(full_image) 85 | else: 86 | # we cannot load the full image 87 | logger.error('Cannot retrieve full image metadata') 88 | if not args.keep_wd: 89 | prep.clean_image_tars(full_image) 90 | -------------------------------------------------------------------------------- /tern/analyze/default/debug/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2020 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | -------------------------------------------------------------------------------- /tern/analyze/default/dockerfile/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2019 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | -------------------------------------------------------------------------------- /tern/analyze/default/live/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2021 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | -------------------------------------------------------------------------------- /tern/analyze/default/live/collect.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2021 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | 6 | """ 7 | Functions to collect package metadata from a live container filesystem. 8 | 9 | These functions are similar to the default collect.py functions except 10 | the invoking of the scripts occurs in a different environment. 11 | """ 12 | import logging 13 | import os 14 | import re 15 | 16 | from tern.utils import rootfs 17 | from tern.utils import constants 18 | 19 | # global logger 20 | logger = logging.getLogger(constants.logger_name) 21 | 22 | 23 | def create_script(command, prereqs, method): 24 | """Create the script to execute in an unshared environment""" 25 | chroot_script = """#!{host_shell} 26 | 27 | mount -t proc /proc {mnt}/proc 28 | chroot {mnt} {fs_shell} -c "{snip}" 29 | """ 30 | host_script = """#!{host_shell} 31 | {host_shell} -c "{snip}" 32 | """ 33 | script = '' 34 | script_path = os.path.join(rootfs.get_working_dir(), constants.script_file) 35 | if method == 'container': 36 | script = chroot_script.format(host_shell=prereqs.host_shell, 37 | mnt=prereqs.host_path, 38 | fs_shell=prereqs.fs_shell, 39 | snip=command) 40 | if method == 'host': 41 | script = host_script.format(host_shell=prereqs.host_shell, 42 | snip=command) 43 | with open(script_path, 'w', encoding='utf-8') as f: 44 | f.write(script) 45 | os.chmod(script_path, 0o700) 46 | return script_path 47 | 48 | 49 | def snippets_to_script(snippet_list): 50 | """Create a script out of the snippet list such that it can be invokable 51 | via chroot's -c command""" 52 | replace_dict = {r'\$': r'\\$', 53 | r'\`': r'\\`'} 54 | final_list = [] 55 | for snippet in snippet_list: 56 | # replace the escaped characters 57 | for key, val in replace_dict.items(): 58 | snippet = re.sub(key, val, snippet) 59 | final_list.append(snippet) 60 | return " && ".join(final_list) 61 | 62 | 63 | def invoke_live(snippet_list, prereqs, method): 64 | """Given a list of commands to run, invoke the commands and return 65 | the result. The prereqs object should""" 66 | # we first create a single command from the snippet list 67 | command = snippets_to_script(snippet_list) 68 | logger.debug("Invoking command: %s", command) 69 | # we then insert this command into our unshare script 70 | script_path = create_script(command, prereqs, method) 71 | if method == 'container': 72 | full_cmd = ['unshare', '-mpf', '-r', script_path] 73 | if method == 'host': 74 | full_cmd = ['unshare', '-pf', '-r', script_path] 75 | # invoke the script and remove it 76 | output, error = rootfs.shell_command(False, full_cmd) 77 | os.remove(script_path) 78 | return output, error 79 | -------------------------------------------------------------------------------- /tern/analyze/passthrough.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2019-2021 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | 6 | """ 7 | Use an external tool to analyze a container image 8 | """ 9 | 10 | 11 | import logging 12 | import shutil 13 | import sys 14 | from stevedore import driver 15 | from stevedore.exception import NoMatches 16 | 17 | from tern.classes.notice import Notice 18 | from tern.utils import constants 19 | from tern.utils import rootfs 20 | from tern.report import errors 21 | from tern import prep 22 | 23 | 24 | # global logger 25 | logger = logging.getLogger(constants.logger_name) 26 | 27 | 28 | def get_exec_command(command_string): 29 | '''Given a command as a string, find out if the command exists on the 30 | system. If it does exist, return a subprocess invokable command list 31 | where the command is the absolute path of the binary existing on the 32 | system''' 33 | cmd_list = command_string.split(' ') 34 | # we first find if the command exists on the system 35 | run_bin = cmd_list.pop(0) 36 | bin_path = shutil.which(run_bin) 37 | if not bin_path: 38 | raise OSError("Command {} not found".format(run_bin)) 39 | cmd_list.insert(0, bin_path) 40 | return cmd_list 41 | 42 | 43 | def get_filesystem_command(layer_obj, command): 44 | '''Given an ImageLayer object and a command in the form of a string, 45 | return the command in list form with the target directory of the layer. 46 | This assumes that the layer tarball is untarred, which should have happened 47 | during the loading of the Image object''' 48 | # in most cases, the external tool has a CLI where the target directory 49 | # is the last token in the command. So the most straightforward way 50 | # to perform this operation is to append the target directory 51 | cmd_list = get_exec_command(command) 52 | cmd_list.append(layer_obj.get_untar_dir()) 53 | return cmd_list 54 | 55 | 56 | def execute_external_command(layer_obj, command, is_sudo=False): 57 | '''Given an Imagelayer object and a command in the form of a list, execute 58 | the command and store the results in the ImageLayer object either as 59 | results or as a Notice object''' 60 | origin_layer = 'Layer {}'.format(layer_obj.layer_index) 61 | result, error = rootfs.shell_command(is_sudo, command) 62 | if error: 63 | msg = error.decode('utf-8') 64 | logger.error("Error in executing external command: %s", msg) 65 | layer_obj.origins.add_notice_to_origins(origin_layer, Notice( 66 | msg, 'error')) 67 | return False 68 | layer_obj.analyzed_output = result.decode('utf-8') 69 | return True 70 | 71 | 72 | def execute_and_pass(layer_obj, command, is_sudo=False): 73 | '''Similar to execute_external_command, but the results and the errors 74 | are stored together in layer_obj's analyzed_output property to be 75 | post-processed. The result and error will be separated by two new line 76 | characters \n\n''' 77 | full_cmd = get_filesystem_command(layer_obj, command) 78 | result, error = rootfs.shell_command(is_sudo, full_cmd) 79 | layer_obj.analyzed_output = error.decode( 80 | 'utf-8') + '\n\n' + result.decode('utf-8') 81 | 82 | 83 | def run_extension_layer(image_layer, ext_string, redo=False): 84 | '''Depending on what tool the user has chosen to extend with, load that 85 | extension and run it''' 86 | try: 87 | mgr = driver.DriverManager( 88 | namespace='tern.extensions', 89 | name=ext_string, 90 | invoke_on_load=True, 91 | ) 92 | return mgr.driver.execute_layer(image_layer, redo) 93 | except NoMatches: 94 | msg = errors.unrecognized_extension.format(ext=ext_string) 95 | logger.critical(msg) 96 | rootfs.clean_up() 97 | prep.clean_working_dir() 98 | sys.exit(1) 99 | -------------------------------------------------------------------------------- /tern/classes/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2019 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | -------------------------------------------------------------------------------- /tern/classes/notice.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2017-2019 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | 6 | from tern.utils.general import prop_names 7 | 8 | 9 | class NoticeException(Exception): 10 | '''Base notice exception''' 11 | 12 | 13 | class LevelException(NoticeException): 14 | '''Exception for illegal notices''' 15 | def __init__(self, level, message): 16 | self.level = level 17 | self.message = message 18 | 19 | 20 | class Notice: 21 | '''A notice for reporting purposes 22 | attributes: 23 | message: the notice message 24 | level: notice level - error, warning or hint 25 | error: cannot continue further 26 | warning: will try to continue from here 27 | info: information only 28 | hint: message on how to make the results better 29 | methods: 30 | to_dict: returns a dict representation of the object 31 | ''' 32 | def __init__(self, message='', level='info'): 33 | self.__message = message 34 | self.__level = '' 35 | self.__levels = ['error', 'warning', 'hint', 'info'] 36 | self.level = level 37 | 38 | @property 39 | def message(self): 40 | return self.__message 41 | 42 | @message.setter 43 | def message(self, message): 44 | self.__message = message 45 | 46 | @property 47 | def level(self): 48 | return self.__level 49 | 50 | @level.setter 51 | def level(self, level): 52 | if level in self.__levels: 53 | self.__level = level 54 | else: 55 | raise LevelException(level, 'Illegal Level') 56 | 57 | def to_dict(self, template=None): 58 | notice_dict = {} 59 | if template: 60 | # loop through object properties 61 | for key, prop in prop_names(self): 62 | # check if the property is in the mapping 63 | if prop in template.notice().keys(): 64 | notice_dict.update( 65 | {template.notice()[prop]: self.__dict__[key]}) 66 | else: 67 | # don't map, just use the property name as the key 68 | for key, prop in prop_names(self): 69 | notice_dict.update({prop: self.__dict__[key]}) 70 | # special case - don't include 'levels' 71 | notice_dict.pop('levels') 72 | return notice_dict 73 | -------------------------------------------------------------------------------- /tern/classes/notice_origin.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2017-2019 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | 6 | from tern.report import formats 7 | from tern.utils.general import prop_names 8 | from tern.classes.notice import Notice 9 | 10 | 11 | class NoticeOrigin: 12 | '''The origin of a notice 13 | attributes: 14 | origin_str: the origin string, from the input or the environment or 15 | the configuration 16 | notices: a list of Notice objects 17 | methods: 18 | print_notices: print all the notices for this origin and 19 | to_dict: return a dict representation of the object 20 | ''' 21 | def __init__(self, origin_str): 22 | self.__origin_str = origin_str 23 | self.__notices = [] 24 | 25 | @property 26 | def origin_str(self): 27 | return self.__origin_str 28 | 29 | @property 30 | def notices(self): 31 | return self.__notices 32 | 33 | def add_notice(self, notice): 34 | if isinstance(notice, Notice): 35 | self.__notices.append(notice) 36 | else: 37 | raise TypeError('Object type is {0}, should be Notice'.format( 38 | type(notice))) 39 | 40 | def print_notices(self): 41 | '''Using the notice format, return a formatted string''' 42 | info = '' 43 | warnings = '' 44 | errors = '' 45 | hints = '' 46 | for notice in self.notices: 47 | if notice.level == 'info': 48 | info = info + notice.message 49 | if notice.level == 'warning': 50 | warnings = warnings + notice.message 51 | if notice.level == 'error': 52 | errors = errors + notice.message 53 | if notice.level == 'hint': 54 | hints = hints + notice.message 55 | notice_msg = formats.notice_format.format( 56 | origin=self.origin_str, 57 | info=info, 58 | warnings=warnings, 59 | errors=errors, 60 | hints=hints) 61 | return notice_msg 62 | 63 | def to_dict(self, template=None): 64 | no_dict = {} 65 | # for packages call each package object's to_dict method 66 | notice_list = [notice.to_dict(template) for notice in self.notices] 67 | if template: 68 | # use the template mapping for key names 69 | for key, prop in prop_names(self): 70 | if prop in template.notice_origin().keys(): 71 | no_dict.update( 72 | {template.notice_origin()[prop]: self.__dict__[key]}) 73 | # update the 'notices' if it exists in the mapping 74 | if 'notices' in template.notice_origin().keys(): 75 | no_dict.update( 76 | {template.notice_origin()['notices']: notice_list}) 77 | else: 78 | # directly use property names 79 | for key, prop in prop_names(self): 80 | no_dict.update({prop: self.__dict__[key]}) 81 | # update with 'notices' info 82 | no_dict.update({'notices': notice_list}) 83 | return no_dict 84 | -------------------------------------------------------------------------------- /tern/classes/origins.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2017-2019 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | 6 | from tern.classes.notice_origin import NoticeOrigin 7 | 8 | 9 | class Origins: 10 | '''An class containing a list of NoticeOrigin objects 11 | attributes: 12 | origins: a list of NoticeOrigin objects 13 | methods: 14 | get_origin: given the string return the origin 15 | add_notice_to_origin: 16 | If a NoticeOrigin object exists in the list of NoticeOrigin 17 | objects, then the Notice object will be added to that 18 | NoticeOrigin - return true if this happens 19 | If there is no NoticeOrigin object with the given string, 20 | create a NoticeOrigin object and add it to the list of 21 | origins 22 | add_notice_origin: add an empty NoticeOrigin object 23 | is_empty: check if there are any notices 24 | to_dict: return a dict representation of the object 25 | ''' 26 | def __init__(self): 27 | self.__origins = [] 28 | 29 | @property 30 | def origins(self): 31 | return self.__origins 32 | 33 | def get_origin(self, string): 34 | for orij in self.__origins: 35 | if orij.origin_str == string: 36 | return orij 37 | return None 38 | 39 | def add_notice_to_origins(self, orig_string, notice): 40 | orij = self.get_origin(orig_string) 41 | if orij: 42 | orij.add_notice(notice) 43 | else: 44 | notice_orij = NoticeOrigin(orig_string) 45 | notice_orij.add_notice(notice) 46 | self.__origins.append(notice_orij) 47 | 48 | def add_notice_origin(self, orig_string): 49 | if not self.get_origin(orig_string): 50 | self.__origins.append(NoticeOrigin(orig_string)) 51 | 52 | def is_empty(self): 53 | empty = True 54 | if self.__origins: 55 | for orij in self.__origins: 56 | if orij.notices: 57 | empty = False 58 | break 59 | return empty 60 | 61 | def to_dict(self, template=None): 62 | return [origin.to_dict(template) for origin in self.origins] 63 | -------------------------------------------------------------------------------- /tern/classes/template.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2017-2020 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | 6 | from abc import ABCMeta 7 | from abc import abstractmethod 8 | 9 | 10 | class Template(metaclass=ABCMeta): 11 | '''This is an abstract base class for reporting templates 12 | A specific type of template needs to be a subclass of this class 13 | methods: 14 | must implement: 15 | package: mappings for the properties under 'Package' 16 | layer: mappings for the properties under 'Layer' 17 | image: mappings for the properties under 'Image' 18 | should implement: 19 | notice: mappings for the properties under 'Notice' 20 | notice_origin: mappings for the properties under 'NoticeOrigin' 21 | origins: mappings for the properties under 'Origins' ''' 22 | 23 | @abstractmethod 24 | def file_data(self): 25 | '''Must implement a mapping for 'FileData' class properties''' 26 | 27 | @abstractmethod 28 | def package(self): 29 | '''Must implement a mapping for 'Package' class properties''' 30 | 31 | @abstractmethod 32 | def image_layer(self): 33 | '''Must implement a mapping for 'ImageLayer' class properties''' 34 | 35 | @abstractmethod 36 | def image(self): 37 | '''Must implement a mapping for 'Image' class properties''' 38 | 39 | def notice(self): 40 | '''Should implement a mapping for 'Notice' class properties''' 41 | 42 | def notice_origin(self): 43 | '''Should implement a mapping for 'NoticeOrigin' class properties''' 44 | 45 | def origins(self): 46 | '''Should implement a mapping for 'Origins' class properties''' 47 | -------------------------------------------------------------------------------- /tern/extensions/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2019 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | -------------------------------------------------------------------------------- /tern/extensions/cve_bin_tool/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2019 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | -------------------------------------------------------------------------------- /tern/extensions/cve_bin_tool/executor.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2019-2020 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | 6 | """ 7 | Execute cve-bin-tool 8 | https://github.com/intel/cve-bin-tool 9 | This plugin does not support installation of cve-bin-tool 10 | The assumption is that cve-bin-tool is globally executable 11 | """ 12 | 13 | import logging 14 | 15 | from tern.analyze import passthrough 16 | from tern.extensions.executor import Executor 17 | from tern.utils import constants 18 | 19 | 20 | logger = logging.getLogger(constants.logger_name) 21 | 22 | 23 | class CveBinTool(Executor): 24 | '''Execute cve-bin-tool''' 25 | def execute(self, image_obj, redo=False): 26 | '''Execution should be: 27 | cve-bin-tool -x -u now /path/to/directory 28 | ''' 29 | command = 'cve-bin-tool -x -u now' 30 | for layer in image_obj.layers: 31 | # execute the command for each layer 32 | logger.debug("Analyzing layer %s", layer.layer_index) 33 | passthrough.execute_and_pass(layer, command, True) 34 | # for now we just print the results for each layer 35 | print(layer.analyzed_output) 36 | 37 | def execute_layer(self, image_layer, redo=False): 38 | command = 'cve-bin-tool -x -u now' 39 | logger.debug("Analyzing layer %s", image_layer.layer_index) 40 | passthrough.execute_and_pass(image_layer, command, True) 41 | # for now we just print the results for each image_layer 42 | print(image_layer.analyzed_output) 43 | -------------------------------------------------------------------------------- /tern/extensions/executor.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2019 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | 6 | from abc import ABCMeta 7 | from abc import abstractmethod 8 | 9 | 10 | class Executor(metaclass=ABCMeta): 11 | '''Base class for the external tool executor''' 12 | @abstractmethod 13 | def execute(self, image_obj, redo=False): 14 | '''Return a string consisting of the command the tool should execute 15 | for the container image filesystem. Allow for the filesystem directory 16 | to be incorporated in the command''' 17 | 18 | @abstractmethod 19 | def execute_layer(self, image_layer, redo=False): 20 | '''...''' 21 | -------------------------------------------------------------------------------- /tern/extensions/scancode/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2019 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | -------------------------------------------------------------------------------- /tern/formats/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2019 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | -------------------------------------------------------------------------------- /tern/formats/consumer.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2021 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | 6 | from abc import ABCMeta 7 | from abc import abstractmethod 8 | 9 | 10 | class Consume(metaclass=ABCMeta): 11 | """Base class for report consuming plugins""" 12 | @abstractmethod 13 | def consume_layer(self, reports): 14 | """Ingest the contents of the list of files into a list of ImageLayer 15 | objects according to the plugin type. Each plugin is 16 | responsible for implementing the reading and and assimilation of the 17 | report metadata""" 18 | -------------------------------------------------------------------------------- /tern/formats/cyclonedx/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2021 Patrick Dwyer. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | -------------------------------------------------------------------------------- /tern/formats/cyclonedx/cyclonedx_common.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2021 Patrick Dwyer. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | 6 | """ 7 | Common functions that are useful for CycloneDX document creation 8 | """ 9 | 10 | import datetime 11 | import uuid 12 | from tern.utils import general 13 | 14 | 15 | ################### 16 | # General Helpers # 17 | ################### 18 | 19 | 20 | # document level tool information 21 | metadata_tool = { 22 | 'vendor': 'Tern Tools', 23 | 'name': 'Tern', 24 | 'version': general.get_git_rev_or_version()[1] 25 | } 26 | 27 | 28 | # keys are what Tern uses, values what CycloneDX uses 29 | hash_type_mapping = { 30 | 'md5': 'MD5', 31 | 'sha1': 'SHA-1', 32 | 'sha256': 'SHA-256', 33 | } 34 | 35 | 36 | purl_types_with_namespaces = [ 37 | 'deb', 38 | 'rpm', 39 | 'apk', 40 | ] 41 | 42 | 43 | purl_names_in_lowercase = [ 44 | 'deb', 45 | 'go', 46 | 'npm', 47 | 'pypi', 48 | 'rpm', 49 | ] 50 | 51 | 52 | def get_serial_number(): 53 | ''' Return a randomly generated CycloneDX BOM serial number ''' 54 | return 'urn:uuid:' + str(uuid.uuid4()) 55 | 56 | 57 | def get_purl_name(name, pkg_format): 58 | '''Some purl types require that package names always be lowercased. Given 59 | a package format and a corresponding name for a package of that format, 60 | return a lowercased version of the package name if the purl spec requires 61 | it. Otherwise, just return the original package name.''' 62 | if pkg_format in purl_names_in_lowercase: 63 | return name.lower() 64 | return name 65 | 66 | 67 | def get_timestamp(): 68 | ''' Return a timestamp suitable for the BOM timestamp ''' 69 | return datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ') 70 | 71 | 72 | def get_hash(checksum_type, checksum): 73 | ''' Return a CycloneDX hash object from Tern checksum values ''' 74 | hash_algorithm = hash_type_mapping.get(checksum_type, None) 75 | return None if hash_algorithm is None else {'alg': hash_algorithm, 'content': checksum} 76 | 77 | 78 | def get_property(name, value): 79 | ''' Return a CycloneDX property object ''' 80 | return {'name': name, 'value': value} 81 | 82 | 83 | def get_purl_namespace(os_guess, pkg_format): 84 | if pkg_format in purl_types_with_namespaces: 85 | return os_guess.partition(' ')[0].lower() 86 | return None 87 | 88 | 89 | def get_os_guess(image_obj): 90 | return image_obj.layers[0].os_guess or None 91 | 92 | 93 | def get_license_from_name(name): 94 | return {'license': {'id': name}} 95 | -------------------------------------------------------------------------------- /tern/formats/cyclonedx/cyclonedxjson/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2021 Patrick Dwyer. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | -------------------------------------------------------------------------------- /tern/formats/cyclonedx/cyclonedxjson/generator.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2021 Patrick Dwyer. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | 6 | ''' 7 | CycloneDX JSON document generator 8 | ''' 9 | 10 | import json 11 | import logging 12 | 13 | from tern.utils import constants 14 | from tern.formats import generator 15 | from tern.formats.cyclonedx import cyclonedx_common 16 | from tern.formats.cyclonedx.cyclonedxjson import image_helpers as mhelpers 17 | from tern.formats.cyclonedx.cyclonedxjson import package_helpers as phelpers 18 | 19 | 20 | # global logger 21 | logger = logging.getLogger(constants.logger_name) 22 | 23 | 24 | def get_document_dict(image_obj_list): 25 | ''' Return document info as a dictionary ''' 26 | docu_dict = { 27 | 'bomFormat': 'CycloneDX', 28 | 'specVersion': '1.3', 29 | 'serialNumber': cyclonedx_common.get_serial_number(), 30 | 'version': 1, 31 | 'metadata': { 32 | 'timestamp': cyclonedx_common.get_timestamp(), 33 | 'tools': [cyclonedx_common.metadata_tool], 34 | }, 35 | 'components': [], 36 | } 37 | 38 | # if representing a single image populate top level BOM metadata component 39 | # else representing multiple images so list them as components 40 | if len(image_obj_list) == 1: 41 | docu_dict['metadata']['component'] = mhelpers.get_image_dict(image_obj_list[0]) 42 | docu_dict['components'] = phelpers.get_packages_list(image_obj_list[0]) 43 | else: 44 | for image_obj in image_obj_list: 45 | image_componet = mhelpers.get_image_dict(image_obj) 46 | image_componet['components'] = phelpers.get_packages_list(image_obj) 47 | docu_dict['components'].append(image_componet) 48 | 49 | return docu_dict 50 | 51 | 52 | class CycloneDXJSON(generator.Generate): 53 | def generate(self, image_obj_list, print_inclusive=False): 54 | ''' Generate a CycloneDX document 55 | The whole document should be stored in a dictionary which can be 56 | converted to JSON and dumped to a file using the write_report function 57 | in report.py. ''' 58 | logger.debug('Generating CycloneDX JSON document...') 59 | 60 | report = get_document_dict(image_obj_list) 61 | 62 | return json.dumps(report, indent=2) 63 | -------------------------------------------------------------------------------- /tern/formats/cyclonedx/cyclonedxjson/image_helpers.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2021 Patrick Dwyer. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | 6 | ''' 7 | Helper functions for image level JSON CycloneDX document dictionaries 8 | ''' 9 | 10 | 11 | from tern.formats.cyclonedx import cyclonedx_common 12 | from packageurl import PackageURL 13 | from tern.classes.oci_image import OCIImage 14 | from tern.classes.docker_image import DockerImage 15 | 16 | 17 | def get_image_dict(image_obj): 18 | ''' Given an image object return the CycloneDX document dictionary for the 19 | given image. For CycloneDX, the image is a component and hence follows the 20 | JSON spec for components. ''' 21 | image_dict = { 22 | 'type': 'container', 23 | 'scope': 'required', 24 | 'name': image_obj.name, 25 | 'version': image_obj.checksum_type + ':' + image_obj.checksum, 26 | 'hashes': [], 27 | 'properties': [] 28 | } 29 | 30 | purl = PackageURL('docker', None, image_dict['name'], image_dict['version']) 31 | image_dict['purl'] = str(purl) 32 | 33 | if isinstance(image_obj, DockerImage): 34 | for repotag in image_obj.repotags: 35 | image_dict['properties'].append(cyclonedx_common.get_property('tern:repotag', repotag)) 36 | elif isinstance(image_obj, OCIImage): 37 | image_dict['properties'].append(cyclonedx_common.get_property('tern:repotag', image_obj.repotag)) 38 | 39 | os_guess = cyclonedx_common.get_os_guess(image_obj) 40 | if os_guess: 41 | image_dict['properties'].append(cyclonedx_common.get_property('tern:os_guess', os_guess)) 42 | 43 | cdx_hash = cyclonedx_common.get_hash(image_obj.checksum_type, image_obj.checksum) 44 | if cdx_hash is not None: 45 | image_dict['hashes'].append(cdx_hash) 46 | 47 | return image_dict 48 | -------------------------------------------------------------------------------- /tern/formats/cyclonedx/cyclonedxjson/package_helpers.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2021 Patrick Dwyer. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | 6 | ''' 7 | Helper functions for packages in CycloneDX JSON document creation 8 | ''' 9 | 10 | from tern.formats.cyclonedx import cyclonedx_common 11 | from packageurl import PackageURL 12 | 13 | 14 | def get_package_dict(os_guess, package): 15 | ''' Given a package format, namespace and package object return a 16 | CycloneDX JSON dictionary representation of the package. ''' 17 | package_dict = { 18 | 'name': package.name, 19 | 'version': package.version, 20 | 'type': 'application', 21 | } 22 | 23 | purl_type = package.pkg_format 24 | purl_namespace = cyclonedx_common.get_purl_namespace(os_guess, package.pkg_format) 25 | if purl_type: 26 | purl_name = cyclonedx_common.get_purl_name(package.name, 27 | package.pkg_format) 28 | purl = PackageURL(purl_type, purl_namespace, purl_name, package.version) 29 | if purl_type == "apk": 30 | # Update purl to remove "apk" from the string 31 | purl = PackageURL(purl_namespace, purl_name, package.version) 32 | package_dict['purl'] = str(purl) 33 | 34 | if package.pkg_license: 35 | package_dict['licenses'] = [cyclonedx_common.get_license_from_name(package.pkg_license)] 36 | 37 | if package.pkg_licenses: 38 | package_dict['evidence'] = {'licenses': []} 39 | for pkg_license in package.pkg_licenses: 40 | package_dict['evidence']['licenses'].append(cyclonedx_common.get_license_from_name(pkg_license)) 41 | 42 | return package_dict 43 | 44 | 45 | def get_packages_list(image_obj): 46 | ''' Given an image object return a list of CycloneDX dictionary 47 | representations for each of the packages in the image ''' 48 | package_dicts = [] 49 | 50 | os_guess = cyclonedx_common.get_os_guess(image_obj) 51 | 52 | for layer_obj in image_obj.layers: 53 | for package in layer_obj.packages: 54 | package_dicts.append(get_package_dict(os_guess, package)) 55 | 56 | return package_dicts 57 | -------------------------------------------------------------------------------- /tern/formats/default/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2019 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | -------------------------------------------------------------------------------- /tern/formats/generator.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2019 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | 6 | from abc import ABCMeta 7 | from abc import abstractmethod 8 | 9 | 10 | class Generate(metaclass=ABCMeta): 11 | '''Base class for report plugins''' 12 | @abstractmethod 13 | def generate(self, image_obj_list, print_inclusive=False): 14 | '''Format the report according to the plugin style. 15 | Each subclass is responsible for their own formatting.''' 16 | -------------------------------------------------------------------------------- /tern/formats/html/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2020 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | -------------------------------------------------------------------------------- /tern/formats/json/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2019 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | -------------------------------------------------------------------------------- /tern/formats/json/consumer.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2021 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | 6 | """ 7 | JSON document generator 8 | """ 9 | 10 | import json 11 | import logging 12 | import os 13 | 14 | from tern.classes.image_layer import ImageLayer 15 | from tern.classes.package import Package 16 | from tern.classes.file_data import FileData 17 | from tern.formats import consumer 18 | from tern.utils import constants 19 | 20 | # global logger 21 | logger = logging.getLogger(constants.logger_name) 22 | 23 | 24 | class ConsumerError(Exception): 25 | """Exception raised if a critical error has occured""" 26 | 27 | 28 | def create_image_layer(report): 29 | """Given a report file, create an ImageLayer object with the metadata""" 30 | # expect a json input, raise an error if it is not 31 | content = {} 32 | try: 33 | with open(os.path.abspath(report), encoding='utf-8') as f: 34 | content = json.load(f) 35 | except OSError as err: 36 | logger.critical("Cannot access file %s: %s", report, err) 37 | raise ConsumerError(f"Error with given report file: {report}") from err 38 | except json.JSONDecodeError as err: 39 | logger.critical("Cannot parse JSON in file %s: %s", report, err) 40 | raise ConsumerError(f"Error with given report file: {report}") from err 41 | # we should have some content but it may be empty 42 | if not content: 43 | raise ConsumerError("No content consumed from given report file") 44 | # instantiate a layer and fill it 45 | layer = ImageLayer("") 46 | try: 47 | layer.os_guess = content['os_guess'] 48 | for pkg in content['packages']: 49 | pkg_obj = Package(pkg['name']) 50 | pkg_obj.fill(pkg) 51 | layer.add_package(pkg_obj) 52 | for filedict in content['files']: 53 | file_obj = FileData(filedict['name'], filedict['path']) 54 | file_obj.fill(filedict) 55 | layer.add_file(file_obj) 56 | return layer 57 | except ValueError as err: 58 | logger.critical("Cannot find required data in report: %s", err) 59 | return None 60 | 61 | 62 | class JSON(consumer.Consume): 63 | def consume_layer(self, reports): 64 | """Given a list json report files, created by the json generator, 65 | create a corresponding list of image layer objects. We assume the 66 | layers are ordered in the order or report files""" 67 | layer_list = [] 68 | layer_count = 1 69 | for report in reports: 70 | layer = create_image_layer(report) 71 | layer.layer_index = layer_count 72 | layer_list.append(layer) 73 | layer_count += 1 74 | return layer_list 75 | -------------------------------------------------------------------------------- /tern/formats/json/generator.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2019 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | 6 | """ 7 | JSON document generator 8 | """ 9 | 10 | import json 11 | from tern.formats import generator 12 | 13 | 14 | class JSON(generator.Generate): 15 | def generate(self, image_obj_list, print_inclusive=False): 16 | '''Given a list of image objects, create a json object string''' 17 | image_list = [] 18 | for image in image_obj_list: 19 | image_list.append({'image': image.to_dict()}) 20 | image_dict = {'images': image_list} 21 | return json.dumps(image_dict) 22 | 23 | def generate_layer(self, layer): 24 | """Create a json object for one layer""" 25 | return json.dumps(layer.to_dict()) 26 | -------------------------------------------------------------------------------- /tern/formats/spdx/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2019 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | -------------------------------------------------------------------------------- /tern/formats/spdx/spdx.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2019-2020 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | 6 | from tern.classes.template import Template 7 | 8 | 9 | class SPDX(Template): 10 | '''This is the SPDX Template class 11 | It provides mappings for the SPDX tag-value document format''' 12 | 13 | def file_data(self): 14 | return {'path': 'FileName', 15 | 'short_file_type': 'FileType'} 16 | 17 | def package(self): 18 | return {'name': 'PackageName', 19 | 'version': 'PackageVersion', 20 | 'pkg_license': 'PackageLicenseDeclared', 21 | 'copyright': 'PackageCopyrightText', 22 | 'download_url': 'PackageDownloadLocation', 23 | 'src_name': 'SourcePackageName', 24 | 'src_version': 'SourcePackageVersion', 25 | 'pkg_supplier': 'PackageSupplier'} 26 | 27 | def image_layer(self): 28 | return {'tar_file': 'PackageFileName'} 29 | 30 | def image(self): 31 | return {'name': 'PackageName', 32 | 'tag': 'PackageVersion'} 33 | -------------------------------------------------------------------------------- /tern/formats/spdx/spdxjson/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2021 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | -------------------------------------------------------------------------------- /tern/formats/spdx/spdxjson/consumer.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2021 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | 6 | """ 7 | SPDXJSON document consumer 8 | """ 9 | 10 | import json 11 | import logging 12 | import os 13 | 14 | from tern.classes.image_layer import ImageLayer 15 | from tern.classes.package import Package 16 | from tern.formats import consumer 17 | from tern.utils import constants 18 | 19 | # global logger 20 | logger = logging.getLogger(constants.logger_name) 21 | 22 | 23 | class ConsumerError(Exception): 24 | """Exception raised if a critical error has occured""" 25 | 26 | 27 | def get_package_from_dict(pkg_dict): 28 | """The SPDX JSON format contains a list of dictionaries, each containing 29 | the package metadata. For one package dictionary, return a Package 30 | object""" 31 | pkg_obj = Package(pkg_dict['name']) 32 | pkg_obj.version = ("" if pkg_dict['versionInfo'] == 'NOASSERTION' 33 | else pkg_dict['versionInfo']) 34 | pkg_obj.proj_url = ("" if pkg_dict['downloadLocation'] == 'NONE' 35 | else pkg_dict['downloadLocation']) 36 | pkg_obj.copyright = ("" if pkg_dict['copyrightText'] == 'NONE' 37 | else pkg_dict['copyrightText']) 38 | return pkg_obj 39 | 40 | 41 | def get_license_refs_dict(license_refs_list): 42 | """In SPDX, if the license strings extracted from package metadata is 43 | not a license expression it will be listed separately. Given such a 44 | list, return a dictionary containing license ref to extracted text""" 45 | license_ref_dict = {} 46 | if license_refs_list: 47 | for ref_dict in license_refs_list: 48 | license_ref_dict[ref_dict['licenseId']] = ref_dict['extractedText'] 49 | return license_ref_dict 50 | 51 | 52 | def create_image_layer(report): 53 | """Given a report file, create an ImageLayer object with the metadata""" 54 | # expect a json input, raise an error if it is not 55 | content = {} 56 | try: 57 | with open(os.path.abspath(report), encoding='utf-8') as f: 58 | content = json.load(f) 59 | except OSError as err: 60 | logger.critical("Cannot access file %s: %s", report, err) 61 | raise ConsumerError(f"Error with given report file: {report}") from err 62 | except json.JSONDecodeError as err: 63 | logger.critical("Cannot parse JSON in file %s: %s", report, err) 64 | raise ConsumerError(f"Error with given report file: {report}") from err 65 | # we should have some content but it may be empty 66 | if not content: 67 | raise ConsumerError("No content consumed from given report file") 68 | # instantiate a layer and fill it 69 | layer = ImageLayer("") 70 | # if there are license refs, make a dictionary with license refs to 71 | # extracted content 72 | refs_license = get_license_refs_dict( 73 | content.get('hasExtractedLicensingInfos', [])) 74 | try: 75 | # we ignore the document level information and go straight 76 | # to the packages 77 | for pkg in content['packages']: 78 | pkg_obj = get_package_from_dict(pkg) 79 | pkg_obj.pkg_license = refs_license.get(pkg['licenseDeclared']) 80 | layer.add_package(pkg_obj) 81 | return layer 82 | except ValueError as err: 83 | logger.critical("Cannot find required data in report: %s", err) 84 | return None 85 | 86 | 87 | class SpdxJSON(consumer.Consume): 88 | def consume_layer(self, reports): 89 | """Given a list of report files in the SPDX JSON format, created by 90 | the spdxjson generator, create a total list of image layer objects. 91 | We assume the layers are ordered in the order or report files""" 92 | layer_list = [] 93 | layer_count = 1 94 | for report in reports: 95 | layer = create_image_layer(report) 96 | layer.layer_index = layer_count 97 | layer_list.append(layer) 98 | layer_count += 1 99 | return layer_list 100 | -------------------------------------------------------------------------------- /tern/formats/spdx/spdxjson/formats.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2021 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | 6 | """ 7 | SPDX document formatting 8 | """ 9 | 10 | 11 | # document level strings 12 | spdx_version = 'SPDX-2.2' 13 | data_license = 'CC0-1.0' 14 | spdx_id = 'SPDXRef-DOCUMENT' 15 | document_name = 'Tern report for {image_name}' 16 | document_comment = 'This document was generated by ' \ 17 | 'the Tern Project: https://github.com/tern-tools/tern' 18 | document_namespace = 'https://spdx.org/spdxdocs/tern-' \ 19 | 'report-{version}-{image}-{uuid}' 20 | license_list_version = '3.20' 21 | creator = 'Tool: tern-{version}' 22 | created = '{timestamp}' 23 | 24 | document_name_snapshot = 'Tern SPDX JSON SBoM' 25 | document_namespace_snapshot = 'https://spdx.org/spdxdocs/tern-report-' \ 26 | '{timestamp}-{uuid}' 27 | source_package_comment = 'This package refers to a source package associated' \ 28 | ' with one or more binary packages installed in this container. ' \ 29 | 'This source pacakge is NOT installed in the container but may be useful' \ 30 | ' for CVE lookups.' 31 | 32 | 33 | # Dictionary Formatting 34 | def get_relationship_dict(element_id, related_element_id, relationship_type): 35 | '''Given two SPDX element IDs and their relationship type, return a 36 | dictionary that represents the relationship. Assume that the element_id 37 | inputs are provided as SPDXRefs. 38 | { 39 | "spdxElementId" : "SPDXRef-element_id", 40 | "relatedSpdxElement" : "SPDXRef-related_element_id", 41 | "relationshipType" : "relationship_type" 42 | }''' 43 | return { 44 | "spdxElementId": element_id, 45 | "relatedSpdxElement": related_element_id, 46 | "relationshipType": relationship_type 47 | } 48 | 49 | 50 | def get_extracted_text_dict(extracted_text, license_ref): 51 | '''Given a plain text license string and the corresponding license_ref, 52 | return a dictionary that describes the key-value pair: 53 | { 54 | "extractedText" : "extracted_text" 55 | "licenseId": "license_ref" 56 | }''' 57 | return { 58 | "extractedText": extracted_text, 59 | "licenseId": license_ref 60 | } 61 | -------------------------------------------------------------------------------- /tern/formats/spdx/spdxjson/image_helpers.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2021 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | 6 | """ 7 | Helper functions for image level JSON SPDX document dictionaries 8 | Images for SPDX act like a Package 9 | """ 10 | from tern.formats.spdx import spdx_common 11 | from tern.formats.spdx.spdxjson import formats as json_formats 12 | 13 | 14 | def get_image_extracted_licenses(image_obj): 15 | '''Given an image_obj, return a unique list of extractedText dictionaries 16 | that contain all the file and package license key-value pairs for a 17 | LicenseRef and its corresponding plain text. The dictionaries will 18 | contain the following information: 19 | { 20 | "extractedText": "Plain text of license", 21 | "licenseId": "Corresponding LicenseRef" 22 | }''' 23 | 24 | unique_licenses = set() 25 | for layer in image_obj.layers: 26 | # Get all of the unique file licenses, if they exist 27 | unique_licenses.update(spdx_common.get_layer_licenses(layer)) 28 | # Next, collect any package licenses not already accounted for 29 | for package in layer.packages: 30 | if package.pkg_license: 31 | unique_licenses.add(package.pkg_license) 32 | # Add debian licenses from copyright text as one license 33 | if package.pkg_licenses: 34 | unique_licenses.add(", ".join(package.pkg_licenses)) 35 | extracted_texts = [] 36 | for lic in list(unique_licenses): 37 | valid_spdx, _ = spdx_common.is_spdx_license_expression(lic) 38 | if not valid_spdx: 39 | extracted_texts.append(json_formats.get_extracted_text_dict( 40 | extracted_text=lic, license_ref=spdx_common.get_license_ref( 41 | lic))) 42 | return extracted_texts 43 | 44 | 45 | def get_image_layer_relationships(image_obj): 46 | '''Given an image object, return a list of dictionaries describing the 47 | relationship between each layer "package" and the image "package". 48 | For SPDX JSON format this will typically look like: 49 | { 50 | "spdxElementId" : "SPDXRef-image", 51 | "relatedSpdxElement" : "SPDXRef-layer", 52 | "relationshipType" : "CONTAINS" 53 | }''' 54 | layer_relationships = [] 55 | image_ref = spdx_common.get_image_spdxref(image_obj) 56 | 57 | for index, layer in enumerate(image_obj.layers): 58 | layer_ref = spdx_common.get_layer_spdxref(layer) 59 | # Create a list of dictionaries. 60 | # First, add dictionaries for the layer relationship to the image 61 | layer_relationships.append(json_formats.get_relationship_dict( 62 | image_ref, layer_ref, 'CONTAINS')) 63 | # Next, add dictionary of the layer relationship to other layers 64 | if index != 0: 65 | prev_layer_ref = spdx_common.get_layer_spdxref( 66 | image_obj.layers[index - 1]) 67 | layer_relationships.append(json_formats.get_relationship_dict( 68 | prev_layer_ref, layer_ref, 'HAS_PREREQUISITE')) 69 | return layer_relationships 70 | 71 | 72 | def get_image_dict(image_obj, template): 73 | '''Given an image object and the template object for SPDX, return the 74 | SPDX document dictionary for the given image. For SPDX, the image is a 75 | package and hence follows the JSON spec for packages. 76 | The mapping for images should have these keys: 77 | name 78 | versionInfo 79 | downloadLocation''' 80 | image_dict = {} 81 | mapping = image_obj.to_dict(template) 82 | 83 | image_dict = { 84 | # describe the image as an SPDX package 85 | 'name': mapping['PackageName'], 86 | 'SPDXID': spdx_common.get_image_spdxref(image_obj), 87 | 'versionInfo': mapping['PackageVersion'], 88 | 'supplier': 'NOASSERTION', # always NOASSERTION 89 | 'downloadLocation': 'NOASSERTION', # always NOASSERTION 90 | 'filesAnalyzed': False, # always false 91 | 'licenseConcluded': 'NOASSERTION', # always NOASSERTION 92 | 'licenseDeclared': 'NOASSERTION', # always NOASSERTION 93 | 'copyrightText': 'NOASSERTION' # always NOASSERTION 94 | } 95 | 96 | return image_dict 97 | -------------------------------------------------------------------------------- /tern/formats/spdx/spdxtagvalue/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2019 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | -------------------------------------------------------------------------------- /tern/formats/spdx/spdxtagvalue/file_helpers.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2020 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | 6 | """ 7 | File level helpers for SPDX tag-value document generator 8 | """ 9 | 10 | from tern.formats.spdx import spdx_common 11 | 12 | 13 | def get_file_comment(filedata): 14 | '''Return a formatted comment string with all file level notices. Return 15 | an empty string if no notices are present''' 16 | comment = '' 17 | for origin in filedata.origins.origins: 18 | comment = comment + '{}:'.format(origin.origin_str) + '\n' 19 | for notice in origin.notices: 20 | comment = comment + \ 21 | '{}: {}'.format(notice.level, notice.message) + '\n' 22 | return comment 23 | 24 | 25 | # formatting functions 26 | def get_license_info_block(filedata): 27 | '''The SPDX spec asks to list of SPDX license identifiers or license 28 | reference IDs using the format: LicenseInfoInFile: . 29 | In this case, we do not know if we are collecting SPDX license identifiers 30 | or license strings or something else. So we will create license refs here. 31 | If the licenses list is empty we will report NONE''' 32 | block = '' 33 | if not filedata.licenses: 34 | block = 'LicenseInfoInFile: NONE\n' 35 | else: 36 | for lic in spdx_common.get_file_licenses(filedata): 37 | # Add the license expression to the list if it is a valid SPDX 38 | # identifier; otherwise, add the LicenseRef 39 | block = block + 'LicenseInfoInFile: {}'.format( 40 | spdx_common.get_package_license_declared(lic)) + '\n' 41 | return block 42 | 43 | 44 | def get_file_contributor_block(filedata): 45 | '''The SPDX spec allows for an optional block listing file contributors. 46 | If there are any authors found in the file, return a formatted SPDX text 47 | block with the list of authors. If empty, return an empty string''' 48 | block = '' 49 | for author in filedata.authors: 50 | block = block + 'FileContributor: {}'.format(author) + '\n' 51 | return block 52 | 53 | 54 | # full file block 55 | def get_file_block(filedata, template, layer_id): 56 | '''Given a FileData object, and the SPDX template mapping, return a SPDX 57 | document block for the file. The mapping should have only FileName and 58 | FileType keys. A layer id is used to distinguish copies of the 59 | same file occuring in different places in the image''' 60 | block = '' 61 | mapping = filedata.to_dict(template) 62 | # File Name 63 | block = block + 'FileName: {}'.format(mapping['FileName']) + '\n' 64 | # SPDX ID 65 | block = block + 'SPDXID: {}'.format( 66 | spdx_common.get_file_spdxref(filedata, layer_id)) + '\n' 67 | # File Type 68 | block = block + 'FileType: {}'.format(mapping['FileType']) + '\n' 69 | # File checksum 70 | block = block + 'FileChecksum: {}'.format( 71 | spdx_common.get_file_checksum(filedata)) + '\n' 72 | # Concluded license - we won't provide this 73 | block = block + 'LicenseConcluded: NOASSERTION' + '\n' 74 | # License info in file 75 | block = block + get_license_info_block(filedata) 76 | # File copyright text - we don't know this 77 | block = block + 'FileCopyrightText: NOASSERTION' + '\n' 78 | # File comment - we add this only if there is a comment 79 | comment = spdx_common.get_file_comment(filedata) 80 | if comment: 81 | block = block + 'FileComment: \n' + comment + '' + '\n' 82 | # File Notice - we add this only if there is a notice 83 | notice = spdx_common.get_file_notice(filedata) 84 | if notice: 85 | block = block + 'FileNotice: \n' + notice + '' + '\n' 86 | # File Contributor - we add this only if there are contributors 87 | contributors = get_file_contributor_block(filedata) 88 | if contributors: 89 | block = block + contributors 90 | return block 91 | -------------------------------------------------------------------------------- /tern/formats/spdx/spdxtagvalue/formats.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2019-2020 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | 6 | """ 7 | SPDX document formatting 8 | """ 9 | 10 | # basic strings 11 | tag_value = '{tag}: {value}' 12 | block_text = '\n{message}\n' 13 | 14 | # document level strings 15 | spdx_version = 'SPDXVersion: SPDX-2.2' 16 | data_license = 'DataLicense: CC0-1.0' 17 | spdx_id = 'SPDXID: SPDXRef-DOCUMENT' 18 | document_name = 'DocumentName: Tern report for {image_name}' 19 | document_comment = 'DocumentComment: This document was generated by ' \ 20 | 'the Tern Project: https://github.com/tern-tools/tern' 21 | document_namespace = 'DocumentNamespace: https://spdx.org/spdxdocs/tern-' \ 22 | 'report-{version}-{image}-{uuid}' 23 | license_list_version = 'LicenseListVersion: 3.20' 24 | creator = 'Creator: Tool: tern-{version}' 25 | created = 'Created: {timestamp}' 26 | 27 | # Package level strings 28 | package_comment = 'PackageComment: \n{comment}\n' 29 | source_comment = 'PackageComment: \nThis package refers to a source ' \ 30 | 'package associated with one or more binary packages installed in this ' \ 31 | 'container. This source package is NOT installed in the container but ' \ 32 | 'may be useful for CVE lookups.\n' 33 | package_id = '{name}-{ver}' 34 | 35 | # Relationship strings 36 | contains = 'Relationship: {outer} CONTAINS {inner}' 37 | prereq = 'Relationship: {after} HAS_PREREQUISITE {before}' 38 | describes = 'Relationship: {doc} DESCRIBES {image}' 39 | generates = 'Relationship: {pkg_ref} GENERATED_FROM {src_ref}' 40 | 41 | # License Reference Information 42 | license_id = 'LicenseID: {license_ref}' 43 | extracted_text = 'ExtractedText: Original license: {orig_license}' 44 | -------------------------------------------------------------------------------- /tern/formats/yaml/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2019 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | -------------------------------------------------------------------------------- /tern/formats/yaml/generator.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2019 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | 6 | """ 7 | YAML document generator 8 | """ 9 | 10 | import yaml 11 | from tern.report import formats 12 | from tern.utils.general import get_git_rev_or_version 13 | from tern.formats import generator 14 | 15 | 16 | def print_yaml_report(image): 17 | '''Given an image object, create a yaml report''' 18 | image_dict = {} 19 | image_dict.update({'image': image.to_dict()}) 20 | return yaml.dump(image_dict, default_flow_style=False) 21 | 22 | 23 | class YAML(generator.Generate): 24 | def generate(self, image_obj_list, print_inclusive=False): 25 | '''Generate a yaml report''' 26 | report = formats.disclaimer_yaml.format( 27 | version_info=get_git_rev_or_version()) 28 | for image in image_obj_list: 29 | report = report + print_yaml_report(image) 30 | return report 31 | 32 | def generate_layer(self, layer): 33 | """Generate a yaml report for the given layer object""" 34 | return yaml.dump(layer.to_dict(), default_flow_style=False) 35 | -------------------------------------------------------------------------------- /tern/load/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2020 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | -------------------------------------------------------------------------------- /tern/load/skopeo.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2021-2022 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | 6 | """ 7 | Interactions with remote container images using skopeo 8 | """ 9 | 10 | import json 11 | import logging 12 | import sys 13 | import shutil 14 | 15 | from tern.utils import constants 16 | from tern.utils import rootfs 17 | 18 | # global logger 19 | logger = logging.getLogger(constants.logger_name) 20 | 21 | 22 | def check_skopeo_setup(): 23 | """Check if the skopeo tool is installed""" 24 | if not shutil.which('skopeo'): 25 | logger.critical('Skopeo is not installed') 26 | logger.critical('Exiting...') 27 | sys.exit(1) 28 | 29 | 30 | def pull_image(image_tag_string, no_tls=False): 31 | """Use skopeo to pull a remote image into the working directory""" 32 | # Check if skopeo is set up 33 | check_skopeo_setup() 34 | # we will assume the docker transport for now 35 | remote = f'docker://{image_tag_string}' 36 | # Unless the user specifies that the image is local 37 | if image_tag_string.split(':')[0] == "docker-daemon": 38 | remote = image_tag_string 39 | local = f'dir:{rootfs.get_working_dir()}' 40 | logger.debug("Attempting to pull image \"%s\"", image_tag_string) 41 | if no_tls: 42 | result, error = rootfs.shell_command( 43 | False, ['skopeo', 'copy', '--src-tls-verify=false', remote, local]) 44 | else: 45 | result, error = rootfs.shell_command( 46 | False, ['skopeo', 'copy', remote, local]) 47 | if error: 48 | logger.error("Error when downloading image: \"%s\"", error) 49 | return None 50 | return result 51 | 52 | 53 | def get_image_digest(image_tag_string): 54 | """Use skopeo to get the remote image's digest""" 55 | # check if skopeo is set up 56 | check_skopeo_setup() 57 | remote = f'docker://{image_tag_string}' 58 | if image_tag_string.split(':')[0] == "docker-daemon": 59 | remote = image_tag_string 60 | logger.debug("Inspecting remote image \"%s\"", image_tag_string) 61 | result, error = rootfs.shell_command( 62 | False, ['skopeo', 'inspect', remote]) 63 | if error or not result: 64 | logger.error("Unable to retrieve image digest") 65 | return None, None 66 | result_string = json.loads(result) 67 | digest_string = result_string.get("Digest") 68 | if not digest_string: 69 | logger.error("No image digest available") 70 | return None, None 71 | digest_list = digest_string.split(":") 72 | return digest_list[0], digest_list[1] 73 | -------------------------------------------------------------------------------- /tern/prep.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2017-2021 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | 6 | """ 7 | Environment prep before analysis and cleanup after 8 | """ 9 | 10 | import logging 11 | import os 12 | import pkg_resources 13 | import shutil 14 | 15 | from tern.utils import constants 16 | from tern.utils import general 17 | from tern.utils import rootfs 18 | from tern.utils import cache 19 | 20 | # global logger 21 | logger = logging.getLogger(constants.logger_name) 22 | 23 | 24 | def setup(working_dir=None): 25 | """Environment setup 26 | working_dir: a directory path other than the default directory""" 27 | # create the top directory and cache file 28 | logger.debug("Setting up...") 29 | top_dir = general.get_top_dir(working_dir) 30 | if not os.path.isdir(top_dir): 31 | os.makedirs(top_dir) 32 | # set the working directory according to user input 33 | rootfs.set_working_dir(working_dir) 34 | # load the cache 35 | cache.load() 36 | # required to run in a container natively on Windows 37 | fs_hash_path = pkg_resources.resource_filename("tern", "tools/fs_hash.sh") 38 | rootfs.root_command(["chmod", "+x", fs_hash_path]) 39 | 40 | 41 | def teardown(keep=False): 42 | """Tear down the environment setup""" 43 | logger.debug("Tearing down...") 44 | # save the cache 45 | cache.save() 46 | # clean up the working directory if user has not asked to keep it 47 | if not keep: 48 | clean_working_dir() 49 | else: 50 | logger.debug( 51 | "Working directory available at: %s", rootfs.get_working_dir()) 52 | 53 | 54 | def clean_image_tars(image_obj): 55 | """Given an image object, clean up all the image layer contents""" 56 | for layer in image_obj.layers: 57 | fspath = layer.get_untar_dir() 58 | if os.path.exists(fspath): 59 | rootfs.root_command(rootfs.remove, fspath) 60 | 61 | 62 | def clean_working_dir(): 63 | """Clean up the working directory""" 64 | path = rootfs.get_working_dir() 65 | if os.path.exists(path): 66 | shutil.rmtree(path) 67 | -------------------------------------------------------------------------------- /tern/report/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2019 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | -------------------------------------------------------------------------------- /tern/report/formats.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2017-2021 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | 6 | """ 7 | Report formatting for different types of reports and report content 8 | """ 9 | 10 | 11 | # formatting variables 12 | layer_id = '' 13 | package_name = '' 14 | package_version = '' 15 | package_url = '' 16 | package_license = '' 17 | package_info_retrieval_errors = '' 18 | package_info_reporting_improvements = '' 19 | 20 | # general formatting 21 | # report disclaimer 22 | disclaimer = '''This report was generated by the Tern Project\n''' \ 23 | '''{version_info}\n\n''' 24 | disclaimer_yaml = '''# This report was generated by the Tern Project\n''' \ 25 | '''# {version_info}\n\n''' 26 | 27 | commit_version = "https://github.com/tern-tools/tern/commit/{commit_sha}" 28 | packaged_version = "Version: {version}" 29 | 30 | # cache 31 | retrieve_from_cache = '''Retrieving packages from cache for layer ''' \ 32 | '''{layer_id}:\n\n''' 33 | # command library 34 | base_listing = '''Direct listing in command_lib/base.yml''' 35 | snippet_listing = '''Direct listing in command_lib/snippets.yml''' 36 | invoke_for_base = '''Retrieved package metadata using {binary} ''' \ 37 | '''default method. \n''' 38 | invoke_for_snippets = '''Retrieved by invoking listing in command_lib/''' \ 39 | '''snippets.yml''' 40 | invoke_in_container = '''\tin container:\n''' 41 | invoke_on_host = '''\ton host:\n''' 42 | # package information 43 | package_name = '''Package: {package_name}\n''' 44 | package_version = '''Version: {package_version}\n''' 45 | package_url = '''Project URL: {package_url}\n''' 46 | package_license = '''License: {package_license}\n''' 47 | package_copyright = '''Copyright Text: {package_copyright}\n''' 48 | layer_packages_header = '''\tPackages found in Layer: {}''' 49 | layer_licenses_list = '''\tLicenses found in Layer: {list}\n''' 50 | layer_file_licenses_list = '''\tFile licenses found in Layer: {list}\n''' 51 | full_licenses_list = '''###########################################\n'''\ 52 | '''# Summary of licenses found in Container: #\n'''\ 53 | '''###########################################\n{list}\n''' 54 | 55 | # notes 56 | package_notes = '''Errors: {package_info_retrieval_errors}\n''' \ 57 | '''Improvements: {package_info_reporting_improvements}\n''' 58 | # demarkation 59 | package_demarkation = '''================================================='''\ 60 | '''======================================\n\n''' 61 | 62 | # informational 63 | loading_from_cache = '''Loading packages from cache for layer {layer_id}''' 64 | invoking_base_commands = '''Invoking commands from command_lib/base.yml''' 65 | invoking_snippet_commands = '''Invoking commands from ''' \ 66 | '''command_lib/snippets.yml''' 67 | ignored = '''\nIgnored Commands:''' 68 | unrecognized = '''\nUnrecognized Commands:''' 69 | os_style_guess = '''Found {package_manager} in filesystem. ''' \ 70 | '''Possible OS(es) might be: {os_list}''' 71 | os_release = '''Found '{os_style}' in /etc/os-release.''' 72 | 73 | # report formatting for dockerfiles 74 | 75 | # dockerfile variables 76 | base_image_instructions = '' 77 | dockerfile_instruction = '' 78 | 79 | # dockerfile report sections 80 | dockerfile_image = '''Image built from Dockerfile {dockerfile}''' 81 | dockerfile_base = '''Base Image: {base_image_instructions}''' 82 | dockerfile_line = '''Instruction Line: {dockerfile_instruction}''' 83 | image_build_failure = '''Failed to build image from Dockerfile''' 84 | image_load_failure = '''Failed to load metadata for built image {testimage}''' 85 | layer_created_by = '''Layer created by commands: {created_by}''' 86 | 87 | # docker image report 88 | docker_image = '''Docker image: {imagetag}''' 89 | 90 | # format for notices 91 | notice_format = '''{origin}:\n\t{info}\n\twarnings:{warnings}''' \ 92 | '''\n\terrors:{errors}\n\thints:{hints}\n''' 93 | -------------------------------------------------------------------------------- /tern/report/report.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2017-2020 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | 6 | """ 7 | Create a report 8 | """ 9 | 10 | import logging 11 | import os 12 | from stevedore import driver 13 | from stevedore.exception import NoMatches 14 | 15 | from tern.utils import constants 16 | from tern.utils import rootfs 17 | 18 | # global logger 19 | logger = logging.getLogger(constants.logger_name) 20 | 21 | 22 | def write_report(report, args): 23 | '''Write the report to a file''' 24 | if args.output_file: 25 | file_name = args.output_file 26 | with open(file_name, 'w', encoding='utf-8') as f: 27 | f.write(report) 28 | 29 | 30 | def clean_image_tars(image_obj): 31 | '''Clean up untar directories''' 32 | for layer in image_obj.layers: 33 | fspath = layer.get_untar_dir() 34 | if os.path.exists(fspath): 35 | rootfs.root_command(rootfs.remove, fspath) 36 | 37 | 38 | def generate_report(args, *images): 39 | '''Generate a report based on the command line options''' 40 | if args.report_format: 41 | return generate_format( 42 | images, args.report_format, args.print_inclusive) 43 | return generate_format(images, 'default', args.print_inclusive) 44 | 45 | 46 | def generate_format(images, format_string, print_inclusive): 47 | '''Generate a report in the format of format_string given one or more 48 | image objects. Here we will load the required module and run the generate 49 | function to get back a report''' 50 | try: 51 | mgr = driver.DriverManager( 52 | namespace='tern.formats', 53 | name=format_string, 54 | invoke_on_load=True, 55 | ) 56 | return mgr.driver.generate(images, print_inclusive) 57 | except NoMatches: 58 | return None 59 | 60 | 61 | def generate_format_layer(layer, format_string): 62 | """Generate a report in the format of format_string given one layer 63 | object. This is similar to the generate_format function""" 64 | try: 65 | mgr = driver.DriverManager( 66 | namespace='tern.formats', 67 | name=format_string, 68 | invoke_on_load=True, 69 | ) 70 | return mgr.driver.generate_layer(layer) 71 | except NoMatches: 72 | return None 73 | 74 | 75 | def report_out(args, *images): 76 | for img in images: 77 | if (args.load_until_layer > img.total_layers and 78 | args.load_until_layer != 0): 79 | # The actual ignoring is done in docker_image.py 80 | # Warning is given here for visibility to user 81 | logger.warning(f'Given layer {args.load_until_layer} exceeds total' 82 | + f' number of layers in image ({img.total_layers})' 83 | + '. Ignoring --layer option and generating report ' 84 | + 'for' 85 | + f' {img.total_layers} total layers') 86 | report = generate_report(args, *images) 87 | if not report: 88 | logger.error("%s not a recognized plugin.", args.report_format) 89 | elif args.output_file: 90 | write_report(report, args) 91 | else: 92 | print(report) 93 | 94 | 95 | def report_layer(layer, args): 96 | """Generate a report for one layer object""" 97 | layer_report = "" 98 | if args.report_format: 99 | layer_report = generate_format_layer(layer, args.report_format) 100 | else: 101 | layer_report = generate_format_layer(layer, 'default') 102 | if not layer_report: 103 | logger.error( 104 | "Unable to generate %s report type for layer", args.report_format) 105 | else: 106 | if args.output_file: 107 | write_report(layer_report, args) 108 | else: 109 | print(layer_report) 110 | -------------------------------------------------------------------------------- /tern/scripts/debian/apt_get_sources.sh: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2017-2019 VMware, Inc. All Rights Reserved. 2 | # SPDX-License-Identifier: BSD-2-Clause 3 | 4 | #!/bin/bash 5 | # Get sources for a list of packages 6 | 7 | apt-get update 8 | mkdir apt_sources 9 | pushd apt_sources 10 | for package in "$@" 11 | do 12 | mkdir $package 13 | pushd $package 14 | apt-get source -d $package 15 | popd 16 | done 17 | popd 18 | tar cvzf apt_sources.tar.gz /apt_sources/* 19 | -------------------------------------------------------------------------------- /tern/scripts/debian/jessie/sources.list: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2019 VMware, Inc. All Rights Reserved. 2 | # SPDX-License-Identifier: BSD-2-Clause 3 | 4 | deb http://deb.debian.org/debian jessie main 5 | deb http://deb.debian.org/debian jessie-updates main 6 | deb http://security.debian.org jessie/updates main 7 | deb-src http://deb.debian.org/debian jessie main 8 | -------------------------------------------------------------------------------- /tern/tools/fs_hash.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # 3 | # Copyright (c) 2018-2021 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | # 6 | # Given a file path, create a list of file stats and their sha256sums 7 | # usage: ./fs_hash.sh path/to/dir 8 | # format: 9 | # permissions|uid|gid|size in bytes|number of hard links| sha256sum filepath 10 | # extended attributes list 11 | # 12 | # repeat for each file 13 | # 14 | # Check that all commands to collect metadata exist on the 15 | # system otherwise exit. 16 | 17 | command -v find > /dev/null || { echo "'find' not found on system." >&2 ; exit 1; } 18 | command -v sha256sum > /dev/null || { echo "'sha256sum' not found on system." >&2 ; exit 1; } 19 | command -v getfattr > /dev/null || { echo "'getfattr' not found on system." >&2 ; exit 1; } 20 | 21 | cwd=`pwd` 22 | cd $1 23 | find -type f -printf "%M|%U|%G|%s|%n| " -exec sha256sum {} \; -exec getfattr -d -m - {} \; 24 | cd $cwd 25 | -------------------------------------------------------------------------------- /tern/utils/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2019 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | -------------------------------------------------------------------------------- /tern/utils/cache.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2017-2021 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | 6 | """ 7 | Docker layer cache related modules 8 | The cache is currently stored in a json file called cache.json 9 | It is organized in this way: 10 | { 11 | "layer sha": { 12 | "packages": [ 13 | { 14 | "name": null, 15 | "version": null, 16 | "license": null, 17 | "proj_url": null 18 | } 19 | ] 20 | } 21 | } 22 | """ 23 | 24 | import os 25 | import json 26 | from tern.utils.constants import cache_file 27 | from tern.utils import rootfs 28 | 29 | # known base image database 30 | cache = {} 31 | 32 | 33 | def load(): 34 | '''Load the cache''' 35 | global cache 36 | 37 | # Do not try to populate the cache if there is no cache available 38 | if not os.path.exists(os.path.join(rootfs.working_dir, cache_file)): 39 | return 40 | 41 | with open(os.path.join(rootfs.working_dir, cache_file), 42 | encoding='utf-8') as f: 43 | cache = json.load(f) 44 | 45 | 46 | def get_packages(layer_hash): 47 | '''Given the layer hash, retrieve cache record. If none return an empty 48 | list''' 49 | if layer_hash in cache.keys(): 50 | return cache[layer_hash]['packages'] 51 | return [] 52 | 53 | 54 | def get_files(layer_hash): 55 | '''Given the layer hash, retrieve file layers cache record. 56 | If none return an empty list''' 57 | if layer_hash in cache.keys(): 58 | return cache[layer_hash]['files'] 59 | return [] 60 | 61 | 62 | def get_layers(): 63 | '''Return a list of layer shas''' 64 | return cache.keys() 65 | 66 | 67 | def get_origins(layer_hash): 68 | '''Return the origins dictionary''' 69 | if layer_hash in get_layers(): 70 | if 'origins' in cache[layer_hash].keys(): 71 | return cache[layer_hash]['origins'] 72 | return [] 73 | 74 | 75 | def add_layer(layer_obj): 76 | '''Given a layer object, add it to the cache 77 | We use the layer's to_dict object and make a dictionary such that 78 | the key is the layer object's fs_hash function and the value is the 79 | rest of the dictionary''' 80 | layer_dict = layer_obj.to_dict() 81 | fs_hash = layer_dict.pop('fs_hash') 82 | cache.update({fs_hash: layer_dict}) 83 | 84 | 85 | def save(): 86 | '''Save the cache to the cache file''' 87 | with open(os.path.join(rootfs.working_dir, cache_file), 88 | 'w', encoding='utf-8') as f: 89 | json.dump(cache, f) 90 | 91 | 92 | def remove_layer(layer_hash): 93 | '''Remove from cache the object referenced by the layer hash''' 94 | success = False 95 | if layer_hash in cache.keys(): 96 | del cache[layer_hash] 97 | success = True 98 | return success 99 | 100 | 101 | def clear(): 102 | '''Empty the cache - don't use unless you really have to''' 103 | global cache 104 | cache = {} 105 | with open(os.path.join(rootfs.working_dir, cache_file), 106 | 'w', encoding='utf-8') as f: 107 | json.dump(cache, f) 108 | -------------------------------------------------------------------------------- /tern/utils/constants.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2017-2021 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | 6 | ''' 7 | Constants 8 | ''' 9 | 10 | # paths for working on container images 11 | # this is relative to the user's home directory 12 | 13 | # hidden folder 14 | dot_folder = '.tern' 15 | # working folder 16 | temp_folder = 'temp' 17 | # temporary tar file 18 | temp_tarfile = 'temp.tar' 19 | # built image name 20 | image = 'ternimage' 21 | # built image tag 22 | tag = 'terntag' 23 | # running container name 24 | container = 'terncontainer' 25 | # logger name 26 | logger_name = 'ternlog' 27 | # logfile 28 | logfile = 'tern.log' 29 | # manifest file 30 | manifest_file = 'manifest.json' 31 | # cache file 32 | cache_file = 'cache.json' 33 | # default shell 34 | shell = '/bin/sh' 35 | # path where resolv.conf lives 36 | resolv_path = '/etc/resolv.conf' 37 | # paths where os-release could be 38 | etc_release_path = 'etc/os-release' 39 | lib_release_path = 'usr/lib/os-release' 40 | # directory where layer.tar can be extracted to 41 | untar_dir = 'contents' 42 | # rootfs working directory 43 | # this is relative to where tern is 44 | workdir = 'workdir' 45 | # rootfs directory where overlay merges filesystems 46 | # this is relative to where tern is 47 | mergedir = 'mergedir' 48 | # locked dockerfile 49 | locked_dockerfile = 'Dockerfile.lock' 50 | # temporary directory for multistage Dockerfile analysis 51 | multistage_dir = 'dftemp' 52 | # temporary file name for unshared script 53 | script_file = 'invoke.sh' 54 | -------------------------------------------------------------------------------- /tern/utils/host.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from tern.analyze.default.command_lib import command_lib 4 | 5 | 6 | def check_shell(): 7 | """Check if any shell binary is available on the host""" 8 | for shell in command_lib.command_lib["common"]["shells"]: 9 | if os.path.exists(shell): 10 | return shell 11 | return "" 12 | -------------------------------------------------------------------------------- /tests/dockerfiles/buildpack_deps_jessie_arg: -------------------------------------------------------------------------------- 1 | ARG version=jessie 2 | FROM debian:${version} 3 | 4 | RUN apt-get update && apt-get install -y --no-install-recommends \ 5 | ca-certificates \ 6 | curl \ 7 | netbase \ 8 | wget \ 9 | && rm -rf /var/lib/apt/lists/* 10 | 11 | RUN set -ex; \ 12 | if ! command -v gpg > /dev/null; then \ 13 | apt-get update; \ 14 | apt-get install -y --no-install-recommends \ 15 | gnupg \ 16 | dirmngr \ 17 | ; \ 18 | rm -rf /var/lib/apt/lists/*; \ 19 | fi 20 | -------------------------------------------------------------------------------- /tests/dockerfiles/buildpack_deps_jessie_curl: -------------------------------------------------------------------------------- 1 | FROM debian:jessie 2 | 3 | RUN apt-get update && apt-get install -y --no-install-recommends \ 4 | ca-certificates \ 5 | curl \ 6 | netbase \ 7 | wget \ 8 | && rm -rf /var/lib/apt/lists/* 9 | 10 | RUN set -ex; \ 11 | if ! command -v gpg > /dev/null; then \ 12 | apt-get update; \ 13 | apt-get install -y --no-install-recommends \ 14 | gnupg \ 15 | dirmngr \ 16 | ; \ 17 | rm -rf /var/lib/apt/lists/*; \ 18 | fi 19 | -------------------------------------------------------------------------------- /tests/dockerfiles/buildpack_deps_jessie_pinned: -------------------------------------------------------------------------------- 1 | FROM debian@sha256:e25703ee6ab5b2fac31510323d959cdae31eebdf48e88891c549e55b25ad7e94 2 | -------------------------------------------------------------------------------- /tests/dockerfiles/debian_buster_apt: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2020 VMware, Inc. All Rights Reserved. 2 | # SPDX-License-Identifier: BSD-2-Clause 3 | 4 | FROM debian:buster 5 | 6 | # Install some dependencies 7 | RUN apt-get update && \ 8 | apt-get -y install tar && \ 9 | apt-get clean && \ 10 | rm -rf /var/lib/apt/lists/* 11 | 12 | # Make a default command 13 | CMD ["bash"] 14 | -------------------------------------------------------------------------------- /tests/dockerfiles/fail_build: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2020 VMware, Inc. All Rights Reserved. 2 | # SPDX-License-Identifier: BSD-2-Clause 3 | 4 | FROM debian:buster 5 | 6 | # Install some dependencies 7 | RUN apt-get update && \ 8 | apt-get -y install tar && \ 9 | apt-get clean && \ 10 | rm -rf /var/lib/apt/lists/* 11 | 12 | # Try to copy in a file that doesn't exist 13 | COPY noexist . 14 | 15 | # Build shouldn't get here 16 | CMD ["bash"] 17 | -------------------------------------------------------------------------------- /tests/dockerfiles/golang_1.13_stretch: -------------------------------------------------------------------------------- 1 | FROM buildpack-deps:stretch-scm 2 | 3 | # gcc for cgo 4 | RUN apt-get update && apt-get install -y --no-install-recommends \ 5 | g++ \ 6 | gcc \ 7 | libc6-dev \ 8 | make \ 9 | pkg-config \ 10 | && rm -rf /var/lib/apt/lists/* 11 | 12 | ENV GOLANG_VERSION 1.13.6 13 | 14 | RUN set -eux; \ 15 | \ 16 | # this "case" statement is generated via "update.sh" 17 | dpkgArch="$(dpkg --print-architecture)"; \ 18 | case "${dpkgArch##*-}" in \ 19 | amd64) goRelArch='linux-amd64'; goRelSha256='a1bc06deb070155c4f67c579f896a45eeda5a8fa54f35ba233304074c4abbbbd' ;; \ 20 | armhf) goRelArch='linux-armv6l'; goRelSha256='37a1a83e363dcf146a67fa839d170fd1afb13009585fdd493d0a3370fbe6f785' ;; \ 21 | arm64) goRelArch='linux-arm64'; goRelSha256='0a18125c4ed80f9c3045cf92384670907c4796b43ed63c4307210fe93e5bbca5' ;; \ 22 | i386) goRelArch='linux-386'; goRelSha256='27feb013106da784f09e560720aa41ab395c67f7eed4c4a0fce04bc6e3d01c7d' ;; \ 23 | ppc64el) goRelArch='linux-ppc64le'; goRelSha256='26a977a8af5dc50a562f0a57b58dded5fa3bacfe77722cf8a84ea54ca54728dd' ;; \ 24 | s390x) goRelArch='linux-s390x'; goRelSha256='5cd9900a1fa0f0cac657930b648381cad9b8c5e2bbc77caf86a6fb5cedad0017' ;; \ 25 | *) goRelArch='src'; goRelSha256='aae5be954bdc40bcf8006eb77e8d8a5dde412722bc8effcdaf9772620d06420c'; \ 26 | echo >&2; echo >&2 "warning: current architecture ($dpkgArch) does not have a corresponding Go binary release; will be building from source"; echo >&2 ;; \ 27 | esac; \ 28 | \ 29 | url="https://golang.org/dl/go${GOLANG_VERSION}.${goRelArch}.tar.gz"; \ 30 | wget -O go.tgz "$url"; \ 31 | echo "${goRelSha256} *go.tgz" | sha256sum -c -; \ 32 | tar -C /usr/local -xzf go.tgz; \ 33 | rm go.tgz; \ 34 | \ 35 | if [ "$goRelArch" = 'src' ]; then \ 36 | echo >&2; \ 37 | echo >&2 'error: UNIMPLEMENTED'; \ 38 | echo >&2 'TODO install golang-any from jessie-backports for GOROOT_BOOTSTRAP (and uninstall after build)'; \ 39 | echo >&2; \ 40 | exit 1; \ 41 | fi; \ 42 | \ 43 | export PATH="/usr/local/go/bin:$PATH"; \ 44 | go version 45 | 46 | ENV GOPATH /go 47 | ENV PATH $GOPATH/bin:/usr/local/go/bin:$PATH 48 | 49 | RUN mkdir -p "$GOPATH/src" "$GOPATH/bin" && chmod -R 777 "$GOPATH" 50 | WORKDIR $GOPATH 51 | -------------------------------------------------------------------------------- /tests/dockerfiles/pin_add_command_test/pin_add_command_test_dockerfile: -------------------------------------------------------------------------------- 1 | FROM ubuntu 2 | ADD plain_file /tmp 3 | -------------------------------------------------------------------------------- /tests/dockerfiles/pin_add_command_test/plain_file: -------------------------------------------------------------------------------- 1 | This is a test file. 2 | -------------------------------------------------------------------------------- /tests/dockerfiles/split_shell_script/buildpack_deps_buster: -------------------------------------------------------------------------------- 1 | # 2 | # NOTE: THIS DOCKERFILE IS GENERATED VIA "update.sh" 3 | # 4 | # PLEASE DO NOT EDIT IT DIRECTLY. 5 | # 6 | 7 | FROM buildpack-deps:buster 8 | 9 | # ensure local python is preferred over distribution python 10 | ENV PATH /usr/local/bin:$PATH 11 | 12 | # http://bugs.python.org/issue19846 13 | # > At the moment, setting "LANG=C" on a Linux system *fundamentally breaks Python 3*, and that's not OK. 14 | ENV LANG C.UTF-8 15 | 16 | # extra dependencies (over what buildpack-deps already includes) 17 | RUN apt-get update && apt-get install -y --no-install-recommends \ 18 | libbluetooth-dev \ 19 | tk-dev \ 20 | uuid-dev \ 21 | && rm -rf /var/lib/apt/lists/* 22 | 23 | ENV GPG_KEY E3FF2839C048B25C084DEBE9B26995E310250568 24 | ENV PYTHON_VERSION 3.9.0a6 25 | 26 | RUN set -ex \ 27 | \ 28 | && wget -O python.tar.xz "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz" \ 29 | && wget -O python.tar.xz.asc "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz.asc" \ 30 | && export GNUPGHOME="$(mktemp -d)" \ 31 | && gpg --batch --keyserver ha.pool.sks-keyservers.net --recv-keys "$GPG_KEY" \ 32 | && gpg --batch --verify python.tar.xz.asc python.tar.xz \ 33 | && { command -v gpgconf > /dev/null && gpgconf --kill all || :; } \ 34 | && rm -rf "$GNUPGHOME" python.tar.xz.asc \ 35 | && mkdir -p /usr/src/python \ 36 | && tar -xJC /usr/src/python --strip-components=1 -f python.tar.xz \ 37 | && rm python.tar.xz \ 38 | \ 39 | && cd /usr/src/python \ 40 | && gnuArch="$(dpkg-architecture --query DEB_BUILD_GNU_TYPE)" \ 41 | && ./configure \ 42 | --build="$gnuArch" \ 43 | --enable-loadable-sqlite-extensions \ 44 | --enable-optimizations \ 45 | --enable-option-checking=fatal \ 46 | --enable-shared \ 47 | --with-system-expat \ 48 | --with-system-ffi \ 49 | --without-ensurepip \ 50 | && make -j "$(nproc)" \ 51 | && make install \ 52 | && ldconfig \ 53 | \ 54 | && find /usr/local -depth \ 55 | \( \ 56 | \( -type d -a \( -name test -o -name tests -o -name idle_test \) \) \ 57 | -o \ 58 | \( -type f -a \( -name '*.pyc' -o -name '*.pyo' \) \) \ 59 | \) -exec rm -rf '{}' + \ 60 | && rm -rf /usr/src/python \ 61 | \ 62 | && python3 --version 63 | 64 | # make some useful symlinks that are expected to exist 65 | RUN cd /usr/local/bin \ 66 | && ln -s idle3 idle \ 67 | && ln -s pydoc3 pydoc \ 68 | && ln -s python3 python \ 69 | && ln -s python3-config python-config 70 | 71 | # if this is called "PIP_VERSION", pip explodes with "ValueError: invalid truth value ''" 72 | ENV PYTHON_PIP_VERSION 20.1 73 | # https://github.com/pypa/get-pip 74 | ENV PYTHON_GET_PIP_URL https://github.com/pypa/get-pip/raw/1fe530e9e3d800be94e04f6428460fc4fb94f5a9/get-pip.py 75 | ENV PYTHON_GET_PIP_SHA256 ce486cddac44e99496a702aa5c06c5028414ef48fdfd5242cd2fe559b13d4348 76 | 77 | RUN set -ex; \ 78 | \ 79 | wget -O get-pip.py "$PYTHON_GET_PIP_URL"; \ 80 | echo "$PYTHON_GET_PIP_SHA256 *get-pip.py" | sha256sum --check --strict -; \ 81 | \ 82 | python get-pip.py \ 83 | --disable-pip-version-check \ 84 | --no-cache-dir \ 85 | "pip==$PYTHON_PIP_VERSION" \ 86 | ; \ 87 | pip --version; \ 88 | \ 89 | find /usr/local -depth \ 90 | \( \ 91 | \( -type d -a \( -name test -o -name tests -o -name idle_test \) \) \ 92 | -o \ 93 | \( -type f -a \( -name '*.pyc' -o -name '*.pyo' \) \) \ 94 | \) -exec rm -rf '{}' +; \ 95 | rm -f get-pip.py 96 | 97 | CMD ["python3"] 98 | -------------------------------------------------------------------------------- /tests/dockerfiles/split_shell_script/buildpack_deps_buster_scm: -------------------------------------------------------------------------------- 1 | FROM buildpack-deps:buster-scm 2 | 3 | # gcc for cgo 4 | RUN apt-get update && apt-get install -y --no-install-recommends \ 5 | g++ \ 6 | gcc \ 7 | libc6-dev \ 8 | make \ 9 | pkg-config \ 10 | && rm -rf /var/lib/apt/lists/* 11 | 12 | ENV GOLANG_VERSION 1.14 13 | 14 | RUN set -eux; \ 15 | \ 16 | # this "case" statement is generated via "update.sh" 17 | dpkgArch="$(dpkg --print-architecture)"; \ 18 | case "${dpkgArch##*-}" in \ 19 | amd64) goRelArch='linux-amd64'; goRelSha256='08df79b46b0adf498ea9f320a0f23d6ec59e9003660b4c9c1ce8e5e2c6f823ca' ;; \ 20 | armhf) goRelArch='linux-armv6l'; goRelSha256='b5e682176d7ad3944404619a39b585453a740a2f82683e789f4279ec285b7ecd' ;; \ 21 | arm64) goRelArch='linux-arm64'; goRelSha256='cd813387f770c07819912f8ff4b9796a4e317dee92548b7226a19e60ac79eb27' ;; \ 22 | i386) goRelArch='linux-386'; goRelSha256='cdcdab6c8d1f2dcea3bbec793352ef84db167a2eb6c60ff69e5cf94dca575f9a' ;; \ 23 | ppc64el) goRelArch='linux-ppc64le'; goRelSha256='b896b5eba616d27fd3bb8218de6bef557cb62221e42f73c84ae4b89cdb602dec' ;; \ 24 | s390x) goRelArch='linux-s390x'; goRelSha256='22e67470fe872c893face196f02323a11ffe89999260c136b9c50f06619e0243' ;; \ 25 | *) goRelArch='src'; goRelSha256='6d643e46ad565058c7a39dac01144172ef9bd476521f42148be59249e4b74389'; \ 26 | echo >&2; echo >&2 "warning: current architecture ($dpkgArch) does not have a corresponding Go binary release; will be building from source"; echo >&2 ;; \ 27 | esac; \ 28 | \ 29 | url="https://golang.org/dl/go${GOLANG_VERSION}.${goRelArch}.tar.gz"; \ 30 | wget -O go.tgz "$url"; \ 31 | echo "${goRelSha256} *go.tgz" | sha256sum -c -; \ 32 | tar -C /usr/local -xzf go.tgz; \ 33 | rm go.tgz; \ 34 | \ 35 | if [ "$goRelArch" = 'src' ]; then \ 36 | echo >&2; \ 37 | echo >&2 'error: UNIMPLEMENTED'; \ 38 | echo >&2 'TODO install golang-any from jessie-backports for GOROOT_BOOTSTRAP (and uninstall after build)'; \ 39 | echo >&2; \ 40 | exit 1; \ 41 | fi; \ 42 | \ 43 | export PATH="/usr/local/go/bin:$PATH"; \ 44 | go version 45 | 46 | ENV GOPATH /go 47 | ENV PATH $GOPATH/bin:/usr/local/go/bin:$PATH 48 | 49 | RUN mkdir -p "$GOPATH/src" "$GOPATH/bin" && chmod -R 777 "$GOPATH" 50 | WORKDIR $GOPATH 51 | -------------------------------------------------------------------------------- /tests/test_analyze_default_common.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2020 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | 6 | import unittest 7 | 8 | from tern.analyze.default import default_common as common 9 | from test_fixtures import TestImage 10 | 11 | 12 | class TestAnalyzeDefaultCommon(unittest.TestCase): 13 | 14 | def setUp(self): 15 | self.image = TestImage('5678efgh') 16 | 17 | def tearDown(self): 18 | del self.image 19 | 20 | def testUpdateMasterListWithoutPackages(self): 21 | self.image.load_image() 22 | layer = self.image.layers[0] 23 | master_list = list() 24 | common.update_master_list(master_list, layer) 25 | self.assertEqual(len(master_list), len(layer.packages)) 26 | 27 | def testUpdateMasterListWithPackages(self): 28 | self.image.load_image() 29 | layer = self.image.layers[0] 30 | master_list = list() 31 | older_master_list = list() 32 | for pkg in layer.packages: 33 | master_list.append(pkg) 34 | older_master_list.append(pkg) 35 | 36 | common.update_master_list(master_list, layer) 37 | self.assertEqual(len(master_list), len(older_master_list)) 38 | self.assertEqual(len(layer.packages), 0) 39 | 40 | for old_pkg in older_master_list: 41 | exists = False 42 | for pkg in master_list: 43 | if old_pkg.is_equal(pkg): 44 | exists = True 45 | break 46 | self.assertTrue(exists) 47 | 48 | 49 | if __name__ == '__main__': 50 | unittest.main() 51 | -------------------------------------------------------------------------------- /tests/test_analyze_default_filter.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2020 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | 6 | import unittest 7 | 8 | from tern.analyze.default import filter as fltr 9 | from tern.classes.command import Command 10 | 11 | 12 | class TestAnalyzeDefaultFilter(unittest.TestCase): 13 | 14 | def setUp(self): 15 | self.command1 = Command("yum install nfs-utils") 16 | self.command2 = Command("yum remove nfs-utils") 17 | 18 | def tearDown(self): 19 | del self.command1 20 | del self.command2 21 | 22 | def testFilterInstallCommands(self): 23 | commands, _ = fltr.filter_install_commands("yum install") 24 | self.assertEqual(len(commands), 1) 25 | commands, _ = fltr.filter_install_commands("yum remove") 26 | self.assertEqual(len(commands), 1) 27 | 28 | # Negative Scenarios 29 | commands, _ = fltr.filter_install_commands("yum clean") 30 | self.assertEqual(len(commands), 0) 31 | commands, _ = fltr.filter_install_commands("yum") 32 | self.assertEqual(len(commands), 0) 33 | 34 | def testConsolidateCommandsWithDifferentCommands(self): 35 | commands_list = fltr.consolidate_commands( 36 | [self.command1, self.command2]) 37 | self.assertEqual(len(commands_list), 1) 38 | self.assertEqual(len(commands_list[0].words), 3) 39 | 40 | def testConsolidateCommandsWithSameCommands(self): 41 | command = Command("yum install nfs-utils") 42 | commands_list = fltr.consolidate_commands([self.command1, command]) 43 | self.assertEqual(len(commands_list), 1) 44 | self.assertEqual(len(commands_list[0].words), 2) 45 | 46 | command = Command("yum remove nfs-utils") 47 | commands_list = fltr.consolidate_commands([self.command2, command]) 48 | self.assertEqual(len(commands_list), 1) 49 | self.assertEqual(len(commands_list[0].words), 2) 50 | 51 | def testRemoveIgnoredCommandsWithIgnoreFlag(self): 52 | self.command1.set_ignore() 53 | _, c = fltr.remove_ignored_commands([self.command1]) 54 | self.assertEqual(len(c), 0) 55 | 56 | def testRemoveIgnoredCommandsWithoutIgnoreFlag(self): 57 | # Negative Scenarios 58 | _, c = fltr.remove_ignored_commands([self.command1]) 59 | self.assertEqual(len(c), 1) 60 | 61 | def testRemoveUnrecognizedCommandsWithoutFlag(self): 62 | _, c = fltr.remove_unrecognized_commands([self.command1]) 63 | self.assertEqual(len(c), 0) 64 | 65 | def testRemoveUnrecognizedCommandsWithFlag(self): 66 | # Negative Scenarios 67 | self.command1.set_install() 68 | _, c = fltr.remove_unrecognized_commands([self.command1]) 69 | self.assertEqual(len(c), 1) 70 | 71 | def testGetInstalledPackageNamesWithInstallFlag(self): 72 | self.command1.set_install() 73 | self.assertGreater( 74 | len(fltr.get_installed_package_names(self.command1)), 0) 75 | 76 | def testGetInstalledPackageNamesWithRemoveFlag(self): 77 | # Negative Scenarios 78 | self.command2.set_remove() 79 | self.assertEqual( 80 | len(fltr.get_installed_package_names(self.command2)), 0) 81 | 82 | 83 | if __name__ == '__main__': 84 | unittest.main() 85 | -------------------------------------------------------------------------------- /tests/test_class_notice.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2017-2019 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | 6 | import unittest 7 | 8 | from tern.classes.notice import Notice 9 | from tern.classes.notice import NoticeException 10 | from test_fixtures import TestTemplate2 11 | 12 | 13 | class TestClassNotice(unittest.TestCase): 14 | 15 | def setUp(self): 16 | self.notice = Notice() 17 | 18 | def tearDown(self): 19 | del self.notice 20 | 21 | def testInstance(self): 22 | self.assertFalse(self.notice.message) 23 | self.assertEqual(self.notice.level, 'info') 24 | 25 | def testSetters(self): 26 | self.notice.message = 'tag' 27 | self.assertEqual(self.notice.message, 'tag') 28 | with self.assertRaises(NoticeException): 29 | self.notice.level = 'something' 30 | self.notice.level = 'warning' 31 | 32 | def testGetters(self): 33 | self.notice.message = 'tag' 34 | self.notice.level = 'warning' 35 | self.assertEqual(self.notice.message, 'tag') 36 | self.assertEqual(self.notice.level, 'warning') 37 | 38 | def testToDict(self): 39 | self.notice.message = 'tag' 40 | self.notice.level = 'warning' 41 | a_dict = self.notice.to_dict() 42 | self.assertEqual(a_dict['message'], 'tag') 43 | self.assertEqual(a_dict['level'], 'warning') 44 | 45 | def testToDictTemplate(self): 46 | template = TestTemplate2() 47 | self.notice.message = 'tag' 48 | self.notice.level = 'warning' 49 | a_dict = self.notice.to_dict(template) 50 | self.assertEqual(a_dict['note.message'], 'tag') 51 | self.assertEqual(a_dict['note.level'], 'warning') 52 | 53 | 54 | if __name__ == '__main__': 55 | unittest.main() 56 | -------------------------------------------------------------------------------- /tests/test_class_template.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2019 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | 6 | import unittest 7 | 8 | from test_fixtures import TestTemplate1 9 | from test_fixtures import TestTemplate2 10 | 11 | 12 | class TestClassTemplate(unittest.TestCase): 13 | '''Note that we cannot instantiate the abstract base class so we 14 | will use a test fixture which is a subclass of Template''' 15 | 16 | def setUp(self): 17 | self.template1 = TestTemplate1() 18 | self.template2 = TestTemplate2() 19 | 20 | def tearDown(self): 21 | del self.template1 22 | del self.template2 23 | 24 | def testPackage(self): 25 | mapping = self.template1.package() 26 | self.assertEqual(mapping['name'], 'package.name') 27 | self.assertEqual(mapping['version'], 'package.version') 28 | self.assertEqual(mapping['pkg_license'], 'package.license') 29 | 30 | def testImageLayer(self): 31 | mapping = self.template1.image_layer() 32 | self.assertEqual(mapping['diff_id'], 'layer.diff') 33 | self.assertEqual(mapping['tar_file'], 'layer.tarfile') 34 | self.assertEqual(mapping['packages'], 'layer.packages') 35 | 36 | def testImage(self): 37 | mapping = self.template1.image() 38 | self.assertEqual(mapping['repotag'], 'image.repotag') 39 | self.assertEqual(mapping['layers'], 'image.layers') 40 | 41 | def testNotice(self): 42 | mapping = self.template2.notice() 43 | self.assertEqual(mapping['level'], 'note.level') 44 | self.assertEqual(mapping['message'], 'note.message') 45 | 46 | def testNoticeOrigin(self): 47 | mapping = self.template2.notice_origin() 48 | self.assertEqual(mapping['origin_str'], 'note.source') 49 | self.assertEqual(mapping['notices'], 'note.messages') 50 | 51 | def testOrigins(self): 52 | mapping = self.template2.origins() 53 | self.assertEqual(mapping['origins'], 'notes') 54 | 55 | 56 | if __name__ == '__main__': 57 | unittest.main() 58 | -------------------------------------------------------------------------------- /tests/test_fixtures.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2019-2020 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | 6 | """ 7 | Objects to use for testing 8 | """ 9 | 10 | import os 11 | import shutil 12 | 13 | from tern.classes.package import Package 14 | from tern.classes.image_layer import ImageLayer 15 | from tern.classes.image import Image 16 | from tern.classes.template import Template 17 | from tern.utils import constants 18 | from tern.utils import general 19 | 20 | 21 | def create_working_dir(): 22 | """Create the working directory for tests""" 23 | working_dir = os.path.join(general.get_top_dir(), constants.temp_folder) 24 | if not os.path.isdir(working_dir): 25 | os.makedirs(working_dir) 26 | 27 | 28 | def remove_working_dir(): 29 | """Remove the working directory for tests""" 30 | # don't remove the cache, just the temp folder 31 | working_dir = os.path.join(general.get_top_dir(), constants.temp_folder) 32 | if os.path.exists(working_dir): 33 | shutil.rmtree(working_dir) 34 | 35 | 36 | class TestImage(Image): 37 | def __init__(self, image_id): 38 | super().__init__(image_id) 39 | 40 | def load_image(self): 41 | l1 = ImageLayer('123abc', 'path/to/tar') 42 | self.name = 'testimage' 43 | self.tag = 'testtag' 44 | l1.add_package(Package('p1')) 45 | l1.add_package(Package('p2')) 46 | self._layers.append(l1) 47 | 48 | 49 | class TestTemplate1(Template): 50 | '''Template with no origins mapping''' 51 | def file_data(self): 52 | return {'name': 'file.name', 53 | 'path': 'file.path', 54 | 'licenses': 'file.licenses'} 55 | 56 | def package(self): 57 | return {'name': 'package.name', 58 | 'version': 'package.version', 59 | 'pkg_license': 'package.license', 60 | 'files': 'package.files'} 61 | 62 | def image_layer(self): 63 | return {'diff_id': 'layer.diff', 64 | 'tar_file': 'layer.tarfile', 65 | 'packages': 'layer.packages', 66 | 'files': 'layer.files'} 67 | 68 | def image(self): 69 | return {'repotag': 'image.repotag', 70 | 'layers': 'image.layers'} 71 | 72 | 73 | class TestTemplate2(Template): 74 | '''Template with origins mapping''' 75 | def file_data(self): 76 | mapping = {'name': 'file.name', 77 | 'path': 'file.path', 78 | 'licenses': 'file.licenses'} 79 | # we update the mapping with another defined mapping 80 | mapping.update(self.origins()) 81 | return mapping 82 | 83 | def package(self): 84 | mapping = {'name': 'package.name', 85 | 'version': 'package.version', 86 | 'pkg_license': 'package.license', 87 | 'proj_url': 'package.url', 88 | 'files': 'package.files'} 89 | # we update the mapping with another defined mapping 90 | mapping.update(self.origins()) 91 | return mapping 92 | 93 | def image_layer(self): 94 | mapping = {'diff_id': 'layer.diff', 95 | 'tar_file': 'layer.tarfile', 96 | 'packages': 'layer.packages', 97 | 'files': 'layer.files'} 98 | # we update the mapping with another defined mapping 99 | mapping.update(self.origins()) 100 | return mapping 101 | 102 | def image(self): 103 | mapping = {'repotag': 'image.repotag', 104 | 'layers': 'image.layers'} 105 | # we update the mapping with another defined mapping 106 | mapping.update(self.origins()) 107 | return mapping 108 | 109 | def notice(self): 110 | return {'level': 'note.level', 111 | 'message': 'note.message'} 112 | 113 | def notice_origin(self): 114 | return {'origin_str': 'note.source', 115 | 'notices': 'note.messages'} 116 | 117 | def origins(self): 118 | return {'origins': 'notes'} 119 | -------------------------------------------------------------------------------- /tests/test_load_docker_api.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (c) 2020 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | 6 | import unittest 7 | 8 | from tern.load import docker_api 9 | from tern.utils import rootfs 10 | from test_fixtures import create_working_dir 11 | from test_fixtures import remove_working_dir 12 | 13 | 14 | class TestLoadDockerAPI(unittest.TestCase): 15 | """This test case requires a temporary folder to be set up and the Docker 16 | daemon to be up and running properly""" 17 | 18 | def setUp(self): 19 | self.client = docker_api.check_docker_setup() 20 | create_working_dir() 21 | rootfs.set_working_dir() 22 | 23 | def tearDown(self): 24 | # should not do anything if the client is already closed 25 | docker_api.close_client(self.client) 26 | # clean up working directory 27 | remove_working_dir() 28 | 29 | def testBuildAndRemoveImage(self): 30 | # working dockerfile 31 | dockerfile_path = 'tests/dockerfiles/debian_buster_apt' 32 | image_obj = docker_api.build_image(dockerfile_path, self.client) 33 | self.assertTrue(image_obj) 34 | # successful remove 35 | self.assertTrue(docker_api.remove_image(image_obj, self.client)) 36 | # remove an image that is not there 37 | self.assertFalse(docker_api.remove_image(image_obj, self.client)) 38 | # no dockerfile 39 | image_obj = docker_api.build_image( 40 | 'dockerfiles/not_there', self.client) 41 | self.assertFalse(image_obj) 42 | # failed build 43 | image_obj = docker_api.build_image( 44 | 'tests/dockerfiles/fail_build', self.client) 45 | self.assertFalse(image_obj) 46 | 47 | def testExtractImage(self): 48 | # successful save 49 | dockerfile_path = 'tests/dockerfiles/debian_buster_apt' 50 | image_obj = docker_api.build_image(dockerfile_path, self.client) 51 | self.assertTrue(docker_api.extract_image(image_obj)) 52 | docker_api.remove_image(image_obj, self.client) 53 | 54 | 55 | if __name__ == '__main__': 56 | unittest.main() 57 | -------------------------------------------------------------------------------- /tests/test_util_general.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from tern.utils import general 3 | 4 | 5 | class TestUtilGeneral(unittest.TestCase): 6 | 7 | def testImageString(self): 8 | correct_strings = [ 9 | 'image@digest_type:digest', 10 | 'image:tag', 11 | 'debian:buster', 12 | 'golang:1.12-alpine', 13 | ('p12/test@sha256:737aaa0caf3b8f64baa41ebf78c6cd0c43f34fadccc1275' 14 | 'a32b8ab5d5b75c344') 15 | ] 16 | 17 | incorrect_strings = [ 18 | 'debian', 19 | 'image', 20 | 'debian@sha', 21 | 'test/v1.56' 22 | ] 23 | 24 | for image_str in correct_strings: 25 | self.assertTrue(general.check_image_string(image_str)) 26 | 27 | for image_str in incorrect_strings: 28 | self.assertFalse(general.check_image_string(image_str)) 29 | 30 | def testParseImageString(self): 31 | hello = 'hello-world' 32 | debian = 'debian:9.8-slim' 33 | distroless = 'gcr.io/distroless/static' 34 | resizer = 'gcr.io/google-containers/addon-resizer:2.3' 35 | etcd = ('bitnami/etcd@sha256:35862e29b27efd97cdf4a1fc79abc1341feac556' 36 | '32e4256b02e6cfee9a4b6455') 37 | nexus = ('nexus3.onap.org:10001/onap/so/so-oof-adapter@sha256:d7e1f739ba732c' 38 | '853a638f9c90becd5e0f8d313c8d506567b0b83ac38a1d53cb') 39 | self.assertEqual(general.parse_image_string(hello), 40 | {'name': 'hello-world', 41 | 'tag': '', 42 | 'digest_type': '', 43 | 'digest': ''}) 44 | self.assertEqual(general.parse_image_string(debian), 45 | {'name': 'debian', 46 | 'tag': '9.8-slim', 47 | 'digest_type': '', 48 | 'digest': ''}) 49 | self.assertEqual(general.parse_image_string(distroless), 50 | {'name': 'gcr.io/distroless/static', 51 | 'tag': '', 52 | 'digest_type': '', 53 | 'digest': ''}) 54 | self.assertEqual(general.parse_image_string(resizer), 55 | {'name': 'gcr.io/google-containers/addon-resizer', 56 | 'tag': '2.3', 57 | 'digest_type': '', 58 | 'digest': ''}) 59 | self.assertEqual(general.parse_image_string(etcd), 60 | {'name': 'bitnami/etcd', 61 | 'tag': '', 62 | 'digest_type': 'sha256', 63 | 'digest': ('35862e29b27efd97cdf4a1fc79abc1341fe' 64 | 'ac55632e4256b02e6cfee9a4b6455')}) 65 | self.assertEqual(general.parse_image_string(nexus), 66 | {'name': 'nexus3.onap.org:10001/onap/so/so-oof-adapter', 67 | 'tag': '', 68 | 'digest_type': 'sha256', 69 | 'digest': ('d7e1f739ba732c853a638f9c90becd5e0f8' 70 | 'd313c8d506567b0b83ac38a1d53cb')}) 71 | 72 | 73 | if __name__ == '__main__': 74 | unittest.main() 75 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = py35 3 | py36 4 | py37 5 | commit 6 | prospector 7 | security 8 | changes 9 | coverage 10 | 11 | [testenv:prospector] 12 | description = Run the Prospector test 13 | deps = prospector>=1.2 14 | commands = prospector 15 | 16 | [testenv:commit] 17 | description = Run the Commit test 18 | commands = python ci/test_commit_message.py 19 | 20 | [testenv:security] 21 | description = Run bandit 22 | deps = bandit~=1.6 23 | commands = bandit . 24 | 25 | [testenv:tests] 26 | description = Run unittests 27 | commands = python -m unittest discover -s tests 28 | 29 | [testenv:coverage] 30 | description = Get current coverage 31 | deps = coverage 32 | commands = coverage run -m unittest discover -s tests 33 | coverage report --omit .tox/**,tests/** 34 | 35 | -------------------------------------------------------------------------------- /vagrant/Vagrantfile: -------------------------------------------------------------------------------- 1 | # -*- mode: ruby -*- 2 | # vi: set ft=ruby : 3 | # 4 | # Copyright (c) 2018-2019 VMware, Inc. All Rights Reserved. 5 | # SPDX-License-Identifier: BSD-2-Clause 6 | # 7 | # All Vagrant configuration is done below. The "2" in Vagrant.configure 8 | # configures the configuration version (we support older styles for 9 | # backwards compatibility). Please don't change it unless you know what 10 | # you're doing. 11 | 12 | # Customize the amount of memory on the VM: 13 | VM_MEMORY = 4 * 1024 14 | # Customize amount of cores for the VM: 15 | VM_CPUS = 2 16 | 17 | Vagrant.configure("2") do |config| 18 | # The most common configuration options are documented and commented below. 19 | # For a complete reference, please see the online documentation at 20 | # https://docs.vagrantup.com. 21 | 22 | # Every Vagrant development environment requires a box. You can search for 23 | # boxes at https://vagrantcloud.com/search. 24 | config.vm.box = "generic/ubuntu2004" 25 | config.vm.provision :shell, path: "bootstrap.sh" 26 | 27 | config.vm.synced_folder '.', '/vagrant', disabled: true 28 | config.vm.synced_folder '../', '/tern' 29 | # Disable automatic box update checking. If you disable this, then 30 | # boxes will only be checked for updates when the user runs 31 | # `vagrant box outdated`. This is not recommended. 32 | # config.vm.box_check_update = false 33 | 34 | # Create a forwarded port mapping which allows access to a specific port 35 | # within the machine from a port on the host machine. In the example below, 36 | # accessing "localhost:8080" will access port 80 on the guest machine. 37 | # NOTE: This will enable public access to the opened port 38 | # config.vm.network "forwarded_port", guest: 80, host: 8080 39 | 40 | # Create a forwarded port mapping which allows access to a specific port 41 | # within the machine from a port on the host machine and only allow access 42 | # via 127.0.0.1 to disable public access 43 | # config.vm.network "forwarded_port", guest: 80, host: 8080, host_ip: "127.0.0.1" 44 | 45 | # Create a private network, which allows host-only access to the machine 46 | # using a specific IP. 47 | # config.vm.network "private_network", ip: "192.168.33.10" 48 | 49 | # Create a public network, which generally matched to bridged network. 50 | # Bridged networks make the machine appear as another physical device on 51 | # your network. 52 | # config.vm.network "public_network" 53 | 54 | # Share an additional folder to the guest VM. The first argument is 55 | # the path on the host to the actual folder. The second argument is 56 | # the path on the guest to mount the folder. And the optional third 57 | # argument is a set of non-required options. 58 | # config.vm.synced_folder "../data", "/vagrant_data" 59 | 60 | # Provider-specific configuration so you can fine-tune various 61 | # backing providers for Vagrant. These expose provider-specific options. 62 | # Example for VirtualBox: 63 | # 64 | config.vm.provider "virtualbox" do |vb| 65 | # # Display the VirtualBox GUI when booting the machine 66 | # vb.gui = true 67 | # 68 | vb.memory = VM_MEMORY 69 | vb.cpus = VM_CPUS 70 | end 71 | 72 | config.vm.provider :libvirt do |lv| 73 | lv.memory = VM_MEMORY 74 | lv.cpus = VM_CPUS 75 | lv.storage_pool_name = "default" 76 | end 77 | 78 | # View the documentation for the provider you are using for more 79 | # information on available options. 80 | 81 | # Enable provisioning with a shell script. Additional provisioners such as 82 | # Puppet, Chef, Ansible, Salt, and Docker are also available. Please see the 83 | # documentation for more information about their specific syntax and use. 84 | # config.vm.provision "shell", inline: <<-SHELL 85 | # apt-get update 86 | # apt-get install -y apache2 87 | # SHELL 88 | end 89 | -------------------------------------------------------------------------------- /vagrant/bootstrap.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Copyright (c) 2018-2021 VMware, Inc. All Rights Reserved. 4 | # SPDX-License-Identifier: BSD-2-Clause 5 | 6 | # Update the Ubuntu repositories 7 | sudo apt-get update 8 | 9 | # Upgrade all currently installed packages 10 | sudo apt-get -y upgrade 11 | 12 | # Python3 versions and system dependencies 13 | sudo apt-get install -y python3 python3-pip python3-venv attr jq 14 | 15 | # Install skopeo for Ubuntu 20.04 16 | echo "deb https://download.opensuse.org/repositories/devel:/kubic:/libcontainers:/stable/xUbuntu_20.04/ /" | sudo tee /etc/apt/sources.list.d/devel:kubic:libcontainers:stable.list 17 | curl -L https://download.opensuse.org/repositories/devel:/kubic:/libcontainers:/stable/xUbuntu_20.04/Release.key | sudo apt-key add - 18 | sudo apt-get update 19 | sudo apt-get -y upgrade 20 | sudo apt-get -y install skopeo 21 | 22 | # Install Docker 23 | sudo apt-get install -y docker.io 24 | 25 | # Docker adjustments 26 | 27 | # Optional: Make it easier to run Docker commands by adding 28 | # the vagrant user to the Docker group. If you want to run commands 29 | # against Docker without doing this, you will need to use sudo. 30 | 31 | sudo usermod -a -G docker vagrant 32 | 33 | # Install tern 34 | pip3 install tern 35 | --------------------------------------------------------------------------------