├── .github
├── ISSUE_TEMPLATE
│ ├── bug_report.md
│ └── feature_request.md
└── workflows
│ └── flow.yml
├── .gitignore
├── CONTRIBUTING.md
├── Dockerfile
├── LICENSE
├── README.md
├── buildConfig.json
├── flow
├── __init__.py
├── aggregator.py
├── artifactstorage
│ ├── __init__.py
│ ├── artifact_storage_abc.py
│ └── artifactory
│ │ ├── __init__.py
│ │ └── artifactory.py
├── buildconfig.py
├── cloud
│ ├── __init__.py
│ ├── cloud_abc.py
│ ├── cloudfoundry
│ │ ├── __init__.py
│ │ └── cloudfoundry.py
│ └── gcappengine
│ │ ├── __init__.py
│ │ └── gcappengine.py
├── coderepo
│ ├── __init__.py
│ ├── code_repo_abc.py
│ └── github
│ │ ├── __init__.py
│ │ └── github.py
├── communications
│ ├── __init__.py
│ ├── communications_abc.py
│ └── slack
│ │ ├── __init__.py
│ │ └── slack.py
├── logger.py
├── metrics
│ ├── __init__.py
│ ├── graphite
│ │ ├── __init__.py
│ │ └── graphite.py
│ └── metrics_abc.py
├── pluginloader.py
├── plugins
│ ├── __init__.py
│ └── foo
│ │ ├── __init__.py
│ │ └── foo.py
├── projecttracking
│ ├── __init__.py
│ ├── jira
│ │ ├── __init__.py
│ │ └── jira.py
│ ├── project_tracking_abc.py
│ └── tracker
│ │ ├── __init__.py
│ │ └── tracker.py
├── settings.ini
├── staticqualityanalysis
│ ├── __init__.py
│ ├── sonar
│ │ ├── __init__.py
│ │ └── sonarmodule.py
│ └── static_quality_analysis_abc.py
├── utils
│ ├── __init__.py
│ └── commons.py
└── zipit
│ ├── __init__.py
│ └── zipit.py
├── images
├── PyCharm_Preferences.png
├── PyCharm_Select_PyTest.png
├── iTerm-Flow_Output.png
├── iTerm_reg-ex.png
├── iTerm_triggers.png
├── pycharm_edit_config.jpg
├── pycharm_env_variables.png
└── tracker.png
├── requirements.txt
├── scripts
├── buildtar.sh
├── buildwheel.sh
├── test-results
│ └── results.txt
├── unittest.sh
├── unittest_continous.sh
└── unittest_coverage.sh
├── setup.py
├── sonar-project.properties
└── tests
├── __init__.py
├── artifactstorage
├── __init__.py
└── artifactory
│ ├── __init__.py
│ └── test_artifactory.py
├── cloud
├── __init__.py
├── cloudfoundry
│ ├── __init__.py
│ └── test_cloudfoundry.py
└── gcappengine
│ ├── __init__.py
│ └── test_gcappengine.py
├── coderepo
├── __init__.py
└── github
│ ├── __init__.py
│ ├── git_command_raw_history_output.txt
│ ├── git_command_raw_history_output_multiline.txt
│ ├── git_tag_last_was_release.txt
│ ├── git_tag_last_was_snapshot.txt
│ ├── git_tag_mock_output.txt
│ ├── git_tag_mock_output_calver.txt
│ ├── git_tag_mock_output_random.txt
│ ├── git_tag_mock_output_small.txt
│ ├── git_tag_one_release.txt
│ ├── git_tag_three_release.txt
│ ├── git_tag_unordered_manual_versions.txt
│ ├── github_commit_history_output.txt
│ ├── github_commit_history_output_multiline.txt
│ ├── test_github.py
│ ├── tracker_stories.json
│ └── tracker_stories_github_format.txt
├── communications
├── __init__.py
└── slack
│ ├── __init__.py
│ ├── test_slack.py
│ └── tracker_stories.json
├── plugins
└── __init__.py
├── projecttracking
├── __init__.py
├── jira
│ ├── __init__.py
│ ├── jira_projects.json
│ ├── jira_stories_bug.json
│ ├── jira_stories_major.json
│ ├── jira_stories_minor.json
│ └── test_jira.py
└── tracker
│ ├── __init__.py
│ ├── test_tracker.py
│ ├── tracker_stories_bug.json
│ ├── tracker_stories_major.json
│ └── tracker_stories_minor.json
├── staticqualityanalysis
├── __init__.py
└── sonar
│ ├── __init__.py
│ └── test_sonar.py
├── test_aggregator.py
└── utils
├── __init__.py
└── test_commons.py
/.github/ISSUE_TEMPLATE/bug_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug report
3 | about: Create a report to help us improve
4 | title: ''
5 | labels: ''
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Describe the bug**
11 | A clear and concise description of what the bug is.
12 |
13 | **To Reproduce**
14 | Steps to reproduce the behavior:
15 | 1. Go to '...'
16 | 2. Click on '....'
17 | 3. Scroll down to '....'
18 | 4. See error
19 |
20 | **Expected behavior**
21 | A clear and concise description of what you expected to happen.
22 |
23 | **Screenshots**
24 | If applicable, add screenshots to help explain your problem.
25 |
26 | **Desktop (please complete the following information):**
27 | - OS:
28 | - Flow Version:
29 |
30 | **Additional context**
31 | Add any other context about the problem here.
32 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature_request.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Feature request
3 | about: Suggest an idea for this project
4 | title: ''
5 | labels: ''
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Is your feature request related to a problem? Please describe.**
11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
12 |
13 | **Describe the solution you'd like**
14 | A clear and concise description of what you want to happen.
15 |
16 | **Describe alternatives you've considered**
17 | A clear and concise description of any alternative solutions or features you've considered.
18 |
19 | **Additional context**
20 | Add any other context or screenshots about the feature request here.
21 |
--------------------------------------------------------------------------------
/.github/workflows/flow.yml:
--------------------------------------------------------------------------------
1 | # This workflow will install Python dependencies, run tests and lint with a variety of Python versions
2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions
3 |
4 | name: Python package
5 |
6 | on:
7 | push:
8 | branches: [ master ]
9 | pull_request:
10 | branches: [ master ]
11 |
12 | jobs:
13 | build:
14 |
15 | runs-on: ubuntu-latest
16 | strategy:
17 | matrix:
18 | python-version: ['3.5', '3.6', '3.7', '3.8']
19 |
20 | steps:
21 | - uses: actions/checkout@v2
22 | - name: Set up Python ${{ matrix.python-version }}
23 | uses: actions/setup-python@v2
24 | with:
25 | python-version: ${{ matrix.python-version }}
26 | - name: Install dependencies
27 | run: |
28 | pip install coveralls
29 | pip install bandit
30 | pip install -e .
31 | - name: Unit Tests
32 | run: |
33 | py.test -s -v ./tests --capture=sys
34 | - name: Coverage Reports
35 | run: |
36 | coverage run --source flow -m py.test
37 | - name: Bandit Security Tests
38 | run: |
39 | bandit -r -ll -ii -x tests .
40 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Stuff
2 | .DS_Store
3 | .idea
4 | build/
5 | *.swp
6 | .vscode
7 |
8 | # Byte-compiled / optimized / DLL files
9 | __pycache__/
10 | *.py[cod]
11 | *$py.class
12 |
13 | # C extensions
14 | *.so
15 |
16 | # Distribution / packaging
17 | .Python
18 | env/
19 | build/
20 | develop-eggs/
21 | dist/
22 | downloads/
23 | eggs/
24 | .eggs/
25 | lib/
26 | lib64/
27 | parts/
28 | sdist/
29 | var/
30 | *.egg-info/
31 | .installed.cfg
32 | *.egg
33 |
34 | # PyInstaller
35 | # Usually these files are written by a python script from a template
36 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
37 | *.manifest
38 | *.spec
39 |
40 | # Installer logs
41 | pip-log.txt
42 | pip-delete-this-directory.txt
43 |
44 | # Unit test / coverage reports
45 | htmlcov/
46 | .tox/
47 | .coverage
48 | .coverage.*
49 | .cache
50 | nosetests.xml
51 | coverage.xml
52 | *,cover
53 | .hypothesis/
54 | /test-results/
55 |
56 | # Translations
57 | *.mo
58 | *.pot
59 |
60 | # Django stuff:
61 | *.log
62 | local_settings.py
63 |
64 | # Flask stuff:
65 | instance/
66 | .webassets-cache
67 |
68 | # Scrapy stuff:
69 | .scrapy
70 |
71 | # Sphinx documentation
72 | docs/_build/
73 |
74 | # PyBuilder
75 | target/
76 |
77 | # IPython Notebook
78 | .ipynb_checkpoints
79 |
80 | # pyenv
81 | .python-version
82 |
83 | # celery beat schedule file
84 | celerybeat-schedule
85 |
86 | # dotenv
87 | .env
88 |
89 | # virtualenv
90 | venv/
91 | ENV/
92 |
93 | # Spyder project settings
94 | .spyderproject
95 |
96 | # Rope project settings
97 | .ropeproject
98 |
99 | cf-linux-amd64.tgz
100 | cr/
101 | fordeployment/
102 | .flow.log.txt
103 | .deployment.log.txt
104 | envp3/*
105 |
106 | #Quality Hub tests - temp file
107 | tests/plugins/sqh/qh_testresults/qh_test_results.tar.gz
108 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing
2 |
3 | ## Things to know before contributing code
4 |
5 | * Fork the repository and make changes
6 | * After your changes are complete, submit a pull request to the develop branch.
7 | * Pull requests with excessive numbers of commits will typically be squashed if there are an excess number of commit messages.
8 | * Include the story number in at least one of your commit messages like so `git commit -m "blah [#123456]"`
9 | * **After** your changes have been propagated to develop and you have tested, you are ready to promote to master. This is done via another pull request by you.
10 | * Many changes will require unit tests. If you submit significant changes with unit tests you may be asked to add them before the changes are accepted. This is a perfect opportunity to practice [TDD](https://en.wikipedia.org/wiki/Test-driven_development) if you are not already!
11 |
12 |
13 | # Adding New Modules
14 |
15 | ### Reusable Modules
16 |
17 | Most modules should be written in a manner that allows for reuse by the general public and other organizations. These modules should be added to the flow folder.
18 | ### Proprietary Modules
19 |
20 | At times, teams may wish to add functionality to Flow that includes proprietary logic and not meant for use outside of their company. This can easily be achieved by following the Flow plugin architecture. These changes should *not* be contributed back to this project.
21 |
22 | First, ensure that your modules are created under the "plugins" directory. An example, [foo](flow/plugins/foo/foo.py) is included in the project.
23 |
24 | Second, when you create your module, ensure that you have also created a \__init__.py file. This file should implement:
25 | * `parser` variable defining the parser name you plan to use when your module is called via flow
26 | * method `register_parser` is required. This is where you add the arguments for your parser.
27 | * method `run_action` is required. Define the code that should be run when your parser option is activated.
28 |
29 | ```
30 | from plugins.foo.foo import Foo
31 |
32 | parser = 'foo'
33 |
34 | def register_parser(new_parser):
35 | new_parser.add_argument('action', help='Action to take, possible values: bar, baz')
36 | new_parser.add_argument('-v', '--version', help='Version to use')
37 |
38 | def run_action(args):
39 | foo = Foo()
40 |
41 | if args.action == 'fooa':
42 | foo.bar()
43 | elif args.action == 'foob':
44 | foo.baz()
45 | ```
46 |
47 | #### Hooking into other module events
48 |
49 | There may be times when you need to trigger code in your module based on other system events. i.e. after deployment, attach the flow.log.txt to a change request
50 |
51 | The preferred approach is to utilize [PyDispatch](http://pydispatcher.sourceforge.net) to emit events that require a dependency.
52 |
53 |
54 | # Coding standards
55 |
56 | The biggest rule is that you attempt to follow existing patterns and conventions in the code. The authors of Flow attempted to follow [common patterns and best practices](https://www.python.org/dev/peps/pep-0008). With that said, there are a few opinions that had to be decided on in order to be consistent.
57 |
58 | #### Single vs Double Quote
59 |
60 | In general, single quotes are used throughout this code unless the string is used for interpolation. This is an opinion that we recommend following to maintain consistency.
61 |
62 | #### Abstract Base Classes
63 |
64 | [Abstract Base Classes](https://docs.python.org/2/library/abc.html) are used to define contracts and maintain standards across like classes - similar to interfaces in many OOP languages. They provide Flow users the ability to add components to fit their needs. For instance, swapping out artifact storage library Artifactory for Nexus.
65 |
66 | #### String Concatenation
67 |
68 | Our preferred method for string concatenation is to use the format method of the [string](https://docs.python.org/2/library/string.html) class.
69 |
70 | ```
71 | "Failed publishing with error: {error}".format(error = str(ex))
72 | ```
73 |
74 | #### Logging
75 |
76 | A common logging method has been established and should utilized in new code. This logs code in a standard fashion:
77 | `[ERROR] BuildConfig checkFileExists Cannot find buildConfig.json`
78 |
79 | The equivalent code to log the message above would be:
80 | ```
81 | commons.print_msg(BuildConfig.clazz, method, 'Cannot find buildConfig.json', 'ERROR')
82 | ```
83 |
84 | Logging should done when there is information of significance that could help in troubleshooting. Use your best judgement. The following will log a debug statement:
85 | ```
86 | commons.print_msg(BuildConfig.clazz, method, 'Cannot find buildConfig.json') # DEBUG is default
87 |
88 | commons.print_msg(BuildConfig.clazz, method, 'Cannot find buildConfig.json', 'DEBUG') # or you can explicitly define DEBUG
89 | ```
90 |
91 | Warnings should be used when there is a likelihood that something seems abnormal but you do not want to fail.
92 | ```
93 | commons.print_msg(BuildConfig.clazz, method, 'No token was defined. Attempting to use anonymous authentication.', 'WARN')
94 | ```
95 |
96 | #### Exiting on Error
97 |
98 | If an error occurs that should halt flow CLI from continuing, in addition to logging an ERROR you will need to `Exit(1)`.
99 |
100 | #### Coupling
101 |
102 | Please ensure that we don't tightly couple our classes together. Flow is meant to be modular, allowing teams to pick/choose/create modules that work together.
103 |
104 | ## Environment Setup (Docker)
105 |
106 | Instead of installing dependencies natively on your machine use docker to ease your configuration woes!
107 |
108 | This will allow you to make code changes locally on your machine, but run unit tests and smoke test on the officially maintained flow docker container(s).
109 |
110 | 1. Install [docker](https://docs.docker.com/install/)
111 | 2. Clone this repo
112 | 3. Create a local docker container and mount it to your freshly cloned flow repo.
113 |
114 | ```shell
115 | docker run -d --name flow-dev -ti -v [path-to-flow-repo]:/flow [container image with flow installed, e.g. `flow:latest`]
116 | ```
117 |
118 | 4. Jump into that docker container
119 |
120 | ```shell
121 | docker exec -it --rm flow-dev bash
122 | ```
123 |
124 | 5. Get the environment ready for local development.
125 |
126 | * We need to unset some proxies that allow the build servers talk to resources outside of a work network, since this container is running on your dev machine we don't need those proxies anymore.
127 |
128 | ```shell
129 | unset http_proxy https_proxy HTTPS_PROXY HTTP_PROXY
130 | ```
131 |
132 | * We may also need to create the virtualenv directory (a fix is out for this but if you get an error in the next step come back and run the below command).
133 |
134 | ```shell
135 | mkdir /.virtualenvs
136 | ```
137 |
138 | 6. Jump into the source directory and start unit tests!
139 |
140 | ```shell
141 | cd /flow
142 | bash ./scripts/unittest.sh
143 | ```
144 |
145 | Once set-up any changes that you make on your local machine will be reflected within the docker container. Changes are also immediately reflected in the `flow` command that is run within the container. You can also install flow locally on your machine using `pip3 install -e ./`. This of course requires the installation of [pip](https://pip.pypa.io/en/stable/installing/).
146 |
147 | # Project Setup
148 |
149 | ## Environment Setup for Mac (Tested with 10.11.6)
150 |
151 | * Using homebrew install python 3.
152 | * `/usr/bin/ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"`
153 | * `brew install python3`
154 |
155 | * Install PIP 3
156 | * `curl -Lk https://bootstrap.pypa.io/get-pip.py -o get-pip.py`
157 | * `python3 get-pip.py`
158 |
159 | * Install Virtual Environment Wrapper
160 | * `sudo -H pip install virtualenvwrapper --ignore-installed six `
161 | * `source /usr/local/bin/virtualenvwrapper.sh`
162 | * If you want this to run automatically on future terminals then put the above command at end of `~/.bash_profile`
163 | * This errors out in mac OS with both Python 2 & 3. `export VIRTUALENVWRAPPER_PYTHON=/usr/local/bin/python3` and then run `source /usr/local/bin/virtualenvwrapper.sh`
164 | * `mkvirtualenv -p $(which python3) flow`
165 | * `workon flow`
166 |
167 | * Clone the flow repo and cd into that directory.
168 |
169 | * Build flow from local code:
170 | `pip install -e ./`
171 |
172 | ## Environment Setup for Mac (10.11.6, imaged with pivotal/workstation-setup)
173 |
174 | * Install Homebrew:
175 | * `/usr/bin/ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"`
176 |
177 | * Install Python/Python3:
178 | * `brew install python python3`
179 |
180 | * Install virtualenv and virtualenvwrapper in Python3:
181 | * `pip3 install virtualenv virtualenvwrapper`
182 |
183 | * Edit `~/.bash_profile` and add the following to interact with the virtual environment:
184 |
185 | ```
186 | VIRTUALENVWRAPPER_PYTHON=/usr/local/bin/python3
187 | WORKON_HOME=~/.virtualenvs
188 | source /usr/local/bin/virtualenvwrapper.sh
189 | ```
190 |
191 | * Source the newly updated bash_profile:
192 | * `source ~/.bash_profile`
193 |
194 | * Create the flow virtual environment:
195 | * `mkvirtualenv -p $(which python3) flow`
196 |
197 | * Tell your system to use the flow virtual environment:
198 | * `workon flow`
199 |
200 | * Clone the `ci-cd/flow` repo and cd into that directory:
201 | * `git clone ...`
202 |
203 | * Build flow from local code:
204 | `pip install -e ./`
205 |
206 | ## Environment Setup for Windows 10 Using Windows Subsystem for Linux
207 |
208 | * Install Python
209 | * `https://www.python.org/downloads/`
210 |
211 | * Install python3-distutils and python3-apt
212 | * `sudo apt-get install python3-distutils`
213 | * `sudo apt-get install python3-apt`
214 |
215 | * Download & Install pip
216 | * `curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py`
217 | * `python3 get-pip.py`
218 |
219 | * Set up virtualenvwrapper
220 | * `sudo -H pip install virtualenvwrapper --ignore-installed six`
221 | * `source /usr/local/bin/virtualenvwrapper.sh`
222 | ### Troubleshooting virtualenvwrapper
223 |
224 | * Error
225 | ```
226 | virtualenvwrapper_run_hook:12: permission denied: virtualenvwrapper.sh: There was a problem running the initialization hooks.
227 |
228 | If Python could not import the module virtualenvwrapper.hook_loader, check that virtualenvwrapper has been installed for VIRTUALENVWRAPPER_PYTHON= and that PATH is set properly.
229 | ```
230 | * Solution: Add the following lines to the end of your ~/.bash_profile or ~/.zshrc and restart your terminal
231 | ```
232 | VIRTUALENVWRAPPER_PYTHON=/usr/local/bin/python3
233 | WORKON_HOME=~/.virtualenvs
234 | source /usr/local/bin/virtualenvwrapper.sh
235 | ```
236 | * Error
237 | ```
238 | virtualenvwrapper_run_hook:12: no such file or directory: /usr/local/bin/python3
239 | virtualenvwrapper.sh: There was a problem running the initialization hooks.
240 |
241 | If Python could not import the module virtualenvwrapper.hook_loader,
242 | check that virtualenvwrapper has been installed for
243 | VIRTUALENVWRAPPER_PYTHON=/usr/local/bin/python3 and that PATH is
244 | set properly.
245 | ```
246 | * Solution: Create a symlink to your python installation
247 | * `ln -s $(which python3) /usr/local/bin/python3`
248 |
249 | * Create the flow virtual environment
250 | * `mkvirtualenv -p $(which python3) flow`
251 | * Tell your system to use the flow virtual environment:
252 | * `workon flow`
253 | * Clone the flow repo and cd into that directory:
254 | * `git clone ...`
255 | * Build flow from local code:
256 | * `pip install -e ./`
257 | * Verify that Flow built correctly
258 | * `Flow --help`
259 |
260 |
261 | # Running Tests
262 |
263 | ## Running Unit Tests on a Mac/Linux (Not Using PyCharm Or Pytest)
264 |
265 | * In an effort to make things simple, take a look at the `scripts/unittest.sh` script. You should just be able to run this and it will setup everything. This can only be run after all of the Environment Setup is complete.
266 |
267 | ## Continuous Unit Testing on a Mac/Linux (Not Using PyCharm or Pytest)
268 |
269 | * Run the `scripts/unittest_continous.sh` script which just runs the `scripts/unittest.sh` in a while loop for ever. ctrl-c to quit.
270 | * To continuously run tests while making code changes use the `pytest-watch` or if you don't feel like typing all of that then `ptw` will suffice
271 |
272 | ## Using Pytest
273 |
274 | * Install Pytest
275 | * `pip install -U pytest`
276 | * Verify installation pytest
277 | * `pytest -v`
278 | * Run all unit tests
279 | * `pytest`
280 |
281 | ## Using PyCharm with Pytest
282 |
283 | * Open Project in PyCharm
284 |
285 | * Click On PyCharm > Preferences...
286 | 
287 |
288 | * Go To Python Integrated Tools and Select *pytest* as Default Test Runner
289 | 
290 |
291 | * Select Pytest And click OK
292 |
293 | ## Testing Flow Changes Locally
294 |
295 | * Clone flow code
296 | ```
297 | cd ~/Documents/workspace/
298 | git clone ...
299 | cd flow
300 | ```
301 | * Make your enhancement or bug fixes to flow locally
302 | * Build flow from local code:
303 | `pip install -e ./`
304 | * Clone a project that you want to test with your flow changes
305 | ```
306 | cd ~/Documents/workspace/
307 | git clone ...
308 | cd ...
309 | ```
310 | * Run a flow command, testing against the test project cloned above.
311 | ```
312 | #change to work on your local python virtualenv
313 | workon flow
314 | ```
315 | ```
316 | #these tokens may be required for your github task to run. Set the environment variable, similar to concourse parameters/secrets.
317 | export GITHUB_TOKEN= <>
318 | export SLACK_WEBHOOK_URL= <>
319 | export TRACKER_TOKEN= <>
320 | ```
321 | ```
322 | #run the flow task, pointing to the version of flow locally with the task of choice. Below runs the github version task but this could be any flow task
323 | flow github version -v v0.1.0 development
324 | ```
325 |
326 | ## Configuring iTerm on Mac to Color Code Warnings & Errors
327 |
328 | 
329 | In iTerm, go to `profiles` => `Open Profiles`.
330 | Select the profile to modify and click `Edit Profiles...`
331 | Choose the `Advanced` tab and then under `Triggers` click on `Edit`
332 | 
333 | Add the following reg exs with colors of your choice:
334 | ```
335 | ^.*\[ERROR\].*$
336 | ^.*\[WARN\].*$
337 | ```
338 | 
339 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:alpine
2 |
3 | RUN apk add --no-cache --virtual .build-deps \
4 | gcc libxml2-dev libxslt-dev musl-dev linux-headers \
5 | && apk add --no-cache \
6 | wget \
7 | bash \
8 | curl \
9 | git \
10 | jq \
11 | libxml2 \
12 | libxslt \
13 | musl \
14 | zlib \
15 | openssh-client \
16 | zip \
17 | openjdk8-jre
18 |
19 | COPY . /source
20 |
21 | RUN wget https://bootstrap.pypa.io/get-pip.py --no-check-certificate \
22 | && python3 get-pip.py \
23 | && pip3 install -e /source
24 |
25 | RUN apk del .build-deps
26 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "{}"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright 2016 Home Depot
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
--------------------------------------------------------------------------------
/buildConfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "projectInfo": {
3 | "name": "flow",
4 | "language": "python",
5 | "versionStrategy": "tracker"
6 | },
7 |
8 | "artifact": {
9 | "artifactType": "tar"
10 | },
11 |
12 | "github": {
13 | "org": "homedepot",
14 | "repo": "flow",
15 | "URL": "https://api.github.com/repos"
16 | },
17 |
18 | "tracker": {
19 | "projectId": 2135573
20 | },
21 |
22 | "slack": {
23 | "botName": "Flow",
24 | "emoji": ":robot_face:",
25 | "channel": "#spigot-ci"
26 | },
27 |
28 | "environments": {
29 | "development": {
30 | "artifactCategory": "snapshot",
31 | "associatedBranchName": "develop"
32 | },
33 | "production": {
34 | "artifactCategory": "release",
35 | "associatedBranchName": "master"
36 | }
37 | }
38 | }
--------------------------------------------------------------------------------
/flow/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/homedepot/flow/c4bee88370a132b675b23e29f577b87e4c1d4c63/flow/__init__.py
--------------------------------------------------------------------------------
/flow/artifactstorage/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/homedepot/flow/c4bee88370a132b675b23e29f577b87e4c1d4c63/flow/artifactstorage/__init__.py
--------------------------------------------------------------------------------
/flow/artifactstorage/artifact_storage_abc.py:
--------------------------------------------------------------------------------
1 | from abc import ABCMeta, abstractmethod
2 |
3 |
4 | class Artifact_Storage(metaclass=ABCMeta):
5 | @abstractmethod
6 | def publish(self, file, file_name):
7 | pass
8 |
9 | @abstractmethod
10 | def publish_build_artifact(self):
11 | pass
12 |
13 | @abstractmethod
14 | def get_artifact_home_url(self):
15 | pass
16 |
17 | @abstractmethod
18 | def get_artifact_url(self):
19 | pass
20 |
21 | @abstractmethod
22 | def get_urls_of_artifacts(self):
23 | pass
24 |
25 | @abstractmethod
26 | def download_and_extract_artifacts_locally(self, download_dir, extract):
27 | pass
28 |
--------------------------------------------------------------------------------
/flow/artifactstorage/artifactory/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/homedepot/flow/c4bee88370a132b675b23e29f577b87e4c1d4c63/flow/artifactstorage/artifactory/__init__.py
--------------------------------------------------------------------------------
/flow/buildconfig.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | # buildconfig.py
3 |
4 | import configparser
5 | import json
6 | import os.path
7 |
8 | import flow.utils.commons as commons
9 |
10 |
11 | class BuildConfig:
12 | clazz = 'BuildConfig'
13 | json_config = None
14 | build_env = None
15 | build_env_info = None
16 | version_number = None
17 | project_name = None
18 | artifact_category = None
19 | calver_bump_type = None
20 | calver_year_format = None
21 | settings = None
22 | language = None
23 | version_strategy = None
24 | artifact_extension = None
25 | artifact_extensions = None
26 | push_location = 'fordeployment'
27 | sonar_project_key = None
28 | project_tracker = None
29 |
30 | def __init__(self, args):
31 | method = '__init__'
32 |
33 | commons.print_msg(BuildConfig.clazz, method, 'begin')
34 | commons.print_msg(BuildConfig.clazz, method, "environment is set to: {}".format(args.env))
35 |
36 | BuildConfig.build_env = args.env
37 |
38 | BuildConfig.settings = configparser.ConfigParser()
39 |
40 | script_dir = os.path.dirname(__file__) # <-- absolute dir the script is in
41 | rel_path = 'settings.ini'
42 | abs_file_path = os.path.join(script_dir, rel_path)
43 | commons.print_msg(BuildConfig.clazz, method, abs_file_path)
44 | BuildConfig.settings.read(abs_file_path)
45 |
46 | self._load_build_config()
47 |
48 | BuildConfig.build_env_info = BuildConfig.json_config['environments'][args.env]
49 |
50 | BuildConfig.language = BuildConfig.json_config['projectInfo']['language'].lower()
51 |
52 | if 'artifactoryConfig' in BuildConfig.json_config:
53 | commons.print_msg(self.clazz, method, 'Detected artifactoryConfig block. Retrieving artifactType.')
54 | # TODO get rid of artifact_extension in place of artifact_extensions if possible.
55 | BuildConfig.artifact_extension = BuildConfig.json_config['artifactoryConfig'].get("artifactType", None)
56 | BuildConfig.artifact_extensions = BuildConfig.json_config['artifactoryConfig'].get("artifactTypes", None)
57 | elif 'artifact' in BuildConfig.json_config:
58 | commons.print_msg(self.clazz, method, 'Detected artifactory block. Retrieving artifactType.')
59 | BuildConfig.artifact_extension = BuildConfig.json_config['artifact'].get("artifactType", None)
60 | BuildConfig.artifact_extensions = BuildConfig.json_config['artifact'].get("artifactTypes", None)
61 |
62 | projectTrackers = []
63 | if 'tracker' in BuildConfig.json_config:
64 | projectTrackers.append('tracker')
65 | if 'projectTracking' in BuildConfig.json_config:
66 | if 'tracker' in BuildConfig.json_config['projectTracking']:
67 | if 'tracker' not in projectTrackers:
68 | projectTrackers.append('tracker')
69 | if 'jira' in BuildConfig.json_config['projectTracking']:
70 | if 'jira' not in projectTrackers:
71 | projectTrackers.append('jira')
72 |
73 | if len(projectTrackers) > 1:
74 | trackers = ','.join(projectTrackers)
75 | commons.print_msg(BuildConfig.clazz, method, "The build config json contains configuration for "
76 | "multiple project tracking tools: {}"
77 | "Please remove all but one project tracker from the "
78 | "configuration".format(trackers), 'ERROR')
79 | exit(1)
80 | elif len(projectTrackers) == 1:
81 | BuildConfig.project_tracker = projectTrackers[0]
82 |
83 | try:
84 | BuildConfig.version_strategy = BuildConfig.json_config['projectInfo']['versionStrategy']
85 | except KeyError:
86 | commons.print_msg(BuildConfig.clazz, method, "The build config json does not contain projectInfo => "
87 | "versionStrategy. 'manual', 'calver_year', 'tracker' or 'jira' values can be "
88 | "used.", 'ERROR')
89 | exit(1)
90 |
91 | if BuildConfig.version_strategy != 'manual' and BuildConfig.version_strategy != 'calver_year' and BuildConfig.version_strategy != projectTrackers[0]:
92 | commons.print_msg(BuildConfig.clazz, method, "The versionStrategy in build config json is not "
93 | "manual or calver_year and does not match the "
94 | "defined project tracking tool: {}.".format(projectTrackers[0]), 'ERROR')
95 | exit(1)
96 |
97 | commons.print_msg(BuildConfig.clazz, method, 'end')
98 |
99 | def _load_build_config(self):
100 | method = 'loadBuildConfig'
101 | commons.print_msg(BuildConfig.clazz, method, 'begin')
102 | commons.print_msg(BuildConfig.clazz, method, "The run time environment {}".format(BuildConfig.build_env))
103 |
104 | if BuildConfig.build_env is None:
105 | commons.print_msg(BuildConfig.clazz, method, 'Environment was not passed in.', 'ERROR')
106 | exit(1)
107 |
108 | if BuildConfig.json_config is None:
109 | self.__check_file_exists(commons.build_config_file)
110 | build_config = json.loads(open(commons.build_config_file).read())
111 |
112 | if build_config == '':
113 | commons.print_msg(BuildConfig.clazz, method, 'Environment was not passed in.', 'ERROR')
114 | exit(1)
115 |
116 | BuildConfig.json_config = build_config
117 |
118 | try:
119 | BuildConfig.project_name = BuildConfig.json_config['projectInfo']['name']
120 | BuildConfig.artifact_category = BuildConfig.json_config['environments'][BuildConfig.build_env][
121 | 'artifactCategory'].lower()
122 | if ('versionStrategy' in BuildConfig.json_config['projectInfo'].keys() and
123 | BuildConfig.json_config['projectInfo']['versionStrategy'] == 'calver_year'):
124 | bump_type = BuildConfig.json_config['environments'][BuildConfig.build_env]['calverBumpType'].lower()
125 | commons.print_msg(BuildConfig.clazz, method, "The calver bump type is {}".format(bump_type))
126 | BuildConfig.calver_bump_type = bump_type
127 | #check if year format is defined
128 | if 'calverYearFormat' in BuildConfig.json_config['environments'][BuildConfig.build_env].keys():
129 | calver_year_format = BuildConfig.json_config['environments'][BuildConfig.build_env]['calverYearFormat'].lower()
130 | if calver_year_format != 'short' and calver_year_format != 'long':
131 | commons.print_msg(BuildConfig.clazz, method, "The calverYearFormat in build config json must be either 'short' "
132 | "or 'long'.", 'ERROR')
133 | exit(1)
134 | commons.print_msg(BuildConfig.clazz, method, "The calver year format is {}".format(calver_year_format))
135 | BuildConfig.calver_year_format = calver_year_format
136 | else:
137 | # default to long (4 digit) calver year format to match github module
138 | commons.print_msg(BuildConfig.clazz, method, "The calver year format is long (4 digit years).")
139 | BuildConfig.calver_year_format = "long"
140 | except KeyError as e:
141 | commons.print_msg(BuildConfig.clazz, method, "The buildConfig.json is missing a key. {}".format(e),
142 | 'ERROR')
143 | exit(1)
144 |
145 | commons.print_msg(BuildConfig.clazz, method, 'end')
146 |
147 | return BuildConfig.json_config
148 |
149 | def __check_file_exists(self, file):
150 | method = "checkFileExists"
151 |
152 | if not os.path.isfile(file):
153 | for f in os.listdir('.'):
154 | commons.print_msg(BuildConfig.clazz, method, 'Listing files found.')
155 | commons.print_msg(BuildConfig.clazz, method, f)
156 | commons.print_msg(BuildConfig.clazz, method, 'Cannot find buildConfig.json. Only the files above were '
157 | 'found in the current directory.', 'ERROR')
158 |
159 | exit(1)
160 |
--------------------------------------------------------------------------------
/flow/cloud/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/homedepot/flow/c4bee88370a132b675b23e29f577b87e4c1d4c63/flow/cloud/__init__.py
--------------------------------------------------------------------------------
/flow/cloud/cloud_abc.py:
--------------------------------------------------------------------------------
1 | import os
2 | import subprocess
3 | from abc import ABCMeta, abstractmethod
4 |
5 | import requests
6 |
7 | from flow.utils import commons
8 |
9 |
10 | class Cloud(metaclass=ABCMeta):
11 | clazz = 'Cloud'
12 | http_timeout = 30
13 |
14 | def download_custom_deployment_script(self, custom_deploy_script):
15 | method = 'download_custom_deployment_script'
16 | commons.print_msg(Cloud.clazz, method, 'begin')
17 |
18 | if custom_deploy_script is None or len(custom_deploy_script.strip()) == 0:
19 | return
20 |
21 | if 'github' in custom_deploy_script:
22 | commons.print_msg(Cloud.clazz, method, "Looking for deploy script from GitHub {}"
23 | .format(custom_deploy_script))
24 |
25 | try:
26 | if os.getenv("GITHUB_TOKEN"):
27 | headers = {'Authorization': ("Bearer " + os.getenv("GITHUB_TOKEN"))}
28 |
29 | resp = requests.get(custom_deploy_script, headers=headers, timeout=self.http_timeout)
30 | else:
31 | commons.print_msg(Cloud.clazz, 'No GITHUB_TOKEN detected in environment. Attempting to access '
32 | 'deploy script anonymously.', 'WARN')
33 | resp = requests.get(custom_deploy_script, timeout=self.http_timeout)
34 |
35 | except:
36 | commons.print_msg(Cloud.clazz, method, "Failed retrieving custom deploy script from GitHub {}".format(
37 | custom_deploy_script), 'ERROR')
38 | exit(1)
39 |
40 | # noinspection PyUnboundLocalVariable
41 | if resp.status_code == 200:
42 | with os.fdopen(os.open('custom_deploy.sh', os.O_WRONLY | os.O_CREAT), 'w') as handle:
43 | handle.write(resp.text)
44 | commons.print_msg(Cloud.clazz, method, resp.text)
45 | else:
46 | commons.print_msg(Cloud.clazz, method, "Failed retrieving custom web deploy script from {script}. "
47 | "\r\n Response: {response}".format(script=custom_deploy_script,
48 | response=resp.text), 'ERROR')
49 | exit(1)
50 |
51 | elif 'http' in custom_deploy_script or 'www' in custom_deploy_script:
52 | commons.print_msg(Cloud.clazz, method, "Looking for deploy script from web at {}"
53 | .format(custom_deploy_script))
54 |
55 | resp = None
56 |
57 | try:
58 | resp = requests.get(custom_deploy_script, timeout=self.http_timeout)
59 | except:
60 | commons.print_msg(Cloud.clazz, method, "Failed retrieving custom web deploy script from {script}. "
61 | "\r\n Response: {response}".format(script=custom_deploy_script,
62 | response=resp.text), 'ERROR')
63 | exit(1)
64 |
65 | if resp.status_code == 200:
66 | with os.fdopen(os.open('custom_deploy.sh', os.O_WRONLY | os.O_CREAT), 'w') as handle:
67 | handle.write(resp.text)
68 | commons.print_msg(Cloud.clazz, method, resp.text)
69 | else:
70 | commons.print_msg(Cloud.clazz, method, "Failed retrieving custom web deploy script from {script}. "
71 | "\r\n Response: {response}".format(script=custom_deploy_script,
72 | response=resp.text), 'ERROR')
73 | exit(1)
74 |
75 | elif custom_deploy_script is not None and len(custom_deploy_script.strip()) > 0:
76 | commons.print_msg(Cloud.clazz, method, ("Looking for deploy script locally", custom_deploy_script))
77 |
78 | if not os.path.isfile(custom_deploy_script.strip()):
79 | commons.print_msg(Cloud.clazz, method, ("Failed retrieving custom deploy script locally from {}"
80 | .format(custom_deploy_script), 'ERROR'))
81 | exit(1)
82 |
83 | commons.print_msg(Cloud.clazz, method, 'end')
84 |
85 | def find_deployable(self, file_ext, dir_to_look_in):
86 | method = 'find_deployable'
87 | commons.print_msg(Cloud.clazz, method, 'begin')
88 |
89 | commons.print_msg(Cloud.clazz, method, "Looking for a {ext} in {dir}".format(ext=file_ext, dir=dir_to_look_in))
90 |
91 | deployable_files = commons.get_files_of_type_from_directory(file_ext.lower(), dir_to_look_in)
92 |
93 | if len(deployable_files) > 1:
94 | commons.print_msg(Cloud.clazz, method, "Found more than 1 artifact in {}".format(dir_to_look_in), 'ERROR')
95 | # raise IOError('Found more than 1 artifact')
96 | exit(1)
97 | elif len(deployable_files) == 0:
98 | commons.print_msg(Cloud.clazz, method, "Could not find file of type {ext} in {dir}".format(
99 | ext=file_ext, dir=dir_to_look_in), 'ERROR')
100 | # raise IOError('Found 0 artifacts')
101 | exit(1)
102 |
103 | commons.print_msg(Cloud.clazz, method, 'end')
104 |
105 | return deployable_files[0]
106 |
107 | def run_deployment_script(self, custom_deploy_script):
108 | method = 'run_deployment_script'
109 | commons.print_msg(Cloud.clazz, method, 'begin')
110 |
111 | cmd = "./" + custom_deploy_script
112 |
113 | execute_custom_script = subprocess.Popen(cmd.split(), shell=False, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
114 |
115 | while execute_custom_script.poll() is None:
116 | line = execute_custom_script.stdout.readline().decode('utf-8').strip(' \r\n')
117 | commons.print_msg(Cloud.clazz, method, line)
118 |
119 | execute_custom_script_output, execute_custom_script_error = execute_custom_script.communicate(timeout=120)
120 |
121 | for line in execute_custom_script_output.splitlines():
122 | commons.print_msg(Cloud.clazz, method, line.decode("utf-8"))
123 |
124 | if execute_custom_script.returncode != 0:
125 | commons.print_msg(Cloud.clazz, method, "Failed calling {command}. Return code of {rtn}".format(
126 | command=cmd, rtn=execute_custom_script.returncode), 'ERROR')
127 | return False
128 |
129 | commons.print_msg(Cloud.clazz, method, 'end')
130 |
131 | return True
132 |
133 | @abstractmethod
134 | def deploy(self):
135 | pass
136 |
--------------------------------------------------------------------------------
/flow/cloud/cloudfoundry/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/homedepot/flow/c4bee88370a132b675b23e29f577b87e4c1d4c63/flow/cloud/cloudfoundry/__init__.py
--------------------------------------------------------------------------------
/flow/cloud/gcappengine/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/homedepot/flow/c4bee88370a132b675b23e29f577b87e4c1d4c63/flow/cloud/gcappengine/__init__.py
--------------------------------------------------------------------------------
/flow/cloud/gcappengine/gcappengine.py:
--------------------------------------------------------------------------------
1 | import os
2 | import platform
3 | import subprocess
4 | import tarfile
5 | import urllib.request
6 | import ssl
7 |
8 | from subprocess import TimeoutExpired
9 |
10 | from flow.buildconfig import BuildConfig
11 | from flow.cloud.cloud_abc import Cloud
12 |
13 | import flow.utils.commons as commons
14 |
15 |
16 | class GCAppEngine(Cloud):
17 |
18 | clazz = 'GCAppEngine'
19 | config = BuildConfig
20 | path_to_google_sdk = None
21 |
22 | def __init__(self, config_override=None):
23 | method = '__init__'
24 | commons.print_msg(GCAppEngine.clazz, method, 'begin')
25 |
26 | if config_override is not None:
27 | self.config = config_override
28 |
29 | if os.environ.get('WORKSPACE'): # for Jenkins
30 | GCAppEngine.path_to_google_sdk = os.environ.get('WORKSPACE') + '/'
31 | else:
32 | GCAppEngine.path_to_google_sdk = ""
33 |
34 | commons.print_msg(GCAppEngine.clazz, method, 'end')
35 |
36 | def _download_google_sdk(self):
37 | method = '_download_google_sdk'
38 | commons.print_msg(GCAppEngine.clazz, method, 'begin')
39 |
40 | ctx = ssl.create_default_context()
41 | ctx.check_hostname = False
42 | # noinspection PyUnresolvedReferences
43 | ctx.verify_mode = ssl.CERT_NONE
44 |
45 | cmd = "where" if platform.system() == "Windows" else "which"
46 | rtn = subprocess.call([cmd, 'gcloud'])
47 |
48 | gcloud_location = self.config.settings.get('googlecloud', 'cloud_sdk_path') + self.config.settings.get('googlecloud', 'gcloud_version')
49 |
50 | if rtn == 0:
51 | commons.print_msg(GCAppEngine.clazz, method, 'gcloud already installed')
52 | else:
53 | commons.print_msg(GCAppEngine.clazz, method, "gcloud CLI was not installed on this image. "
54 | "Downloading Google Cloud SDK from {}".format(
55 | gcloud_location))
56 |
57 | with urllib.request.urlopen(gcloud_location, context=ctx) as u, open(self.config.settings.get('googlecloud', 'gcloud_version'), 'wb') as f: # nosec
58 | f.write(u.read())
59 |
60 | tar = tarfile.open('./' + self.config.settings.get('googlecloud', 'gcloud_version'))
61 | GCAppEngine.path_to_google_sdk = 'google-cloud-sdk/bin/'
62 | tar.extractall()
63 | tar.close()
64 |
65 | commons.print_msg(GCAppEngine.clazz, method, 'end')
66 |
67 | def _verify_required_attributes(self):
68 | method = '_verfify_required_attributes'
69 |
70 | if not os.getenv('GCAPPENGINE_USER_JSON'):
71 | commons.print_msg(GCAppEngine.clazz, method, 'Credentials not loaded. Please define ''environment variable '
72 | '\'GCAPPENGINE_USER_JSON\'', 'ERROR')
73 | exit(1)
74 |
75 | def _write_service_account_json_to_file(self):
76 | method = '_write_service_account_json_to_file'
77 | commons.print_msg(GCAppEngine.clazz, method, 'begin')
78 |
79 | try:
80 | file = open('gcloud.json', 'w+')
81 |
82 | file.write(os.getenv('GCAPPENGINE_USER_JSON'))
83 |
84 | except Exception as e:
85 | commons.print_msg(GCAppEngine.clazz, method, "Failed writing gcloud auth json to gcloud.json from "
86 | "'GCAPPENGINE_USER_JSON'. Error: {}'".format(e), 'ERROR')
87 | exit(1)
88 |
89 | commons.print_msg(GCAppEngine.clazz, method, 'end')
90 |
91 | def _gcloud_login(self):
92 | method = '_gcloud_login'
93 | commons.print_msg(GCAppEngine.clazz, method, 'begin')
94 |
95 | cmd = "{path}gcloud auth activate-service-account --key-file {keyfile} --quiet".format(
96 | path=GCAppEngine.path_to_google_sdk,
97 | keyfile='gcloud.json')
98 |
99 | gcloud_login = subprocess.Popen(cmd.split(), shell=False, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
100 |
101 | login_failed = False
102 |
103 | while gcloud_login.poll() is None:
104 | line = gcloud_login.stdout.readline().decode('utf-8').strip(' \r\n')
105 |
106 | commons.print_msg(GCAppEngine.clazz, method, line)
107 |
108 | # if 'credentials were rejected' in line.lower():
109 | # commons.print_msg(GCAppEngine.clazz, method, "Make sure that your credentials are correct for {"
110 | # "}".format(CloudFoundry.cf_user), 'ERROR')
111 | # login_failed = True
112 |
113 | try:
114 | gcloud_login_output, errs = gcloud_login.communicate(timeout=120)
115 |
116 | for line in gcloud_login_output.splitlines():
117 | commons.print_msg(GCAppEngine.clazz, method, line.decode('utf-8'))
118 |
119 | if gcloud_login.returncode != 0:
120 | commons.print_msg(GCAppEngine.clazz, method, "Failed calling cloud auth. Return code of {}. Make "
121 | "sure the user has proper permission to deploy.".format(
122 | gcloud_login.returncode), 'ERROR')
123 | login_failed = True
124 |
125 | except TimeoutExpired:
126 | commons.print_msg(GCAppEngine.clazz, method, "Timed out calling GCLOUD AUTH.", 'ERROR')
127 | login_failed = True
128 |
129 | if login_failed:
130 | gcloud_login.kill()
131 | os.system('stty sane')
132 | exit(1)
133 |
134 | commons.print_msg(GCAppEngine.clazz, method, 'end')
135 |
136 | def _determine_app_yml(self):
137 | method = '_determine_app_yml'
138 | commons.print_msg(GCAppEngine.clazz, method, 'begin')
139 |
140 | if os.path.isfile("app-{}.yml".format(self.config.build_env)):
141 | app_yaml = "app-{}.yml".format(self.config.build_env)
142 | elif os.path.isfile("{dir}/app-{env}.yml".format(dir=self.config.push_location, env=self.config.build_env)):
143 | app_yaml = "{dir}/app-{env}.yml".format(dir=self.config.push_location, env=self.config.build_env)
144 | elif os.path.isfile("app-{}.yaml".format(self.config.build_env)):
145 | app_yaml = "app-{}.yaml".format(self.config.build_env)
146 | elif os.path.isfile("{dir}/app-{env}.yaml".format(dir=self.config.push_location, env=self.config.build_env)):
147 | app_yaml = "{dir}/app-{env}.yaml".format(dir=self.config.push_location, env=self.config.build_env)
148 | else:
149 | commons.print_msg(GCAppEngine.clazz, method, "Failed to find app_yaml file app-{}.yml/yaml".format(
150 | self.config.build_env), 'ERROR')
151 | exit(1)
152 |
153 | # noinspection PyUnboundLocalVariable
154 | commons.print_msg(GCAppEngine.clazz, method, "Using app_yaml {}".format(app_yaml))
155 |
156 | commons.print_msg(GCAppEngine.clazz, method, 'end')
157 |
158 | return app_yaml
159 |
160 | def _gcloud_deploy(self, app_yaml, promote=True):
161 | method = '_gcloud_deploy'
162 | commons.print_msg(GCAppEngine.clazz, method, 'begin')
163 |
164 | promote_flag = "--no-promote" if promote is False else "--promote"
165 | cmd = "{path}gcloud app deploy {dir}/{env} --quiet --version {ver} {promote}".format(
166 | path=GCAppEngine.path_to_google_sdk,
167 | dir=self.config.push_location,
168 | env=app_yaml,
169 | ver=self.config.version_number.replace('+', '--').replace('.', '-'),
170 | promote=promote_flag)
171 |
172 |
173 | commons.print_msg(GCAppEngine.clazz, method, cmd)
174 |
175 | gcloud_app_deploy = subprocess.Popen(cmd.split(), shell=False, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
176 |
177 | deploy_failed = False
178 |
179 | while gcloud_app_deploy.poll() is None:
180 | line = gcloud_app_deploy.stdout.readline().decode('utf-8').strip(' \r\n')
181 | commons.print_msg(GCAppEngine.clazz, method, line)
182 |
183 | try:
184 | gcloud_app_deploy.communicate(timeout=300)
185 |
186 | if gcloud_app_deploy.returncode != 0:
187 | commons.print_msg(GCAppEngine.clazz, method, "Failed calling {command}. Return code of {rtn}."
188 | .format(command=cmd,
189 | rtn=gcloud_app_deploy.returncode),
190 | 'ERROR')
191 | deploy_failed = True
192 |
193 | except TimeoutExpired:
194 | commons.print_msg(GCAppEngine.clazz, method, "Timed out calling {}".format(cmd), 'ERROR')
195 | deploy_failed = True
196 |
197 | if deploy_failed:
198 | os.system('stty sane')
199 | # self._cf_logout()
200 | exit(1)
201 |
202 | commons.print_msg(GCAppEngine.clazz, method, 'end')
203 |
204 | def deploy(self, app_yaml=None, promote=True):
205 | method = 'deploy'
206 | commons.print_msg(GCAppEngine.clazz, method, 'begin')
207 |
208 | self._verify_required_attributes()
209 |
210 | self._write_service_account_json_to_file()
211 |
212 | self._download_google_sdk()
213 |
214 | self._gcloud_login()
215 |
216 | if self.config.artifact_extension is not None:
217 | self.find_deployable(self.config.artifact_extension, self.config.push_location)
218 |
219 | if app_yaml is None:
220 | app_yaml = self._determine_app_yml()
221 |
222 | self._gcloud_deploy(app_yaml, promote)
223 |
224 | commons.print_msg(GCAppEngine.clazz, method, 'DEPLOYMENT SUCCESSFUL')
225 |
226 | commons.print_msg(GCAppEngine.clazz, method, 'end')
227 |
--------------------------------------------------------------------------------
/flow/coderepo/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/homedepot/flow/c4bee88370a132b675b23e29f577b87e4c1d4c63/flow/coderepo/__init__.py
--------------------------------------------------------------------------------
/flow/coderepo/code_repo_abc.py:
--------------------------------------------------------------------------------
1 | from abc import ABCMeta, abstractmethod
2 |
3 |
4 | # TODO need to come back to this and add more after additional github refactor
5 | class Code_Repo(metaclass=ABCMeta):
6 | @abstractmethod
7 | def _verify_repo_existence(self, url, org, repo, token=None):
8 | pass
9 |
10 | @abstractmethod
11 | def calculate_next_semver(self, tag_type, bump_type, highest_version_array):
12 | pass
13 |
--------------------------------------------------------------------------------
/flow/coderepo/github/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/homedepot/flow/c4bee88370a132b675b23e29f577b87e4c1d4c63/flow/coderepo/github/__init__.py
--------------------------------------------------------------------------------
/flow/communications/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/homedepot/flow/c4bee88370a132b675b23e29f577b87e4c1d4c63/flow/communications/__init__.py
--------------------------------------------------------------------------------
/flow/communications/communications_abc.py:
--------------------------------------------------------------------------------
1 | from abc import ABCMeta, abstractmethod
2 |
3 |
4 | class communications(metaclass=ABCMeta):
5 | @abstractmethod
6 | def publish_deployment(self, story_details):
7 | pass
8 |
9 | @abstractmethod
10 | def publish_error(sender, message, class_name, method_name):
11 | pass
--------------------------------------------------------------------------------
/flow/communications/slack/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/homedepot/flow/c4bee88370a132b675b23e29f577b87e4c1d4c63/flow/communications/slack/__init__.py
--------------------------------------------------------------------------------
/flow/logger.py:
--------------------------------------------------------------------------------
1 | class Logger:
2 |
3 | instance = None
4 |
5 | class __Logger:
6 | log_file = None
7 |
8 | def __init__(self, message):
9 | self.log_file = open(".flow.log.txt", "a")
10 | self.log_file.write('\r\n' + message)
11 |
12 | def __init__(self, message):
13 | if not Logger.instance:
14 | Logger.instance = Logger.__Logger(message)
15 | else:
16 | Logger.instance.log_file.write('\r\n' + message)
17 |
18 | def __getattr__(self, name):
19 | return getattr(self.instance, name)
20 |
--------------------------------------------------------------------------------
/flow/metrics/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/homedepot/flow/c4bee88370a132b675b23e29f577b87e4c1d4c63/flow/metrics/__init__.py
--------------------------------------------------------------------------------
/flow/metrics/graphite/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/homedepot/flow/c4bee88370a132b675b23e29f577b87e4c1d4c63/flow/metrics/graphite/__init__.py
--------------------------------------------------------------------------------
/flow/metrics/graphite/graphite.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | # graphite.py
3 |
4 | from flow.buildconfig import BuildConfig
5 | from flow.metrics.metrics_abc import Metrics
6 |
7 | import flow.utils.commons as commons
8 |
9 |
10 | class Graphite(Metrics):
11 | clazz = 'Graphite'
12 | endpoint = None
13 | config = BuildConfig
14 | prefix = None
15 |
16 | def __init__(self):
17 | method = '__init__'
18 |
19 | self.endpoint = self.config.settings.get('metrics', 'endpoint')
20 | commons.print_msg(self.clazz, method, "Metrics Endpoint {}".format(self.endpoint))
21 |
22 | self.prefix = self.config.settings.get('metrics', 'prefix')
23 | commons.print_msg(self.clazz, method, "Metrics Prefix {}".format(self.prefix))
24 |
25 | def write_metric(self, task, action):
26 | method = 'write_metric'
27 | commons.print_msg(self.clazz, method, 'begin')
28 | # try:
29 | # message = "{0}.{1}.{2}.{3}.count {4} {5}\n".format(self.prefix, task, action, self.config.project_name,
30 | # 1, int(time()))
31 | # # resp = post(self.endpoint, message)
32 | # #
33 | # # if resp.status_code == 200:
34 | # # commons.print_msg(self.clazz, method, "Metrics Write {}".format(message))
35 | # # else:
36 | # # commons.print_msg(self.clazz, method, 'Metrics Write Failed', 'ERROR')
37 | #
38 | # except socket.error as e:
39 | # commons.print_msg(self.clazz, method, "Metrics Write Failed ()".format(e), 'ERROR')
40 |
41 | commons.print_msg(self.clazz, method, 'end')
42 |
--------------------------------------------------------------------------------
/flow/metrics/metrics_abc.py:
--------------------------------------------------------------------------------
1 | from abc import ABCMeta, abstractmethod
2 |
3 |
4 | class Metrics(metaclass=ABCMeta):
5 | @abstractmethod
6 | def write_metric(self, task, action):
7 | pass
8 |
--------------------------------------------------------------------------------
/flow/pluginloader.py:
--------------------------------------------------------------------------------
1 | import imp
2 | import inspect
3 | import os
4 |
5 | import flow.utils.commons as commons
6 |
7 | plugin_folder = "plugins"
8 | MainModule = "__init__"
9 |
10 |
11 | def get_plugins():
12 | clazz = 'plugin_loader'
13 | method = 'load_plugin'
14 |
15 | plugins = []
16 |
17 | possible_plugins = os.listdir(os.path.join(os.path.dirname(__file__), plugin_folder))
18 | for i in possible_plugins:
19 | location = os.path.join(os.path.join(os.path.dirname(__file__), plugin_folder), i)
20 | if not os.path.isdir(location) or '__pycache__' in location:
21 | continue
22 | if not MainModule + ".py" in os.listdir(location): # no .py file
23 | commons.printMSG(clazz, method, "Failed to load plugin {}. Missing __init__ method".format(i), 'ERROR')
24 | continue
25 |
26 | module_hdl, path_name, description = imp.find_module(MainModule, [location])
27 | plugins.append({"name": i, "module_hdl": module_hdl, "path_name": path_name, "description": description})
28 |
29 | module_hdl.close()
30 | return plugins
31 |
32 |
33 | def load_plugin(plugin):
34 | clazz = 'plugin_loader'
35 | method = 'load_plugin'
36 |
37 | current_plugin = imp.load_module(plugin['name'], plugin["module_hdl"], plugin["path_name"], plugin["description"])
38 |
39 | plugin_members = inspect.getmembers(current_plugin)
40 | plugin_methods = inspect.getmembers(current_plugin, inspect.isfunction)
41 |
42 | if 'run_action' not in tuple(x[0] for x in plugin_methods) or 'register_parser' not in tuple(x[0] for x in
43 | plugin_methods):
44 | commons.printMSG(clazz, method, "Failed to find method run_action() and/or register_parser() in plugin {"
45 | "}.".format(plugin), 'ERROR')
46 | exit(1)
47 |
48 | if 'parser' not in tuple(x[0] for x in plugin_members):
49 | commons.printMSG(clazz, method, "Failed to find variable 'parser' in plugin {}.".format(plugin), 'ERROR')
50 | exit(1)
51 |
52 | return current_plugin
--------------------------------------------------------------------------------
/flow/plugins/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/homedepot/flow/c4bee88370a132b675b23e29f577b87e4c1d4c63/flow/plugins/__init__.py
--------------------------------------------------------------------------------
/flow/plugins/foo/__init__.py:
--------------------------------------------------------------------------------
1 | from flow.plugins.foo.foo import Foo
2 |
3 | parser = 'foo'
4 |
5 | def register_parser(new_parser):
6 | new_parser.add_argument('action', help='Action to take, possible values: bar, baz')
7 | new_parser.add_argument('-v', '--version', help='Version to use')
8 |
9 |
10 | def run_action(args):
11 | foo_instance = Foo()
12 |
13 | if args.action == 'fooa':
14 | foo_instance.bar()
15 | elif args.action == 'foob':
16 | foo_instance.baz()
17 |
--------------------------------------------------------------------------------
/flow/plugins/foo/foo.py:
--------------------------------------------------------------------------------
1 | class Foo:
2 | def __init__(self):
3 | print("initialized foo")
4 |
5 | def bar(self):
6 | print('foo bar')
7 |
8 | def baz(self):
9 | print('foo baz')
--------------------------------------------------------------------------------
/flow/projecttracking/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/homedepot/flow/c4bee88370a132b675b23e29f577b87e4c1d4c63/flow/projecttracking/__init__.py
--------------------------------------------------------------------------------
/flow/projecttracking/jira/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/homedepot/flow/c4bee88370a132b675b23e29f577b87e4c1d4c63/flow/projecttracking/jira/__init__.py
--------------------------------------------------------------------------------
/flow/projecttracking/jira/jira.py:
--------------------------------------------------------------------------------
1 | import base64
2 | import json
3 | import os
4 | import re
5 |
6 | import requests
7 | from flow.buildconfig import BuildConfig
8 | from flow.projecttracking.project_tracking_abc import Project_Tracking
9 |
10 | import flow.utils.commons as commons
11 | from flow.utils.commons import Object
12 |
13 | #https:///rest/api/3/
14 | class Jira(Project_Tracking):
15 | clazz = 'Jira'
16 | token = None
17 | user = None
18 | project_keys = None
19 | jira_url = None
20 | jira_basic_auth = None
21 | config = BuildConfig
22 | http_timeout = 30
23 |
24 | def __init__(self, config_override=None):
25 | method = '__init__'
26 | commons.print_msg(Jira.clazz, method, 'begin')
27 |
28 | if config_override is not None:
29 | self.config = config_override
30 |
31 | Jira.token = os.getenv('JIRA_TOKEN')
32 | Jira.user = os.getenv('JIRA_USER')
33 |
34 | if not Jira.token:
35 | if not Jira.user:
36 | commons.print_msg(Jira.clazz, method, 'No jira user, jira token found in environment. Did you define '
37 | 'environment variables \'JIRA_USER\' and \'JIRA_TOKEN\'?', 'ERROR')
38 | else:
39 | commons.print_msg(Jira.clazz, method, 'No jira token found in environment. Did you define '
40 | 'environment variable \'JIRA_TOKEN\'?', 'ERROR')
41 | exit(1)
42 | elif not Jira.user:
43 | commons.print_msg(Jira.clazz, method, 'No jira user found in environment. Did you define '
44 | 'environment variable \'JIRA_USER\'?', 'ERROR')
45 | exit(1)
46 |
47 | # Check for jira url first in buildConfig, second try settings.ini
48 |
49 | try:
50 | jira_json_config = self.config.json_config['projectTracking']['jira']
51 | commons.print_msg(Jira.clazz, method, jira_json_config)
52 | # noinspection PyUnboundLocalVariable
53 | Jira.jira_url = jira_json_config['url']
54 | except KeyError as e:
55 | if e.args[0] == 'url':
56 | if self.config.settings.has_section('jira') and self.config.settings.has_option('jira', 'url'):
57 | Jira.jira_url = self.config.settings.get('jira', 'url')
58 | else:
59 | commons.print_msg(Jira.clazz, method, 'No jira url found in buildConfig or settings.ini.',
60 | 'ERROR')
61 | exit(1)
62 | else:
63 | commons.print_msg(Jira.clazz,
64 | method,
65 | "The build config associated with projectTracking is missing key {}".format(str(e)),
66 | 'ERROR')
67 | exit(1)
68 |
69 | Jira.jira_basic_auth = base64.b64encode("{0}:{1}".format(Jira.user, Jira.token).encode('ascii')).decode('ascii')
70 |
71 | try:
72 | # since api call to get project data uses key or id just always call to fetch id.
73 | if jira_json_config.get('projectKey') is not None and jira_json_config.get('projectKeys') is not None:
74 | raise KeyError('projectKeys')
75 | elif jira_json_config.get('projectKey') is not None:
76 | project_data = self._retrieve_project_info(str(jira_json_config['projectKey']))
77 | Jira.project_keys = [(project_data['id'], project_data['key'])]
78 | elif jira_json_config.get('projectKeys') is not None:
79 | Jira.project_keys = []
80 | for project_key in jira_json_config.get('projectKeys'):
81 | project_data = self._retrieve_project_info(str(project_key))
82 | Jira.project_keys.append((project_data['id'], project_data['key']))
83 | else:
84 | raise KeyError('projectKey')
85 |
86 | commons.print_msg(Jira.clazz, method, Jira.project_keys)
87 | except KeyError as e:
88 | if e.args[0] == 'projectKeys':
89 | commons.print_msg(Jira.clazz,
90 | method,
91 | "The build config may only contain 'projectKey' for single project key"
92 | " or 'projectKeys' containing an array of project keys",
93 | 'ERROR')
94 | else:
95 | commons.print_msg(Jira.clazz,
96 | method,
97 | "The build config associated with projectTracking is missing key {}".format(str(e)),
98 | 'ERROR')
99 | exit(1)
100 |
101 | commons.print_msg(Jira.clazz, method, 'end')
102 |
103 | def get_details_for_all_stories(self, story_list):
104 | method = 'get_details_for_all_stories'
105 | commons.print_msg(Jira.clazz, method, 'begin')
106 |
107 | story_details = []
108 | commons.print_msg(Jira.clazz, method, story_list)
109 |
110 | for i, story_id in enumerate(story_list):
111 | story_detail = self._retrieve_story_detail(story_id)
112 |
113 | if story_detail is not None:
114 | story_details.append(story_detail)
115 |
116 | commons.print_msg(Jira.clazz, method, story_details)
117 | commons.print_msg(Jira.clazz, method, 'end')
118 | return story_details
119 |
120 | def _retrieve_project_info(self, project_id):
121 | method = '_retrieve_project_info'
122 | commons.print_msg(Jira.clazz, method, 'begin')
123 |
124 | json_data = None
125 | resp = None
126 |
127 | project_detail = {'url': '{0}/rest/api/3/project/{1}'.format(Jira.jira_url, project_id)}
128 | headers = {'Content-type': 'application/json', 'Accept': 'application/json', 'Authorization': 'Basic {0}'.format(Jira.jira_basic_auth)}
129 |
130 | try:
131 | commons.print_msg(Jira.clazz, method, project_detail['url'])
132 | resp = requests.get(project_detail['url'], headers=headers, timeout=self.http_timeout)
133 | except requests.ConnectionError as e:
134 | commons.print_msg(Jira.clazz, method, "Failed retrieving project detail from call to {}".format(
135 | project_detail.get('url', '')), 'ERROR')
136 | commons.print_msg(Jira.clazz, method, e, 'ERROR')
137 | exit(1)
138 |
139 | if resp.status_code == 200:
140 | json_data = json.loads(resp.text)
141 | commons.print_msg(Jira.clazz, method, "Project Key: {key}, Project Id: {id}".format(key=json_data['key'],
142 | id=json_data['id']))
143 | else:
144 | commons.print_msg(Jira.clazz, method, "Failed retrieving project detail from call to {url}. \r\n "
145 | "Response: {response}".format(url=project_detail.get('url', ''),
146 | response=resp.text), 'WARN')
147 |
148 | commons.print_msg(Jira.clazz, method, 'end')
149 | return json_data
150 |
151 |
152 | def _retrieve_story_detail(self, story_id):
153 | method = '_retrieve_story_detail'
154 | commons.print_msg(Jira.clazz, method, 'begin')
155 |
156 | json_data = None
157 | resp = None
158 |
159 | story_detail = {'url': '{0}/rest/api/3/issue/{1}'.format(Jira.jira_url, story_id)}
160 |
161 | headers = {'Content-type': 'application/json', 'Accept': 'application/json', 'Authorization': 'Basic {0}'.format(Jira.jira_basic_auth)}
162 |
163 | try:
164 | commons.print_msg(Jira.clazz, method, story_detail['url'])
165 | resp = requests.get(story_detail['url'], headers=headers, timeout=self.http_timeout)
166 | except requests.ConnectionError as e:
167 | commons.print_msg(Jira.clazz, method, "Failed retrieving story detail from call to {}".format(
168 | story_detail.get('url', '')), 'ERROR')
169 | commons.print_msg(Jira.clazz, method, e, 'ERROR')
170 | exit(1)
171 |
172 | if resp.status_code == 200:
173 | json_data = json.loads(resp.text)
174 | commons.print_msg(Jira.clazz, method, json_data)
175 | else:
176 | commons.print_msg(Jira.clazz, method, "Failed retrieving story detail from call to {url}. \r\n "
177 | "Response: {response}".format(url=story_detail.get('url', ''),
178 | response=resp.text), 'WARN')
179 |
180 | commons.print_msg(Jira.clazz, method, 'end')
181 | return json_data
182 |
183 | def tag_stories_in_commit(self, story_list):
184 | method = 'tag_stories_in_commit'
185 | commons.print_msg(Jira.clazz, method, 'begin')
186 |
187 | version = '{0}-{1}'.format(self.config.project_name, self.config.version_number)
188 | self._add_version_to_project(version)
189 |
190 | for story in story_list:
191 | self._add_version_to_story(story, version)
192 |
193 | commons.print_msg(Jira.clazz, method, 'end')
194 |
195 | def _add_version_to_project(self, version):
196 | method = '_add_version_to_project'
197 | commons.print_msg(Jira.clazz, method, 'begin')
198 |
199 | for idx, project_id in enumerate(self.project_keys):
200 | does_version_exist = self._determine_if_project_version_exists(project_id[0], version.lower())
201 | if does_version_exist:
202 | commons.print_msg(Jira.clazz, method, 'Version {version} already exists for project {project}, skipping.'.format(version=version.lower(), project=project_id[1]))
203 | else:
204 | version_to_post = Object()
205 | version_to_post.projectId = project_id[0]
206 | version_to_post.name = version.lower()
207 |
208 | jira_url = "{url}/rest/api/3/version".format(url=Jira.jira_url)
209 |
210 | headers = {'Content-type': 'application/json', 'Accept': 'application/json',
211 | 'Authorization': 'Basic {0}'.format(Jira.jira_basic_auth)}
212 |
213 | commons.print_msg(Jira.clazz, method, 'Post body for create project version:\n{}'.format(version_to_post.to_JSON()))
214 |
215 | try:
216 | resp = requests.post(jira_url, version_to_post.to_JSON(), headers=headers, timeout=self.http_timeout)
217 |
218 | if resp.status_code != 201:
219 | commons.print_msg(Jira.clazz, method, "Unable to create version {version} for project {project} \r\n "
220 | "Response: {response}".format(version=version, project=project_id[1], response=resp.text), 'WARN')
221 | else:
222 | commons.print_msg(Jira.clazz, method, resp.text)
223 | except requests.ConnectionError as e:
224 | commons.print_msg(Jira.clazz, method, 'Connection error. ' + str(e), 'WARN')
225 | except Exception as e:
226 | commons.print_msg(Jira.clazz, method, "Unable to create version {version} for project {project}".format(
227 | version=version, project=project_id[1]), 'WARN')
228 | commons.print_msg(Jira.clazz, method, e, 'WARN')
229 |
230 | commons.print_msg(Jira.clazz, method, 'end')
231 |
232 | def _determine_if_project_version_exists(self, project_id, version):
233 | method = '_determine_if_project_version_exists'
234 | commons.print_msg(Jira.clazz, method, 'begin')
235 |
236 | jira_url = "{url}/rest/api/3/project/{project}/versions".format(url=Jira.jira_url, project=project_id)
237 |
238 | headers = {'Content-type': 'application/json', 'Accept': 'application/json',
239 | 'Authorization': 'Basic {0}'.format(Jira.jira_basic_auth)}
240 |
241 | version_exists = False
242 |
243 | try:
244 | resp = requests.get(jira_url, headers=headers, timeout=self.http_timeout)
245 | if resp.status_code != 200:
246 | commons.print_msg(Jira.clazz, method, "Unable to fetch versions for project {project} \r\n "
247 | "Response: {response}".format(project=project_id, response=resp.text), 'WARN')
248 | return False
249 | else:
250 | project_versions = json.loads(resp.text)
251 | version_exists = any(v['name'] == version for v in project_versions)
252 | except requests.ConnectionError as e:
253 | commons.print_msg(Jira.clazz, method, 'Connection error. ' + str(e), 'WARN')
254 | except Exception as e:
255 | commons.print_msg(Jira.clazz, method, "Unable to fetch versions for project {project} \r\n "
256 | "Response: {response}".format(project=project_id, response=resp.text), 'WARN')
257 | commons.print_msg(Jira.clazz, method, e, 'WARN')
258 |
259 | commons.print_msg(Jira.clazz, method, 'end')
260 | return version_exists
261 |
262 | def _add_version_to_story(self, story_id, version):
263 | method = '_add_version_to_story'
264 | commons.print_msg(Jira.clazz, method, 'begin')
265 |
266 | jira_url = "{url}/rest/api/3/issue/{id}".format(url = Jira.jira_url, id = story_id)
267 |
268 | headers = {'Content-type': 'application/json', 'Accept': 'application/json',
269 | 'Authorization': 'Basic {0}'.format(Jira.jira_basic_auth)}
270 |
271 | data = {
272 | "update": {
273 | "fixVersions": [
274 | {
275 | "add": {
276 | "name": version.lower()
277 | }
278 | }
279 | ]
280 | }
281 | }
282 |
283 | put_data = json.dumps(data, default=lambda o: o.__dict__, sort_keys=False, indent=4)
284 |
285 | commons.print_msg(Jira.clazz, method, jira_url)
286 |
287 | try:
288 | resp = requests.put(jira_url, put_data, headers=headers, timeout=self.http_timeout)
289 |
290 | if resp.status_code != 204:
291 | commons.print_msg(Jira.clazz, method, "Unable to add version {version} to issue {story} \r\n "
292 | "Response: {response}".format(version=version, story=story_id, response=resp.text), 'WARN')
293 | else:
294 | commons.print_msg(Jira.clazz, method, resp.text)
295 | except requests.ConnectionError as e:
296 | commons.print_msg(Jira.clazz, method, 'Connection error. ' + str(e), 'WARN')
297 | except Exception as e:
298 | commons.print_msg(Jira.clazz, method, "Unable to add version {version} for story {story}".format(
299 | version=version, story=story_id), 'WARN')
300 | commons.print_msg(Jira.clazz, method, e, 'WARN')
301 |
302 | commons.print_msg(Jira.clazz, method, 'end')
303 |
304 | def determine_semantic_version_bump(self, story_details):
305 | method = 'determine_semantic_version_bump'
306 | commons.print_msg(Jira.clazz, method, 'begin')
307 |
308 | bump_type = None
309 |
310 | for i, story in enumerate(story_details):
311 | #jira labels are global across all projects but could still be used
312 | for j, label in enumerate(story.get('fields').get('labels')):
313 | if label.lower() == 'major':
314 | return 'major'
315 |
316 | #jira components behave closest to tracker labels, are per project
317 | for k, component in enumerate(story.get('fields').get('components')):
318 | if component.get('name') == 'major':
319 | return 'major'
320 |
321 | story_type = story.get('fields').get('issuetype').get('name').lower()
322 |
323 | if story_type == 'story' or story_type == 'chore' or story_type == 'release':
324 | bump_type = 'minor'
325 | elif story_type == 'bug' and bump_type is None:
326 | bump_type = 'bug'
327 |
328 | # This fall-through rule is needed because if there are no tracker
329 | # stories present in the commits, we need to default to something,
330 | # else calculate_next_semver will throw an error about getting 'None'
331 | if bump_type is None:
332 | bump_type = 'minor'
333 |
334 | commons.print_msg(Jira.clazz, method, "bump type: {}".format(bump_type))
335 |
336 | commons.print_msg(Jira.clazz, method, 'end')
337 |
338 | return bump_type
339 |
340 | def extract_story_id_from_commit_messages(self, commit_messages):
341 | method = 'extract_story_id_from_commit_messages'
342 | commons.print_msg(Jira.clazz, method, 'begin')
343 |
344 | story_list = []
345 |
346 | for commit_string in commit_messages:
347 |
348 | # check if there is a starting bracket and if there are balanced brackets
349 | if commit_string.count('[') > 0 and commit_string.count('[') == commit_string.count(']'):
350 | # for each starting bracket
351 | for m in re.finditer('\[', commit_string):
352 | # find the next subsequent ending bracket
353 | ending_bracket = commit_string.find(']', m.start())
354 | # find the contents between the brackets
355 | stories = commit_string[m.start()+1:ending_bracket]
356 |
357 | # verify there isn't a embedded bracket, if so just skip this one and keep marching.
358 | if stories.find('[') == -1: # there is a nested starting bracket
359 | # now dig out the tracker number or jira key in single number format or multiple separated by commas.
360 | r = re.compile('(?:[a-zA-Z]+\-[0-9]+,?)+(,([a-zA-Z]+\-[0-9]+,?))*,?')
361 | stories_array = stories.split(',')
362 | stories = list(filter(r.match, stories_array))
363 | for story in stories:
364 | # split out by comma.
365 | if story not in story_list:
366 | story_list.append(story)
367 |
368 | commons.print_msg(Jira.clazz, method, "Story list: {}".format(story_list))
369 | commons.print_msg(Jira.clazz, method, 'end')
370 | return story_list
371 |
372 | """
373 | This methods needs to return an array of stories with 4 specific fields for each story:
374 | story_type
375 | id
376 | name
377 | description
378 | url
379 | current_state
380 | """
381 | def flatten_story_details(self, story_details):
382 | method = 'flatten_story_details'
383 | commons.print_msg(Jira.clazz, method, 'begin')
384 |
385 | if story_details is None:
386 | return None
387 | story_release_notes = []
388 | for story in story_details:
389 | story_release_note_summary = {}
390 | story_release_note_summary['story_type'] = story.get('fields').get('issuetype').get('name').lower()
391 | story_release_note_summary['id'] = story.get('key').upper()
392 | story_release_note_summary['name'] = story.get('fields').get('summary')
393 | story_release_note_summary['url'] = '{0}/browse/{1}'.format(Jira.jira_url, story.get('key').upper())
394 | story_release_note_summary['current_state'] = story.get('fields').get('status').get('name')
395 | description_text = []
396 | if story.get('fields').get('description') is not None:
397 | for i, description_content in enumerate(story.get('fields').get('description').get('content')):
398 | if description_content.get('type') == 'paragraph':
399 | for j, paragraph_content in enumerate(description_content.get('content')):
400 | if paragraph_content.get('type') == 'text':
401 | description_text.append(paragraph_content.get('text'))
402 | if len(description_text) > 0:
403 | description_text = ' '.join(description_text)
404 | else:
405 | description_text = None
406 | story_release_note_summary['description'] = description_text
407 | story_release_notes.append(story_release_note_summary)
408 |
409 | commons.print_msg(Jira.clazz, method, story_release_notes)
410 | commons.print_msg(Jira.clazz, method, 'end')
411 | return story_release_notes
412 |
--------------------------------------------------------------------------------
/flow/projecttracking/project_tracking_abc.py:
--------------------------------------------------------------------------------
1 | from abc import ABCMeta, abstractmethod
2 |
3 |
4 | class Project_Tracking(metaclass=ABCMeta):
5 | @abstractmethod
6 | def get_details_for_all_stories(self, story_list):
7 | pass
8 |
9 | @abstractmethod
10 | def determine_semantic_version_bump(self, story_details):
11 | pass
12 |
13 | @abstractmethod
14 | def extract_story_id_from_commit_messages(self, commit_messages):
15 | pass
16 |
17 | @abstractmethod
18 | def tag_stories_in_commit(self, story_list):
19 | pass
20 |
21 | @abstractmethod
22 | def flatten_story_details(self, story_details):
23 | pass
--------------------------------------------------------------------------------
/flow/projecttracking/tracker/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/homedepot/flow/c4bee88370a132b675b23e29f577b87e4c1d4c63/flow/projecttracking/tracker/__init__.py
--------------------------------------------------------------------------------
/flow/projecttracking/tracker/tracker.py:
--------------------------------------------------------------------------------
1 | import json
2 | import os
3 | import re
4 |
5 | import requests
6 | from flow.buildconfig import BuildConfig
7 | from flow.projecttracking.project_tracking_abc import Project_Tracking
8 |
9 | import flow.utils.commons as commons
10 | from flow.utils.commons import Object
11 |
12 |
13 | class Tracker(Project_Tracking):
14 | clazz = 'Tracker'
15 | token = None
16 | project_ids = None
17 | tracker_url = None
18 | config = BuildConfig
19 | http_timeout = 30
20 |
21 | def __init__(self, config_override=None):
22 | method = '__init__'
23 | commons.print_msg(Tracker.clazz, method, 'begin')
24 |
25 | if config_override is not None:
26 | self.config = config_override
27 |
28 | Tracker.token = os.getenv('TRACKER_TOKEN')
29 |
30 | if not Tracker.token:
31 | commons.print_msg(Tracker.clazz, method, 'No tracker token found in environment. Did you define '
32 | 'environment variable \'TRACKER_TOKEN\'?', 'ERROR')
33 | exit(1)
34 |
35 | try:
36 | # below line is to maintain backwards compatibility since stanza was renamed
37 | tracker_json_config = self.config.json_config['tracker'] if 'tracker' in self.config.json_config else \
38 | self.config.json_config['projectTracking']["tracker"]
39 |
40 | if tracker_json_config.get('projectId') is not None and tracker_json_config.get('projectIds') is not None:
41 | raise KeyError('projectIds')
42 | elif tracker_json_config.get('projectId') is not None:
43 | Tracker.project_ids = [str(tracker_json_config['projectId'])]
44 | elif tracker_json_config.get('projectIds') is not None:
45 | Tracker.project_ids = []
46 | for project_id in tracker_json_config.get('projectIds'):
47 | Tracker.project_ids.append(str(project_id))
48 | else:
49 | raise KeyError('projectId')
50 |
51 | commons.print_msg(Tracker.clazz, method, Tracker.project_ids)
52 | except KeyError as e:
53 | if e.args[0] == 'projectIds':
54 | commons.print_msg(Tracker.clazz,
55 | method,
56 | "The build config may only contain 'projectId' for single project id"
57 | "or 'projectIds' containing an array of project ids",
58 | 'ERROR')
59 | else:
60 | commons.print_msg(Tracker.clazz,
61 | method,
62 | "The build config associated with projectTracking is missing key {}".format(str(e)),
63 | 'ERROR')
64 | exit(1)
65 |
66 | # Check for tracker url first in buildConfig, second try settings.ini
67 |
68 | try:
69 | # noinspection PyUnboundLocalVariable
70 | Tracker.tracker_url = tracker_json_config['url']
71 | except KeyError:
72 | if self.config.settings.has_section('tracker') and self.config.settings.has_option('tracker', 'url'):
73 | Tracker.tracker_url = self.config.settings.get('tracker', 'url')
74 | else:
75 | commons.print_msg(Tracker.clazz, method, 'No tracker url found in buildConfig or settings.ini.',
76 | 'ERROR')
77 | exit(1)
78 |
79 | def get_details_for_all_stories(self, story_list):
80 | method = 'get_details_for_all_stories'
81 | commons.print_msg(Tracker.clazz, method, 'begin')
82 |
83 | story_details = []
84 | commons.print_msg(Tracker.clazz, method, story_list)
85 |
86 | for i, story_id in enumerate(story_list):
87 | story_detail = self._retrieve_story_detail(story_id)
88 |
89 | if story_detail is not None:
90 | story_details.append(story_detail)
91 |
92 | commons.print_msg(Tracker.clazz, method, story_details)
93 | commons.print_msg(Tracker.clazz, method, 'end')
94 | return story_details
95 |
96 | def _retrieve_story_detail(self, story_id):
97 | method = '_retrieve_story_detail'
98 | commons.print_msg(Tracker.clazz, method, 'begin')
99 |
100 | tracker_story_details = []
101 | json_data = None
102 | resp = None
103 |
104 | if Tracker.project_ids is not None:
105 | for project_id in Tracker.project_ids:
106 | tracker_story_details.append(
107 | {'url': Tracker.tracker_url + '/services/v5/projects/' + project_id + '/stories/' + story_id})
108 |
109 | headers = {'Content-type': 'application/json', 'Accept': 'application/json', 'X-TrackerToken': Tracker.token}
110 |
111 | for story_detail in tracker_story_details:
112 | try:
113 | commons.print_msg(Tracker.clazz, method, story_detail['url'])
114 | resp = requests.get(story_detail['url'], headers=headers, timeout=self.http_timeout)
115 | except requests.ConnectionError as e:
116 | commons.print_msg(Tracker.clazz, method, 'Connection error. ' + str(e), 'ERROR')
117 | exit(1)
118 | except Exception as e:
119 | commons.print_msg(Tracker.clazz, method, "Failed retrieving story detail from call to {} ".format(
120 | story_detail.get('url', '')), 'ERROR')
121 | commons.print_msg(Tracker.clazz, method, e, 'ERROR')
122 | exit(1)
123 |
124 | if resp.status_code == 200:
125 | json_data = json.loads(resp.text)
126 | commons.print_msg(Tracker.clazz, method, json_data)
127 | break
128 | else:
129 | commons.print_msg(Tracker.clazz, method, "Failed retrieving story detail from call to {url}. \r\n "
130 | "Response: {response}".format(url=story_detail.get('url', ''),
131 | response=resp.text), 'WARN')
132 |
133 | commons.print_msg(Tracker.clazz, method, 'end')
134 | return json_data
135 |
136 | def tag_stories_in_commit(self, story_list):
137 | method = 'tag_stories_in_commit'
138 | commons.print_msg(Tracker.clazz, method, 'begin')
139 |
140 | for story in story_list:
141 | label = self.config.project_name + '-' + self.config.version_number
142 |
143 | self._add_label_to_tracker(story, label)
144 |
145 | commons.print_msg(Tracker.clazz, method, 'end')
146 |
147 | def _add_label_to_tracker(self, story_id, label):
148 | method = '_add_label_to_tracker'
149 | commons.print_msg(Tracker.clazz, method, 'begin')
150 |
151 | label_to_post = Object()
152 | label_to_post.name = label.lower()
153 |
154 | for project_id in Tracker.project_ids:
155 | tracker_url = "{url}/services/v5/projects/{projid}/stories/{storyid}/labels".format(url=Tracker.tracker_url,
156 | projid=project_id,
157 | storyid=story_id)
158 |
159 | headers = {'Content-type': 'application/json', 'Accept': 'application/json',
160 | 'X-TrackerToken': Tracker.token}
161 |
162 | commons.print_msg(Tracker.clazz, method, tracker_url)
163 | commons.print_msg(Tracker.clazz, method, label_to_post.to_JSON())
164 |
165 | try:
166 | resp = requests.post(tracker_url, label_to_post.to_JSON(), headers=headers, timeout=self.http_timeout)
167 |
168 | if resp.status_code != 200:
169 | commons.print_msg(Tracker.clazz, method, "Unable to tag story {story} with label {lbl} \r\n "
170 | "Response: {response}".format(story=story_id, lbl=label,
171 | response=resp.text), 'WARN')
172 | else:
173 | commons.print_msg(Tracker.clazz, method, resp.text)
174 | except requests.ConnectionError as e:
175 | commons.print_msg(Tracker.clazz, method, 'Connection error. ' + str(e), 'WARN')
176 | except Exception as e:
177 | commons.print_msg(Tracker.clazz, method, "Unable to tag story {story} with label {lbl}".format(
178 | story=story_id, lbl=label), 'WARN')
179 | commons.print_msg(Tracker.clazz, method, e, 'WARN')
180 |
181 | commons.print_msg(Tracker.clazz, method, 'end')
182 |
183 | def determine_semantic_version_bump(self, story_details):
184 | method = 'determine_semantic_version_bump'
185 | commons.print_msg(Tracker.clazz, method, 'begin')
186 |
187 | bump_type = None
188 |
189 | for i, story in enumerate(story_details):
190 | for j, label in enumerate(story.get('labels')):
191 | if label.get('name') == 'major':
192 | return 'major'
193 |
194 | if story.get('story_type') == 'feature' or story.get('story_type') == 'chore' or story.get(
195 | 'story_type') == 'release':
196 | bump_type = 'minor'
197 | elif story.get('story_type') == 'bug' and bump_type is None:
198 | bump_type = 'bug'
199 |
200 | # This fall-through rule is needed because if there are no tracker
201 | # stories present in the commits, we need to default to something,
202 | # else calculate_next_semver will throw an error about getting 'None'
203 | if bump_type is None:
204 | bump_type = 'minor'
205 |
206 | commons.print_msg(Tracker.clazz, method, "bump type: {}".format(bump_type))
207 |
208 | commons.print_msg(Tracker.clazz, method, 'end')
209 |
210 | return bump_type
211 |
212 | def extract_story_id_from_commit_messages(self, commit_messages):
213 | method = 'extract_story_id_from_commit_messages'
214 | commons.print_msg(Tracker.clazz, method, 'begin')
215 |
216 | story_list = []
217 |
218 | for commit_string in commit_messages:
219 | # check if there is a starting bracket and if there are balanced brackets
220 | if commit_string.count('[') > 0 and commit_string.count('[') == commit_string.count(']'):
221 | # for each starting bracket
222 | for m in re.finditer('\[', commit_string):
223 | # find the next subsequent ending bracket
224 | ending_bracket = commit_string.find(']', m.start())
225 | # find the contents between the brackets
226 | stories = commit_string[m.start()+1:ending_bracket]
227 |
228 | # verify there isn't a embedded bracket, if so just skip this one and keep marching.
229 | if stories.find('[') == -1: # there is a nested starting bracket
230 | # now dig out the tracker number or jira key in single number format or multiple separated by commas.
231 | r = re.compile('[0-9,]+(,[0-9]+)*,?')
232 | stories = ''.join(filter(r.match, stories))
233 |
234 | for story in [_f for _f in stories.split(',') if _f]:
235 | # split out by comma.
236 | if story not in story_list:
237 | story_list.append(story)
238 |
239 | commons.print_msg(Tracker.clazz, method, "Story list: {}".format(story_list))
240 | commons.print_msg(Tracker.clazz, method, 'end')
241 | return story_list
242 |
243 | """
244 | This methods needs to flatten an array of stories to ensure 6 specific
245 | fields exist at the top level of the dictionary for each story:
246 | story_type
247 | id
248 | name
249 | description
250 | url
251 | current_state
252 | """
253 | def flatten_story_details(self, story_details):
254 | method = 'flatten_story_details'
255 | commons.print_msg(Tracker.clazz, method, 'begin')
256 |
257 | if story_details is None:
258 | return None
259 |
260 | story_release_notes = []
261 | for story in story_details:
262 | story_release_note_summary = {}
263 | story_release_note_summary['story_type'] = story.get('story_type')
264 | story_release_note_summary['id'] = story.get('id')
265 | story_release_note_summary['name'] = story.get('name')
266 | story_release_note_summary['description'] = story.get('description')
267 | story_release_note_summary['url'] = story.get('url')
268 | story_release_note_summary['current_state'] = story.get('current_state')
269 | story_release_notes.append(story_release_note_summary)
270 |
271 | commons.print_msg(Tracker.clazz, method, story_release_notes)
272 | commons.print_msg(Tracker.clazz, method, 'end')
273 | return story_release_notes
--------------------------------------------------------------------------------
/flow/settings.ini:
--------------------------------------------------------------------------------
1 | [project]
2 | retry_sleep_interval = 5
3 | http_timeout_default_seconds = 60
4 |
5 | [sonar]
6 | sonar_runner = #TODO add location to sonar runner
7 |
8 | [tracker]
9 | url = https://www.pivotaltracker.com
10 |
11 | [jira]
12 | #this should be updated to the specific site-url for the cloud jira instance
13 | # https://.atlassion.net
14 | url = https://www.atlassian.net
15 |
16 | [github]
17 |
18 | [slack]
19 | bot_name = DeployBot
20 | emoji = :robot_face:
21 | release_note_attachment_color = #4286f4
22 | error_attachment_color = #FF0000
23 | generic_message_slack_url =
24 | #generic message url lets us send messages to channels even in cases where the user has not injected a slack webhook
25 |
26 | [cloudfoundry]
27 | cli_download_path = #TODO add location to download path
28 |
29 | [googlecloud]
30 | cloud_sdk_path = https://storage.googleapis.com/cloud-sdk-release/
31 | gcloud_version = google-cloud-sdk-182.0.0-linux-x86_64.tar.gz
32 |
33 | [metrics]
34 | endpoint =
35 | prefix =
--------------------------------------------------------------------------------
/flow/staticqualityanalysis/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/homedepot/flow/c4bee88370a132b675b23e29f577b87e4c1d4c63/flow/staticqualityanalysis/__init__.py
--------------------------------------------------------------------------------
/flow/staticqualityanalysis/sonar/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/homedepot/flow/c4bee88370a132b675b23e29f577b87e4c1d4c63/flow/staticqualityanalysis/sonar/__init__.py
--------------------------------------------------------------------------------
/flow/staticqualityanalysis/sonar/sonarmodule.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | # sonarmodule.py
3 |
4 | import os
5 | import subprocess
6 | import time
7 |
8 | from flow.buildconfig import BuildConfig
9 | from flow.staticqualityanalysis.static_quality_analysis_abc import Static_Quality_Analysis
10 |
11 | import flow.utils.commons as commons
12 |
13 |
14 | class SonarQube(Static_Quality_Analysis):
15 | clazz = 'SonarQube'
16 | config = BuildConfig
17 |
18 | def __init__(self, config_override=None):
19 | method = '__init__'
20 | commons.print_msg(SonarQube.clazz, method, 'begin')
21 |
22 | if config_override is not None:
23 | self.config = config_override
24 |
25 | commons.print_msg(SonarQube.clazz, method, 'end')
26 |
27 | def scan_code(self):
28 | method = 'scan_code'
29 | commons.print_msg(SonarQube.clazz, method, 'begin')
30 |
31 | retries = 0
32 |
33 | keep_retrying = True
34 |
35 | while keep_retrying:
36 | try:
37 | sleep_timer = 0
38 |
39 | if self.config.settings.has_section('project') and self.config.settings.has_option('project',
40 | 'retry_sleep_interval'):
41 | sleep_timer = int(self.config.settings.get('project', 'retry_sleep_interval'))
42 |
43 | if retries > 0:
44 | time.sleep(sleep_timer * retries)
45 |
46 | self._submit_scan()
47 | keep_retrying = False
48 | except Exception:
49 | retries += 1
50 |
51 | if retries > 3:
52 | commons.print_msg(SonarQube.clazz, method, 'Could not connect to Sonar. Maximum number of retries '
53 | 'reached.', "ERROR")
54 | keep_retrying = False
55 | exit(1)
56 | else:
57 | commons.print_msg(SonarQube.clazz, method, "Attempting retry number {}".format(retries), "WARN")
58 |
59 | commons.print_msg(SonarQube.clazz, method, 'end')
60 |
61 | # noinspection PyUnboundLocalVariable
62 | def _submit_scan(self):
63 | method = '_submit_scan'
64 | commons.print_msg(SonarQube.clazz, method, 'begin')
65 |
66 | process_failed = False
67 |
68 | sonar_user = os.environ.get('SONAR_USER')
69 | sonar_pwd = os.environ.get('SONAR_PWD')
70 |
71 | if sonar_user is None or sonar_pwd is None or len(sonar_user.strip()) == 0 or len(sonar_pwd.strip()) == 0:
72 | commons.print_msg(SonarQube.clazz, method, 'No sonar name/pwd supplied. If your sonar instance does not '
73 | 'support anonymous access then this operation may fail', 'WARNING')
74 |
75 | if not os.getenv("SONAR_HOME"):
76 | commons.print_msg(SonarQube.clazz, method, '\'SONAR_HOME\' environment variable must be defined', 'ERROR')
77 | exit(1)
78 |
79 | sonar_jar_files = commons.get_files_of_type_from_directory('jar', os.environ.get('SONAR_HOME'))
80 | if len(sonar_jar_files) > 0:
81 | sonar_jar_files.sort(reverse=True)
82 | sonar_runner_executable = sonar_jar_files[0]
83 | elif not self.config.settings.has_section('sonar') or not self.config.settings.has_option('sonar',
84 | 'sonar_runner'):
85 | commons.print_msg(SonarQube.clazz, method, 'Sonar runner undefined. Please define path to sonar '
86 | 'runner in settings.ini.', 'ERROR')
87 | exit(1)
88 | else:
89 | sonar_runner_executable = self.config.settings.get('sonar', 'sonar_runner')
90 |
91 | if not os.path.isfile('sonar-project.properties'):
92 | commons.print_msg(SonarQube.clazz, method, 'No sonar-project.properties file was found. Please include in the root of your project with a valid value for \'sonar.host.url\'', 'ERROR')
93 | exit(1)
94 |
95 | if 'sonar' in self.config.json_config and 'propertiesFile' in self.config.json_config['sonar']:
96 | custom_sonar_file = self.config.json_config['sonar']['propertiesFile']
97 |
98 | if sonar_user is not None and sonar_pwd is not None:
99 | sonar_cmd = 'java -Dsonar.projectKey="' + self.config.sonar_project_key + '" -Dsonar.projectName="' + self.config.sonar_project_key + '" -Dsonar.projectVersion="' + self.config.version_number + '" -Dsonar.login=$SONAR_USER -Dsonar.password=$SONAR_PWD -Dproject.settings="' + custom_sonar_file + '" -Dproject.home="$PWD" -jar $SONAR_HOME/' + sonar_runner_executable + ' -e -X'
100 | else:
101 | sonar_cmd = 'java -Dsonar.projectKey="' + self.config.sonar_project_key + '" -Dsonar.projectName="' + self.config.sonar_project_key + '" -Dsonar.projectVersion="' + self.config.version_number + '" -Dproject.settings="' + custom_sonar_file + '" -Dproject.home="$PWD" -jar $SONAR_HOME/' + sonar_runner_executable + ' -e -X'
102 | commons.print_msg(SonarQube.clazz, method, sonar_cmd)
103 | else:
104 | if sonar_user is not None and sonar_pwd is not None:
105 | sonar_cmd = 'java -Dsonar.projectKey="' + self.config.sonar_project_key + '" -Dsonar.projectName="' + self.config.sonar_project_key + '" -Dsonar.projectVersion="' + self.config.version_number + '" -Dsonar.login=$SONAR_USER -Dsonar.password=$SONAR_PWD -Dproject.home="$PWD" -jar $SONAR_HOME/' + sonar_runner_executable + ' -e -X'
106 | else:
107 | sonar_cmd = 'java -Dsonar.projectKey="' + self.config.sonar_project_key + '" -Dsonar.projectName="' + self.config.sonar_project_key + '" -Dsonar.projectVersion="' + self.config.version_number + '" -Dproject.home="$PWD" -jar $SONAR_HOME/' + sonar_runner_executable + ' -e -X'
108 | commons.print_msg(SonarQube.clazz, method, sonar_cmd)
109 |
110 | p = subprocess.Popen(sonar_cmd.split(), shell=False, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
111 |
112 | while p.poll() is None:
113 | line = p.stdout.readline().decode('utf-8').strip(' \r\n')
114 |
115 | commons.print_msg(SonarQube.clazz, method, line)
116 |
117 | if 'EXECUTION FAILURE' in line:
118 | commons.print_msg(SonarQube.clazz, method, "Failed to execute Sonar: {}".format(line), 'ERROR')
119 | process_failed = True
120 |
121 | p_output, errs = p.communicate(timeout=120)
122 |
123 | for line in p_output.splitlines():
124 | commons.print_msg(SonarQube.clazz, method, line.decode('utf-8'))
125 |
126 | if p.returncode != 0:
127 | commons.print_msg(SonarQube.clazz, method, "Failed calling sonar runner. Return code of {}"
128 | .format(p.returncode),
129 | 'ERROR')
130 | process_failed = True
131 |
132 | if process_failed:
133 | raise Exception('Failed uploading')
134 |
135 | commons.print_msg(SonarQube.clazz, method, 'end')
136 |
--------------------------------------------------------------------------------
/flow/staticqualityanalysis/static_quality_analysis_abc.py:
--------------------------------------------------------------------------------
1 | from abc import ABCMeta, abstractmethod
2 |
3 |
4 | class Static_Quality_Analysis(metaclass=ABCMeta):
5 | @abstractmethod
6 | def scan_code(self):
7 | pass
8 |
--------------------------------------------------------------------------------
/flow/utils/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/homedepot/flow/c4bee88370a132b675b23e29f577b87e4c1d4c63/flow/utils/__init__.py
--------------------------------------------------------------------------------
/flow/utils/commons.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | #commons.py
3 |
4 | import json
5 | import os
6 | import re
7 | import subprocess
8 | import sys
9 | from enum import Enum
10 |
11 | from pydispatch import dispatcher
12 |
13 | from flow.logger import Logger
14 |
15 |
16 | class Commons:
17 | quiet = False
18 |
19 | content_json = 'application/json'
20 | build_config_file = 'buildConfig.json'
21 | forward_slash = '/'
22 | content_oct_stream = 'application/octet-stream'
23 | clazz = 'commons'
24 |
25 |
26 | def flush_out(string):
27 | method = 'flush_out'
28 | print_msg(clazz, method, string)
29 | sys.stdout.flush()
30 |
31 |
32 | def byteify(input_str):
33 | if isinstance(input_str, dict):
34 | return {byteify(key): byteify(value)
35 | for key, value in input_str.items()}
36 | elif isinstance(input_str, list):
37 | return [byteify(element) for element in input_str]
38 | elif isinstance(input_str, str):
39 | return input_str.encode('utf-8')
40 | else:
41 | return input_str
42 |
43 | def print_msg(class_name, method, message, level='DEBUG'):
44 | if level.lower() != 'error' and Commons.quiet:
45 | return
46 |
47 | log_level = '[' + level + ']'
48 | log_message = '{:7s} {:11s} {:35s} {!s:s}'.format(log_level, class_name, method, message)
49 | try:
50 | print(log_message)
51 | Logger(log_message)
52 | except:
53 | print(log_message.encode('utf-8'))
54 |
55 | if level == 'ERROR':
56 | SIGNAL = 'publish-error-signal'
57 | sender = {}
58 | new_message = ''.join(str(v) for v in message)
59 | dispatcher.send(signal=SIGNAL, sender=sender, message=new_message, class_name=class_name, method_name=method)
60 |
61 |
62 | def write_to_file(path, text, open_func=open, mode="a"):
63 | with open_func(path, mode) as f:
64 | f.write(text)
65 |
66 |
67 | def get_files_of_type_from_directory(file_type, directory):
68 | out = os.listdir(directory)
69 | out = [os.path.join(directory, element) for element in out]
70 | out = [file for file in filter(os.path.isfile, out) if file.lower().endswith(file_type)]
71 |
72 | out = [os.path.basename(file) for file in filter(os.path.isfile, out)]
73 | return out
74 |
75 |
76 | # TODO convert all popens that need decoding to call this
77 | def execute_command(cmd):
78 | process = subprocess.Popen(cmd, shell=False, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
79 | # output from subprocess is always a unicode bytearray and not ascii
80 | # need to read it and go ahead and convert it to a string
81 | output = process.stdout.read().decode("UTF8")
82 | return output
83 |
84 |
85 | def verify_version(config):
86 | method = 'verify_version'
87 |
88 | if config.version_number is None:
89 | print_msg(clazz, method, 'Version not defined. Is your repo tagged with a version number?', 'ERROR')
90 | exit(1)
91 |
92 |
93 | class DeploymentState(Enum):
94 | failure = 'fail'
95 | success = 'success'
96 |
97 |
98 | class Object:
99 | def to_JSON(self):
100 | return json.dumps(self, default=lambda o: o.__dict__, sort_keys=False, indent=4)
101 |
--------------------------------------------------------------------------------
/flow/zipit/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/homedepot/flow/c4bee88370a132b675b23e29f577b87e4c1d4c63/flow/zipit/__init__.py
--------------------------------------------------------------------------------
/flow/zipit/zipit.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | # zipit.py
3 |
4 | import tarfile
5 |
6 | import flow.utils.commons as commons
7 |
8 | from flow.artifactstorage.artifactory.artifactory import Artifactory
9 |
10 |
11 | class ZipIt:
12 | clazz = 'ZipIt'
13 |
14 | def __init__(self, mode, name, contents):
15 | method = '__init__'
16 | commons.print_msg(ZipIt.clazz, method, 'begin')
17 |
18 | ZipIt.file_name = name
19 |
20 | if mode == 'artifactory':
21 | ZipIt.zip_contents = contents
22 | self._zip_it(name, contents)
23 | self._ship_it_artifactory(name)
24 |
25 | commons.print_msg(ZipIt.clazz, method, 'end')
26 |
27 | def _zip_it(self, name, contents):
28 | method = '_zip_it'
29 | commons.print_msg(ZipIt.clazz, method, 'begin')
30 |
31 | file_with_path = name.split('/')
32 |
33 | try:
34 | with tarfile.open(file_with_path[-1], 'w') as tar:
35 | tar.add(contents, name)
36 | except FileNotFoundError as e:
37 | commons.print_msg(Artifactory.clazz, method, "Could not locate files to zip. {}".format(e), 'ERROR')
38 | exit(1)
39 | except Exception as e:
40 | commons.print_msg(Artifactory.clazz, method, "Failure during zip process: {}".format(e), 'ERROR')
41 | exit(1)
42 |
43 | commons.print_msg(ZipIt.clazz, method, 'end')
44 |
45 | def _ship_it_artifactory(self, name):
46 | method = '_ship_it_artifactory'
47 | commons.print_msg(ZipIt.clazz, method, 'begin')
48 |
49 | file_with_path = name.split('/')
50 |
51 | ar = Artifactory()
52 | ar.publish(file_with_path[-1], name)
53 |
54 | commons.print_msg(ZipIt.clazz, method, 'end')
55 |
--------------------------------------------------------------------------------
/images/PyCharm_Preferences.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/homedepot/flow/c4bee88370a132b675b23e29f577b87e4c1d4c63/images/PyCharm_Preferences.png
--------------------------------------------------------------------------------
/images/PyCharm_Select_PyTest.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/homedepot/flow/c4bee88370a132b675b23e29f577b87e4c1d4c63/images/PyCharm_Select_PyTest.png
--------------------------------------------------------------------------------
/images/iTerm-Flow_Output.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/homedepot/flow/c4bee88370a132b675b23e29f577b87e4c1d4c63/images/iTerm-Flow_Output.png
--------------------------------------------------------------------------------
/images/iTerm_reg-ex.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/homedepot/flow/c4bee88370a132b675b23e29f577b87e4c1d4c63/images/iTerm_reg-ex.png
--------------------------------------------------------------------------------
/images/iTerm_triggers.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/homedepot/flow/c4bee88370a132b675b23e29f577b87e4c1d4c63/images/iTerm_triggers.png
--------------------------------------------------------------------------------
/images/pycharm_edit_config.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/homedepot/flow/c4bee88370a132b675b23e29f577b87e4c1d4c63/images/pycharm_edit_config.jpg
--------------------------------------------------------------------------------
/images/pycharm_env_variables.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/homedepot/flow/c4bee88370a132b675b23e29f577b87e4c1d4c63/images/pycharm_env_variables.png
--------------------------------------------------------------------------------
/images/tracker.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/homedepot/flow/c4bee88370a132b675b23e29f577b87e4c1d4c63/images/tracker.png
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | PyDispatcher==2.0.5
2 | requests>=2.20.0
3 | pytest==2.9.2
4 | pytest-cov==2.12.1
5 | responses==0.8.1
6 | pytest-mock==1.2
--------------------------------------------------------------------------------
/scripts/buildtar.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | mkdir ../dist
4 |
5 | tar -cvf ../dist/ci-python.tar -C flow .
--------------------------------------------------------------------------------
/scripts/buildwheel.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | source "../ci/scripts/flow-env.sh"
4 |
5 | flow github -o VERSION getversion $ENVIRONMENT
6 |
7 | pip3 install setuptools==38.5.2
8 | pip3 install twine==1.10.0
9 | pip3 install wheel==0.30.0
10 |
11 | python3 setup.py bdist_wheel
12 |
13 | twine upload --config-file scripts/.pypirc -r local -u $PYPI_USER -p $PYPI_PWD dist/*
14 |
--------------------------------------------------------------------------------
/scripts/test-results/results.txt:
--------------------------------------------------------------------------------
1 | ============================= test session starts ==============================
2 | platform darwin -- Python 3.5.2, pytest-2.9.2, py-1.4.31, pluggy-0.3.1 -- /Users/nnp0333/.virtualenvs/flow/bin/python3.5
3 | cachedir: ../.cache
4 | rootdir: /Users/nnp0333/silverlining/flow/flow-2, inifile:
5 | plugins: mock-1.2
6 | collecting ...
7 | ========================= no tests ran in 0.00 seconds =========================
8 |
--------------------------------------------------------------------------------
/scripts/unittest.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | echo "Setting up the environment via ~/.virtualenvs/flow/bin/activate"
4 |
5 | if [ -d /root/.virtualenvs ]; then
6 | . /root/.virtualenvs/ci/bin/activate
7 | elif [ -d /.virtualenvs ] ; then
8 | . /.virtualenvs/ci/bin/activate
9 | elif [ -d ~/.virtualenvs/flow ] ; then
10 | . ~/.virtualenvs/flow/bin/activate
11 | elif [ -d ~/.virtualenvs/ci ] ; then
12 | . ~/.virtualenvs/ci/bin/activate
13 | else
14 | echo "Cannot locate virutal env directory"
15 | exit 1
16 | fi
17 |
18 | pip install -r requirements.txt
19 | pip install -e flow/
20 | pip list --format=columns
21 |
22 | echo "Executing the tests and placing the output in test/results.txt"
23 | mkdir -p test-results
24 | # the --capture=sys allows mocking of sys objects.
25 | py.test -s -v ./tests --capture=sys | tee test-results/results.txt
26 |
27 | ret_cd=${PIPESTATUS[0]}
28 | if [ "${ret_cd}" == "0" ]; then
29 | echo "Unit Tests Passed"
30 | else
31 | echo "Unit Tests Failed"
32 | fi
33 |
34 | deactivate
35 |
36 | exit ${ret_cd}
37 |
--------------------------------------------------------------------------------
/scripts/unittest_continous.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | while :; do
4 | ./scripts/unittest.sh
5 | sleep 15
6 | done
7 |
--------------------------------------------------------------------------------
/scripts/unittest_coverage.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | echo "Setting up the environment via ~/.virtualenvs/flow/bin/activate"
4 |
5 | if [ -d /root/.virtualenvs ]; then
6 | . /root/.virtualenvs/ci/bin/activate
7 | elif [ -d /.virtualenvs ] ; then
8 | . /.virtualenvs/ci/bin/activate
9 | elif [ -d ~/.virtualenvs/flow ] ; then
10 | . ~/.virtualenvs/flow/bin/activate
11 | elif [ -d ~/.virtualenvs/ci ] ; then
12 | . ~/.virtualenvs/ci/bin/activate
13 | else
14 | echo "Cannot locate virutal env directory"
15 | exit 1
16 | fi
17 |
18 | pip install -r requirements.txt
19 | pip install -e flow/
20 | pip list --format=columns
21 |
22 | echo "Executing the tests and placing the output in test/results.txt"
23 | mkdir -p test-results
24 | # the --capture=sys allows mocking of sys objects.
25 | py.test -s -v --cov-report term-missing --cov-report html:test-results/coverage --cov=flow ./tests --capture=sys | tee test-results/results.txt
26 |
27 | ret_cd=${PIPESTATUS[0]}
28 | if [ "${ret_cd}" == "0" ]; then
29 | echo "Unit Tests Passed"
30 | else
31 | echo "Unit Tests Failed"
32 | fi
33 |
34 | deactivate
35 |
36 | exit ${ret_cd}
37 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | # from distutils.core import setup
2 | from setuptools import find_packages, setup
3 | import os
4 |
5 | setup(name='THD-Flow',
6 | version=open('VERSION').read().strip() if os.path.isfile('VERSION') else 'UNKNOWN',
7 | description='A modular CI CLI providing versioning, quality analysis, security analysis, packaging, building, '
8 | 'deployment and communication',
9 | author='Andrew Turner',
10 | author_email='andrew_m_turner@homedepot.com',
11 | url='https://www.homedepot.com',
12 | license='Apache',
13 | keywords='ci cd continuous integration deployment delivery flow jenkins concourse',
14 | packages=find_packages(exclude=['images', 'scripts', 'test-results', 'tests*']),
15 | install_requires = ["appdirs==1.4.3",
16 | "argh==0.26.2",
17 | "colorama==0.3.7",
18 | "colorlog==2.10.0",
19 | "cookies==2.2.1",
20 | "docopt==0.6.2",
21 | "gitdb==0.6.4",
22 | "GitPython==2.0.2",
23 | "mock==2.0.0",
24 | "multi-key-dict==2.0.3",
25 | "packaging==16.8",
26 | "pathtools==0.1.2",
27 | "pbr==1.10.0",
28 | "py==1.10.0",
29 | "PyDispatcher==2.0.5",
30 | "pyparsing==2.2.0",
31 | "pytest==2.9.2",
32 | "pytest-mock==1.2",
33 | "pytest-watch==4.1.0",
34 | "python-jenkins==0.4.13",
35 | "PyYAML>=4.2b1",
36 | "requests>=2.20.0",
37 | "requests-toolbelt==0.7.1",
38 | "responses==0.5.1",
39 | "six==1.10.0",
40 | "smmap==0.9.0",
41 | "splunk-handler==1.1.3",
42 | "urllib3==1.26.5",
43 | "watchdog==0.8.3",
44 | ],
45 | data_files=['flow/settings.ini'],
46 | include_package_data=True,
47 | entry_points={
48 | 'console_scripts': [
49 | 'flow=flow.aggregator:main',
50 | ],
51 | },
52 | )
53 |
--------------------------------------------------------------------------------
/sonar-project.properties:
--------------------------------------------------------------------------------
1 | #TODO provide sonar host
2 | sonar.host.url=
3 |
4 | #----- Default source code encoding
5 | sonar.sourceEncoding=UTF-8
6 |
7 | sonar.sources=flow
8 | sonar.exclusions=
9 | sonar.projectBaseDir=.
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/homedepot/flow/c4bee88370a132b675b23e29f577b87e4c1d4c63/tests/__init__.py
--------------------------------------------------------------------------------
/tests/artifactstorage/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/homedepot/flow/c4bee88370a132b675b23e29f577b87e4c1d4c63/tests/artifactstorage/__init__.py
--------------------------------------------------------------------------------
/tests/artifactstorage/artifactory/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/homedepot/flow/c4bee88370a132b675b23e29f577b87e4c1d4c63/tests/artifactstorage/artifactory/__init__.py
--------------------------------------------------------------------------------
/tests/artifactstorage/artifactory/test_artifactory.py:
--------------------------------------------------------------------------------
1 | import os
2 | import configparser
3 | from unittest.mock import MagicMock
4 | from unittest.mock import patch
5 |
6 | import pytest
7 | import responses
8 | from flow.buildconfig import BuildConfig
9 | from requests.exceptions import HTTPError
10 |
11 | from flow.artifactstorage.artifactory.artifactory import Artifactory, ArtifactException
12 |
13 | mock_build_config_dict = {
14 | "projectInfo": {
15 | "name": "testproject"
16 | },
17 | "artifact": {
18 | "artifactoryDomain": "https://testdomain/artifactory",
19 | "artifactoryRepoKey": "release-repo",
20 | "artifactoryRepoKeySnapshot": "snapshot-repo",
21 | "artifactoryGroup": "group",
22 | "artifactType": "type",
23 | "artifactDirectory": "directory"
24 | },
25 | "environments": {
26 | "unittest": {
27 | "artifactCategory": "release"
28 | }
29 | },
30 | "slack": {
31 | "botName": "Flow",
32 | "emoji": ":robot_face:",
33 | "channel": "#spigot-ci"
34 | }
35 | }
36 |
37 | mock_build_config_artifactoryConfig_include_POM = {
38 | "projectInfo": {
39 | "name": "testproject"
40 | },
41 | "artifactoryConfig": {
42 | "artifactoryDomain": "https://testdomain/artifactory",
43 | "artifactoryRepoKey": "release-repo",
44 | "artifactoryRepoKeySnapshot": "snapshot-repo",
45 | "artifactoryGroup": "group",
46 | "artifactType": "type",
47 | "artifactDirectory": "directory",
48 | "includePom": "true"
49 | },
50 | "environments": {
51 | "unittest": {
52 | "artifactCategory": "release"
53 | }
54 | },
55 | "slack": {
56 | "botName": "Flow",
57 | "emoji": ":robot_face:",
58 | "channel": "#spigot-ci"
59 | }
60 | }
61 |
62 |
63 |
64 | mock_build_config_missing_artifact_dict = {
65 | "projectInfo": {
66 | "name": "testproject"
67 | },
68 | "environments": {
69 | "unittest": {
70 | "artifactCategory": "release"
71 | }
72 | },
73 | "slack": {
74 | "botName": "Flow",
75 | "emoji": ":robot_face:",
76 | "channel": "#spigot-ci"
77 | }
78 | }
79 | response_body_artifactory = """
80 | {
81 | "repo" : "release-repo",
82 | "path" : "/group/testproject/v1.0.0",
83 | "created" : "2016-09-09T13:02:49.851-04:00",
84 | "createdBy" : "svc_cicd",
85 | "lastModified" : "2016-09-09T13:02:49.851-04:00",
86 | "modifiedBy" : "svc_cicd",
87 | "lastUpdated" : "2016-09-09T13:02:49.851-04:00",
88 | "children" : [ {
89 | "uri" : "/unittest",
90 | "folder" : true
91 | }, {
92 | "uri" : "/testproject.bob",
93 | "folder" : false
94 | },
95 | {
96 | "uri" : "/testproject.vcl",
97 | "folder" : false
98 | }
99 | ],
100 | "uri" : "https://maven.artifactory.fake.com/artifactory/api/storage/libs-release-local/com/fake/thd-store-info-service/v1.2.0"
101 | }
102 | """
103 |
104 | response_body_artifactory_no_matching_children = """
105 | {
106 | "repo" : "release-repo",
107 | "path" : "/group/testproject/v1.0.0",
108 | "created" : "2016-09-09T13:02:49.851-04:00",
109 | "createdBy" : "svc_cicd",
110 | "lastModified" : "2016-09-09T13:02:49.851-04:00",
111 | "modifiedBy" : "svc_cicd",
112 | "lastUpdated" : "2016-09-09T13:02:49.851-04:00",
113 | "children" : [ {
114 | "uri" : "/unittest",
115 | "folder" : true
116 | }, {
117 | "uri" : "/testproject.nonexistenttype",
118 | "folder" : false
119 | } ],
120 | "uri" : "https://maven.artifactory.fake.com/artifactory/api/storage/libs-release-local/com/fake/thd-store-info-service/v1.2.0"
121 | }
122 | """
123 |
124 | response_body_artifactory_no_children = """
125 | {
126 | "repo" : "release-repo",
127 | "path" : "/group/testproject/v1.0.0",
128 | "created" : "2016-09-09T13:02:49.851-04:00",
129 | "createdBy" : "svc_cicd",
130 | "lastModified" : "2016-09-09T13:02:49.851-04:00",
131 | "modifiedBy" : "svc_cicd",
132 | "lastUpdated" : "2016-09-09T13:02:49.851-04:00",
133 | "children" : [ ],
134 | "uri" : "https://maven.artifactory.fake.com/artifactory/api/storage/libs-release-local/com/fake/thd-store-info-service/v1.2.0"
135 | }
136 | """
137 |
138 | response_body_artifactory_not_found = """
139 | {
140 | "errors" : [ {
141 | "status" : 404,
142 | "message" : "Unable to find item"
143 | } ]
144 | }
145 | """
146 |
147 |
148 | # noinspection PyUnresolvedReferences
149 | @responses.activate
150 | def test_get_urls_of_artifacts(monkeypatch):
151 | _b = MagicMock(BuildConfig)
152 | _b.build_env_info = mock_build_config_dict['environments']['unittest']
153 | _b.json_config = mock_build_config_dict
154 | _b.project_name = mock_build_config_dict['projectInfo']['name']
155 | _b.version_number = 'v1.0.0'
156 | _b.artifact_extension = None
157 | _b.artifact_extensions = ["bob", "vcl"]
158 | art = Artifactory(config_override=_b)
159 |
160 | test_url = "https://testdomain/artifactory/api/storage/release-repo/group/testproject/v1.0.0"
161 |
162 | responses.add(responses.GET,
163 | test_url,
164 | body=response_body_artifactory,
165 | status=200,
166 | content_type="application/json")
167 |
168 | urls = art.get_urls_of_artifacts()
169 |
170 | assert len(responses.calls) == 2
171 | assert 'Authorization' not in responses.calls[0].request.headers
172 | assert 'Authorization' not in responses.calls[1].request.headers
173 | assert urls == ["https://testdomain/artifactory/release-repo/group/testproject/v1.0.0/testproject.bob",
174 | "https://testdomain/artifactory/release-repo/group/testproject/v1.0.0/testproject.vcl"]
175 |
176 | artifactory_token = 'fake_token_a'
177 | monkeypatch.setenv('ARTIFACTORY_USER', 'fake_user')
178 | monkeypatch.setenv('ARTIFACTORY_TOKEN', artifactory_token)
179 |
180 | urls = art.get_urls_of_artifacts()
181 |
182 | assert len(responses.calls) == 4
183 | assert responses.calls[2].request.headers['Authorization'] == 'Bearer ' + artifactory_token
184 | assert responses.calls[3].request.headers['Authorization'] == 'Bearer ' + artifactory_token
185 | assert urls == ["https://testdomain/artifactory/release-repo/group/testproject/v1.0.0/testproject.bob", "https://testdomain/artifactory/release-repo/group/testproject/v1.0.0/testproject.vcl"]
186 |
187 |
188 | # noinspection PyUnresolvedReferences
189 | @responses.activate
190 | def test_get_artifact_url(monkeypatch):
191 | _b = MagicMock(BuildConfig)
192 | _b.build_env_info = mock_build_config_dict['environments']['unittest']
193 | _b.json_config = mock_build_config_dict
194 | _b.project_name = mock_build_config_dict['projectInfo']['name']
195 | _b.version_number = 'v1.0.0'
196 | _b.artifact_extension = 'bob'
197 | _b.artifact_extensions = None
198 | parser = configparser.ConfigParser()
199 | parser.add_section('artifactory')
200 | _b.settings = parser
201 | art = Artifactory(config_override=_b)
202 |
203 | test_url = "https://testdomain/artifactory/api/storage/release-repo/group/testproject/v1.0.0"
204 |
205 | responses.add(responses.GET,
206 | test_url,
207 | body=response_body_artifactory,
208 | status=200,
209 | content_type="application/json")
210 |
211 | url = art.get_artifact_url()
212 | assert len(responses.calls) == 1
213 | assert 'X-Api-Key' not in responses.calls[0].request.headers
214 | assert url == "https://testdomain/artifactory/release-repo/group/testproject/v1.0.0/testproject.bob"
215 |
216 | artifactory_token = 'fake_token_b'
217 | monkeypatch.setenv('ARTIFACTORY_TOKEN', artifactory_token)
218 |
219 | url = art.get_artifact_url()
220 | assert len(responses.calls) == 2
221 | assert responses.calls[1].request.headers['X-Api-Key'] == artifactory_token
222 | assert url == "https://testdomain/artifactory/release-repo/group/testproject/v1.0.0/testproject.bob"
223 |
224 |
225 | # noinspection PyUnresolvedReferences
226 | @responses.activate
227 | def test_get_artifact_with_include_pom(monkeypatch):
228 | _b = MagicMock(BuildConfig)
229 | _b.build_env_info = mock_build_config_artifactoryConfig_include_POM['environments']['unittest']
230 | _b.json_config = mock_build_config_artifactoryConfig_include_POM
231 | _b.project_name = mock_build_config_artifactoryConfig_include_POM['projectInfo']['name']
232 | _b.include_pom = mock_build_config_artifactoryConfig_include_POM['artifactoryConfig']['includePom']
233 | _b.version_number = 'v1.0.0'
234 | _b.artifact_extension = 'bob'
235 | _b.artifact_extensions = None
236 | parser = configparser.ConfigParser()
237 | parser.add_section('artifactory')
238 | parser.set('artifactory', 'user', 'fake_user')
239 | _b.settings = parser
240 | art = Artifactory(config_override=_b)
241 |
242 | test_url = "https://testdomain/artifactory/api/storage/release-repo/group/testproject/v1.0.0"
243 |
244 | responses.add(responses.GET,
245 | test_url,
246 | body=response_body_artifactory,
247 | status=200,
248 | content_type="application/json")
249 |
250 | url = art.get_artifact_url()
251 | assert len(responses.calls) == 1
252 | assert 'Authorization' not in responses.calls[0].request.headers
253 | assert url == "https://testdomain/artifactory/release-repo/group/testproject/v1.0.0/testproject.bob"
254 |
255 | artifactory_token = 'fake_token_c'
256 | monkeypatch.setenv('ARTIFACTORY_TOKEN', artifactory_token)
257 |
258 | url = art.get_artifact_url()
259 | assert len(responses.calls) == 2
260 | assert responses.calls[1].request.headers['Authorization'] == 'Basic ZmFrZV91c2VyOmZha2VfdG9rZW5fYw=='
261 | assert url == "https://testdomain/artifactory/release-repo/group/testproject/v1.0.0/testproject.bob"
262 |
263 |
264 | # noinspection PyUnresolvedReferences
265 | @responses.activate
266 | def test_get_artifact_url_failure():
267 |
268 | with patch('flow.utils.commons.print_msg') as mock_printmsg_fn:
269 | _b = MagicMock(BuildConfig)
270 | _b.build_env_info = mock_build_config_dict['environments']['unittest']
271 | _b.json_config = mock_build_config_dict
272 | _b.project_name = mock_build_config_dict['projectInfo']['name']
273 | _b.version_number = 'v1.0.0'
274 |
275 | art = Artifactory(config_override=_b)
276 |
277 | test_url = "https://testdomain/artifactory/api/storage/release-repo/group/testproject/v1.0.0"
278 |
279 | exception = HTTPError('Something went wrong')
280 | responses.add(responses.GET,
281 | test_url,
282 | body=exception)
283 |
284 | with pytest.raises(ArtifactException):
285 | art.get_artifact_url()
286 |
287 | print(str(mock_printmsg_fn.mock_calls))
288 | mock_printmsg_fn.assert_called_with('Artifactory', 'get_artifact_url', 'Unable to locate artifactory path https://testdomain/artifactory/api/storage/release-repo/group/testproject/v1.0.0', 'ERROR')
289 |
290 |
291 | # noinspection PyUnresolvedReferences
292 | @responses.activate
293 | def test_get_artifact_url_not_found():
294 |
295 | with patch('flow.utils.commons.print_msg') as mock_printmsg_fn:
296 | _b = MagicMock(BuildConfig)
297 | _b.build_env_info = mock_build_config_dict['environments']['unittest']
298 | _b.json_config = mock_build_config_dict
299 | _b.project_name = mock_build_config_dict['projectInfo']['name']
300 | _b.version_number = 'v1.0.0'
301 |
302 | art = Artifactory(config_override=_b)
303 |
304 | test_url = "https://testdomain/artifactory/api/storage/release-repo/group/testproject/v1.0.0"
305 |
306 | responses.add(responses.GET,
307 | test_url,
308 | body=response_body_artifactory_not_found,
309 | status=404,
310 | content_type="application/json")
311 |
312 | with pytest.raises(ArtifactException):
313 | art.get_artifact_url()
314 |
315 | print(str(mock_printmsg_fn.mock_calls))
316 | mock_printmsg_fn.assert_called_with('Artifactory', 'get_artifact_url', 'Unable to locate artifactory path https://testdomain/artifactory/api/storage/release-repo/group/testproject/v1.0.0\r\n Response: \n{\n "errors" : [ {\n "status" : 404,\n "message" : "Unable to find item"\n } ]\n}\n', 'ERROR')
317 |
318 |
319 | # noinspection PyUnresolvedReferences
320 | @responses.activate
321 | def test_get_artifact_url_specified_type_does_not_exist():
322 |
323 | with patch('flow.utils.commons.print_msg') as mock_printmsg_fn:
324 | _b = MagicMock(BuildConfig)
325 | _b.build_env_info = mock_build_config_dict['environments']['unittest']
326 | _b.json_config = mock_build_config_dict
327 | _b.project_name = mock_build_config_dict['projectInfo']['name']
328 | _b.version_number = 'v1.0.0'
329 | _b.artifact_extension = 'bob'
330 |
331 | art = Artifactory(config_override=_b)
332 |
333 | test_url = "https://testdomain/artifactory/api/storage/release-repo/group/testproject/v1.0.0"
334 |
335 | responses.add(responses.GET,
336 | test_url,
337 | body=response_body_artifactory_no_matching_children,
338 | status=200,
339 | content_type="application/json")
340 |
341 | with pytest.raises(ArtifactException):
342 | art.get_artifact_url()
343 |
344 | print(str(mock_printmsg_fn.mock_calls))
345 | mock_printmsg_fn.assert_called_with('Artifactory', 'get_artifact_url', 'Could not locate artifact bob',
346 | 'ERROR')
347 |
348 |
349 | # noinspection PyUnresolvedReferences
350 | @responses.activate
351 | def test_get_artifact_url_specified_path_has_no_children():
352 |
353 | with patch('flow.utils.commons.print_msg') as mock_printmsg_fn:
354 | _b = MagicMock(BuildConfig)
355 | _b.build_env_info = mock_build_config_dict['environments']['unittest']
356 | _b.json_config = mock_build_config_dict
357 | _b.project_name = mock_build_config_dict['projectInfo']['name']
358 | _b.version_number = 'v1.0.0'
359 | _b.artifact_extension = 'bob'
360 | _b.artifact_extensions = None
361 |
362 | art = Artifactory(config_override=_b)
363 |
364 | test_url = "https://testdomain/artifactory/api/storage/release-repo/group/testproject/v1.0.0"
365 |
366 | responses.add(responses.GET,
367 | test_url,
368 | body=response_body_artifactory_no_children,
369 | status=200,
370 | content_type="application/json")
371 |
372 | with pytest.raises(ArtifactException):
373 | art.get_artifact_url()
374 |
375 | print(str(mock_printmsg_fn.mock_calls))
376 | mock_printmsg_fn.assert_called_with('Artifactory', 'get_artifact_url', 'Could not locate artifact bob',
377 | 'ERROR')
378 |
379 |
380 | def test__get_artifactory_file_name_directory_not_defined(monkeypatch):
381 | _b = MagicMock(BuildConfig)
382 | _b.build_env_info = mock_build_config_dict['environments']['unittest']
383 | _b.json_config = mock_build_config_dict
384 | _b.project_name = mock_build_config_dict['projectInfo']['name']
385 | _b.version_number = 'v1.0.0'
386 |
387 | art = Artifactory(config_override=_b)
388 |
389 | with patch('flow.utils.commons.print_msg') as mock_printmsg_fn:
390 | if os.getenv('ARTIFACT_BUILD_DIRECTORY'):
391 | monkeypatch.delenv('ARTIFACT_BUILD_DIRECTORY')
392 |
393 | with pytest.raises(SystemExit):
394 | art._get_artifactory_files_name_from_build_dir()
395 |
396 | print(str(mock_printmsg_fn.mock_calls))
397 | mock_printmsg_fn.assert_called_with('Artifactory', '_get_artifactory_files_name_from_build_dir', 'Missing artifact build path. Did you forget to define the environment variable \'ARTIFACT_BUILD_DIRECTORY\'? ', 'ERROR')
398 |
399 |
400 | # def foo(self, test1, test2):
401 | # print(test1)
402 | # print(test2)
403 | # pass
404 |
405 | # @patch('utils.commons.CommonUtils.get_files_of_type_from_directory', new=foo)
406 | def test__get_artifactory_files_name_no_artifact_found(monkeypatch):
407 | _b = MagicMock(BuildConfig)
408 | _b.build_env_info = mock_build_config_dict['environments']['unittest']
409 | _b.json_config = mock_build_config_dict
410 | _b.project_name = mock_build_config_dict['projectInfo']['name']
411 | _b.version_number = 'v1.0.0'
412 | _b.artifact_extension = 'bob'
413 | _b.artifact_extensions = None
414 |
415 | art = Artifactory(config_override=_b)
416 |
417 | def _get_files_of_type_from_directory(filetype, directory):
418 | print(filetype)
419 | print(directory)
420 |
421 | with patch('flow.utils.commons.get_files_of_type_from_directory', new=_get_files_of_type_from_directory):
422 | with patch('flow.utils.commons.print_msg') as mock_printmsg_fn:
423 | monkeypatch.setenv('ARTIFACT_BUILD_DIRECTORY', 'mydir')
424 |
425 | with pytest.raises(SystemExit):
426 | art._get_artifactory_files_name_from_build_dir()
427 |
428 | print(str(mock_printmsg_fn.mock_calls))
429 | mock_printmsg_fn.assert_called_with('Artifactory', '_get_artifactory_files_name_from_build_dir',
430 | 'Failed to find artifact of type bob in mydir', 'ERROR')
431 |
432 | def test_get_artifact_home_url_no_defined_version():
433 | with patch('flow.utils.commons.print_msg') as mock_printmsg_fn:
434 | with pytest.raises(SystemExit):
435 | _b = MagicMock(BuildConfig)
436 | _b.build_env_info = mock_build_config_dict['environments']['unittest']
437 | _b.json_config = mock_build_config_dict
438 | _b.project_name = mock_build_config_dict['projectInfo']['name']
439 | _b.version_number = None
440 |
441 | art = Artifactory(config_override=_b)
442 |
443 | art.get_artifact_home_url()
444 | print(str(mock_printmsg_fn.mock_calls))
445 | mock_printmsg_fn.assert_called_with('commons', 'verify_version', 'Version not defined. Is your '
446 | 'repo tagged with a version '
447 | 'number?', 'ERROR')
448 |
449 | def test_download_and_extract_artifacts_locally_no_defined_version():
450 | with patch('flow.utils.commons.print_msg') as mock_printmsg_fn:
451 | with pytest.raises(SystemExit):
452 | _b = MagicMock(BuildConfig)
453 | _b.build_env_info = mock_build_config_dict['environments']['unittest']
454 | _b.json_config = mock_build_config_dict
455 | _b.project_name = mock_build_config_dict['projectInfo']['name']
456 | _b.version_number = None
457 |
458 | art = Artifactory(config_override=_b)
459 |
460 | art.download_and_extract_artifacts_locally('download_dir')
461 | print(str(mock_printmsg_fn.mock_calls))
462 |
463 | mock_printmsg_fn.assert_called_with('commons', 'verify_version', 'Version not defined. Is your '
464 | 'repo tagged with a version '
465 | 'number?', 'ERROR')
466 |
467 | def test_init_missing_artifactory():
468 | _b = MagicMock(BuildConfig)
469 | _b.json_config = mock_build_config_missing_artifact_dict
470 |
471 | with patch('flow.utils.commons.print_msg') as mock_printmsg_fn:
472 | with pytest.raises(SystemExit):
473 | Artifactory(config_override=_b)
474 |
475 | mock_printmsg_fn.assert_called_with('Artifactory', '__init__', "The build config associated with artifactory is missing key 'artifact'", 'ERROR')
476 |
477 |
478 |
--------------------------------------------------------------------------------
/tests/cloud/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/homedepot/flow/c4bee88370a132b675b23e29f577b87e4c1d4c63/tests/cloud/__init__.py
--------------------------------------------------------------------------------
/tests/cloud/cloudfoundry/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/homedepot/flow/c4bee88370a132b675b23e29f577b87e4c1d4c63/tests/cloud/cloudfoundry/__init__.py
--------------------------------------------------------------------------------
/tests/cloud/gcappengine/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/homedepot/flow/c4bee88370a132b675b23e29f577b87e4c1d4c63/tests/cloud/gcappengine/__init__.py
--------------------------------------------------------------------------------
/tests/cloud/gcappengine/test_gcappengine.py:
--------------------------------------------------------------------------------
1 | import os
2 | from unittest.mock import patch
3 | import pytest
4 | import subprocess
5 | from flow.cloud.gcappengine.gcappengine import GCAppEngine
6 | from unittest.mock import MagicMock
7 | from flow.buildconfig import BuildConfig
8 |
9 | mock_build_config_dict = {
10 | "projectInfo": {
11 | "name": "MyProjectName",
12 | "language": "java",
13 | "versionStrategy": "tracker"
14 | },
15 | "artifact": {
16 | "artifactoryDomain": "https://maven.artifactory.fake.com/artifactory",
17 | "artifactoryRepoKey": "libs-release-local",
18 | "artifactoryRepoKeySnapshot": "libs-snapshot-local",
19 | "artifactoryGroup": "com/fake/team",
20 | "artifactType": "tar.gz"
21 | },
22 | "github": {
23 | "org": "Org-GitHub",
24 | "repo": "Repo-GitHub",
25 | "URL": "https://github.fake.com/api/v3/repos"
26 | },
27 | "tracker": {
28 | "projectId": 2222222,
29 | "url": "https://www.pivotaltracker.com"
30 | },
31 | "slack": {
32 | "channel": "fake-deployments",
33 | "botName": "DeployBot",
34 | "emoji": ":package:"
35 | },
36 | "environments": {
37 | "develop": {
38 | "cf": {
39 | "apiEndpoint": "api.run-np.fake.com",
40 | "domain": "apps-np.fake.com",
41 | "space": "develop",
42 | "org": "org-awesome"
43 | },
44 | "artifactCategory": "snapshot",
45 | "associatedBranchName": "develop"
46 | }
47 | }
48 | }
49 |
50 | def test_init_missing_json_login(monkeypatch):
51 | if os.getenv('GCAPPENGINE_USER_JSON'):
52 | monkeypatch.delenv('GCAPPENGINE_USER_JSON')
53 |
54 | with patch('flow.utils.commons.print_msg') as mock_printmsg_fn:
55 | with pytest.raises(SystemExit):
56 | _gcAppEngine = GCAppEngine()
57 | _gcAppEngine.deploy()
58 |
59 | mock_printmsg_fn.assert_called_with('GCAppEngine', '_verfify_required_attributes', 'Credentials not loaded. Please define '
60 | 'environment variable '
61 | '\'GCAPPENGINE_USER_JSON\'', 'ERROR')
62 |
63 | def test_no_promote(monkeypatch):
64 | monkeypatch.setenv("PROMOTE", "false")
65 |
66 | _b = MagicMock(BuildConfig)
67 | _b.push_location = 'fordeployment'
68 | _b.build_env_info = mock_build_config_dict['environments']['develop']
69 | _b.json_config = mock_build_config_dict
70 | _b.project_name = mock_build_config_dict['projectInfo']['name']
71 | _b.version_number = 'v1.0.0'
72 |
73 | with patch('flow.utils.commons.print_msg') as mock_printmsg_fn:
74 | with patch.object(subprocess, 'Popen') as mocked_popen:
75 | mocked_popen.return_value.returncode = 0
76 | mocked_popen.return_value.communicate.return_value = ("EVERYTHING IS AWESOME", 'FAKE_RETURN')
77 | _gcAppEngine = GCAppEngine(config_override=_b)
78 | _gcAppEngine._gcloud_deploy('dummy.yml', promote=False)
79 |
80 | mock_printmsg_fn.assert_any_call('GCAppEngine', '_gcloud_deploy', 'gcloud app deploy fordeployment/dummy.yml '
81 | '--quiet --version v1-0-0 --no-promote')
82 |
83 |
84 | def test_promote(monkeypatch):
85 | monkeypatch.setenv("PROMOTE", "false")
86 |
87 | _b = MagicMock(BuildConfig)
88 | _b.push_location = 'fordeployment'
89 | _b.build_env_info = mock_build_config_dict['environments']['develop']
90 | _b.json_config = mock_build_config_dict
91 | _b.project_name = mock_build_config_dict['projectInfo']['name']
92 | _b.version_number = 'v1.0.0'
93 |
94 | with patch('flow.utils.commons.print_msg') as mock_printmsg_fn:
95 | with patch.object(subprocess, 'Popen') as mocked_popen:
96 | mocked_popen.return_value.returncode = 0
97 | mocked_popen.return_value.communicate.return_value = ("EVERYTHING IS AWESOME", 'FAKE_RETURN')
98 | _gcAppEngine = GCAppEngine(config_override=_b)
99 | _gcAppEngine._gcloud_deploy('dummy.yml', promote=True)
100 |
101 | mock_printmsg_fn.assert_any_call('GCAppEngine', '_gcloud_deploy', 'gcloud app deploy fordeployment/dummy.yml --quiet --version v1-0-0 --promote')
102 |
--------------------------------------------------------------------------------
/tests/coderepo/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/homedepot/flow/c4bee88370a132b675b23e29f577b87e4c1d4c63/tests/coderepo/__init__.py
--------------------------------------------------------------------------------
/tests/coderepo/github/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/homedepot/flow/c4bee88370a132b675b23e29f577b87e4c1d4c63/tests/coderepo/github/__init__.py
--------------------------------------------------------------------------------
/tests/coderepo/github/git_command_raw_history_output.txt:
--------------------------------------------------------------------------------
1 | &&&98db8af&&&@@@Update README for setup and testing instructions@@@
2 | &&&da1bc5e&&&@@@refactor all the mdm tests@@@
3 | &&&20d837b&&&@@@restructure artifactory tests@@@
4 | &&&97b4e72&&&@@@first working pass with minimal changes@@@
5 | &&&adb2746&&&@@@Merge pull request #35 from fake/snapshot-manual-versioning@@@
6 | &&&482263d&&&@@@Adding double % signs to fix syntax@@@
7 | &&&12eb7ee&&&@@@Merge pull request #34 from fake/snapshot-manual-versioning@@@
8 | &&&808b0d0&&&@@@Find snapshot releases that match version_number@@@
9 | &&&3fb35b2&&&@@@Allow snapshot builds to set version_number@@@
10 | &&&2453021&&&@@@Merge pull request #30 from fake/patch-2@@@
11 | &&&f7d9258&&&@@@Adding MDM to tasks_requiring_github@@@
12 | &&&cfa4aeb&&&@@@Merge pull request #29 from fake/patch-1@@@
13 | &&&950893f&&&@@@Added cut to git for-each-ref calls to avoid pulling back the timezone offset.@@@
14 | &&&c67b3e0&&&@@@fixed bug with snapshot version not being pulled as latest when there was a recent release version. [#134114167]@@@
15 | &&&0763f7a&&&@@@Merge pull request #27 from ci-cd/artifactory-bug-fix-134082057@@@
16 | &&&00431a9&&&@@@correcting priority of checking environment variables@@@
17 | &&&223342f&&&@@@Adding ability to specify artifactory user [#134082057]@@@
18 | &&&4326d00&&&@@@Adding slack channel option for errors [#130798449]@@@
19 | &&&09c1983&&&@@@Merge pull request #25 from ci-cd/revert-18-github-version-fix@@@
20 | &&&445fd02&&&@@@Revert "GitHub version fix"@@@
21 | &&&65c05e9&&&@@@Update buildConfig.json@@@
22 | &&&623d518&&&@@@Merge pull request #18 from ci-cd/github-version-fix@@@
23 | &&&3ca339f&&&@@@removing virutalenv that snuck in@@@
24 | &&&9803e38&&&@@@fixed parsing issue of github tag@@@
25 | &&&8a3f3e7&&&@@@cleaning up how we find the latest tag@@@
26 | &&&7b19416&&&@@@reorganized code to make diff prettier@@@
27 | &&&2e1e149&&&@@@refactored github to deal with +'s in the timezone offset@@@
28 | &&&0ae49fa&&&@@@fixed bug requring snowfield user [#133455353]@@@
29 | &&&7fc6615&&&@@@moved artifactory key to environment variable@@@
30 | &&&e6cc51a&&&@@@Moved github api to env variable@@@
31 | &&&fadad65&&&@@@removed print in favor of commons.print_msg [#133448693]@@@
32 | &&&e6bcbdf&&&@@@moved all snowfield url's to settings.ini and added catch for token [#133446639]@@@
33 | &&&4c9b4d2&&&@@@Now you should use ARTIFACTORY_TOKEN env variable instead of settings.ini or buildConfig.json [#133445393]@@@
34 | &&&55b2373&&&@@@No longer using fake by default for github api token. Use GITHUB_TOKEN pwd parameter or secrets file to inject [#133436887]@@@
35 | &&&eabab9b&&&@@@No longer using fake by default for github api token. Use GITHUB_TOKEN pwd parameter or secrets file to inject [#133436887]@@@
36 | &&&894a65c&&&@@@Merge pull request #15 from mc331w/patch-1@@@
37 | &&&8fb551c&&&@@@Changing fortify email address [#132965521]@@@
38 | &&&f57349f&&&@@@adding sqp link to cr [#133064795]@@@
39 | &&&a247b37&&&@@@Fixing bug with SQP and cr task@@@
40 | &&&30c3ac0&&&@@@Merge branch 'develop' of https://github.fake.com/ci-cd/flow into develop@@@
41 | &&&99d6a7e&&&@@@fixing issue calling cr creation/close@@@
42 | &&&b30dd71&&&@@@Updated the descriptions of the values needed to run MDM.@@@
43 | &&&44c9c07&&&@@@fixing bug where a task requiring github also requires a version arg@@@
44 | &&&58ef4f2&&&@@@testing with random buid number to see if that's why sqp is appending results@@@
45 | &&&bc0cc67&&&@@@fixed bug wth completed tasks without reports [#132967929]@@@
46 | &&&7239b99&&&@@@SQP doesn't allow plus sign in version. Results weren't posting but no error was thrown. [#132967211]@@@
47 | &&&90543d6&&&@@@Adding support for uploading to software quality portal from flow [#132964471]@@@
48 | &&&029a13e&&&@@@Adding support for uploading to software quality portal from flow [#132964471]@@@
49 | &&&a94ce59&&&@@@Adding support for uploading to software quality portal from flow [#132964471]@@@
50 | &&&17f0b3e&&&@@@Merge pull request #14 from ci-cd/subcommand-fix@@@
51 | &&&6e5b5c4&&&@@@fixing slack task bug@@@
52 | &&&44e3f73&&&@@@Merge pull request #13 from ci-cd/subcommand-fix@@@
53 | &&&ed10e85&&&@@@Merge branch 'develop' of github.fake.com:ci-cd/flow into develop@@@
54 | &&&303bce4&&&@@@triggering build to validate https://github.fake.com/ci-cd/concourse-common/commit/32c801e9ad4c1ba2e74157f0a8838c573fe885ab@@@
55 | &&&38333d2&&&@@@updating tasks names@@@
56 | &&&b31caf4&&&@@@adding back change that vanished@@@
57 | &&&811b151&&&@@@adding back change that vanished@@@
58 | &&&6574d69&&&@@@Merge pull request #11 from ci-cd/subcommand-fix@@@
59 | &&&405186b&&&@@@fixing merge conflict@@@
60 | &&&c968da6&&&@@@putting proper branch name back@@@
61 |
--------------------------------------------------------------------------------
/tests/coderepo/github/git_command_raw_history_output_multiline.txt:
--------------------------------------------------------------------------------
1 | &&&838f192&&&@@@Fix sync time queries [Finishes] (#53)[Finishes #145753803]@@@
2 | &&&01ee887&&&@@@set MasterDataCommon to use latest version to fix sync time issue [Finishes] (#52)[Finishes #145560051]@@@
3 | &&&4e17d72&&&@@@final fix [ci skip]@@@
4 | &&&f4044b8&&&@@@another fix [ci skip]@@@
5 | &&&381b760&&&@@@[ci skip] fixed readme@@@
6 | &&&ba38106&&&@@@Merge pull request #51 from fake/chore/spring-boot-1.5.3-145272703Welcome to Spring Boot 1.5.3@@@
7 | &&&c2da381&&&@@@Welcome to Spring Boot 1.5.3
8 | further cleanup of pipeline given removal of latest-dev branch
9 | removing latest-dev branch
10 | catch latest-dev up with master (#86)
11 | updating fly_commands shell script [#123555544]
12 | github custom version make executable :disappointed:
13 | @@@
14 | &&&83e4098&&&@@@Merge pull request #50 from fake/build-config-#144197567added buildConfig.json@@@
15 | &&&6379fc8&&&@@@added buildConfig.json[#144197567]
16 | @@@
17 | &&&230b7d2&&&@@@add q3 to DB2 support@@@
18 | &&&c8abe35&&&@@@version set for spring security web@@@
19 | &&&8382fc7&&&@@@Merge pull request #49 from fake/chore/prepare-for-spring-boot-1.5-145029157Move deprecated function overrides into appropriate classes@@@
20 | &&&597238c&&&@@@delete commented out dependencies[#145029157]
21 | @@@
22 | &&&441062a&&&@@@Move deprecated function overrides into appropriate classes[#145029157]
23 | @@@
24 | &&&1a2cdde&&&@@@Merge pull request #48 from fake/chore/upgrade-spring-boot-1.4-140439405Chore/upgrade spring boot 1.4 140439405@@@
25 | &&&be31433&&&@@@Welcome to Spring Boot 1.4.6![#140439405]
26 | @@@
27 | &&&5060238&&&@@@Welcome to Spring Boot 1.4.6![#140439405]
28 | @@@
29 | &&&7ef6950&&&@@@delete unused interface@@@
30 | &&&a06218c&&&@@@Merge pull request #47 from fake/bug/enterprise-store-timezone-144706665Convert Enterprise store service to use QueryDSL@@@
31 | &&&563207b&&&@@@add check the other tables[#144706665]
32 | @@@
33 | &&&124f875&&&@@@Set the same padding logic for Business unit merchandising Heirarchy as added to Enterprise Store Catalog[#144706665]
34 | @@@
35 | &&&bac30fb&&&@@@Convert Enterprise store service to use QueryDSL Add padding of store numbers to make them have leading zeros until 4 digits ar reached Add test of padding of store numbers [Fixes][Fixes #144706665]
36 | @@@
37 | &&&85c24e6&&&@@@rename health endpoint location@@@
38 | &&&2dcc739&&&@@@production upgrade buildpack [ci skip]@@@
39 | &&&d1972b8&&&@@@Merge pull request #46 from fake/chore/upgrade-buildpackupgrade buildpack@@@
40 | &&&841bbc7&&&@@@disable spring boot actuator endpoints except health and info@@@
41 | &&&9298ca3&&&@@@upgrade buildpack@@@
42 | &&&69b98b3&&&@@@Merge pull request #45 from fake/feature/fake-fake-of-sale-service @@@
43 | &&&79d2017&&&@@@removing .idea@@@
44 | &&&fc09c58&&&@@@Cleaning up MasterDataLocationSimulation@@@
45 | &&&fef9880&&&@@@Update application-fake.properties@@@
46 | &&&881f404&&&@@@Creating CSV file for performance testing.@@@
47 | &&&5b874fc&&&@@@Store Point of Sale Profile ready for performance test@@@
48 | &&&c564a1a&&&@@@Updating ASCII doc with path parameters for each test.@@@
49 | &&&3e84016&&&@@@Updated asciidoc for store point of sale profile@@@
50 | &&&7400998&&&@@@Update README.mdKicking Concourse@@@
51 | &&&076d4c2&&&@@@Added more for store point of profile service.@@@
52 | &&&9afcf22&&&@@@Update README.mdKicking Concourse@@@
53 | &&&f0be0c5&&&@@@Initial implementation for Store Point of Sale Profile service@@@
54 | &&&5f257c1&&&@@@Merge pull request #44 from fake/feature/location_bugfixfixed location Bug@@@
55 | &&&074075b&&&@@@location bug fix changes committed@@@
56 | &&&b0c0573&&&@@@Update pom.xml@@@
57 | &&&56016ea&&&@@@YOLO Common Bump.@@@
58 | &&&a3ab3de&&&@@@Merge pull request #43 from fake/feature/add-id-for-fake-centersAdd new field to api documentation [Fix broken test]@@@
59 | &&&9ea512a&&&@@@Add new field to api documentation@@@
60 | &&&0d9dc59&&&@@@Merge pull request #42 from fake/feature/add-id-for-fake-centersAdd DistributionCenters to expose ID list@@@
61 | &&&a69e908&&&@@@Add DistributionCenters to expose ID@@@
62 | &&&e43403d&&&@@@Merge pull request #41 from fake/feature/added-store-fake Store domain from Common Repository@@@
63 | &&&5194296&&&@@@Replaced Store domain from Common Repository@@@
64 | &&&31b9498&&&@@@Merge pull request #40 from fake/update-documentationadded instructions for fetching all stores with pagination@@@
65 | &&&8235dea&&&@@@added instructions for fetching all stores with pagination@@@
66 | &&&14517b6&&&@@@Merge pull request #39 from fake/pagination-documentationPagination documentation@@@
67 | &&&1b1126c&&&@@@added documentation for /location/stores/offsetpaged endpoint@@@
68 | &&&50d8f43&&&@@@YOLO bump common@@@
69 | &&&86018c5&&&@@@removed shah's email from the doc's@@@
70 | &&&ea4aa06&&&@@@production endpoint client logging@@@
71 | &&&098aef6&&&@@@Update README.md@@@
72 | &&&30c8c60&&&@@@YOLO killing off Rethink completely.@@@
73 | &&&e1fd602&&&@@@YOLO Bump Common and enable Rethink-like logging.@@@
74 | &&&4a3ddfb&&&@@@Merge pull request #38 from fake/bugfix/UnusedNot used pageables.@@@
75 |
--------------------------------------------------------------------------------
/tests/coderepo/github/git_tag_last_was_release.txt:
--------------------------------------------------------------------------------
1 | v0.0.3
2 | v0.0.4
3 | v0.1.0
4 | v0.2.0
5 | v0.1.0+1
6 | v0.1.0+2
7 |
--------------------------------------------------------------------------------
/tests/coderepo/github/git_tag_last_was_snapshot.txt:
--------------------------------------------------------------------------------
1 | v0.0.3
2 | v0.0.4
3 | v0.2.0+1
4 | v0.2.0+2
5 | v0.1.0
6 | v0.2.0
7 | v0.1.0+1
8 | v0.1.0+2
9 |
--------------------------------------------------------------------------------
/tests/coderepo/github/git_tag_mock_output.txt:
--------------------------------------------------------------------------------
1 | 1.67
2 | 1.67.0
3 | 1.67.0+1
4 | 1.67.0+10
5 | 1.67.0+11
6 | 1.67.0+12
7 | 1.67.0+13
8 | 1.67.0+14
9 | 1.67.0+15
10 | 1.67.0+16
11 | 1.67.0+17
12 | 1.67.0+18
13 | 1.67.0+19
14 | 1.67.0+2
15 | 1.67.0+20
16 | 1.67.0+21
17 | 1.67.0+22
18 | 1.67.0+23
19 | 1.67.0+3
20 | 1.67.0+4
21 | 1.67.0+5
22 | 1.67.0+6
23 | 1.67.0+7
24 | 1.67.0+8
25 | 1.67.0+9
26 | 1.68.0
27 | 1.68.0+1
28 | 1.68.0+2
29 | 1.68.0+3
30 | 1.68.0+4
31 | 1.68.0+5
32 | 1.68.0+6
33 | 1.68.0+7
34 | 1.69.0
35 | 1.69.0+1
36 | 1.70.0
37 | 1.71.0
38 | 1.71.1
39 | 1.72.0
40 | 1.73.0
41 | 1.73.0+1
42 | 1.73.0+2
43 | 1.73.0+3
44 | 1.73.0+4
45 | 1.73.0+5
46 | 1.73.0+6
47 | v0.0.0+56
48 | v0.0.0+57
49 | v0.0.0+58
50 | v0.0.0+59
51 | v0.0.0+60
52 | v0.0.0+61
53 | v0.0.0+62
54 | v0.0.0+63
55 | v0.0.0+64
56 | v0.0.0+65
57 | v0.0.0+66
58 | v1.0.0
59 | v1.1.0
60 | v1.10.0
61 | v1.11.0
62 | v1.12.0
63 | v1.13.0
64 | v1.14.0
65 | v1.15.0
66 | v1.16.0
67 | v1.17.0
68 | v1.17.0+1
69 | v1.18.0
70 | v1.19.0
71 | v1.2.0
72 | v1.20.0
73 | v1.21.0
74 | v1.22.0
75 | v1.23.0
76 | v1.24.0
77 | v1.25.0
78 | v1.26.0
79 | v1.27.0
80 | v1.28.0
81 | v1.28.0+1
82 | v1.28.0+2
83 | v1.28.0+3
84 | v1.28.0+4
85 | v1.29.0
86 | v1.3.0
87 | v1.30.0
88 | v1.30.0+1
89 | v1.31.0
90 | v1.31.0+1
91 | v1.31.0+10
92 | v1.31.0+2
93 | v1.31.0+3
94 | v1.31.0+4
95 | v1.31.0+5
96 | v1.31.0+6
97 | v1.31.0+7
98 | v1.31.0+8
99 | v1.31.0+9
100 | v1.32.0
101 | v1.4.0
102 | v1.42.0
103 | v1.43.0
104 | v1.44.0
105 | v1.44.0+1
106 | v1.44.0+10
107 | v1.44.0+11
108 | v1.44.0+12
109 | v1.44.0+2
110 | v1.44.0+3
111 | v1.44.0+4
112 | v1.44.0+5
113 | v1.44.0+6
114 | v1.44.0+7
115 | v1.44.0+8
116 | v1.44.0+9
117 | v1.46.0
118 | v1.47.0
119 | v1.49.0
120 | v1.49.0+1
121 | v1.49.0+2
122 | v1.5.0
123 | v1.50.0
124 | v1.51.0
125 | v1.51.0+1
126 | v1.51.0+2
127 | v1.51.0+3
128 | v1.55.0
129 | v1.55.0+1
130 | v1.55.0+10
131 | v1.55.0+11
132 | v1.55.0+12
133 | v1.55.0+13
134 | v1.55.0+2
135 | v1.55.0+3
136 | v1.55.0+4
137 | v1.55.0+5
138 | v1.55.0+6
139 | v1.55.0+7
140 | v1.55.0+8
141 | v1.55.0+9
142 | v1.56.0
143 | v1.56.0+1
144 | v1.56.0+2
145 | v1.56.1
146 | v1.57.0
147 | v1.57.1
148 | v1.58.0
149 | v1.59.0
150 | v1.59.0+1
151 | v1.59.0+2
152 | v1.59.0+3
153 | v1.6.0
154 | v1.60.0
155 | v1.60.0+1
156 | v1.60.1
157 | v1.60.1+1
158 | v1.60.1+2
159 | v1.61.0
160 | v1.61.0+1
161 | v1.61.0+2
162 | v1.61.0+3
163 | v1.61.0+4
164 | v1.62.0
165 | v1.62.0+1
166 | v1.63.0
167 | v1.64.0
168 | v1.64.0+1
169 | v1.64.1
170 | v1.65.0
171 | v1.65.0+1
172 | v1.66.0
173 | v1.66.0+1
174 | v1.67.0
175 | v1.7.0
176 | v1.8.0
177 | v1.9.0
178 |
--------------------------------------------------------------------------------
/tests/coderepo/github/git_tag_mock_output_calver.txt:
--------------------------------------------------------------------------------
1 | v2021.65.2+1
2 | v2021.65.2
3 | v2021.65.1+2
4 | v2021.65.1+1
5 | v2021.65.1
6 | v2021.65.0+2
7 | v2021.65.0+1
8 | v2021.65.0
9 | v21.68.0+1
10 | v21.68.0
11 | v21.67.0+16
12 | v21.67.0+15
13 | v21.67.0+14
14 | v21.67.0+13
15 | v21.67.0+12
16 | v21.67.0+11
17 | v21.67.0+10
18 | v21.67.0+9
19 | v21.67.0+8
20 | v21.67.0+7
21 | v21.67.0+6
22 | v21.67.0+5
23 | v21.67.0+4
24 | v21.67.0+3
25 | v21.67.0+2
26 | v21.67.0+1
27 | v21.67.0
28 | v21.66.0+2
29 | v21.66.0+1
30 | v21.66.0
--------------------------------------------------------------------------------
/tests/coderepo/github/git_tag_mock_output_random.txt:
--------------------------------------------------------------------------------
1 | v1.25.0
2 | v1.5.0
3 | v1.16.0
4 | v1.51.0+2
5 | v1.55.0+12
6 | v1.63.0
7 | v1.66.0
8 | v1.61.0+4
9 | v1.31.0+1
10 | v1.44.0+10
11 | v1.62.0
12 | v1.28.0+4
13 | 1.67.0+18
14 | v1.59.0+1
15 | v1.31.0+6
16 | v0.0.0+58
17 | 1.71.0
18 | v0.0.0+61
19 | v1.44.0+5
20 | 1.67.0+7
21 | v1.21.0
22 | v1.0.0
23 | 1.67.0+17
24 | 1.67.0+19
25 | v1.66.0+1
26 | v0.0.0+56
27 | v1.6.0
28 | v1.56.0+2
29 | v1.51.0
30 | 1.67.0+9
31 | 1.67.0+22
32 | v1.46.0
33 | v1.60.1+1
34 | v1.57.1
35 | v1.44.0+6
36 | v1.56.0+1
37 | v1.65.0
38 | v1.60.1+2
39 | v1.49.0+2
40 | v1.28.0+2
41 | 1.73.0+6
42 | 1.67.0+2
43 | v1.64.1
44 | v1.59.0+2
45 | v1.44.0+3
46 | v1.61.0
47 | v1.31.0+10
48 | v1.59.0+3
49 | 1.68.0+1
50 | v1.65.0+1
51 | v0.0.0+63
52 | v1.4.0
53 | v0.0.0+65
54 | v1.61.0+2
55 | 1.67.0+10
56 | v1.19.0
57 | 1.73.0+3
58 | 1.69.0+1
59 | v1.12.0
60 | v1.55.0+3
61 | v1.42.0
62 | 1.73.0+5
63 | 1.71.1
64 | v1.61.0+1
65 | v1.29.0
66 | 1.73.0+4
67 | v1.44.0+9
68 | v1.31.0+4
69 | 1.67.0+13
70 | v1.55.0+4
71 | 1.67.0+14
72 | 1.70.0
73 | 1.69.0
74 | v1.55.0+8
75 | 1.68.0+4
76 | v1.44.0
77 | v0.0.0+66
78 | v1.44.0+1
79 | v1.44.0+8
80 | v1.30.0
81 | v1.47.0
82 | v1.58.0
83 | v1.55.0+5
84 | v1.8.0
85 | 1.68.0+5
86 | v0.0.0+60
87 | 1.67.0
88 | 1.67.0+21
89 | 1.67.0+23
90 | 1.73.0
91 | v1.31.0+3
92 | 1.68.0+6
93 | v1.22.0
94 | 1.67.0+1
95 | v1.51.0+1
96 | v1.55.0+11
97 | 1.73.0+1
98 | 1.67.0+20
99 | 1.72.0
100 | v1.61.0+3
101 | v1.28.0+3
102 | v1.55.0+1
103 | v1.1.0
104 | 1.67.0+15
105 | v1.56.0
106 | v1.44.0+11
107 | v1.26.0
108 | 1.67.0+6
109 | v1.55.0+13
110 | v1.7.0
111 | v1.3.0
112 | v1.24.0
113 | v1.62.0+1
114 | v1.64.0+1
115 | v1.67.0
116 | v1.44.0+7
117 | v0.0.0+62
118 | v1.60.0+1
119 | v1.10.0
120 | v1.55.0+2
121 | v1.60.0
122 | v1.28.0+1
123 | 1.68.0+7
124 | 1.73.0+2
125 | v1.57.0
126 | v1.50.0
127 | v1.23.0
128 | v1.55.0+9
129 | v1.9.0
130 | v1.27.0
131 | 1.67.0+4
132 | v0.0.0+59
133 | v0.0.0+57
134 | v1.20.0
135 | 1.67.0+3
136 | 1.67.0+16
137 | 1.67
138 | v1.44.0+12
139 | v1.49.0+1
140 | v1.44.0+2
141 | v1.55.0+6
142 | v1.60.1
143 | v1.56.1
144 | 1.68.0
145 | 1.67.0+8
146 | v1.51.0+3
147 | v1.31.0
148 | v1.64.0
149 | v1.59.0
150 | v1.15.0
151 | v1.31.0+5
152 | v1.31.0+7
153 | v1.11.0
154 | v1.31.0+2
155 | v1.55.0
156 | v1.31.0+9
157 | v1.31.0+8
158 | 1.68.0+3
159 | v0.0.0+64
160 | v1.28.0
161 | 1.68.0+2
162 | v1.30.0+1
163 | v1.14.0
164 | 1.67.0+5
165 | v1.32.0
166 | v1.44.0+4
167 | v1.2.0
168 | v1.17.0
169 | v1.55.0+7
170 | 1.67.0+12
171 | v1.18.0
172 | v1.13.0
173 | v1.55.0+10
174 | v1.17.0+1
175 | 1.67.0+11
176 | v1.43.0
177 | v1.49.0
178 |
--------------------------------------------------------------------------------
/tests/coderepo/github/git_tag_mock_output_small.txt:
--------------------------------------------------------------------------------
1 | 1.68.0+1
2 | 1.68.0
3 | 1.67.0+16
4 | 1.67.0+15
5 | 1.67.0+14
6 | 1.67.0+13
7 | 1.67.0+12
8 | 1.67.0+11
9 | 1.67.0+10
10 | 1.67.0+9
11 | 1.67.0+8
12 | 1.67.0+7
13 | 1.67.0+6
14 | 1.67.0+5
15 | 1.67.0+4
16 | 1.67.0+3
17 | 1.67.0+2
18 | 1.67.0+1
19 | 1.67.0
20 | 1.66.0+2
21 | 1.66.0+1
22 | 1.66.0
23 |
--------------------------------------------------------------------------------
/tests/coderepo/github/git_tag_one_release.txt:
--------------------------------------------------------------------------------
1 | v1.2.3+8
2 | v1.2.3+7
3 | v1.2.3+6
4 | v1.2.3+5
5 | v1.2.3+4
6 | v1.2.3+3
7 | v1.2.3+2
8 | v1.2.3+1
9 | v1.2.3
10 | v1.2.2+4
11 | v1.2.2+3
12 |
--------------------------------------------------------------------------------
/tests/coderepo/github/git_tag_three_release.txt:
--------------------------------------------------------------------------------
1 | v1.2.3+8
2 | v1.2.3+7
3 | v1.2.3+6
4 | v1.2.3+5
5 | v1.2.3+4
6 | v1.2.3+3
7 | v1.2.3+2
8 | v1.2.3
9 | v1.2.2+5
10 | v1.2.2+4
11 | v1.2.2
12 | v1.2.1+1
13 | v1.2.1
14 |
--------------------------------------------------------------------------------
/tests/coderepo/github/git_tag_unordered_manual_versions.txt:
--------------------------------------------------------------------------------
1 | v0.0.3
2 | v0.0.4
3 | v0.1.0
4 | v0.2.0
5 | v0.1.0+1
6 | v0.1.0+2
7 | v0.2.0+1
--------------------------------------------------------------------------------
/tests/coderepo/github/github_commit_history_output.txt:
--------------------------------------------------------------------------------
1 | [{"sha":"98db8af","commit":{"message":"Update README for setup and testing instructions"}},{"sha":"da1bc5e","commit":{"message":"refactor all the mdm tests"}},{"sha":"20d837b","commit":{"message":"restructure artifactory tests"}},{"sha":"97b4e72","commit":{"message":"first working pass with minimal changes"}},{"sha":"adb2746","commit":{"message":"Merge pull request #35 from mc331w/snapshot-manual-versioning"}},{"sha":"482263d","commit":{"message":"Adding double % signs to fix syntax"}},{"sha":"12eb7ee","commit":{"message":"Merge pull request #34 from mc331w/snapshot-manual-versioning"}},{"sha":"808b0d0","commit":{"message":"Find snapshot releases that match version_number"}},{"sha":"3fb35b2","commit":{"message":"Allow snapshot builds to set version_number"}},{"sha":"2453021","commit":{"message":"Merge pull request #30 from mc331w/patch-2"}},{"sha":"f7d9258","commit":{"message":"Adding MDM to tasks_requiring_github"}},{"sha":"cfa4aeb","commit":{"message":"Merge pull request #29 from mc331w/patch-1"}},{"sha":"950893f","commit":{"message":"Added cut to git for-each-ref calls to avoid pulling back the timezone offset."}},{"sha":"c67b3e0","commit":{"message":"fixed bug with snapshot version not being pulled as latest when there was a recent release version. [#134114167]"}},{"sha":"0763f7a","commit":{"message":"Merge pull request #27 from ci-cd/artifactory-bug-fix-134082057"}},{"sha":"00431a9","commit":{"message":"correcting priority of checking environment variables"}},{"sha":"223342f","commit":{"message":"Adding ability to specify artifactory user [#134082057]"}},{"sha":"4326d00","commit":{"message":"Adding slack channel option for errors [#130798449]"}},{"sha":"09c1983","commit":{"message":"Merge pull request #25 from ci-cd/revert-18-github-version-fix"}},{"sha":"445fd02","commit":{"message":"Revert \"GitHub version fix\""}},{"sha":"65c05e9","commit":{"message":"Update buildConfig.json"}},{"sha":"623d518","commit":{"message":"Merge pull request #18 from ci-cd/github-version-fix"}},{"sha":"3ca339f","commit":{"message":"removing virutalenv that snuck in"}},{"sha":"9803e38","commit":{"message":"fixed parsing issue of github tag"}},{"sha":"8a3f3e7","commit":{"message":"cleaning up how we find the latest tag"}},{"sha":"7b19416","commit":{"message":"reorganized code to make diff prettier"}},{"sha":"2e1e149","commit":{"message":"refactored github to deal with +'s in the timezone offset"}},{"sha":"0ae49fa","commit":{"message":"fixed bug requring snowfield user [#133455353]"}},{"sha":"7fc6615","commit":{"message":"moved artifactory key to environment variable"}},{"sha":"e6cc51a","commit":{"message":"Moved github api to env variable"}},{"sha":"fadad65","commit":{"message":"removed print in favor of commons.print_msg [#133448693]"}},{"sha":"e6bcbdf","commit":{"message":"moved all snowfield url's to settings.ini and added catch for token [#133446639]"}},{"sha":"4c9b4d2","commit":{"message":"Now you should use ARTIFACTORY_TOKEN env variable instead of settings.ini or buildConfig.json [#133445393]"}},{"sha":"55b2373","commit":{"message":"No longer using svc_cicd by default for github api token. Use GITHUB_TOKEN pwd parameter or secrets file to inject [#133436887]"}},{"sha":"eabab9b","commit":{"message":"No longer using svc_cicd by default for github api token. Use GITHUB_TOKEN pwd parameter or secrets file to inject [#133436887]"}},{"sha":"894a65c","commit":{"message":"Merge pull request #15 from mc331w/patch-1"}},{"sha":"8fb551c","commit":{"message":"Changing fortify email address [#132965521]"}},{"sha":"f57349f","commit":{"message":"adding sqp link to cr [#133064795]"}},{"sha":"a247b37","commit":{"message":"Fixing bug with SQP and cr task"}},{"sha":"30c3ac0","commit":{"message":"Merge branch 'develop' of https://github.homedepot.com/ci-cd/flow into develop"}},{"sha":"99d6a7e","commit":{"message":"fixing issue calling cr creation/close"}},{"sha":"b30dd71","commit":{"message":"Updated the descriptions of the values needed to run MDM."}},{"sha":"44c9c07","commit":{"message":"fixing bug where a task requiring github also requires a version arg"}},{"sha":"58ef4f2","commit":{"message":"testing with random buid number to see if that's why sqp is appending results"}},{"sha":"bc0cc67","commit":{"message":"fixed bug wth completed tasks without reports [#132967929]"}},{"sha":"7239b99","commit":{"message":"SQP doesn't allow plus sign in version. Results weren't posting but no error was thrown. [#132967211]"}},{"sha":"90543d6","commit":{"message":"Adding support for uploading to software quality portal from flow [#132964471]"}},{"sha":"029a13e","commit":{"message":"Adding support for uploading to software quality portal from flow [#132964471]"}},{"sha":"a94ce59","commit":{"message":"Adding support for uploading to software quality portal from flow [#132964471]"}},{"sha":"17f0b3e","commit":{"message":"Merge pull request #14 from ci-cd/subcommand-fix"}},{"sha":"6e5b5c4","commit":{"message":"fixing slack task bug"}},{"sha":"44e3f73","commit":{"message":"Merge pull request #13 from ci-cd/subcommand-fix"}},{"sha":"ed10e85","commit":{"message":"Merge branch 'develop' of github.homedepot.com:ci-cd/flow into develop"}},{"sha":"303bce4","commit":{"message":"triggering build to validate https://github.homedepot.com/ci-cd/concourse-common/commit/32c801e9ad4c1ba2e74157f0a8838c573fe885ab"}},{"sha":"38333d2","commit":{"message":"updating tasks names"}},{"sha":"b31caf4","commit":{"message":"adding back change that vanished"}},{"sha":"811b151","commit":{"message":"adding back change that vanished"}},{"sha":"6574d69","commit":{"message":"Merge pull request #11 from ci-cd/subcommand-fix"}},{"sha":"405186b","commit":{"message":"fixing merge conflict"}},{"sha":"c968da6","commit":{"message":"putting proper branch name back"}}]
2 |
--------------------------------------------------------------------------------
/tests/coderepo/github/github_commit_history_output_multiline.txt:
--------------------------------------------------------------------------------
1 | [{"sha":"838f192","commit":{"message":"Fix sync time queries [Finishes] (#53)[Finishes #145753803]"}},{"sha":"01ee887","commit":{"message":"set MasterDataCommon to use latest version to fix sync time issue [Finishes] (#52)[Finishes #145560051]"}},{"sha":"4e17d72","commit":{"message":"final fix [ci skip]"}},{"sha":"f4044b8","commit":{"message":"another fix [ci skip]"}},{"sha":"381b760","commit":{"message":"[ci skip] fixed readme"}},{"sha":"ba38106","commit":{"message":"Merge pull request #51 from fake/chore/spring-boot-1.5.3-145272703Welcome to Spring Boot 1.5.3"}},{"sha":"c2da381","commit":{"message":"Welcome to Spring Boot 1.5.3\n further cleanup of pipeline given removal of latest-dev branch\n removing latest-dev branch\n catch latest-dev up with master (#86)\n updating fly_commands shell script [#123555544]\n github custom version make executable :disappointed:\n"}},{"sha":"83e4098","commit":{"message":"Merge pull request #50 from fake/build-config-#144197567added buildConfig.json"}},{"sha":"6379fc8","commit":{"message":"added buildConfig.json[#144197567]\n"}},{"sha":"230b7d2","commit":{"message":"add q3 to DB2 support"}},{"sha":"c8abe35","commit":{"message":"version set for spring security web"}},{"sha":"8382fc7","commit":{"message":"Merge pull request #49 from fake/chore/prepare-for-spring-boot-1.5-145029157Move deprecated function overrides into appropriate classes"}},{"sha":"597238c","commit":{"message":"delete commented out dependencies[#145029157]\n"}},{"sha":"441062a","commit":{"message":"Move deprecated function overrides into appropriate classes[#145029157]\n"}},{"sha":"1a2cdde","commit":{"message":"Merge pull request #48 from fake/chore/upgrade-spring-boot-1.4-140439405Chore/upgrade spring boot 1.4 140439405"}},{"sha":"be31433","commit":{"message":"Welcome to Spring Boot 1.4.6![#140439405]\n"}},{"sha":"5060238","commit":{"message":"Welcome to Spring Boot 1.4.6![#140439405]\n"}},{"sha":"7ef6950","commit":{"message":"delete unused interface"}},{"sha":"a06218c","commit":{"message":"Merge pull request #47 from fake/bug/enterprise-store-timezone-144706665Convert Enterprise store service to use QueryDSL"}},{"sha":"563207b","commit":{"message":"add check the other tables[#144706665]\n"}},{"sha":"124f875","commit":{"message":"Set the same padding logic for Business unit fake Heirarchy as added to Enterprise Store Catalog[#144706665]\n"}},{"sha":"bac30fb","commit":{"message":"Convert Enterprise store service to use QueryDSL Add padding of store numbers to make them have leading zeros until 4 digits ar reached Add test of padding of store numbers [Fixes][Fixes #144706665]\n"}},{"sha":"85c24e6","commit":{"message":"rename health endpoint location"}},{"sha":"2dcc739","commit":{"message":"production upgrade buildpack [ci skip]"}},{"sha":"d1972b8","commit":{"message":"Merge pull request #46 from fake/chore/upgrade-buildpackupgrade buildpack"}},{"sha":"841bbc7","commit":{"message":"disable spring boot actuator endpoints except health and info"}},{"sha":"9298ca3","commit":{"message":"upgrade buildpack"}},{"sha":"69b98b3","commit":{"message":"Merge pull request #45 from fake/feature/store-point-of-sale-service "}},{"sha":"79d2017","commit":{"message":"removing .idea"}},{"sha":"fc09c58","commit":{"message":"Cleaning up MasterDataLocationSimulation"}},{"sha":"fef9880","commit":{"message":"Update application-fake.properties"}},{"sha":"881f404","commit":{"message":"Creating CSV file for performance testing."}},{"sha":"5b874fc","commit":{"message":"Store Point of Sale Profile ready for performance test"}},{"sha":"c564a1a","commit":{"message":"Updating ASCII doc with path parameters for each test."}},{"sha":"3e84016","commit":{"message":"Updated asciidoc for store point of sale profile"}},{"sha":"7400998","commit":{"message":"Update README.mdKicking Concourse"}},{"sha":"076d4c2","commit":{"message":"Added more for store point of profile service."}},{"sha":"9afcf22","commit":{"message":"Update README.mdKicking Concourse"}},{"sha":"f0be0c5","commit":{"message":"Initial implementation for Store Point of Sale Profile service"}},{"sha":"5f257c1","commit":{"message":"Merge pull request #44 from fake/feature/location_bugfixfixed location Bug"}},{"sha":"074075b","commit":{"message":"location bug fix changes committed"}},{"sha":"b0c0573","commit":{"message":"Update pom.xml"}},{"sha":"56016ea","commit":{"message":"YOLO Common Bump."}},{"sha":"a3ab3de","commit":{"message":"Merge pull request #43 from fake/feature/add-id-for-distribution-centersAdd new field to api documentation [Fix broken test]"}},{"sha":"9ea512a","commit":{"message":"Add new field to api documentation"}},{"sha":"0d9dc59","commit":{"message":"Merge pull request #42 from fake/feature/add-id-for-distribution-centersAdd DistributionCenters to expose ID list"}},{"sha":"a69e908","commit":{"message":"Add DistributionCenters to expose ID"}},{"sha":"e43403d","commit":{"message":"Merge pull request #41 from fake/feature/added-store-commonReplaced Store domain from Common Repository"}},{"sha":"5194296","commit":{"message":"Replaced Store domain from Common Repository"}},{"sha":"31b9498","commit":{"message":"Merge pull request #40 from fake/update-documentationadded instructions for fetching all stores with pagination"}},{"sha":"8235dea","commit":{"message":"added instructions for fetching all stores with pagination"}},{"sha":"14517b6","commit":{"message":"Merge pull request #39 from fake/pagination-documentationPagination documentation"}},{"sha":"1b1126c","commit":{"message":"added documentation for /location/fake/offsetpaged endpoint"}},{"sha":"50d8f43","commit":{"message":"YOLO bump common"}},{"sha":"86018c5","commit":{"message":"removed shah's email from the doc's"}},{"sha":"ea4aa06","commit":{"message":"production endpoint client logging"}},{"sha":"098aef6","commit":{"message":"Update README.md"}},{"sha":"30c8c60","commit":{"message":"YOLO killing off Rethink completely."}},{"sha":"e1fd602","commit":{"message":"YOLO Bump Common and enable Rethink-like logging."}},{"sha":"4a3ddfb","commit":{"message":"Merge pull request #38 from fake/bugfix/UnusedNot used pageables."}}]
2 |
--------------------------------------------------------------------------------
/tests/coderepo/github/tracker_stories.json:
--------------------------------------------------------------------------------
1 | {
2 | "stories":[
3 | {
4 | "current_state":"started",
5 | "project_id":1899551,
6 | "owner_ids":[
7 | 1783060,
8 | 1797262
9 | ],
10 | "created_at":"2016-11-11T19:15:03Z",
11 | "kind":"story",
12 | "updated_at":"2016-11-14T14:16:33Z",
13 | "estimate":0,
14 | "story_type":"bug",
15 | "id":134185571,
16 | "owned_by_id":1783060,
17 | "name":"CCS$ value in Sales Chart metrics is empty for the District Manager view when the value is 0",
18 | "requested_by_id":1783064,
19 | "description":"CCS$ value in Sales Chart metrics is empty for the District Manager view when the value is 0",
20 | "labels":[
21 |
22 | ],
23 | "url":"https://www.pivotaltracker.com/story/show/134185571"
24 | },
25 | {
26 | "kind":"story",
27 | "id":134185571,
28 | "created_at":"2016-11-11T19:15:03Z",
29 | "updated_at":"2016-11-14T14:16:33Z",
30 | "estimate":0,
31 | "story_type":"bug",
32 | "name":"CCS$ value in Sales Chart metrics is empty for the District Manager view when the value is 0",
33 | "description":"CCS$ value in Sales Chart metrics is empty for the District Manager view when the value is 0",
34 | "current_state":"started",
35 | "requested_by_id":1783064,
36 | "url":"https://www.pivotaltracker.com/story/show/134185571",
37 | "project_id":1899551,
38 | "owner_ids":[
39 | 1783060,
40 | 1797262
41 | ],
42 | "labels":[
43 |
44 | ],
45 | "owned_by_id":1783060
46 | }
47 | ]
48 | }
49 |
--------------------------------------------------------------------------------
/tests/coderepo/github/tracker_stories_github_format.txt:
--------------------------------------------------------------------------------
1 | :beetle:bug **134185571**
2 | **CCS$ value in Sales Chart metrics is empty for the District Manager view when the value is 0**
3 | CCS$ value in Sales Chart metrics is empty for the District Manager view when the value is 0
4 |
5 |
6 | :beetle:bug **134185571**
7 | **CCS$ value in Sales Chart metrics is empty for the District Manager view when the value is 0**
8 | CCS$ value in Sales Chart metrics is empty for the District Manager view when the value is 0
9 |
10 |
11 |
--------------------------------------------------------------------------------
/tests/communications/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/homedepot/flow/c4bee88370a132b675b23e29f577b87e4c1d4c63/tests/communications/__init__.py
--------------------------------------------------------------------------------
/tests/communications/slack/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/homedepot/flow/c4bee88370a132b675b23e29f577b87e4c1d4c63/tests/communications/slack/__init__.py
--------------------------------------------------------------------------------
/tests/communications/slack/test_slack.py:
--------------------------------------------------------------------------------
1 | import json
2 | import os
3 | from unittest.mock import MagicMock
4 | from unittest.mock import patch
5 |
6 | import pytest
7 | from flow.communications.slack.slack import Slack
8 |
9 | from flow.buildconfig import BuildConfig
10 |
11 | mock_build_config_dict = {
12 | "projectInfo": {
13 | "name": "testproject"
14 | },
15 | "artifact": {
16 | "artifactoryDomain": "https://testdomain/artifactory",
17 | "artifactoryRepoKey": "release-repo",
18 | "artifactoryRepoKeySnapshot": "snapshot-repo",
19 | "artifactoryGroup": "group",
20 | "artifactType": "type",
21 | "artifactDirectory": "directory"
22 | },
23 | "environments": {
24 | "unittest": {
25 | "artifactCategory": "release"
26 | }
27 | },
28 | "slack": {
29 | "botName": "Flow",
30 | "emoji": ":robot_face:",
31 | "channel": "#spigot-ci"
32 | }
33 | }
34 |
35 |
36 | def test_publish_deployment_missing_version(monkeypatch):
37 | monkeypatch.setenv('SLACK_WEBHOOK_URL', 'https://hooks.slack.com/services/NOTAREALWEBHOOK')
38 |
39 | current_test_directory = os.path.dirname(os.path.realpath(__file__))
40 |
41 | with open(current_test_directory + "/tracker_stories.json", 'r') as myfile:
42 | tracker_json_data = json.loads(myfile.read())
43 | with patch('flow.utils.commons.print_msg') as mock_printmsg_fn:
44 | with pytest.raises(SystemExit):
45 | _b = MagicMock(BuildConfig)
46 | _b.build_env_info = mock_build_config_dict['environments']['unittest']
47 | _b.json_config = mock_build_config_dict
48 | _b.project_name = mock_build_config_dict['projectInfo']['name']
49 | _b.version_number = None
50 |
51 | slack = Slack(config_override=_b)
52 |
53 | slack.publish_deployment(tracker_json_data)
54 | print(str(mock_printmsg_fn.mock_calls))
55 |
56 | mock_printmsg_fn.assert_called_with('commons', 'verify_version', 'Version not defined. Is your repo tagged '
57 | 'with a version number?', 'ERROR')
58 |
59 |
60 | def test_publish_deployment_missing_webhook(monkeypatch):
61 | if os.getenv('SLACK_WEBHOOK_URL'):
62 | monkeypatch.delenv('SLACK_WEBHOOK_URL')
63 |
64 | with patch('flow.utils.commons.print_msg') as mock_printmsg_fn:
65 | with pytest.raises(SystemExit):
66 | _b = MagicMock(BuildConfig)
67 | _b.build_env_info = mock_build_config_dict['environments']['unittest']
68 | _b.json_config = mock_build_config_dict
69 | _b.project_name = mock_build_config_dict['projectInfo']['name']
70 | _b.version_number = 'v0.0.1'
71 |
72 | slack = Slack(config_override=_b)
73 |
74 | slack.publish_deployment('blah')
75 | print(str(mock_printmsg_fn.mock_calls))
76 |
77 | mock_printmsg_fn.assert_called_with('Slack', 'publish_deployment', 'No Slack URL was found in the environment. Did you set SLACK_WEBHOOK_URL in your pipeline?', 'ERROR')
78 |
79 | #TODO find out why this is failing with a circular reference
80 | # def test_publish_deployment_no_valid_webhook(monkeypatch):
81 | # monkeypatch.setenv('SLACK_WEBHOOK_URL', 'https://hooks.slack.com/services/T03PB1F2E/B22KH4LAG/NOTAVLIDHOOK')
82 | #
83 | # with patch('flow.utils.commons.print_msg') as mock_printmsg_fn:
84 | # with pytest.raises(SystemExit) as cm:
85 | # _b = MagicMock(BuildConfig)
86 | # _b.build_env_info = mock_build_config_dict['environments']['unittest']
87 | # _b.json_config = mock_build_config_dict
88 | # _b.project_name = mock_build_config_dict['projectInfo']['name']
89 | # _b.version_number = 'v0.0.1'
90 | #
91 | # slk = Slack(config_override=_b)
92 | #
93 | # slk.publish_deployment()
94 | # print(str(mock_printmsg_fn.mock_calls))
95 | #
96 | # mock_printmsg_fn.assert_called_with('Slack', 'publish_deployment', 'Failed sending slack message to https://hooks.slack.com/services/NOTAREALWEBHOOK')
97 |
98 |
99 | def test_publish_error_to_slack_missing_webhook(monkeypatch):
100 | if os.getenv('SLACK_WEBHOOK_URL'):
101 | monkeypatch.delenv('SLACK_WEBHOOK_URL')
102 |
103 | with patch('flow.utils.commons.print_msg') as mock_printmsg_fn:
104 | Slack.publish_error('test', 'test', 'test', 'test', )
105 | print(str(mock_printmsg_fn.mock_calls))
106 |
107 | mock_printmsg_fn.assert_any_call('Slack', 'publish_error', 'No Slack URL was found in the environment. Did you set SLACK_WEBHOOK_URL in your pipeline?', 'WARN')
--------------------------------------------------------------------------------
/tests/communications/slack/tracker_stories.json:
--------------------------------------------------------------------------------
1 | {
2 | "stories":[
3 | {
4 | "current_state":"started",
5 | "project_id":1899551,
6 | "owner_ids":[
7 | 1783060,
8 | 1797262
9 | ],
10 | "created_at":"2016-11-11T19:15:03Z",
11 | "kind":"story",
12 | "updated_at":"2016-11-14T14:16:33Z",
13 | "estimate":0,
14 | "story_type":"bug",
15 | "id":134185571,
16 | "owned_by_id":1783060,
17 | "name":"CCS$ value in Sales Chart metrics is empty for the District Manager view when the value is 0",
18 | "requested_by_id":1783064,
19 | "description":"CCS$ value in Sales Chart metrics is empty for the District Manager view when the value is 0",
20 | "labels":[
21 |
22 | ],
23 | "url":"https://www.pivotaltracker.com/story/show/134185571"
24 | },
25 | {
26 | "kind":"story",
27 | "id":134185571,
28 | "created_at":"2016-11-11T19:15:03Z",
29 | "updated_at":"2016-11-14T14:16:33Z",
30 | "estimate":0,
31 | "story_type":"bug",
32 | "name":"CCS$ value in Sales Chart metrics is empty for the District Manager view when the value is 0",
33 | "description":"CCS$ value in Sales Chart metrics is empty for the District Manager view when the value is 0",
34 | "current_state":"started",
35 | "requested_by_id":1783064,
36 | "url":"https://www.pivotaltracker.com/story/show/134185571",
37 | "project_id":1899551,
38 | "owner_ids":[
39 | 1783060,
40 | 1797262
41 | ],
42 | "labels":[
43 |
44 | ],
45 | "owned_by_id":1783060
46 | }
47 | ]
48 | }
49 |
--------------------------------------------------------------------------------
/tests/plugins/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/homedepot/flow/c4bee88370a132b675b23e29f577b87e4c1d4c63/tests/plugins/__init__.py
--------------------------------------------------------------------------------
/tests/projecttracking/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/homedepot/flow/c4bee88370a132b675b23e29f577b87e4c1d4c63/tests/projecttracking/__init__.py
--------------------------------------------------------------------------------
/tests/projecttracking/jira/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/homedepot/flow/c4bee88370a132b675b23e29f577b87e4c1d4c63/tests/projecttracking/jira/__init__.py
--------------------------------------------------------------------------------
/tests/projecttracking/jira/jira_projects.json:
--------------------------------------------------------------------------------
1 | {
2 | "projects":[
3 | {
4 | "id": "123456",
5 | "self": "https://thd.atlassian.net/rest/api/3/project/fake",
6 | "key": "TEST"
7 | },
8 | {
9 | "id": "1234567",
10 | "self": "https://thd.atlassian.net/rest/api/3/project/fake",
11 | "key": "TEST2"
12 | }
13 | ]
14 | }
15 |
--------------------------------------------------------------------------------
/tests/projecttracking/jira/jira_stories_bug.json:
--------------------------------------------------------------------------------
1 | {
2 | "stories":[
3 | {
4 | "id": "123456",
5 | "self": "https://thd.atlassian.net/rest/api/3/issue/fake",
6 | "key": "TEST-123",
7 | "fields": {
8 | "issuetype": {
9 | "name": "Bug"
10 | },
11 | "project": {
12 | "id": "123456",
13 | "key": "TEST"
14 | },
15 | "created": "2021-05-04T15:45:03Z",
16 | "updated": "2021-05-04T16:23:33Z",
17 | "creator": {
18 | "accountId": "12345:AAAAAAAA-AAAA-AA11-1111-111111111111"
19 | },
20 | "assignee": {
21 | "accountId": "12345:AAAA1111-BBBB-2222-CCCC-3333DDDD4444"
22 | },
23 | "summary": "Test Bug",
24 | "description": {
25 | "content": [
26 | {
27 | "type": "paragraph",
28 | "content": [
29 | {
30 | "type": "text",
31 | "text": "This is a test bug description"
32 | }
33 | ]
34 | }
35 | ]
36 | },
37 | "components": [
38 | { "name": "TEST Component" },
39 | { "name": "TEST Component 2"}
40 | ],
41 | "labels": [
42 | "TEST"
43 | ],
44 | "status": {
45 | "name": "In Progress"
46 | }
47 | }
48 | },
49 | {
50 | "id": "12345678",
51 | "self": "https://thd.atlassian.net/rest/api/3/issue/fake2",
52 | "key": "TEST-456",
53 | "fields": {
54 | "issuetype": {
55 | "name": "Bug"
56 | },
57 | "project": {
58 | "id": "123456",
59 | "key": "TEST"
60 | },
61 | "created": "2021-05-04T15:45:03Z",
62 | "updated": "2021-05-04T16:23:33Z",
63 | "creator": {
64 | "accountId": "12345:AAAAAAAA-AAAA-AA11-1111-111111111111"
65 | },
66 | "assignee": {
67 | "accountId": "12345:AAAA1111-BBBB-2222-CCCC-3333DDDD4444"
68 | },
69 | "summary": "Another test Bug",
70 | "description": {
71 | "content": [
72 | {
73 | "type": "paragraph",
74 | "content": [
75 | {
76 | "type": "text",
77 | "text": "Another test bug"
78 | }
79 | ]
80 | }
81 | ]
82 | },
83 | "components": [
84 | { "name": "TEST Component" },
85 | { "name": "TEST Component 2"}
86 | ],
87 | "labels": [
88 | "TEST"
89 | ],
90 | "status": {
91 | "name": "Code Review"
92 | }
93 | }
94 | }
95 | ]
96 | }
97 |
--------------------------------------------------------------------------------
/tests/projecttracking/jira/jira_stories_major.json:
--------------------------------------------------------------------------------
1 | {
2 | "stories":[
3 | {
4 | "id": "123456",
5 | "self": "https://thd.atlassian.net/rest/api/3/issue/fake",
6 | "key": "TEST-123",
7 | "fields": {
8 | "issuetype": {
9 | "name": "Bug"
10 | },
11 | "project": {
12 | "id": "123456",
13 | "key": "TEST"
14 | },
15 | "created": "2021-05-04T15:45:03Z",
16 | "updated": "2021-05-04T16:23:33Z",
17 | "creator": {
18 | "accountId": "12345:AAAAAAAA-AAAA-AA11-1111-111111111111"
19 | },
20 | "assignee": {
21 | "accountId": "12345:AAAA1111-BBBB-2222-CCCC-3333DDDD4444"
22 | },
23 | "summary": "Test bug",
24 | "description": {
25 | "content": [
26 | {
27 | "type": "paragraph",
28 | "content": [
29 | {
30 | "type": "text",
31 | "text": "This is a test bug"
32 | }
33 | ]
34 | }
35 | ]
36 | },
37 | "components": [
38 | { "name": "TEST Component" },
39 | { "name": "TEST Component 2"}
40 | ],
41 | "labels": [
42 | "TEST"
43 | ],
44 | "status": {
45 | "name": "In Progress"
46 | }
47 | }
48 | },
49 | {
50 | "id": "1234567",
51 | "self": "https://thd.atlassian.net/rest/api/3/issue/fake",
52 | "key": "TEST-123",
53 | "fields": {
54 | "issuetype": {
55 | "name": "Chore"
56 | },
57 | "project": {
58 | "id": "123456",
59 | "key": "TEST"
60 | },
61 | "created": "2021-05-04T15:45:03Z",
62 | "updated": "2021-05-04T16:23:33Z",
63 | "creator": {
64 | "accountId": "12345:AAAAAAAA-AAAA-AA11-1111-111111111111"
65 | },
66 | "assignee": {
67 | "accountId": "12345:AAAA1111-BBBB-2222-CCCC-3333DDDD4444"
68 | },
69 | "summary": "Test Chore",
70 | "description": {
71 | "content": [
72 | {
73 | "type": "paragraph",
74 | "content": [
75 | {
76 | "type": "text",
77 | "text": "This is a test chore"
78 | }
79 | ]
80 | }
81 | ]
82 | },
83 | "components": [
84 | { "name": "TEST Component" },
85 | { "name": "TEST Component 2"}
86 | ],
87 | "labels": [
88 | "testlabel",
89 | "major"
90 | ],
91 | "status": {
92 | "name": "In Progress"
93 | }
94 | }
95 | },
96 | {
97 | "id": "1234568",
98 | "self": "https://thd.atlassian.net/rest/api/3/issue/fake",
99 | "key": "TEST-456",
100 | "fields": {
101 | "issuetype": {
102 | "name": "Chore"
103 | },
104 | "project": {
105 | "id": "123456",
106 | "key": "TEST"
107 | },
108 | "created": "2021-05-04T15:45:03Z",
109 | "updated": "2021-05-04T16:23:33Z",
110 | "creator": {
111 | "accountId": "12345:AAAAAAAA-AAAA-AA11-1111-111111111111"
112 | },
113 | "assignee": {
114 | "accountId": "12345:AAAA1111-BBBB-2222-CCCC-3333DDDD4444"
115 | },
116 | "summary": "Test Chore",
117 | "description": {
118 | "content": [
119 | {
120 | "type": "paragraph",
121 | "content": [
122 | {
123 | "type": "text",
124 | "text": "This is a test chore"
125 | }
126 | ]
127 | }
128 | ]
129 | },
130 | "components": [
131 | { "name": "Major" },
132 | { "name": "TEST Component 2"}
133 | ],
134 | "labels": [
135 | "testlabel"
136 | ],
137 | "status": {
138 | "name": "In Progress"
139 | }
140 | }
141 | }
142 | ]
143 | }
144 |
--------------------------------------------------------------------------------
/tests/projecttracking/jira/jira_stories_minor.json:
--------------------------------------------------------------------------------
1 | {
2 | "stories":[
3 | {
4 | "id": "123456",
5 | "self": "https://thd.atlassian.net/rest/api/3/issue/fake",
6 | "key": "TEST-123",
7 | "fields": {
8 | "issuetype": {
9 | "name": "Chore"
10 | },
11 | "project": {
12 | "id": "123456",
13 | "key": "TEST"
14 | },
15 | "created": "2021-05-04T15:45:03Z",
16 | "updated": "2021-05-04T16:23:33Z",
17 | "creator": {
18 | "accountId": "12345:AAAAAAAA-AAAA-AA11-1111-111111111111"
19 | },
20 | "assignee": {
21 | "accountId": "12345:AAAA1111-BBBB-2222-CCCC-3333DDDD4444"
22 | },
23 | "summary": "Test Chore",
24 | "description": {
25 | "content": [
26 | {
27 | "type": "paragraph",
28 | "content": [
29 | {
30 | "type": "text",
31 | "text": "This is a test chore"
32 | }
33 | ]
34 | }
35 | ]
36 | },
37 | "components": [
38 | { "name": "TEST Component" },
39 | { "name": "TEST Component 2"}
40 | ],
41 | "labels": [
42 | "TEST"
43 | ],
44 | "status": {
45 | "name": "In Progress"
46 | }
47 | }
48 | },
49 | {
50 | "id": "12345678",
51 | "self": "https://thd.atlassian.net/rest/api/3/issue/fake",
52 | "key": "TEST2-123",
53 | "fields": {
54 | "issuetype": {
55 | "name": "Bug"
56 | },
57 | "project": {
58 | "id": "1234567",
59 | "key": "TEST2"
60 | },
61 | "created": "2021-05-04T15:45:03Z",
62 | "updated": "2021-05-04T16:23:33Z",
63 | "creator": {
64 | "accountId": "12345:AAAAAAAA-AAAA-AA11-1111-111111111111"
65 | },
66 | "assignee": {
67 | "accountId": "11345:AAAA1111-BBBB-2222-CCCC-3333DDDD4444"
68 | },
69 | "summary": "Test Bug",
70 | "description": {
71 | "content": [
72 | {
73 | "type": "paragraph",
74 | "content": [
75 | {
76 | "type": "text",
77 | "text": "This is a test bug"
78 | }
79 | ]
80 | }
81 | ]
82 | },
83 | "components": [
84 | { "name": "TEST Component" },
85 | { "name": "TEST Component 2"}
86 | ],
87 | "labels": [
88 | "TEST"
89 | ],
90 | "status": {
91 | "name": "In Progress"
92 | }
93 | }
94 | }
95 | ]
96 | }
97 |
--------------------------------------------------------------------------------
/tests/projecttracking/tracker/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/homedepot/flow/c4bee88370a132b675b23e29f577b87e4c1d4c63/tests/projecttracking/tracker/__init__.py
--------------------------------------------------------------------------------
/tests/projecttracking/tracker/tracker_stories_bug.json:
--------------------------------------------------------------------------------
1 | {
2 | "stories":[
3 | {
4 | "current_state":"started",
5 | "project_id":123456,
6 | "owner_ids":[
7 | 123456,
8 | 7890
9 | ],
10 | "created_at":"2016-11-11T19:15:03Z",
11 | "kind":"story",
12 | "updated_at":"2016-11-14T14:16:33Z",
13 | "estimate":0,
14 | "story_type":"bug",
15 | "id":123456,
16 | "owned_by_id":123456,
17 | "name":"Test Bug",
18 | "requested_by_id":123456,
19 | "description":"This is a test bug description",
20 | "labels":[
21 |
22 | ],
23 | "url":"https://www.pivotaltracker.com/story/show/fake"
24 | },
25 | {
26 | "kind":"story",
27 | "id":12345678,
28 | "created_at":"2016-11-11T19:15:03Z",
29 | "updated_at":"2016-11-14T14:16:33Z",
30 | "estimate":0,
31 | "story_type":"bug",
32 | "name":"Another test bug",
33 | "description":"Another test bug",
34 | "current_state":"started",
35 | "requested_by_id":123456,
36 | "url":"https://www.pivotaltracker.com/story/show/fake",
37 | "project_id":123456,
38 | "owner_ids":[
39 | 123456,
40 | 7890
41 | ],
42 | "labels":[
43 |
44 | ],
45 | "owned_by_id":123456
46 | }
47 | ]
48 | }
49 |
--------------------------------------------------------------------------------
/tests/projecttracking/tracker/tracker_stories_major.json:
--------------------------------------------------------------------------------
1 | {
2 | "stories":[
3 | {
4 | "current_state":"started",
5 | "project_id":123456,
6 | "owner_ids":[
7 | 123456,
8 | 7890
9 | ],
10 | "created_at":"2016-11-11T19:15:03Z",
11 | "kind":"story",
12 | "updated_at":"2016-11-14T14:16:33Z",
13 | "estimate":0,
14 | "story_type":"bug",
15 | "id":123456,
16 | "owned_by_id":123456,
17 | "name":"Test bug",
18 | "requested_by_id":123456,
19 | "description":"This is a test bug",
20 | "labels":[
21 |
22 | ],
23 | "url":"https://www.pivotaltracker.com/story/show/fake"
24 | },
25 | {
26 | "kind":"story",
27 | "id":1234567,
28 | "created_at":"2016-11-09T14:36:37Z",
29 | "updated_at":"2016-11-14T12:52:12Z",
30 | "estimate":0,
31 | "story_type":"chore",
32 | "name":"Test chore",
33 | "description":"This is a test chore",
34 | "current_state":"started",
35 | "requested_by_id":1234567,
36 | "url":"https://www.pivotaltracker.com/story/show/fake",
37 | "project_id":123456,
38 | "owner_ids":[
39 | 123456
40 | ],
41 | "labels":[
42 | {
43 | "id":111111,
44 | "project_id":123456,
45 | "kind":"label",
46 | "name":"testlabel",
47 | "created_at":"2016-11-10T16:31:21Z",
48 | "updated_at":"2016-11-10T16:31:21Z"
49 | },
50 | {
51 | "id":222222,
52 | "project_id":123456,
53 | "kind":"label",
54 | "name":"major",
55 | "created_at":"2016-11-10T20:29:53Z",
56 | "updated_at":"2016-11-10T20:29:53Z"
57 | }
58 | ],
59 | "owned_by_id":123456
60 | }
61 | ]
62 | }
63 |
--------------------------------------------------------------------------------
/tests/projecttracking/tracker/tracker_stories_minor.json:
--------------------------------------------------------------------------------
1 | {
2 | "stories":[
3 | {
4 | "current_state":"started",
5 | "project_id":123456,
6 | "owner_ids":[
7 | 123456,
8 | 7890
9 | ],
10 | "created_at":"2016-11-11T19:15:03Z",
11 | "kind":"story",
12 | "updated_at":"2016-11-14T14:16:33Z",
13 | "estimate":0,
14 | "story_type":"chore",
15 | "id":123456,
16 | "owned_by_id":123456,
17 | "name":"Test Chore",
18 | "requested_by_id":123456,
19 | "description":"This is a test chore",
20 | "labels":[
21 |
22 | ],
23 | "url":"https://www.pivotaltracker.com/story/show/fake"
24 | },
25 | {
26 | "kind":"story",
27 | "id":12345678,
28 | "created_at":"2016-11-11T19:15:03Z",
29 | "updated_at":"2016-11-14T14:16:33Z",
30 | "estimate":0,
31 | "story_type":"bug",
32 | "name":"Test bug",
33 | "description":"This is a test bug",
34 | "current_state":"started",
35 | "requested_by_id":123456,
36 | "url":"https://www.pivotaltracker.com/story/show/fake",
37 | "project_id":1234567,
38 | "owner_ids":[
39 | 1123456
40 | ],
41 | "labels":[
42 |
43 | ],
44 | "owned_by_id":1123456
45 | }
46 | ]
47 | }
48 |
--------------------------------------------------------------------------------
/tests/staticqualityanalysis/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/homedepot/flow/c4bee88370a132b675b23e29f577b87e4c1d4c63/tests/staticqualityanalysis/__init__.py
--------------------------------------------------------------------------------
/tests/staticqualityanalysis/sonar/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/homedepot/flow/c4bee88370a132b675b23e29f577b87e4c1d4c63/tests/staticqualityanalysis/sonar/__init__.py
--------------------------------------------------------------------------------
/tests/staticqualityanalysis/sonar/test_sonar.py:
--------------------------------------------------------------------------------
1 | import configparser
2 | import os
3 | from unittest.mock import MagicMock
4 | from unittest.mock import patch
5 |
6 | import pytest
7 | from flow.staticqualityanalysis.sonar.sonarmodule import SonarQube
8 |
9 | from flow.buildconfig import BuildConfig
10 |
11 |
12 | def test_scan_code_single_jar_executable_path(monkeypatch):
13 | monkeypatch.setenv('SONAR_HOME','FAKEHOME')
14 |
15 | with patch('flow.utils.commons.print_msg') as mock_printmsg_fn:
16 | with patch('flow.utils.commons.get_files_of_type_from_directory') as mock_getfiletypefromdir_fn:
17 | with pytest.raises(SystemExit):
18 | mock_getfiletypefromdir_fn.return_value = ['sonar-scanner.jar']
19 | _b = MagicMock(BuildConfig)
20 | parser = configparser.ConfigParser()
21 | parser.add_section('sonar')
22 | _b.settings = parser
23 |
24 | _sonar = SonarQube(config_override=_b)
25 | _sonar.scan_code()
26 |
27 | mock_getfiletypefromdir_fn.assert_called_with('jar', 'FAKEHOME')
28 |
29 |
30 | def test_scan_code_settings_executable_path(monkeypatch):
31 | monkeypatch.setenv('SONAR_HOME','FAKEHOME')
32 |
33 | with patch('flow.utils.commons.print_msg') as mock_printmsg_fn:
34 | with patch('flow.utils.commons.get_files_of_type_from_directory') as mock_getfiletypefromdir_fn:
35 | with pytest.raises(SystemExit):
36 | mock_getfiletypefromdir_fn.return_value = []
37 | _b = MagicMock(BuildConfig)
38 | parser = configparser.ConfigParser()
39 | parser.add_section('sonar')
40 | parser.set('sonar', 'sonar_runner', 'sonar-runner-dist-2.4.jar')
41 | _b.settings = parser
42 |
43 | _sonar = SonarQube(config_override=_b)
44 | _sonar.scan_code()
45 |
46 | mock_getfiletypefromdir_fn.assert_called_with('jar', 'FAKEHOME')
47 |
48 |
49 | def test_scan_code_missing_executable_path(monkeypatch):
50 | monkeypatch.setenv('SONAR_HOME','FAKEHOME')
51 |
52 | with patch('flow.utils.commons.print_msg') as mock_printmsg_fn:
53 | with patch('flow.utils.commons.get_files_of_type_from_directory') as mock_getfiletypefromdir_fn:
54 | with pytest.raises(SystemExit):
55 | mock_getfiletypefromdir_fn.return_value = []
56 | _b = MagicMock(BuildConfig)
57 | parser = configparser.ConfigParser()
58 | parser.add_section('sonar')
59 | _b.settings = parser
60 |
61 | _sonar = SonarQube(config_override=_b)
62 | _sonar.scan_code()
63 |
64 | mock_printmsg_fn.assert_called_with('SonarQube', '_submit_scan', 'Sonar runner undefined. Please define path to '
65 | 'sonar '
66 | 'runner in settings.ini.', 'ERROR')
67 |
68 |
69 | def test_scan_retry_logic(monkeypatch):
70 | monkeypatch.setenv('SONAR_HOME','FAKEHOME')
71 |
72 | def _submit_scan_failure():
73 | raise Exception
74 |
75 | with patch('flow.utils.commons.print_msg') as mock_printmsg_fn:
76 | with patch('flow.staticqualityanalysis.sonar.sonarmodule.SonarQube._submit_scan', new=_submit_scan_failure):
77 | with patch('os.path.isfile', return_value=True):
78 | with pytest.raises(SystemExit):
79 | _b = MagicMock(BuildConfig)
80 | parser = configparser.ConfigParser()
81 | parser.add_section('sonar')
82 | parser.set('sonar', 'sonar_runner', 'sonar-runner-dist-2.4.jar')
83 | parser.add_section('project')
84 | parser.set('project', 'retry_sleep_interval', '0')
85 | _b.settings = parser
86 |
87 | _sonar = SonarQube(config_override=_b)
88 | _sonar.scan_code()
89 |
90 | mock_printmsg_fn.assert_called_with('SonarQube', 'scan_code', 'Could not connect to Sonar. Maximum number of retries reached.', 'ERROR')
91 |
92 |
93 | def test_scan_code_missing_sonar_home(monkeypatch):
94 | if os.getenv('SONAR_HOME'):
95 | monkeypatch.delenv('SONAR_HOME')
96 |
97 | with patch('flow.utils.commons.print_msg') as mock_printmsg_fn:
98 | with pytest.raises(SystemExit):
99 | _b = MagicMock(BuildConfig)
100 | parser = configparser.ConfigParser()
101 | parser.add_section('sonar')
102 | parser.set('sonar', 'sonar_runner', 'sonar-runner-dist-2.4.jar')
103 | _b.settings = parser
104 |
105 | _sonar = SonarQube(config_override=_b)
106 | _sonar.scan_code()
107 |
108 | mock_printmsg_fn.assert_called_with('SonarQube', '_submit_scan', '\'SONAR_HOME\' environment variable must be '
109 | 'defined', 'ERROR')
110 |
111 |
112 | def test_scan_code_missing_sonar_project_properties(monkeypatch):
113 | monkeypatch.setenv('SONAR_HOME','FAKEHOME')
114 |
115 | with patch('flow.utils.commons.print_msg') as mock_printmsg_fn:
116 | with patch('flow.utils.commons.get_files_of_type_from_directory') as mock_getfiletypefromdir_fn:
117 | with patch('os.path.isfile', return_value=False):
118 | with pytest.raises(SystemExit):
119 | mock_getfiletypefromdir_fn.return_value = ['sonar-scanner.jar']
120 | _b = MagicMock(BuildConfig)
121 | parser = configparser.ConfigParser()
122 | parser.add_section('sonar')
123 | parser.set('sonar', 'sonar_runner', 'sonar-runner-dist-2.4.jar')
124 | _b.settings = parser
125 |
126 | _sonar = SonarQube(config_override=_b)
127 | _sonar.scan_code()
128 |
129 | mock_printmsg_fn.assert_called_with('SonarQube', '_submit_scan', 'No sonar-project.properties file was found. Please include in the root of your project with a valid value for \'sonar.host.url\'', 'ERROR')
130 |
--------------------------------------------------------------------------------
/tests/utils/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/homedepot/flow/c4bee88370a132b675b23e29f577b87e4c1d4c63/tests/utils/__init__.py
--------------------------------------------------------------------------------
/tests/utils/test_commons.py:
--------------------------------------------------------------------------------
1 | from unittest.mock import mock_open
2 | from unittest.mock import patch
3 |
4 | import flow.utils.commons as commons
5 |
6 | def test_write_to_file():
7 | open_mock = mock_open()
8 | with patch('__main__.open', open_mock, create=True):
9 | commons.write_to_file("somefilepath", "test_write_to_file", open_func=open_mock)
10 |
11 | open_mock.assert_called_once_with("somefilepath", "a")
12 | file_mock = open_mock()
13 | file_mock.write.assert_called_once_with("test_write_to_file")
14 |
--------------------------------------------------------------------------------