├── .gitignore ├── Cargo.toml ├── LICENSE ├── README.md ├── changelog.md ├── provision ├── ms │ ├── Vagrantfile │ ├── ie-box-automation-plugin.rb │ └── setup_ms_box.ps1 ├── osx │ └── make_osx_release.sh └── x64 │ ├── Vagrantfile │ ├── make_x86_release.sh │ └── setup_ubuntu_box.sh ├── src ├── api.rs ├── bin │ └── veye_checker.rs ├── checker.rs ├── configs.rs ├── digest_ext_table.rs ├── lib.rs ├── product.rs └── tasks.rs ├── tests ├── acceptance │ ├── assert.sh │ ├── tests.sh │ └── tests_osx.sh ├── api_test.rs ├── checker_test.rs ├── configs_test.rs ├── digest_ext_table_test.rs ├── fixtures │ ├── files │ │ ├── npm.tgz │ │ ├── pypi.tar.gz │ │ ├── pypi.whl │ │ ├── test.jar │ │ └── test.nupkg │ ├── only_api.toml │ ├── only_csv.toml │ ├── only_file_exts.toml │ ├── only_proxy.toml │ ├── only_scan.toml │ └── veye_checker.toml ├── product_test.rs └── tasks_test.rs └── veye-checker.iml /.gitignore: -------------------------------------------------------------------------------- 1 | ### Rust ### 2 | # Generated by Cargo 3 | # will have compiled files and executables 4 | /target/ 5 | provision/**/veye-checker/ 6 | 7 | # Remove Cargo.lock from gitignore if creating an executable, leave it for libraries 8 | # More information here http://doc.crates.io/guide.html#cargotoml-vs-cargolock 9 | Cargo.lock 10 | 11 | # These are backup files generated by rustfmt 12 | **/*.rs.bk 13 | 14 | 15 | ### Intellij ### 16 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm 17 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 18 | 19 | .idea 20 | 21 | ### test data 22 | 23 | results.csv 24 | temp/* 25 | 26 | ### vagrant files 27 | 28 | .vagrant 29 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "veye_checker" 3 | version = "0.2.0" 4 | authors = ["timgluz ", "reiz@versioneye.com"] 5 | 6 | [dependencies] 7 | sha1 = "^0.2.0" 8 | sha2 = "^0.6" 9 | base64 = "^0.6" 10 | md5 = "^0.3" 11 | getopts = "^0.2" 12 | hyper = "0.13.7" 13 | csv = "^0.15" 14 | regex = "^0.2.1" 15 | serde = "^1.0" 16 | serde_derive = "^1.0" 17 | serde_json = "^1.0" 18 | toml = "^0.4" 19 | walkdir = "^1.0" 20 | hyper-proxy = "0.7.0" 21 | hyper-tls = "0.4.3" 22 | http = "0.2.1" 23 | url = "2.1.1" 24 | futures = "0.3.5" 25 | 26 | [features] 27 | api = [] 28 | proxy = [] 29 | 30 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 VersionEye 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Veye-Checker 2 | 3 | [![Dependency Status](https://www.versioneye.com/user/projects/5912d506e1638f0051a0a33b/badge.svg?style=flat-square)](https://www.versioneye.com/user/projects/5912d506e1638f0051a0a33b) 4 | [![Join the chat at Gitter](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/veye_checker/Lobby?utm_source=share-link&utm_medium=link&utm_campaign=share-link) 5 | 6 | ** THIS PROJECT IS NOT ACTIVELY MAINTAINED ANYMORE! ** 7 | 8 | It's a command-line util that scans packaged binaries and resolves their SHA digest values into the package information. 9 | The whole idea behind this utility is described in the Versioneye's blogpost ["Identifying components by SHA values"](https://blog.versioneye.com/2017/02/08/identifying-components-by-sha-values). 10 | 11 | One can use this utility to lookup a version details of the package, fetch a license ID for the binary or 12 | get vulnerability details or automate due diligence process without installing any runtime or additional dependencies. 13 | 14 | Default file extensions for package managers: 15 | 16 | * Nuget (SHA512) - *\*.nupkg* 17 | * Maven (SHA1) - *\*.jar* 18 | * PYPI (MD5) - *\*.tar.gz, \*.whl* 19 | * NPM (SHA1) - *\*.tgz* 20 | 21 | ## Usage 22 | 23 | Download binaries from the [releases](https://github.com/versioneye/veye-checker/releases) and save it into your binaries folder 24 | 25 | ```bash 26 | #NB! change version and op-sys 27 | curl -s -L -o "${HOME}/bin/veye_checker" https://github.com/versioneye/veye-checker/releases/download/v0.2.0/veye_checker_osx 28 | 29 | chmod a+x ~/bin/veye_checker 30 | ``` 31 | 32 | * **resolve** - scans the target folder recursively, translates a value of a file digest via VersionEye API into the product details and prints out results. 33 | 34 | ```bash 35 | veye_checker resolve ../jars -a "api-key" -c "confs/veye_checker_local.toml" 36 | VERSIONEYE_API_KEY="apitoken" veye_checker resolve ../jars 37 | veye_checker resolve ../jars -o resolve.csv -a "api-key" 38 | 39 | # only resolve binaries which are bigger than 5Kb, but smaller than 10Kb 40 | veye_checker resolve tests/fixtures/files/ --max-file-size=10000 --min-file-size=5000 41 | ``` 42 | 43 | configure which digest algorithms to use 44 | commandline flags for blocking algos: `no-md5, no-sha1, no-sha512` 45 | commandline options to overwrite list of file-extensions of a digest algos: `ext-md5, ext-sha1, ext-sha512` 46 | 47 | ```bash 48 | veye_checker resolve ../jars -a "api-key" --no-md5 --ext-sha1="whl,jar,tgz" 49 | ``` 50 | 51 | 52 | 53 | * **shas** - scans the target folder recursively and outputs digests of supported packagefiles: 54 | 55 | ```bash 56 | veye_checker shas ../jars/ 57 | veye_checker shas ../jars/ -o results.csv 58 | VERSIONEYE_CSV_SEPARATOR="," veye_checker shas temp/bins/ 59 | 60 | # only generate diggest values for files which size in the range (5Kb, 10Kb) 61 | veye_checker shas tests/fixtures/files/ --max-file-size=10000 --min-file-size=5000 62 | ``` 63 | 64 | It is possible to configure which digest algorithms to use. 65 | commandline flags for blocking algos: `no-md5, no-sha1, no-sha512` 66 | commandline options to overwrite list of file-extensions of a digest algos: `ext-md5, ext-sha1, ext-sha512` 67 | commandline flag `-v` / `--verbose` prints out scan configuration 68 | 69 | ```bash 70 | # dont use MD5 for next scan and update file extensions to use for SHA1 71 | veye_checker shas ../jars -a "api-key" -v --no-md5 --ext-sha1="whl,jar,tgz" 72 | ``` 73 | 74 | 75 | 76 | * **lookup** - fetches product details from VersionEye api by the SHA/digest value. 77 | 78 | ```bash 79 | veye_checker lookup -a 80 | 81 | VERSIONEYE_API_KEY="apikey" veye_checker lookup 82 | ``` 83 | 84 | ## API keys 85 | 86 | All the commands ( *lookup*, *resolve*, etc ) requesting data from the [VersionEye api](https://www.versioneye.com/api/v2) require the API-key, 87 | which you can obtain from [your's profile page](https://www.versioneye.com/organisations/private/apikey). 88 | 89 | It's possible to specify the api-key 3 ways: 90 | 91 | * via environment variable `VERSIONEYE_API_KEY` 92 | 93 | ``` 94 | export VERSIONEYE_API_KEY="abcdef1234" veye_checker lookup SHA_VALUE_123 95 | ``` 96 | 97 | * add `veye_checker.toml` config file: 98 | 99 | ``` 100 | [api] 101 | key = "abcdef1234" 102 | ``` 103 | 104 | * specify explicitly via command parameter 105 | 106 | ``` 107 | veye_checker lookup SHA_VALUE_123 -a abcdef1234 108 | ``` 109 | 110 | ## Configuration via ENV variable 111 | 112 | It's possible to tweak a setting of the command-line tool with environmental variables, and all the variables follow a pattern: `VERSIONEYE_GROUPID_VARIABLEID`. 113 | 114 | 115 | | full id | default value | description | 116 | |:---------------------:|---------------|----------------------------| 117 | | VERSIONEYE\_API\_KEY | None | specifies API key for the Versioneye API| 118 | | VERSIONEYE\_API\_HOST | www.versioneye.com | specifies custom host name for VersionEye API, useful when using hosted or enterprise version.| 119 | | VERSIONEYE\_API\_PATH | api/v2 | specifies URL path between the host and REST resource | 120 | | VERSIONEYE\_API\_PORT | None | specifies port number for API | 121 | | VERSIONEYE\_API\_SCHEME | https | specifies URI scheme | 122 | | VERSIONEYE\_CSV\_SEPARATOR| ; | overrides separator in output row, can be only single character| 123 | | VERSIONEYE\_CSV\_QUOTE | \" | what character to use for quoting, can be only single character | 124 | | VERSIONEYE\_CSV\_FLEXIBLE| false | should it skip empty fields at the end, accepted values 1, T, TRUE to activate; all other values equal to FALSE | 125 | | VERSIONEYE\_PROXY\_HOST| None | specifies proxy host | 126 | | VERSIONEYE\_PROXY\_PORT| None | specifies proxy port | 127 | | VERSIONEYE\_PROXY\_SCHEME| http | specifies proxy scheme | 128 | | VERSIONEYE_SCAN_MAX_FILE_SIZE | 64MB | max file size in bytes | 129 | | VERSIONEYE_SCAN_MIN_FILE_SIZE | 0 | min file size in bytes | 130 | 131 | NB! Use cmd-line flags or config-file to configure file extensions used by a digest algo; 132 | 133 | ## Configuration via config file 134 | 135 | One can also put all the permanent configurations for the `veye_checker` tool into a `veye_checker.toml` file. 136 | By default the tool will lookup configuration file in the working directory, but you can always specify 137 | location with the `-c` flag or `--config` option after the subcommand. 138 | 139 | All the fields in the configuration file are optional, and the commandline tool will use default values for unspecified fields. 140 | 141 | ```toml 142 | # veye_checker.toml 143 | [api] 144 | key = "Your API key" 145 | host = "www.versioneye.com" 146 | path = "api/v2" 147 | port = 80 148 | scheme = "https" 149 | 150 | [csv] 151 | separator = "," 152 | quote = "'" 153 | flexible = 0 154 | 155 | [proxy] 156 | host = "127.0.0.1" 157 | port = 3128 158 | scheme = "http" 159 | 160 | # configure file extensions 161 | [digests.md5] 162 | blocked = false 163 | exts = ["whl", "gz"] 164 | 165 | # Dont use SHA1 166 | [digests.sha1] 167 | blocked = true 168 | 169 | # specify scan options 170 | [scan] 171 | max_file_size = 67108864 172 | min_file_size = 1024 173 | ``` 174 | 175 | ## Build 176 | 177 | ```bash 178 | cargo build 179 | ./target/debug/veye_checker 180 | 181 | # or simpler command 182 | cargo run 183 | 184 | # or running tests 185 | cargo test -- --test-threads=1 186 | 187 | # test only api-calls 188 | VERSIONEYE_API_KEY="APIKEY" cargo test --features "api" 189 | 190 | # or optimized production release 191 | cargo build --release 192 | ./target/release/veye-checker 193 | 194 | ``` 195 | 196 | ### TESTING 197 | 198 | * to run all the unit tests 199 | 200 | ```bash 201 | cargo test -- --test-threads=1 202 | ``` 203 | 204 | `--test-threads=1` is required for tests that are checking does reading configuration from ENV variables work; 205 | 206 | * to run integration test against API configs 207 | 208 | ```bash 209 | VERSIONEYE_API_KEY="your_api_key" cargo test --features="api" 210 | ``` 211 | 212 | * running integration tests against proxy 213 | 214 | 1. start squid proxy 215 | 216 | ```bash 217 | docker pull sameersbn/squid:latest 218 | 219 | docker run --name squid -d --restart=always \ 220 | --publish 3128:3128 \ 221 | --volume /veye-checker/temp/cache:/var/spool/squid3 \ 222 | sameersbn/squid:latest 223 | 224 | docker stop|run squid 225 | ``` 226 | 227 | 2. run tests 228 | 229 | ```bash 230 | cargo test test_proxy --features=proxy 231 | ``` 232 | 233 | 234 | * to run acceptance tests 235 | 236 | ```bash 237 | cd tests/acceptance 238 | 239 | # on *nix machines 240 | VERSIONEYE_API_KEY="your_api_key" ./tests.sh 241 | 242 | # on Macs 243 | VERSIONEYE_API_KEY="your_api_key" ./tests_osx.sh 244 | ``` 245 | 246 | 247 | ## Contributing 248 | 249 | It's opensource project and any kind of contribution is more than welcome. 250 | 251 | Here's simple guideline to preferable workflow: 252 | 253 | * open a issue 254 | * implement after it lands into milestones 255 | * write tests 256 | * update docs 257 | * make PR 258 | * review 259 | 260 | and your changes makes into next release 261 | 262 | 263 | 264 | -------------------------------------------------------------------------------- /changelog.md: -------------------------------------------------------------------------------- 1 | # Change Log 2 | All notable changes to this project will be documented in this file. 3 | 4 | The format is based on [Keep a Changelog](http://keepachangelog.com/) 5 | and this project adheres to [Semantic Versioning](http://semver.org/). 6 | 7 | ## v0.2.0 - 2017-06-29 8 | ### Added 9 | 10 | - issue #19 - add options to change the range of file size for digestable files; 11 | - issue #18 - possible to block digest algorithms and specify file extensions; 12 | - add new cmd-line options: `no-md5, no-sha1, no-sha512, ext-md5, ext-sha1, ext-sha512` 13 | 14 | ### Fixed 15 | 16 | - issue #13 - handle cases when 2 or more shas are returned from API; 17 | - issue #20 - config file requires all the top-level categories specified; 18 | - issue #17 - product URL was using host of SAAS, not from configs; 19 | 20 | ### Updated 21 | 22 | - updated `sha2` from `0.4.2` to `0.6.0` 23 | - updated `base64` from `0.3.1` to `0.6.0` 24 | 25 | 26 | ## v0.1.0 - 2017-04-24 27 | ### Added 28 | 29 | - add `-c` flag to override default location of configuration file 30 | - add `shas` command to calculate checksums of the binaries 31 | - add `lookup` command to lookup a product details by a file digest 32 | - add `resolve` command to shazam package binary into details 33 | - add configs manager to read configuration from ENV vars or from the `veye_checker.toml` 34 | - add csv output writers 35 | - add release script for MS 36 | - add release scripts for Linux 37 | - add release scripts for OSx 38 | - make output CSV configurable, #11 39 | - add support for Python PYPI files, #6 40 | - add support for NPM files, #5 41 | - add support for proxy, #14 42 | 43 | ### Fixed 44 | 45 | - issue #12, error message was missing from output 46 | - issue #7, execution raised panic when API response didnt match schema 47 | - issue #1, show API errors in final output without stopping processing 48 | - temporary fix for configs_test, which sometimes fail due the fact the manipulating ENV vars may have read/delete conflicts. 49 | - fix nuget lookup when its base64 includes `+, /, =` which are not URL safe characters; 50 | 51 | 52 | -------------------------------------------------------------------------------- /provision/ms/Vagrantfile: -------------------------------------------------------------------------------- 1 | Vagrant.configure("2") do |config| 2 | # WIN7 - IE11: https://developer.microsoft.com/en-us/microsoft-edge/tools/vms/ 3 | config.vm.box = "microsoft.ie/ie11.win7" 4 | config.vm.box_url = "file://IE11 - Win7.box" 5 | 6 | # Windows 10 Stable - MS Edge 7 | #config.vm.box = "microsoft.ie/msedge.win10stable" 8 | #config.vm.box_url = "file:///Users/timgluz/VirtualBox VMs/dev-msedge.box" 9 | 10 | config.vm.boot_timeout = 5000 11 | 12 | config.vm.guest = :windows 13 | 14 | config.vm.communicator = :winrm if provisioned? 15 | config.winrm.username = "IEUser" if provisioned? 16 | config.winrm.password = "Passw0rd!" if provisioned? 17 | config.winrm.timeout = 50000 if provisioned? 18 | config.winrm.retry_delay = 30 if provisioned? 19 | config.winrm.retry_limit = 1000 if provisioned? 20 | 21 | 22 | config.ssh.username = "IEUser" 23 | config.ssh.password = "Passw0rd!" 24 | config.ssh.insert_key = false 25 | 26 | config.vm.box_check_update = false 27 | 28 | config.vm.synced_folder ".", "/vagrant", disabled: true if not provisioned? 29 | config.vm.synced_folder "./ExtraFolder", "c:/ExtraFolder", create: false if provisioned? 30 | 31 | config.vm.provider "virtualbox" do |vb| 32 | # Display the VirtualBox GUI when booting the machine 33 | vb.gui = true 34 | 35 | # Customize the amount of memory on the VM: 36 | vb.memory = "2048" 37 | end 38 | 39 | config.vm.provision "file", source: "./tools", destination: "c:/users/IEUser" 40 | config.vm.provision "winrm", type: "ie_box_automation" 41 | end 42 | 43 | 44 | end -------------------------------------------------------------------------------- /provision/ms/ie-box-automation-plugin.rb: -------------------------------------------------------------------------------- 1 | # -*- mode: ruby -*- 2 | # vi: set ft=ruby : 3 | 4 | ## 5 | # If you copy this file, dont't delete this comment. 6 | # This Vagrantfile was created by Daniel Menezes: 7 | # https://github.com/danielmenezesbr/modernie-winrm 8 | # E-mail: danielmenezes at gmail dot com 9 | ## 10 | 11 | require 'rubygems' 12 | require 'net/ssh' 13 | 14 | # TODO 15 | # ==== 16 | # Uses config.ssh in Net::SSH.start 17 | # test in win8/10 18 | # add activate (view desktop information) 19 | # use logger for debug 20 | 21 | 22 | # Function to check whether VM was already provisioned 23 | def provisioned?(vm_name='default', provider='virtualbox') 24 | File.exist?(".vagrant/machines/#{vm_name}/#{provider}/action_provision") 25 | end 26 | 27 | module LocalCommand 28 | 29 | class Config < Vagrant.plugin("2", :config) 30 | #attr_accessor :command 31 | end 32 | 33 | class MyPlugin < Vagrant.plugin("2") 34 | name "ie_box_automation" 35 | 36 | config(:ie_box_automation, :provisioner) do 37 | Config 38 | end 39 | 40 | provisioner(:ie_box_automation) do 41 | Provisioner 42 | end 43 | end 44 | 45 | class Provisioner < Vagrant.plugin("2", :provisioner) 46 | def provision 47 | #result = system "#{config.command}" 48 | begin 49 | ssh = Net::SSH.start("localhost", "IEUser", :password => "Passw0rd!", :port => 2222) 50 | 51 | puts "Disabling firewall..." 52 | res = ssh.exec!("NetSh Advfirewall set allprofiles state off") 53 | #for debug 54 | #puts res 55 | 56 | puts "Changing network location..." 57 | res = ssh.exec!("./tools/NLMtool_staticlib.exe -setcategory private") 58 | #for debug 59 | #puts res 60 | 61 | puts "Turn off User Account Control..." 62 | res = ssh.exec!("cmd /c \"reg add HKEY_LOCAL_MACHINE\\Software\\Microsoft\\Windows\\CurrentVersion\\Policies\\System /v EnableLUA /d 0 /t REG_DWORD /f /reg:64\"") 63 | 64 | puts "Creating link to config WinRM on Startup..." 65 | res = ssh.exec!("mv ./tools/ConfigWinRM.lnk \"/cygdrive/c/Users/IEUser/AppData/Roaming/Microsoft/Windows/Start Menu/Programs/Startup\"") 66 | #for debug 67 | #puts res 68 | 69 | puts 'Shutting down guest machine...' 70 | ssh.exec!("shutdown -t 0 -s -f") 71 | 72 | $done = false; 73 | while !$done do 74 | begin 75 | result = Vagrant::Util::Subprocess.execute( 76 | 'vagrant', 77 | 'status', 78 | :notify => [:stdout, :stderr], 79 | #:workdir => config.cwd, 80 | :env => {PATH: ENV["VAGRANT_OLD_ENV_PATH"]}, 81 | ) do |io_name, data| 82 | #@machine.env.ui.debug "[#{io_name}] #{data}" 83 | if data.include? "The VM is running" 84 | puts 'The VM is running... Waiting shutdown...' 85 | else 86 | $done = true 87 | puts 'The VM is not running. Next command should be vagrant up...' 88 | end 89 | end 90 | sleep(50) 91 | rescue Exception => e 92 | $done = true 93 | puts 'Exception...' 94 | end 95 | end 96 | ssh.close 97 | rescue Exception => e 98 | puts "uncaught #{e} exception while handling connection: #{e.message}" 99 | end 100 | end 101 | end 102 | end 103 | -------------------------------------------------------------------------------- /provision/ms/setup_ms_box.ps1: -------------------------------------------------------------------------------- 1 | @echo off 2 | set WINRM_EXEC=call %SYSTEMROOT%\System32\winrm 3 | %WINRM_EXEC% quickconfig -q 4 | %WINRM_EXEC% set winrm/config/winrs @{MaxMemoryPerShellMB="300"} 5 | %WINRM_EXEC% set winrm/config @{MaxTimeoutms="1800000"} 6 | %WINRM_EXEC% set winrm/config/client/auth @{Basic="true"} 7 | %WINRM_EXEC% set winrm/config/service @{AllowUnencrypted="true"} 8 | %WINRM_EXEC% set winrm/config/service/auth @{Basic="true"} -------------------------------------------------------------------------------- /provision/osx/make_osx_release.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | REPO_URL="https://github.com/versioneye/veye-checker.git" 4 | RELEASE_BRANCH="master" 5 | 6 | WORK_DIR="${HOME}/Ideaprojects/veye-checker/provision/osx/veye-checker" 7 | TEST_DIR="${WORK_DIR}/tests/acceptance" 8 | 9 | RELEASE_DIR="${HOME}/Ideaprojects/releases" 10 | TIMESTAMP=$(date +"%s") 11 | 12 | if [[ -n "$RELEASE_VERSION" ]]; then 13 | RELEASE_VERSION="_build_${TIMESTAMP}" 14 | fi 15 | 16 | RELEASE_PATH="${RELEASE_DIR}/veye_checker_osx_${RELEASE_VERSION}" 17 | 18 | if [ ! -d "$WORK_DIR" ]; then 19 | git clone --branch="${RELEASE_BRANCH}" --single-branch ${REPO_URL} 20 | fi 21 | 22 | echo "Pulling latest code from master" 23 | cd ${WORK_DIR} 24 | git pull 25 | 26 | if [ ! -d "temp" ]; then 27 | echo "Add temp folder to keep test results" 28 | mkdir -p temp 29 | fi 30 | 31 | echo "Running unit-tests" 32 | #due the configs_test it must be single threaded to avoid conflicts in ENV var 33 | cargo test -- --test-threads=1 34 | if [ $? -ne 0 ]; then 35 | echo "Failed to pass unit tests" 36 | exit 37 | fi 38 | 39 | echo "Compiling debug version" 40 | cd ${WORK_DIR} 41 | cargo build 42 | 43 | #ps: files are ordered differently on OSx 44 | echo "Running acceptance tests against debug release..." 45 | cd ${TEST_DIR} 46 | export VERSIONEYE_BIN_PATH="../../target/debug/veye_checker" 47 | bash tests_osx.sh 48 | if [ $? -ne 0 ]; then 49 | echo "Failed to pass acceptance tests on debug release" 50 | exit 51 | fi 52 | 53 | echo "release binary into ${RELEASE_PATH}" 54 | cd ${WORK_DIR} 55 | cargo build --release 56 | cp target/release/veye_checker ${RELEASE_PATH} 57 | -------------------------------------------------------------------------------- /provision/x64/Vagrantfile: -------------------------------------------------------------------------------- 1 | Vagrant.configure("2") do |config| 2 | 3 | #configure virtualmachine 4 | config.vm.box = "bento/ubuntu-16.10" 5 | config.vm.synced_folder "../../../releases", "/media/releases" 6 | config.vm.post_up_message = "Welcome to VeyeChecker linux build image" 7 | config.vm.provision "shell", path: "setup_ubuntu_box.sh", privileged: true 8 | 9 | #build commands 10 | # register triggers 11 | config.trigger.after [:up, :resume], :stdout => true do 12 | 13 | unless ENV.has_key? 'VERSIONEYE_API_KEY' 14 | abort "VERSIONEYE_API_KEY is unspecified - required for tests" 15 | end 16 | 17 | unless ENV.has_key? 'RELEASE_VERSION' 18 | p "#-- WARNING --------------------------------------------------" 19 | p "" 20 | p "RELEASE_VERSION is unspecified - will use unixstamp instead;" 21 | p "" 22 | p "--------------------------------------------------------------" 23 | 24 | sleep 3 25 | end 26 | 27 | info "Setting ENV vars for build" 28 | run_remote <<-SCRIPT 29 | echo \"export VERSIONEYE_API_KEY=#{ENV['VERSIONEYE_API_KEY']}\" >> ~/.profile 30 | echo \"export RELEASE_VERSION=#{ENV['RELEASE_VERSION']}\" >> ~/.profile 31 | SCRIPT 32 | 33 | info "Executing release command" 34 | run_remote "bash /vagrant/make_x86_release.sh" 35 | end 36 | 37 | end -------------------------------------------------------------------------------- /provision/x64/make_x86_release.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | REPO_URL="https://github.com/versioneye/veye-checker.git" 4 | RELEASE_BRANCH="master" 5 | WORK_DIR="/home/vagrant/veye-checker" 6 | TEST_DIR="${WORK_DIR}/tests/acceptance" 7 | 8 | RELEASE_DIR="/media/releases" 9 | TIMESTAMP="$(date +"%s")" 10 | 11 | if [[ -n "$RELEASE_VERSION" ]]; then 12 | RELEASE_VERSION="_build_${TIMESTAMP}" 13 | fi 14 | 15 | RELEASE_PATH="${RELEASE_DIR}/veye_checker_x86_64_${RELEASE_VERSION}" 16 | 17 | if [ ! -d "$WORK_DIR" ]; then 18 | git clone --branch=${RELEASE_BRANCH} --single-branch ${REPO_URL} 19 | fi 20 | 21 | echo "Pulling latest code from master" 22 | cd ${WORK_DIR} 23 | git pull 24 | 25 | if [ ! -d "temp" ]; then 26 | echo "Add temp folder to keep test results" 27 | mkdir -p temp 28 | fi 29 | 30 | 31 | echo "Running unit-tests" 32 | #due the configs_test it must be single threaded to avoid conflicts in ENV var 33 | cargo test -- --test-threads=1 34 | if [ $? -ne 0 ]; then 35 | echo "Failed to pass unit tests" 36 | exit 37 | fi 38 | 39 | echo "Compiling debug version" 40 | cd ${WORK_DIR} 41 | cargo build 42 | 43 | echo "Running acceptance tests against debug release..." 44 | cd ${TEST_DIR} 45 | export VERSIONEYE_BIN_PATH="../../target/debug/veye_checker" 46 | bash tests.sh 47 | if [ $? -ne 0 ]; then 48 | echo "Failed to pass acceptance tests on debug release" 49 | exit 50 | fi 51 | 52 | echo "Compiling release version" 53 | cd ${WORK_DIR} 54 | cargo build --release 55 | 56 | #echo "Running acceptance tests against production release ..." 57 | #cd ${TEST_DIR} 58 | #export VERSIONEYE_BIN_PATH="../../target/release/veye_checker" 59 | #bash tests.sh 60 | #if [ $? -ne 0 ]; then 61 | # echo "Failed to pass acceptance tests on production release" 62 | # exit 63 | #fi 64 | 65 | 66 | echo "release binary into ${RELEASE_PATH}" 67 | cd ${WORK_DIR} 68 | cargo build --release 69 | cp target/release/veye_checker ${RELEASE_PATH} 70 | -------------------------------------------------------------------------------- /provision/x64/setup_ubuntu_box.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | #update package registry 4 | sudo apt-get update 5 | sudo apt-get install -y curl g++ git openssl libssl-dev pkg-config 6 | 7 | #install rust 8 | 9 | curl https://sh.rustup.rs -sSf | sh -s -- -y 10 | source $HOME/.cargo/env 11 | rustup toolchain install nightly 12 | rustup default nightly -------------------------------------------------------------------------------- /src/api.rs: -------------------------------------------------------------------------------- 1 | use std::io::{self, Error, ErrorKind}; 2 | 3 | use hyper; 4 | 5 | use hyper_tls::HttpsConnector; 6 | use url::Url; 7 | 8 | use configs::{ApiConfigs, Configs, ProxyConfigs}; 9 | use futures::executor::block_on; 10 | use hyper::body::to_bytes; 11 | use product; 12 | use serde_json; 13 | 14 | const HOST_URL: &'static str = "https://www.versioneye.com"; 15 | 16 | //it is used to build url to the product page (SAAS or Enterprise) 17 | pub fn to_product_url(api_confs: &ApiConfigs, lang: &str, prod_key: &str, version: &str) -> String { 18 | let scheme = match api_confs.scheme.clone() { 19 | Some(val) => val, 20 | None => "http".to_string(), 21 | }; 22 | let host = match api_confs.host.clone() { 23 | Some(val) => val, 24 | None => HOST_URL.to_string(), 25 | }; 26 | 27 | let host_url = match api_confs.port.clone() { 28 | Some(port) => format!("{}://{}:{}", scheme, host, port), 29 | None => format!("{}://{}", scheme, host), 30 | }; 31 | 32 | format!("{}/{}/{}/{}", host_url, lang, prod_key, version) 33 | } 34 | 35 | //it's used to build API url 36 | fn configs_to_url(api_confs: &ApiConfigs, resource_path: &str) -> Result { 37 | let url_str = match api_confs.port { 38 | None => format!( 39 | "{}://{}/{}/{}", 40 | api_confs.scheme.clone().unwrap(), 41 | api_confs.host.clone().unwrap(), 42 | api_confs.path.clone().unwrap(), 43 | resource_path, 44 | ), 45 | Some(port) => format!( 46 | "{}://{}:{}/{}/{}", 47 | api_confs.scheme.clone().unwrap(), 48 | api_confs.host.clone().unwrap(), 49 | port, 50 | api_confs.path.clone().unwrap(), 51 | resource_path 52 | ), 53 | }; 54 | 55 | Url::parse(url_str.as_str()) 56 | } 57 | 58 | enum Client { 59 | Proxy( 60 | hyper::Client< 61 | hyper_proxy::ProxyConnector>, 62 | >, 63 | ), 64 | NoProxy(hyper::Client>), 65 | } 66 | 67 | impl Client { 68 | pub fn get(&self, uri: http::Uri) -> hyper::client::ResponseFuture { 69 | match self { 70 | Client::Proxy(client) => client.get(uri), 71 | Client::NoProxy(client) => client.get(uri), 72 | } 73 | } 74 | } 75 | 76 | fn request_json<'a>(uri: &Url, proxy_confs: &'a ProxyConfigs) -> Option { 77 | let connector = HttpsConnector::new(); 78 | let proxy_confs = proxy_confs.clone(); 79 | 80 | let client = if proxy_confs.is_complete() { 81 | let uri = if let Some(port) = proxy_confs.port { 82 | format!( 83 | "{}://{}:{}", 84 | proxy_confs.scheme.unwrap(), 85 | proxy_confs.host.unwrap(), 86 | port 87 | ) 88 | } else { 89 | format!( 90 | "{}://{}", 91 | proxy_confs.scheme.unwrap(), 92 | proxy_confs.host.unwrap() 93 | ) 94 | }; 95 | 96 | let uri = uri.parse().unwrap(); 97 | 98 | let proxy = hyper_proxy::Proxy::new(hyper_proxy::Intercept::All, uri); 99 | 100 | let proxy_connector = hyper_proxy::ProxyConnector::from_proxy(connector, proxy).unwrap(); 101 | 102 | Client::Proxy(hyper::Client::builder().build(proxy_connector)) 103 | } else { 104 | Client::NoProxy(hyper::Client::builder().build(connector)) 105 | }; 106 | 107 | let uri = uri.as_str().parse().unwrap(); 108 | 109 | let res = block_on(client.get(uri)).expect("Failed to fetch results from the url"); 110 | 111 | let response_bytes = block_on(to_bytes(res.into_body())).expect("Failed to read response body"); 112 | 113 | let response_string = 114 | String::from_utf8(response_bytes.to_vec()).expect("Failed to read response body"); 115 | 116 | Some(response_string) 117 | } 118 | 119 | pub fn fetch_product_details_by_sha( 120 | confs: &Configs, 121 | file_sha: &str, 122 | ) -> Result { 123 | let sha_res = fetch_product_by_sha(&confs, file_sha); 124 | match sha_res { 125 | Ok(m) => { 126 | let sha = m.sha.expect("No product sha from SHA result"); 127 | let product = m.product.expect("No product info from SHA result"); 128 | match fetch_product( 129 | &confs, 130 | &product.language, 131 | &product.prod_key, 132 | &product.version, 133 | ) { 134 | Ok(mut m) => { 135 | m.sha = Some(sha); 136 | Ok(m) 137 | } 138 | Err(e) => { 139 | println!("Failed to fetch product details for sha: {}", file_sha); 140 | Err(e) 141 | } 142 | } 143 | } 144 | Err(e) => Err(e), 145 | } 146 | } 147 | 148 | pub fn fetch_product_by_sha( 149 | confs: &Configs, 150 | sha: &str, 151 | ) -> Result { 152 | let api_confs = confs.api.clone(); 153 | let resource_path = format!("products/sha/{}", encode_sha(sha)); 154 | let mut resource_url = match configs_to_url(&api_confs, resource_path.as_str()) { 155 | Ok(the_url) => the_url, 156 | Err(_) => { 157 | return Err(Error::new( 158 | ErrorKind::InvalidData, 159 | "The values of API configs make up non-valid URL", 160 | )) 161 | } 162 | }; 163 | 164 | //attach query params 165 | resource_url 166 | .query_pairs_mut() 167 | .clear() 168 | .append_pair("api_key", api_confs.key.clone().unwrap().as_str()); 169 | 170 | let json_txt = request_json(&resource_url, &confs.proxy); 171 | process_sha_response(json_txt) 172 | } 173 | 174 | //replaces base64 special characters with HTML safe percentage encoding 175 | //source: https://en.wikipedia.org/wiki/Base64#URL_applications 176 | pub fn encode_sha<'a>(sha: &'a str) -> String { 177 | let encoded_sha = sha.to_string(); 178 | 179 | encoded_sha 180 | .replace("+", "%2B") 181 | .replace("/", "%2F") 182 | .replace("=", "%3D") 183 | .trim() 184 | .to_string() 185 | } 186 | 187 | pub fn encode_prod_key<'b>(prod_key: &'b str) -> String { 188 | let encoded_prod_key = prod_key.to_string(); 189 | encoded_prod_key 190 | .replace(".", "~") 191 | .replace("/", ":") 192 | .trim() 193 | .to_string() 194 | } 195 | 196 | pub fn encode_language<'b>(lang: &'b str) -> String { 197 | let encoded_lang = lang.to_string(); 198 | encoded_lang 199 | .replace(".", "") 200 | .trim() 201 | .to_lowercase() 202 | .to_string() 203 | } 204 | 205 | pub fn fetch_product<'a>( 206 | confs: &Configs, 207 | lang: &str, 208 | prod_key: &str, 209 | version: &str, 210 | ) -> Result { 211 | let api_confs = confs.api.clone(); 212 | let encoded_prod_key = encode_prod_key(&prod_key); 213 | let encoded_lang = encode_language(lang); 214 | let resource_path = format!( 215 | "products/{}/{}", 216 | encoded_lang.clone(), 217 | encoded_prod_key.clone() 218 | ); 219 | let prod_url = to_product_url(&confs.api, encoded_lang.clone().as_str(), prod_key, version); 220 | 221 | let mut resource_url = match configs_to_url(&api_confs, resource_path.as_str()) { 222 | Ok(the_url) => the_url, 223 | Err(_) => { 224 | return Err(Error::new( 225 | ErrorKind::InvalidData, 226 | "The values of API configs make up non-valid URL", 227 | )) 228 | } 229 | }; 230 | 231 | //attach query params 232 | resource_url 233 | .query_pairs_mut() 234 | .clear() 235 | .append_pair("prod_version", version) 236 | .append_pair("api_key", api_confs.key.clone().unwrap().as_str()); 237 | 238 | let json_txt = request_json(&resource_url, &confs.proxy); 239 | process_product_response(json_txt, Some(prod_url)) 240 | } 241 | 242 | #[derive(Serialize, Deserialize, Debug)] 243 | struct ApiError { 244 | error: String, 245 | } 246 | 247 | #[derive(Serialize, Deserialize, Debug)] 248 | struct ShaItem { 249 | language: String, 250 | prod_key: String, 251 | version: String, 252 | sha_value: String, 253 | sha_method: String, 254 | prod_type: Option, 255 | group_id: Option, 256 | artifact_id: Option, 257 | classifier: Option, 258 | packaging: Option, 259 | } 260 | 261 | //-- helper functions 262 | pub fn process_sha_response(json_text: Option) -> Result { 263 | if json_text.is_none() { 264 | return Err(Error::new(ErrorKind::Other, "No response from API")); 265 | } 266 | 267 | let res: serde_json::Value = serde_json::from_str(json_text.unwrap().as_str())?; 268 | 269 | if res.is_object() && res.get("error").is_some() { 270 | let e = Error::new( 271 | ErrorKind::Other, 272 | r#"API rate limit reached. Go to https://www.versioneye.com and upgrade your 273 | subscription to a higher plan."#, 274 | ); 275 | 276 | return Err(e); 277 | } 278 | 279 | if !res.is_array() { 280 | let e = Error::new( 281 | ErrorKind::Other, 282 | "Unsupported SHA response - expected array", 283 | ); 284 | return Err(e); 285 | } 286 | 287 | let shas = res.as_array().unwrap(); 288 | if shas.len() == 0 { 289 | let e = Error::new(ErrorKind::Other, "No match for the SHA"); 290 | return Err(e); 291 | } 292 | 293 | let doc: ShaItem = serde_json::from_value(shas[0].clone()).unwrap(); 294 | let the_prod = product::Product { 295 | name: "".to_string(), 296 | language: doc.language, 297 | prod_key: doc.prod_key, 298 | version: doc.version, 299 | prod_type: doc.prod_type, 300 | }; 301 | 302 | let the_sha = product::ProductSHA { 303 | packaging: doc.packaging.unwrap_or("unknown".to_string()), 304 | method: doc.sha_method, 305 | value: doc.sha_value, 306 | filepath: None, 307 | }; 308 | 309 | Ok(product::ProductMatch::new(the_prod, the_sha)) 310 | } 311 | 312 | // converts the response of product endpoint into ProductMatch struct 313 | #[derive(Serialize, Deserialize, Debug)] 314 | struct ProductItem { 315 | name: String, 316 | language: String, 317 | prod_key: String, 318 | version: String, 319 | prod_type: String, 320 | } 321 | 322 | #[derive(Serialize, Deserialize, Debug)] 323 | struct LicenseItem { 324 | name: String, 325 | url: Option, 326 | } 327 | 328 | pub fn process_product_response( 329 | json_text: Option, 330 | prod_url: Option, 331 | ) -> Result { 332 | if json_text.is_none() { 333 | return Err(Error::new(ErrorKind::Other, "No response from API")); 334 | } 335 | 336 | let res: serde_json::Value = serde_json::from_str(&json_text.unwrap().as_str())?; 337 | if !res.is_object() { 338 | return Err(Error::new(ErrorKind::Other, "No product details")); 339 | } 340 | 341 | //if response includes error field in HTTP200 response 342 | // NB! it may include other errors than limit, but @Rob asked to see custom Limit error message 343 | if res.is_object() && res.get("error").is_some() { 344 | let e = Error::new( 345 | ErrorKind::Other, 346 | r#"API rate limit reached. Go to https://www.versioneye.com and upgrade your 347 | subscription to a higher plan."#, 348 | ); 349 | 350 | return Err(e); 351 | } 352 | 353 | let product_doc: ProductItem = serde_json::from_value(res.clone())?; 354 | let the_prod = product::Product { 355 | name: product_doc.name, 356 | language: product_doc.language, 357 | prod_key: product_doc.prod_key, 358 | version: product_doc.version, 359 | prod_type: Some(product_doc.prod_type), 360 | }; 361 | 362 | //extract license details 363 | let licenses = match res["licenses"].as_array() { 364 | Some(arr) => arr.iter().fold(vec![], |mut acc, ref x| { 365 | let lic_doc = x.as_object().unwrap(); 366 | acc.push(product::ProductLicense { 367 | name: lic_doc["name"].as_str().unwrap_or("unknown").to_string(), 368 | url: lic_doc["url"].as_str().unwrap_or("").to_string(), 369 | }); 370 | 371 | acc 372 | }), 373 | None => vec![], 374 | }; 375 | 376 | //count number of vulnerabilities 377 | let n_vulns = match res["security_vulnerabilities"].as_array() { 378 | Some(arr) => arr.len() as u32, 379 | None => 0 as u32, 380 | }; 381 | 382 | let the_match = product::ProductMatch { 383 | sha: None, 384 | product: Some(the_prod), 385 | url: prod_url, 386 | licenses: licenses, 387 | n_vulns: n_vulns, 388 | error: None, 389 | }; 390 | 391 | Ok(the_match) 392 | } 393 | -------------------------------------------------------------------------------- /src/bin/veye_checker.rs: -------------------------------------------------------------------------------- 1 | extern crate getopts; 2 | extern crate veye_checker; 3 | 4 | use getopts::Options; 5 | use std::path::PathBuf; 6 | use std::env; 7 | 8 | use veye_checker::{product, configs, tasks, digest_ext_table}; 9 | 10 | fn show_usage(program_name: &str, opts: Options) -> Result { 11 | let brief = format!(r#" 12 | usage: 13 | {} resolve DIRECTORY_TO_SCAN -o OUTPUT_FILE -a API_TOKEN 14 | {} shas DIRECTORY_PATH -o OUTPUT_FILE 15 | {} lookup FILE_SHA -a API_TOKEN 16 | "#, program_name, program_name, program_name 17 | ); 18 | 19 | println!("{}", opts.usage(&brief)); 20 | Ok(true) 21 | } 22 | 23 | fn main() { 24 | let args: Vec = env::args().collect(); 25 | let program_name = args[0].clone(); 26 | let mut opts = Options::new(); 27 | 28 | // register options flags 29 | opts.optopt("o", "output", "specifies the name of output file", "FILENAME"); 30 | opts.optopt("a", "auth", "specifies the api-key for API calls", "API_TOKEN"); 31 | opts.optopt("c", "config", "specifies the filepath to lookup configfile", "FILEPATH"); 32 | opts.optflag("h", "help", "shows usage help"); 33 | opts.optflag("v", "verbose", "shows verbose config details"); 34 | 35 | // options for algo settings 36 | opts.optflag("", "no-md5", "dont use MD5"); 37 | opts.optflag("", "no-sha1", "dont use SHA1"); 38 | opts.optflag("", "no-sha512", "don use SHA512"); 39 | 40 | opts.optopt("", "ext-md5", "list of file extensions to use for MD5", "CSV_OF_EXTs"); 41 | opts.optopt("", "ext-sha1", "list of file extensions to use for SHA1", "CSV_OF_EXTs"); 42 | opts.optopt("", "ext-sha512", "list of file extenstions to use for SHA512", "CSV_OF_EXTS"); 43 | 44 | // options for scan 45 | opts.optopt("", "max-file-size", "maximum file size in bytes", "BYTES"); 46 | opts.optopt("", "min-file-size", "minimum file size in bytes", "BYTES"); 47 | 48 | //parse command-line arguments 49 | let matches = match opts.parse(&args[1..]){ 50 | Ok(m) => { m }, 51 | Err(f) => { panic!(f.to_string()) } 52 | }; 53 | 54 | //did user asked to see help menu 55 | if matches.opt_present("h") { 56 | show_usage(&program_name, opts).unwrap(); 57 | return; 58 | } 59 | 60 | if matches.free.len() < 1 { 61 | println!("Error: missing the subcommand"); 62 | show_usage(&program_name, opts).unwrap(); 63 | return; 64 | } 65 | 66 | let command = matches.free[0].clone(); 67 | let cmd_res = match command.as_ref() { 68 | "resolve" => do_resolve_task(&matches), 69 | "shas" => do_shas_task(&matches), 70 | "lookup" => do_lookup_task(&matches), 71 | _ => show_usage(&program_name, opts) 72 | }; 73 | 74 | print_cmd_result(cmd_res); 75 | } 76 | 77 | fn do_resolve_task(matches: &getopts::Matches) -> Result { 78 | let dir_txt = if matches.free.len() != 2 { 79 | panic!("resolve tasks requires target folder".to_string()); 80 | } else { 81 | matches.free[1].clone() 82 | }; 83 | 84 | let mut global_configs = configs::read_configs(matches.opt_str("c")); 85 | 86 | //override global configs when use attached commandline key 87 | if let Some(new_api_key) = matches.opt_str("a") { 88 | global_configs.api.key = Some( new_api_key ); 89 | }; 90 | 91 | if let Some(max_size_txt) = matches.opt_str("max-file-size") { 92 | global_configs.scan.max_file_size = max_size_txt.parse::().ok(); 93 | } 94 | 95 | if let Some(min_size_txt) = matches.opt_str("min-file-size") { 96 | global_configs.scan.min_file_size = min_size_txt.parse::().ok(); 97 | } 98 | 99 | if global_configs.api.key.is_none() { 100 | panic!( 101 | "Missing API key: SET env var VERSIONEYE_API_KEY, or use -a param, or use veye_checker.toml" 102 | ); 103 | }; 104 | 105 | // execute command pipeline 106 | let mut ext_table = digest_ext_table::DigestExtTable::default(); 107 | add_matches_into_ext_table(&mut ext_table, matches); 108 | if matches.opt_present("verbose") { 109 | println!("Digest configuration:\n {:?}", &ext_table); 110 | } 111 | 112 | 113 | let dir = PathBuf::from(&dir_txt); 114 | let (sha_ch, h1) = tasks::start_path_scanner(ext_table, dir, global_configs.scan.clone()); 115 | let (product_ch, h2) = tasks::start_sha_fetcher(global_configs.clone(), sha_ch); 116 | let h3 = match matches.opt_str("o") { 117 | Some(out_path) => { 118 | let out_path = PathBuf::from(out_path); 119 | tasks::start_product_csv_writer(out_path, global_configs.csv.clone(), product_ch) 120 | }, 121 | None => tasks::start_product_stdio_writer(global_configs.csv.clone(), product_ch) 122 | }; 123 | 124 | h1.join().expect("resolve_task: failed to finish scan task").unwrap(); 125 | h2.join().expect("resolve_task: failed to finish SHA fetcher task").unwrap(); 126 | h3.join().expect("resolve_task: failed to dump all the products into output").unwrap(); 127 | 128 | Ok(true) 129 | 130 | } 131 | 132 | 133 | fn do_shas_task(matches: &getopts::Matches) -> Result { 134 | let mut global_configs = configs::read_configs(matches.opt_str("c")); 135 | 136 | //extract input arguments 137 | let dir_txt = if matches.free.len() != 2 { 138 | panic!("scan command misses a path to folder".to_string()); 139 | } else { 140 | matches.free[1].clone() 141 | }; 142 | 143 | if let Some(max_size_txt) = matches.opt_str("max-file-size") { 144 | global_configs.scan.max_file_size = max_size_txt.parse::().ok(); 145 | } 146 | 147 | if let Some(min_size_txt) = matches.opt_str("min-file-size") { 148 | global_configs.scan.min_file_size = min_size_txt.parse::().ok(); 149 | } 150 | 151 | let mut ext_table = digest_ext_table::DigestExtTable::default(); 152 | add_matches_into_ext_table(&mut ext_table, matches); 153 | if matches.opt_present("verbose") { 154 | println!("Digest configuration:\n {:?}", &ext_table); 155 | } 156 | 157 | // start processes 158 | let dir = PathBuf::from(&dir_txt); 159 | let (sha_ch, h1) = tasks::start_path_scanner(ext_table, dir, global_configs.scan.clone()); 160 | let h2 = match matches.opt_str("o") { 161 | Some(outfile_path) => { 162 | let outpath = PathBuf::from(&outfile_path); 163 | tasks::start_sha_csv_writer(outpath, global_configs.csv.clone(), sha_ch) 164 | }, 165 | None => tasks::start_sha_stdio_writer(global_configs.csv.clone(), sha_ch) 166 | 167 | }; 168 | 169 | h1.join().expect("shas: failed to scan file digests").unwrap(); 170 | h2.join().expect("shas: failed to print results into output").unwrap(); 171 | 172 | Ok(true) 173 | } 174 | 175 | fn do_lookup_task(matches: &getopts::Matches) -> Result { 176 | 177 | let file_sha = if matches.free.len() != 2 { 178 | panic!("lookup command misses SHA-code"); 179 | } else { 180 | matches.free[1].clone() 181 | }; 182 | 183 | let mut global_configs = configs::read_configs(matches.opt_str("c")); 184 | //override api key when it was specified via -a flag 185 | if global_configs.api.key.is_none() && matches.opt_str("a").is_some() { 186 | global_configs.api.key = matches.opt_str("a") 187 | }; 188 | 189 | if global_configs.api.key.is_none() { 190 | panic!( 191 | "Missing API key: SET env var VERSIONEYE_API_KEY, or use -a param, or use veye_checker.toml" 192 | ); 193 | }; 194 | 195 | let shas = vec![ 196 | product::ProductSHA::from_sha(file_sha.clone().to_string()) 197 | ]; 198 | let (sha_ch, h1) = tasks::start_sha_publisher(shas); 199 | let (prod_ch, h2) = tasks::start_sha_fetcher(global_configs.clone(), sha_ch); 200 | let h3 = match matches.opt_str("o") { 201 | Some(outfile_path) => { 202 | let outpath = PathBuf::from(&outfile_path); 203 | tasks::start_product_csv_writer(outpath, global_configs.csv.clone(), prod_ch) 204 | }, 205 | None => tasks::start_product_stdio_writer(global_configs.csv.clone(), prod_ch) 206 | }; 207 | 208 | h1.join().expect("lookup: failed to prepare sha value for request").unwrap(); 209 | h2.join().expect("lookup: failed to fetch product details by sha value").unwrap(); 210 | h3.join().expect("lookup: failed to output product details").unwrap(); 211 | 212 | Ok(true) 213 | } 214 | 215 | fn add_matches_into_ext_table ( 216 | ext_table: &mut digest_ext_table::DigestExtTable, matches: &getopts::Matches 217 | ){ 218 | // block algorithms when user attached no flags 219 | if matches.opt_present("no-md5") { 220 | ext_table.block(digest_ext_table::DigestAlgo::Md5); 221 | } 222 | 223 | if matches.opt_present("no-sha1") { 224 | ext_table.block(digest_ext_table::DigestAlgo::Sha1); 225 | } 226 | 227 | if matches.opt_present("no-sha512") { 228 | ext_table.block(digest_ext_table::DigestAlgo::Sha512); 229 | } 230 | 231 | // overwrite file extensions 232 | if let Some(ext_txt) = matches.opt_str("ext-md5") { 233 | let exts: Vec = ext_txt.split(',').map(|s| s.to_string() ).collect(); 234 | 235 | ext_table.clear(digest_ext_table::DigestAlgo::Md5); 236 | ext_table.add_many(digest_ext_table::DigestAlgo::Md5, exts); 237 | } 238 | 239 | if let Some(ext_txt) = matches.opt_str("ext-sha1") { 240 | let exts: Vec = ext_txt.split(',').map(|s| s.to_string() ).collect(); 241 | 242 | ext_table.clear(digest_ext_table::DigestAlgo::Sha1); 243 | ext_table.add_many(digest_ext_table::DigestAlgo::Sha1, exts); 244 | } 245 | 246 | if let Some(ext_txt) = matches.opt_str("ext-sha512") { 247 | let exts: Vec = ext_txt.split(',').map(|s| s.to_string() ).collect(); 248 | 249 | ext_table.clear(digest_ext_table::DigestAlgo::Sha512); 250 | ext_table.add_many(digest_ext_table::DigestAlgo::Sha512, exts); 251 | } 252 | } 253 | 254 | 255 | fn print_cmd_result(cmd_res: Result){ 256 | match cmd_res { 257 | Ok(_) => println!("Done!"), 258 | Err(e) => println!("Failed to finish the task: {}", e) 259 | }; 260 | } 261 | -------------------------------------------------------------------------------- /src/checker.rs: -------------------------------------------------------------------------------- 1 | use sha1::{Sha1}; 2 | use sha2::{Sha512, Digest}; 3 | use base64; 4 | use md5; 5 | 6 | use std::io::Error; 7 | use std::io::prelude::*; 8 | use std::path::Path; 9 | use std::fs::File; 10 | 11 | use product::ProductSHA; 12 | use digest_ext_table::DigestExtTable; 13 | 14 | 15 | pub fn digest_sha1(filepath: &Path) -> Result { 16 | let mut f = File::open(filepath).ok().expect("Failed to read file"); 17 | let mut buffer = Vec::new(); 18 | let mut hasher = Sha1::new(); 19 | 20 | f.read_to_end(&mut buffer).unwrap(); 21 | hasher.update(&buffer); 22 | let sha_val = hasher.digest().to_string(); 23 | Ok(sha_val) 24 | } 25 | 26 | pub fn digest_sha512b64(filepath: &Path) -> Result { 27 | let mut f = File::open(filepath).ok().expect("Failed to read file"); 28 | let mut buffer = Vec::new(); 29 | let mut hasher = Sha512::new(); 30 | 31 | f.read_to_end(&mut buffer).unwrap(); 32 | hasher.input(& buffer); 33 | 34 | let sha_val = base64::encode(&hasher.result()).to_string(); 35 | Ok(sha_val) 36 | } 37 | 38 | pub fn digest_md5(filepath: &Path) -> Result { 39 | let mut f = File::open(filepath).ok().expect("Failed to open python package for digest"); 40 | let mut buffer = Vec::new(); 41 | 42 | f.read_to_end(&mut buffer).expect("Failed to read python package into buffer"); 43 | let md5_val = md5::compute(buffer); 44 | Ok(format!("{:x}", md5_val)) 45 | } 46 | 47 | // founds the right encoder based matching columns in DigestExtTable 48 | // returns None when filetype is unsupported, otherwise list of all matched algos 49 | pub fn digest_file( 50 | ext_table: &DigestExtTable, filepath: &Path 51 | ) -> Option> { 52 | if filepath.is_dir(){ return None; } 53 | 54 | let opt_ext = filepath.extension(); 55 | if opt_ext.is_none() { return None; } //when hidden file or file has no extensions 56 | 57 | let file_ext = opt_ext.unwrap().to_str().unwrap_or(""); 58 | let path_txt = filepath.to_str().unwrap_or("").to_string(); 59 | let mut shas: Vec = Vec::new(); 60 | 61 | if ext_table.is_md5(file_ext.to_string()) { 62 | if let Some(md5_val) = digest_md5(filepath).ok() { 63 | shas.push(ProductSHA { 64 | packaging: "".to_string(), 65 | method: "md5".to_string(), 66 | value: md5_val, 67 | filepath: Some(path_txt.clone()) 68 | }); 69 | } 70 | } 71 | 72 | if ext_table.is_sha1(file_ext.to_string()) { 73 | if let Some(sha_val) = digest_sha1(filepath).ok() { 74 | shas.push(ProductSHA { 75 | packaging: "".to_string(), 76 | method: "sha1".to_string(), 77 | value: sha_val, 78 | filepath: Some(path_txt.clone()) 79 | }); 80 | } 81 | } 82 | 83 | if ext_table.is_sha512(file_ext.to_string()) { 84 | if let Some(sha_val) = digest_sha512b64(filepath).ok() { 85 | shas.push(ProductSHA { 86 | packaging: "".to_string(), 87 | method: "sha512".to_string(), 88 | value: sha_val, 89 | filepath: Some(path_txt) 90 | }); 91 | } 92 | } 93 | 94 | if shas.len() > 0 { 95 | Some(shas) 96 | } else { 97 | None 98 | } 99 | 100 | } 101 | 102 | 103 | 104 | -------------------------------------------------------------------------------- /src/configs.rs: -------------------------------------------------------------------------------- 1 | use std::env; 2 | use regex::Regex; 3 | use std::default::{Default}; 4 | use std::io::{Read, Error, ErrorKind}; 5 | use std::path::PathBuf; 6 | use std::fs::File; 7 | use toml; 8 | 9 | use digest_ext_table::{DigestAlgo, DigestExtTable}; 10 | 11 | 12 | pub static DEFAULT_MAX_SIZE:u64 = 64 * 1024 * 1024; // 64MB 13 | 14 | #[derive(Serialize, Deserialize, Debug, Clone)] 15 | pub struct ApiConfigs { 16 | pub host: Option, 17 | pub path: Option, 18 | pub key: Option, 19 | pub port: Option, 20 | pub scheme: Option 21 | } 22 | 23 | impl ApiConfigs { 24 | //copies self field values into target only it's not None value 25 | fn merge_to(&self, target: &mut ApiConfigs){ 26 | if self.host.is_some() { target.host = self.host.clone(); } 27 | if self.path.is_some() { target.path = self.path.clone(); } 28 | if self.key.is_some() { target.key = self.key.clone(); } 29 | if self.port.is_some() { target.port = self.port.clone(); } 30 | if self.scheme.is_some() { target.scheme = self.scheme.clone(); } 31 | } 32 | } 33 | 34 | impl Default for ApiConfigs { 35 | fn default() -> ApiConfigs { 36 | ApiConfigs { 37 | host: Some( "www.versioneye.com".to_string() ), 38 | path: Some("api/v2".to_string()), 39 | key: None, 40 | port: None, 41 | scheme: Some("https".to_string()) 42 | } 43 | } 44 | } 45 | 46 | //-- CSVConfigs --------------------------------------------------------------- 47 | #[derive(Serialize, Deserialize, Debug, Clone)] 48 | pub struct CSVConfigs { 49 | pub separator: Option, 50 | pub quote: Option, // which character to use for quoted string 51 | pub flexible: Option, //doesnt include empty fields. None = False, only Some(true) is true 52 | } 53 | 54 | impl CSVConfigs { 55 | //copies fields values into target only if it's not None value and overwrites existing value 56 | fn merge_to(&self, target: &mut CSVConfigs){ 57 | if self.separator.is_some() { target.separator = self.separator.clone(); } 58 | if self.quote.is_some() { target.quote = self.quote.clone(); } 59 | if self.flexible.is_some() { target.flexible = self.flexible.clone(); } 60 | } 61 | } 62 | 63 | impl Default for CSVConfigs { 64 | fn default() -> CSVConfigs { 65 | CSVConfigs { 66 | separator: Some(";".to_string()), 67 | quote: Some("\"".to_string()), 68 | flexible: Some(false) 69 | } 70 | } 71 | } 72 | 73 | 74 | //-- ProxyConfigs 75 | #[derive(Serialize, Deserialize, Debug, Clone)] 76 | pub struct ProxyConfigs { 77 | pub host: Option, 78 | pub port: Option, 79 | pub scheme: Option 80 | } 81 | 82 | impl ProxyConfigs { 83 | //copies fields values into target struct only if has Some value 84 | fn merge_to(&self, target: &mut ProxyConfigs){ 85 | if self.host.is_some() { target.host = self.host.clone(); } 86 | if self.port.is_some() { target.port = self.port.clone(); } 87 | if self.scheme.is_some() { target.scheme = self.scheme.clone(); } 88 | } 89 | 90 | //checks does it have all the required fields to use it 91 | pub fn is_complete(&self) -> bool { 92 | self.host.is_some() && self.port.is_some() 93 | } 94 | } 95 | 96 | impl Default for ProxyConfigs { 97 | fn default() -> ProxyConfigs { 98 | ProxyConfigs { 99 | host: None, 100 | port: None, 101 | scheme: None 102 | } 103 | } 104 | } 105 | 106 | //-- Configs for Digest -------------------------------------------------------- 107 | 108 | #[derive(Serialize, Deserialize, Debug, Clone)] 109 | pub struct DigestConfigItem { 110 | pub blocked: Option, 111 | pub exts: Option> 112 | } 113 | 114 | impl DigestConfigItem { 115 | pub fn new(blocked: bool, exts: Vec) -> DigestConfigItem { 116 | DigestConfigItem { 117 | blocked: Some(blocked), 118 | exts: Some(exts) 119 | } 120 | } 121 | } 122 | 123 | #[derive(Serialize, Deserialize, Debug, Clone)] 124 | pub struct DigestConfigs { 125 | pub md5: Option, 126 | pub sha1: Option, 127 | pub sha512: Option 128 | } 129 | 130 | impl DigestConfigs { 131 | pub fn new( 132 | md5: Option, sha1: Option, sha512: Option 133 | ) -> DigestConfigs { 134 | DigestConfigs { 135 | md5: md5, 136 | sha1: sha1, 137 | sha512: sha512 138 | } 139 | } 140 | 141 | // turns DigestConfigs into DigestExtTable 142 | pub fn into_digest_ext_table(&self) -> DigestExtTable { 143 | let mut ext_table = DigestExtTable::default(); 144 | 145 | if let Some(md5_confs) = self.md5.to_owned() { 146 | self.insert_algo_confs(&mut ext_table, DigestAlgo::Md5, &md5_confs ) 147 | } 148 | 149 | if let Some(sha1_confs) = self.sha1.to_owned() { 150 | self.insert_algo_confs(&mut ext_table, DigestAlgo::Sha1, &sha1_confs ) 151 | } 152 | 153 | if let Some(sha512_confs) = self.sha512.to_owned() { 154 | self.insert_algo_confs(&mut ext_table, DigestAlgo::Sha512, &sha512_confs ) 155 | } 156 | 157 | ext_table 158 | } 159 | 160 | fn insert_algo_confs(&self, ext_table: &mut DigestExtTable, algo: DigestAlgo, config_item: &DigestConfigItem){ 161 | 162 | //add algorithm into blocked list only if it blocked fields is specified and equals true 163 | if let Some(is_blocked) = config_item.blocked { 164 | if is_blocked == true { 165 | ext_table.block(algo); 166 | return; //there's no point to insert extensions for blocked items 167 | } 168 | } 169 | 170 | if let Some(exts) = config_item.exts.to_owned() { 171 | ext_table.clear(algo); 172 | ext_table.add_many(algo, exts); 173 | } 174 | 175 | } 176 | } 177 | 178 | 179 | //-- Scan configs ------------------------------------------------------------- 180 | // used to limit file sizes 181 | #[derive(Serialize, Deserialize, Debug, Clone)] 182 | pub struct ScanConfigs { 183 | pub max_file_size: Option, 184 | pub min_file_size: Option 185 | } 186 | 187 | impl ScanConfigs { 188 | 189 | //copies fields values into target struct only if has Some value 190 | fn merge_to(&self, target: &mut ScanConfigs){ 191 | if let Some(new_max_size) = self.max_file_size { 192 | target.max_file_size = Some( new_max_size ); 193 | } 194 | 195 | if let Some(new_min_size) = self.min_file_size { 196 | target.min_file_size = Some( new_min_size ); 197 | } 198 | 199 | } 200 | 201 | } 202 | 203 | impl Default for ScanConfigs { 204 | fn default() -> ScanConfigs { 205 | ScanConfigs { 206 | max_file_size: Some(DEFAULT_MAX_SIZE), 207 | min_file_size: Some(0) 208 | } 209 | } 210 | } 211 | 212 | //-- Configs ------------------------------------------------------------------ 213 | 214 | #[derive(Serialize, Deserialize, Debug, Clone)] 215 | pub struct Configs { 216 | pub api: ApiConfigs, 217 | pub csv: CSVConfigs, 218 | pub proxy: ProxyConfigs, 219 | pub digests: DigestExtTable, 220 | pub scan: ScanConfigs 221 | } 222 | 223 | impl Configs { 224 | fn merge_to(&self, target: &mut Configs) { 225 | self.api.merge_to(&mut target.api); 226 | self.csv.merge_to(&mut target.csv); 227 | self.proxy.merge_to(&mut target.proxy); 228 | self.scan.merge_to(&mut target.scan); 229 | 230 | target.digests = self.digests.clone(); 231 | 232 | } 233 | } 234 | 235 | impl Default for Configs { 236 | fn default() -> Configs { 237 | Configs { 238 | api: ApiConfigs::default(), 239 | csv: CSVConfigs::default(), 240 | proxy: ProxyConfigs::default(), 241 | digests: DigestExtTable::default(), 242 | scan: ScanConfigs::default() 243 | } 244 | } 245 | } 246 | 247 | 248 | pub fn read_configs(filepath: Option) -> Configs { 249 | let conf_filepath = filepath.unwrap_or("veye_checker.toml".to_string()); 250 | let conf_file = PathBuf::from(conf_filepath.clone()); 251 | let mut confs = Configs::default(); 252 | 253 | //all every config reader overwrites previous values 254 | match read_configs_from_toml(&conf_file) { 255 | Ok(toml_confs) => toml_confs.merge_to(&mut confs), 256 | Err(_) => () 257 | }; 258 | 259 | match read_configs_from_env() { 260 | Ok(env_confs) => env_confs.merge_to(&mut confs), 261 | Err(_) => () 262 | }; 263 | 264 | confs 265 | } 266 | 267 | pub fn read_configs_from_env() -> Result { 268 | let re_api_key = Regex::new(r"\AVERSIONEYE_API_(\w+)\z").unwrap(); 269 | let re_csv_key = Regex::new(r"\AVERSIONEYE_CSV_(\w+)\z").unwrap(); 270 | let re_proxy_key = Regex::new(r"\AVERSIONEYE_PROXY_(\w+)\z").unwrap(); 271 | let re_scan_key = Regex::new(r"\AVERSIONEYE_SCAN_(\w+)\z").unwrap(); 272 | 273 | let mut configs = Configs::default(); 274 | 275 | for (key, val) in env::vars() { 276 | 277 | // read API configs 278 | if let Some(m) = re_api_key.captures(&key) { 279 | let api_val = val.clone(); 280 | 281 | match m.get(1).unwrap().as_str() { 282 | "KEY" => configs.api.key = Some(api_val), 283 | "HOST" => configs.api.host = Some(api_val), 284 | "PORT" => configs.api.port = api_val.parse::().ok(), 285 | "PATH" => configs.api.path = Some(api_val), 286 | "SCHEME" => configs.api.scheme = Some(api_val), 287 | _ => () 288 | } 289 | }; 290 | 291 | //read csv configs 292 | if let Some(m) = re_csv_key.captures(&key) { 293 | let csv_val = val.clone(); 294 | 295 | match m.get(1).unwrap().as_str() { 296 | "SEPARATOR" => configs.csv.separator = Some(csv_val), 297 | "QUOTE" => configs.csv.quote = Some(csv_val), 298 | "FLEXIBLE" => { 299 | let flex_val = csv_val.clone().to_string().to_lowercase(); 300 | let is_flexible = match flex_val.as_str() { 301 | "1" => true, 302 | "t" => true, 303 | "true" => true, 304 | _ => false 305 | }; 306 | 307 | configs.csv.flexible = Some(is_flexible) 308 | }, 309 | _ => () //ignore unsupported csv keys 310 | } 311 | }; 312 | 313 | //read proxy configs 314 | if let Some(m) = re_proxy_key.captures(&key) { 315 | let proxy_val = val.clone(); 316 | 317 | match m.get(1).unwrap().as_str() { 318 | "HOST" => configs.proxy.host = Some(proxy_val), 319 | "PORT" => configs.proxy.port = proxy_val.parse::().ok(), 320 | "SCHEME" => configs.proxy.scheme = Some(proxy_val), 321 | _ => () 322 | } 323 | } 324 | 325 | //read scan configs 326 | if let Some(m) = re_scan_key.captures(&key){ 327 | let scan_val = val.clone(); 328 | 329 | match m.get(1).unwrap().as_str() { 330 | "MAX_FILE_SIZE" => configs.scan.max_file_size = scan_val.parse::().ok(), 331 | "MIN_FILE_SIZE" => configs.scan.min_file_size = scan_val.parse::().ok(), 332 | _ => () 333 | } 334 | } 335 | 336 | } 337 | 338 | Ok(configs) 339 | } 340 | 341 | #[derive(Deserialize, Debug)] 342 | struct TomlConfigs { 343 | api: Option, 344 | csv: Option, 345 | proxy: Option, 346 | digests: Option, 347 | scan: Option 348 | } 349 | 350 | impl TomlConfigs { 351 | 352 | //move optional values into Configs structure 353 | fn into_configs(&self) -> Configs { 354 | let mut confs = Configs::default(); 355 | 356 | //TODO: how to get rid of those clone()'s 357 | if let Some(toml_api) = self.api.clone() { 358 | confs.api = toml_api; 359 | } 360 | 361 | if let Some(toml_csv) = self.csv.clone() { 362 | confs.csv = toml_csv; 363 | } 364 | 365 | if let Some(toml_proxy) = self.proxy.clone() { 366 | confs.proxy = toml_proxy; 367 | } 368 | 369 | if let Some(toml_digests) = self.digests.clone() { 370 | confs.digests = toml_digests.into_digest_ext_table(); 371 | } 372 | 373 | if let Some(toml_scan) = self.scan.clone() { 374 | confs.scan = toml_scan; 375 | } 376 | 377 | confs.clone() 378 | } 379 | } 380 | 381 | pub fn read_configs_from_toml(file_path: &PathBuf) -> Result { 382 | let mut toml_file = File::open(file_path)?; 383 | let mut toml_txt = String::new(); 384 | toml_file.read_to_string(&mut toml_txt)?; 385 | 386 | match toml::from_str::(toml_txt.as_str()) { 387 | Ok(toml_configs) => Ok(toml_configs.into_configs()), 388 | Err(_) => { 389 | Err( 390 | Error::new( 391 | ErrorKind::InvalidData, 392 | format!("Failed to extract config data from TOML {:?}", file_path.as_os_str()) 393 | ) 394 | ) 395 | } 396 | 397 | } 398 | } 399 | 400 | -------------------------------------------------------------------------------- /src/digest_ext_table.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashSet; 2 | use std::fmt; 3 | 4 | #[derive(Hash, Eq, PartialEq, Debug, Copy, Clone, Serialize, Deserialize)] 5 | pub enum DigestAlgo { 6 | Md5, 7 | Sha1, 8 | Sha512 //nuget: Sha512 finalized with Base64 9 | } 10 | 11 | #[derive(Serialize, Deserialize, Clone)] 12 | pub struct DigestExtTable { 13 | md5: HashSet, 14 | sha1: HashSet, 15 | sha512: HashSet, 16 | blocked: HashSet 17 | 18 | } 19 | 20 | impl DigestExtTable { 21 | pub fn is_blocked(&self, algo: DigestAlgo) -> bool { 22 | self.blocked.contains(&algo) 23 | } 24 | 25 | pub fn is_md5(&self, file_ext: String) -> bool { 26 | !self.is_blocked(DigestAlgo::Md5) && self.md5.contains(&file_ext) 27 | } 28 | 29 | pub fn is_sha1(&self, file_ext: String) -> bool { 30 | !self.is_blocked(DigestAlgo::Sha1) && self.sha1.contains(&file_ext) 31 | } 32 | 33 | pub fn is_sha512(&self, file_ext: String) -> bool { 34 | !self.is_blocked(DigestAlgo::Sha512) && self.sha512.contains(&file_ext) 35 | } 36 | 37 | pub fn add(&mut self, algo: DigestAlgo, file_ext: String) -> bool { 38 | match algo { 39 | DigestAlgo::Md5 => self.md5.insert(file_ext), 40 | DigestAlgo::Sha1 => self.sha1.insert(file_ext), 41 | DigestAlgo::Sha512 => self.sha512.insert(file_ext) 42 | } 43 | } 44 | 45 | pub fn clear(&mut self, algo: DigestAlgo) -> bool { 46 | match algo { 47 | DigestAlgo::Md5 => { 48 | self.md5.clear(); 49 | self.md5.is_empty() 50 | }, 51 | DigestAlgo::Sha1 => { 52 | self.sha1.clear(); 53 | self.sha1.is_empty() 54 | }, 55 | DigestAlgo::Sha512 => { 56 | self.sha512.clear(); 57 | self.sha512.is_empty() 58 | } 59 | } 60 | } 61 | 62 | pub fn add_many(&mut self, algo: DigestAlgo, file_exts: Vec) { 63 | 64 | for ext in &file_exts { 65 | self.add(algo, ext.to_string()); 66 | } 67 | } 68 | 69 | pub fn block(&mut self, algo: DigestAlgo) -> bool { 70 | self.blocked.insert(algo) 71 | } 72 | 73 | pub fn swipe(&mut self) -> bool { 74 | self.blocked.clear(); 75 | self.md5.clear(); 76 | self.sha1.clear(); 77 | self.sha512.clear(); 78 | 79 | self.blocked.is_empty() && self.md5.is_empty() && self.sha1.is_empty() 80 | } 81 | } 82 | 83 | impl Default for DigestExtTable { 84 | 85 | fn default() -> DigestExtTable { 86 | let mut md5_exts = HashSet::new(); 87 | md5_exts.insert("gz".to_string()); 88 | md5_exts.insert("whl".to_string()); 89 | 90 | let mut sha1_exts = HashSet::new(); 91 | sha1_exts.insert("jar".to_string()); 92 | sha1_exts.insert("tgz".to_string()); 93 | 94 | let mut sha512_exts = HashSet::new(); 95 | sha512_exts.insert("nupkg".to_string()); 96 | 97 | DigestExtTable{ 98 | md5: md5_exts, 99 | sha1: sha1_exts, 100 | sha512: sha512_exts, 101 | blocked: HashSet::new() 102 | } 103 | } 104 | } 105 | 106 | impl fmt::Debug for DigestExtTable { 107 | fn fmt(&self, f: &mut fmt::Formatter) ->fmt::Result { 108 | let mut blocked_algos = vec![]; 109 | let mut file_exts = vec![]; 110 | 111 | if self.is_blocked(DigestAlgo::Md5){ 112 | blocked_algos.push("md5"); 113 | } else { 114 | let exts = format!("md5: {:?}", self.md5); 115 | file_exts.push(exts); 116 | } 117 | 118 | if self.is_blocked(DigestAlgo::Sha1){ 119 | blocked_algos.push("sha1"); 120 | } else { 121 | let exts = format!("sha1: {:?}", self.sha1); 122 | file_exts.push(exts); 123 | } 124 | 125 | if self.is_blocked(DigestAlgo::Sha512){ 126 | blocked_algos.push("sha512"); 127 | } else { 128 | let exts = format!("sha512: {:?}", self.sha512); 129 | file_exts.push(exts); 130 | } 131 | 132 | if !blocked_algos.is_empty() { 133 | writeln!(f, "blocked algos: {}", blocked_algos.join(", ")).unwrap(); 134 | } 135 | 136 | if !file_exts.is_empty() { 137 | writeln!(f, "File extensions:\n {}", file_exts.join("\n")).unwrap(); 138 | } 139 | 140 | write!(f, "\n") 141 | } 142 | } 143 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | extern crate base64; 2 | extern crate csv; 3 | extern crate futures; 4 | extern crate http; 5 | extern crate hyper; 6 | extern crate hyper_proxy; 7 | extern crate hyper_tls; 8 | extern crate md5; 9 | extern crate regex; 10 | extern crate sha1; 11 | extern crate sha2; 12 | extern crate url; 13 | 14 | extern crate toml; 15 | extern crate walkdir; 16 | 17 | extern crate serde_json; 18 | 19 | #[macro_use] 20 | extern crate serde_derive; 21 | extern crate serde; 22 | 23 | pub mod api; 24 | pub mod checker; 25 | pub mod configs; 26 | pub mod digest_ext_table; 27 | pub mod product; 28 | 29 | pub mod tasks; 30 | -------------------------------------------------------------------------------- /src/product.rs: -------------------------------------------------------------------------------- 1 | extern crate csv; 2 | 3 | pub type CSVStringRow = Vec; 4 | 5 | #[derive(Serialize, Deserialize, Debug)] 6 | pub struct Product { 7 | pub language: String, 8 | pub prod_key: String, 9 | pub version: String, 10 | pub name: String, 11 | pub prod_type: Option 12 | } 13 | 14 | impl Product { 15 | pub fn empty() -> Product { 16 | Product { 17 | language: "".to_string(), 18 | prod_key: "".to_string(), 19 | version: "".to_string(), 20 | name: "".to_string(), 21 | prod_type: None 22 | } 23 | } 24 | } 25 | 26 | #[derive(Serialize, Deserialize, Debug)] 27 | pub struct ProductSHA { 28 | pub packaging: String, 29 | pub method: String, 30 | pub value: String, 31 | pub filepath: Option 32 | } 33 | 34 | impl ProductSHA { 35 | pub fn from_sha(sha_value: String) -> ProductSHA { 36 | ProductSHA { 37 | packaging: "".to_string(), 38 | method: "".to_string(), 39 | value: sha_value, 40 | filepath: None 41 | } 42 | } 43 | } 44 | 45 | #[derive(Serialize, Deserialize, Debug)] 46 | pub struct ProductLicense { 47 | pub name: String, 48 | pub url: String 49 | } 50 | 51 | #[derive(Serialize, Deserialize, Debug)] 52 | pub struct ProductMatch { 53 | pub sha: Option, 54 | pub product: Option, 55 | pub url: Option, 56 | pub licenses: Vec, 57 | pub n_vulns: u32, 58 | pub error: Option 59 | } 60 | 61 | impl ProductMatch { 62 | 63 | pub fn new(product: Product, sha: ProductSHA) -> ProductMatch { 64 | let url = format!( 65 | "https://www.versioneye.com/{}/{}", 66 | product.language.clone(), 67 | product.prod_key.clone() 68 | ); 69 | 70 | ProductMatch { 71 | sha: Some(sha), 72 | product: Some(product), 73 | url: Some(url), 74 | licenses: vec![], 75 | n_vulns: 0, 76 | error: None 77 | } 78 | } 79 | 80 | pub fn empty() -> ProductMatch { 81 | ProductMatch { 82 | sha: None, 83 | product: None, 84 | url: None, 85 | licenses: vec![], 86 | n_vulns: 0, 87 | error: None 88 | } 89 | } 90 | } 91 | 92 | 93 | pub trait RowSerializer { 94 | fn to_fields(&self) -> CSVStringRow; 95 | fn to_rows(&self) -> Vec; 96 | } 97 | 98 | impl RowSerializer for ProductSHA { 99 | fn to_fields(&self) -> CSVStringRow { 100 | vec![ 101 | "filepath".to_string(), "packaging".to_string(), 102 | "sha_method".to_string(), "sha_value".to_string() 103 | ] 104 | } 105 | 106 | fn to_rows(&self) -> Vec { 107 | let filepath = match self.filepath.clone() { 108 | Some(path) => path, 109 | None => "".to_string() 110 | }; 111 | 112 | let csv_row = vec![ 113 | filepath, self.packaging.clone(), 114 | self.method.clone(), self.value.clone() 115 | ]; 116 | 117 | vec![csv_row] 118 | } 119 | } 120 | 121 | impl RowSerializer for ProductMatch { 122 | 123 | fn to_fields(&self) -> CSVStringRow { 124 | vec![ 125 | "filepath".to_string(), "packaging".to_string(), "sha_method".to_string(), 126 | "sha_value".to_string(), "language".to_string(), "prod_key".to_string(), 127 | "version".to_string(), "n_vulns".to_string(), "product_url".to_string(), 128 | "license".to_string(), "error".to_string() 129 | ] 130 | } 131 | 132 | fn to_rows(&self) -> Vec { 133 | let mut csv_row: CSVStringRow = vec![]; 134 | 135 | csv_row = match self.sha { 136 | Some(ref x) => { 137 | let mut sha_rows = x.to_rows().pop().unwrap(); 138 | csv_row.append(&mut sha_rows); 139 | csv_row 140 | }, 141 | None => { 142 | let mut emp_row = vec![ 143 | "".to_string(), "".to_string(), "".to_string(), "".to_string() 144 | ]; 145 | csv_row.append(&mut emp_row); 146 | csv_row 147 | } 148 | }; 149 | 150 | csv_row = match self.product { 151 | Some(ref x) => { 152 | csv_row.push(x.language.clone() ); 153 | csv_row.push(x.prod_key.clone() ); 154 | csv_row.push(x.version.clone()); 155 | 156 | csv_row 157 | }, 158 | None => { 159 | let mut emp_row = vec!["".to_string(), "".to_string(), "".to_string()]; 160 | csv_row.append(&mut emp_row); 161 | csv_row 162 | } 163 | }; 164 | 165 | csv_row.push( self.n_vulns.clone().to_string() ); 166 | csv_row.push( self.url.clone().unwrap_or("".to_string()) ); 167 | 168 | let mut rows = vec![]; 169 | 170 | if self.licenses.len() > 0 { 171 | // split every license into own line 172 | for lic in &self.licenses { 173 | let mut row = csv_row.to_vec(); 174 | row.push(lic.name.clone().to_string()); 175 | row.push(self.error.clone().unwrap_or("".to_string())); 176 | rows.push(row); 177 | } 178 | } else { 179 | csv_row.push("unknown".to_string()); //when response had no license information 180 | csv_row.push(self.error.clone().unwrap_or("".to_string())); 181 | rows.push(csv_row); 182 | } 183 | 184 | rows 185 | } 186 | 187 | } -------------------------------------------------------------------------------- /src/tasks.rs: -------------------------------------------------------------------------------- 1 | extern crate csv; 2 | 3 | use std::fs; 4 | use std::vec; 5 | use std::path::{Path, PathBuf}; 6 | use std::thread; 7 | use std::sync::mpsc::{channel, Receiver}; 8 | use std::io::{self, ErrorKind}; 9 | use std::error::Error; 10 | 11 | use walkdir::WalkDir; 12 | 13 | use product::{self, ProductSHA, ProductMatch}; 14 | use product::RowSerializer; 15 | use configs; 16 | use api; 17 | use checker; 18 | use digest_ext_table::DigestExtTable; 19 | 20 | fn check_file_size(path: &Path, scan_configs: &configs::ScanConfigs) -> bool { 21 | if path.is_dir() { return false } 22 | 23 | let min_size = scan_configs.min_file_size.unwrap_or(0); 24 | let max_size = scan_configs.max_file_size.unwrap_or(configs::DEFAULT_MAX_SIZE); 25 | 26 | if let Some(metadata) = fs::metadata(path).ok() { 27 | (min_size < metadata.len() ) && ( metadata.len() <= max_size ) 28 | } else { 29 | // we failed to get read metadata, which means file doesnt exists, unreadable etc 30 | false 31 | } 32 | } 33 | 34 | 35 | pub fn start_path_scanner( 36 | ext_table: DigestExtTable, dir: PathBuf, scan_configs: configs::ScanConfigs 37 | ) -> (Receiver, thread::JoinHandle> ) { 38 | 39 | let (sender, receiver) = channel::(); 40 | let handle = thread::spawn(move || { 41 | if dir.exists() == false { 42 | return Err( 43 | io::Error::new(ErrorKind::Other, "Scannable folder doesnt exists") 44 | ); 45 | } 46 | 47 | for entry in WalkDir::new(&dir).into_iter().filter_map(|e| e.ok()){ 48 | 49 | //skip files which are either too small or too big 50 | if !check_file_size(&entry.path(), &scan_configs){ 51 | println!("path_scanner: skipping {:?}", &entry.path() ); 52 | continue 53 | } 54 | 55 | if let Some(shas) = checker::digest_file(&ext_table, &entry.path()) { 56 | for sha in shas.into_iter(){ 57 | if sender.send(sha).is_err() { 58 | println!( 59 | "start_path_scanner2: failed to send ProductSHA for {}", 60 | entry.path().display() 61 | ); 62 | break 63 | } 64 | } 65 | } 66 | } 67 | 68 | Ok(()) 69 | }); 70 | 71 | (receiver, handle) 72 | } 73 | 74 | //pumps vector of SHAs onto sha channel 75 | pub fn start_sha_publisher(shas: Vec) 76 | -> (Receiver, thread::JoinHandle>) { 77 | 78 | let (sender, receiver) = channel::(); 79 | let handle = thread::spawn(move || { 80 | for sha in shas.into_iter() { 81 | if sender.send(sha).is_err() { 82 | println!("start_sha_publisher: failed to send ProductSHAs"); 83 | break 84 | } 85 | } 86 | 87 | Ok(()) 88 | }); 89 | 90 | (receiver, handle) 91 | } 92 | 93 | //pumps each item of productMatch vector onto product channel 94 | //used to simplify testing 95 | pub fn start_product_match_publisher(prod_matches: Vec) 96 | -> (Receiver, thread::JoinHandle>) { 97 | 98 | let (sender, receiver) = channel::(); 99 | let handle = thread::spawn(move || { 100 | for prod_match in prod_matches.into_iter() { 101 | if sender.send(prod_match).is_err() { 102 | println!("start_product_match_publisher: failed to pipe ProductMatch onto channel"); 103 | break 104 | } 105 | } 106 | 107 | Ok(()) 108 | }); 109 | 110 | (receiver, handle) 111 | } 112 | 113 | pub fn start_sha_fetcher(configs: configs::Configs, sha_ch: Receiver) 114 | -> (Receiver, thread::JoinHandle>) { 115 | 116 | let (sender, receiver) = channel::(); 117 | let handle = thread::spawn(move || { 118 | for sha in sha_ch.into_iter() { 119 | let sha_code = sha.value.clone(); 120 | let prod = match api::fetch_product_details_by_sha(&configs, sha_code.as_str()) { 121 | Ok(mut m) => { 122 | m.sha = Some(sha); //attach original sha document to have filepath data 123 | m 124 | }, 125 | Err(e) => { 126 | //use empty product, so non-matched products will show up in output file 127 | let mut m = ProductMatch::empty(); 128 | m.sha = Some(sha); 129 | m.error = Some(e.description().to_string()); //attach error message 130 | m 131 | } 132 | }; 133 | 134 | if sender.send(prod).is_err(){ 135 | break; 136 | } 137 | } 138 | 139 | Ok(()) 140 | }); 141 | 142 | (receiver, handle) 143 | } 144 | 145 | fn init_csv_file_writer(outpath: PathBuf, csv_configs: configs::CSVConfigs) 146 | -> csv::Writer { 147 | let mut wtr = csv::Writer::from_file(outpath).expect("Failed to open output file"); 148 | 149 | if let Some(sep) = csv_configs.separator { 150 | let ch = if sep.len() > 0 { 151 | sep.as_bytes()[0] 152 | } else { 153 | ";".as_bytes()[0] 154 | }; 155 | 156 | wtr = wtr.delimiter(ch); 157 | } 158 | 159 | if let Some(quote) = csv_configs.quote { 160 | let ch2 = if quote.len() > 0 { 161 | quote.as_bytes()[0] 162 | } else { 163 | "\"".as_bytes()[0] 164 | }; 165 | 166 | wtr = wtr.quote(ch2); 167 | } 168 | 169 | if let Some(is_flex) = csv_configs.flexible { wtr = wtr.flexible(is_flex); }; 170 | 171 | wtr 172 | } 173 | 174 | fn init_csv_stdio_writer(csv_configs: configs::CSVConfigs) -> csv::Writer> { 175 | let mut wtr = csv::Writer::from_memory(); 176 | 177 | if let Some(sep) = csv_configs.separator { 178 | let ch = if sep.len() > 0 { 179 | sep.as_bytes()[0] 180 | } else { 181 | ";".as_bytes()[0] 182 | }; 183 | 184 | wtr = wtr.delimiter(ch); 185 | } 186 | 187 | if let Some(quote) = csv_configs.quote { 188 | let ch2 = if quote.len() > 0 { 189 | quote.as_bytes()[0] 190 | } else { 191 | "\"".as_bytes()[0] 192 | }; 193 | 194 | wtr = wtr.quote(ch2); 195 | } 196 | 197 | if let Some(is_flex) = csv_configs.flexible { wtr = wtr.flexible(is_flex); }; 198 | 199 | wtr 200 | } 201 | 202 | pub fn start_product_csv_writer( 203 | outpath: PathBuf, csv_configs: configs::CSVConfigs, product_ch: Receiver 204 | ) -> thread::JoinHandle< Result<(), csv::Error> > { 205 | 206 | thread::spawn(move || { 207 | let mut n = 0u32; 208 | let mut wtr = init_csv_file_writer(outpath, csv_configs); 209 | 210 | println!(); 211 | for product in product_ch.into_iter() { 212 | if n == 0 { 213 | wtr.encode(product.to_fields()).unwrap(); 214 | }; 215 | 216 | for row in product.to_rows().into_iter() { 217 | wtr.encode(row).unwrap(); 218 | } 219 | 220 | print!("\rrow: {}", n + 1); //to show some progress 221 | n += 1; 222 | } 223 | 224 | println!(); 225 | Ok(()) 226 | }) 227 | 228 | } 229 | 230 | pub fn start_product_stdio_writer( 231 | csv_configs: configs::CSVConfigs, product_ch: Receiver 232 | ) -> thread::JoinHandle> { 233 | 234 | thread::spawn(move || { 235 | let mut n = 0u32; 236 | 237 | for product in product_ch.into_iter() { 238 | let mut wtr = init_csv_stdio_writer(csv_configs.clone()); 239 | 240 | if n == 0 { 241 | wtr.encode(product.to_fields()).unwrap(); 242 | } 243 | 244 | for row in product.to_rows().into_iter() { 245 | wtr.encode(row).unwrap(); 246 | } 247 | 248 | print!("{}", wtr.as_string()); 249 | n += 1; 250 | } 251 | 252 | Ok(()) 253 | }) 254 | } 255 | 256 | 257 | pub fn start_sha_csv_writer(outpath: PathBuf, csv_configs: configs::CSVConfigs, sha_ch: Receiver) 258 | -> thread::JoinHandle< Result<(), csv::Error> > { 259 | 260 | thread::spawn(move || { 261 | let mut n = 0u32; 262 | let mut wtr = init_csv_file_writer(outpath, csv_configs); 263 | 264 | println!(); 265 | for sha in sha_ch.into_iter() { 266 | if n == 0 { 267 | wtr.encode(sha.to_fields()).unwrap(); 268 | }; 269 | 270 | if let Some(row) = sha.to_rows().pop() { 271 | wtr.encode(row).unwrap(); 272 | } 273 | 274 | print!("\rrow: {}", n + 1); //to show some progress 275 | n += 1; 276 | } 277 | 278 | println!(); 279 | Ok(()) 280 | }) 281 | 282 | } 283 | 284 | pub fn start_sha_stdio_writer(csv_configs: configs::CSVConfigs, sha_ch: Receiver) 285 | -> thread::JoinHandle> { 286 | 287 | thread::spawn(move || { 288 | let mut n = 0u32; 289 | 290 | for sha in sha_ch.into_iter() { 291 | let mut wtr = init_csv_stdio_writer(csv_configs.clone()); 292 | 293 | if n == 0 { 294 | wtr.encode(sha.to_fields()).unwrap(); 295 | } 296 | 297 | if let Some(row) = sha.to_rows().pop() { 298 | wtr.encode(row).unwrap(); 299 | } 300 | 301 | print!("{}", wtr.as_string()); 302 | n += 1; 303 | } 304 | 305 | Ok(()) 306 | }) 307 | } -------------------------------------------------------------------------------- /tests/acceptance/assert.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # assert.sh 1.1 - bash unit testing framework 3 | # Copyright (C) 2009-2015 Robert Lehmann 4 | # 5 | # http://github.com/lehmannro/assert.sh 6 | # 7 | # This program is free software: you can redistribute it and/or modify 8 | # it under the terms of the GNU Lesser General Public License as published 9 | # by the Free Software Foundation, either version 3 of the License, or 10 | # (at your option) any later version. 11 | # 12 | # This program is distributed in the hope that it will be useful, 13 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 14 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 15 | # GNU Lesser General Public License for more details. 16 | # 17 | # You should have received a copy of the GNU Lesser General Public License 18 | # along with this program. If not, see . 19 | 20 | export DISCOVERONLY=${DISCOVERONLY:-} 21 | export DEBUG=${DEBUG:-} 22 | export STOP=${STOP:-} 23 | export INVARIANT=${INVARIANT:-} 24 | export CONTINUE=${CONTINUE:-} 25 | 26 | args="$(getopt -n "$0" -l \ 27 | verbose,help,stop,discover,invariant,continue vhxdic $*)" \ 28 | || exit -1 29 | for arg in $args; do 30 | case "$arg" in 31 | -h) 32 | echo "$0 [-vxidc]" \ 33 | "[--verbose] [--stop] [--invariant] [--discover] [--continue]" 34 | echo "`sed 's/./ /g' <<< "$0"` [-h] [--help]" 35 | exit 0;; 36 | --help) 37 | cat < [stdin] 103 | (( tests_ran++ )) || : 104 | [[ -z "$DISCOVERONLY" ]] || return 105 | expected=$(echo -ne "${2:-}") 106 | result="$(eval 2>/dev/null $1 <<< ${3:-})" || true 107 | if [[ "$result" == "$expected" ]]; then 108 | [[ -z "$DEBUG" ]] || echo -n . 109 | return 110 | fi 111 | result="$(sed -e :a -e '$!N;s/\n/\\n/;ta' <<< "$result")" 112 | [[ -z "$result" ]] && result="nothing" || result="\"$result\"" 113 | [[ -z "$2" ]] && expected="nothing" || expected="\"$2\"" 114 | _assert_fail "expected $expected${_indent}got $result" "$1" "$3" 115 | } 116 | 117 | assert_raises() { 118 | # assert_raises [stdin] 119 | (( tests_ran++ )) || : 120 | [[ -z "$DISCOVERONLY" ]] || return 121 | status=0 122 | (eval $1 <<< ${3:-}) > /dev/null 2>&1 || status=$? 123 | expected=${2:-0} 124 | if [[ "$status" -eq "$expected" ]]; then 125 | [[ -z "$DEBUG" ]] || echo -n . 126 | return 127 | fi 128 | _assert_fail "program terminated with code $status instead of $expected" "$1" "$3" 129 | } 130 | 131 | _assert_fail() { 132 | # _assert_fail 133 | [[ -n "$DEBUG" ]] && echo -n X 134 | report="test #$tests_ran \"$2${3:+ <<< $3}\" failed:${_indent}$1" 135 | if [[ -n "$STOP" ]]; then 136 | [[ -n "$DEBUG" ]] && echo 137 | echo "$report" 138 | exit 1 139 | fi 140 | tests_errors[$tests_failed]="$report" 141 | (( tests_failed++ )) || : 142 | } 143 | 144 | skip_if() { 145 | # skip_if 146 | (eval $@) > /dev/null 2>&1 && status=0 || status=$? 147 | [[ "$status" -eq 0 ]] || return 148 | skip 149 | } 150 | 151 | skip() { 152 | # skip (no arguments) 153 | shopt -q extdebug && tests_extdebug=0 || tests_extdebug=1 154 | shopt -q -o errexit && tests_errexit=0 || tests_errexit=1 155 | # enable extdebug so returning 1 in a DEBUG trap handler skips next command 156 | shopt -s extdebug 157 | # disable errexit (set -e) so we can safely return 1 without causing exit 158 | set +o errexit 159 | tests_trapped=0 160 | trap _skip DEBUG 161 | } 162 | _skip() { 163 | if [[ $tests_trapped -eq 0 ]]; then 164 | # DEBUG trap for command we want to skip. Do not remove the handler 165 | # yet because *after* the command we need to reset extdebug/errexit (in 166 | # another DEBUG trap.) 167 | tests_trapped=1 168 | [[ -z "$DEBUG" ]] || echo -n s 169 | return 1 170 | else 171 | trap - DEBUG 172 | [[ $tests_extdebug -eq 0 ]] || shopt -u extdebug 173 | [[ $tests_errexit -eq 1 ]] || set -o errexit 174 | return 0 175 | fi 176 | } 177 | 178 | 179 | _assert_reset 180 | : ${tests_suite_status:=0} # remember if any of the tests failed so far 181 | _assert_cleanup() { 182 | local status=$? 183 | # modify exit code if it's not already non-zero 184 | [[ $status -eq 0 && -z $CONTINUE ]] && exit $tests_suite_status 185 | } 186 | trap _assert_cleanup EXIT 187 | -------------------------------------------------------------------------------- /tests/acceptance/tests.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -e 4 | . assert.sh 5 | 6 | # run as VERSIONEYE_API_KEY="yourkey" ./run.sh 7 | [ -z "$VERSIONEYE_API_KEY" ] && echo "Need to set VERSIONEYE_API_KEY" && exit 1; 8 | 9 | if [ -z ${VERSIONEYE_BIN_PATH+x} ]; then 10 | VERSIONEYE_BIN_PATH="../../target/debug/veye_checker" 11 | echo "Using default binary ${VERSIONEYE_BIN_PATH}" 12 | else 13 | echo "Using specified binary at ${VERSIONEYE_BIN_PATH}" 14 | fi 15 | 16 | FIXTURES_PATH="../fixtures/files" 17 | FILE_SHA="5675fd96b29656504b86029551973d60fb41339b" 18 | 19 | 20 | echo "#-- initializing expected results" 21 | 22 | define(){ IFS='\n' read -r -d '' ${1} || true; } 23 | 24 | define EXPECTED1 < assert_eq!(correct_sha, sha_val), 14 | Err(e) => { 15 | println!("Failed to test digest_sha - {:?}", e); 16 | assert!(false); 17 | } 18 | } 19 | } 20 | 21 | #[test] 22 | fn test_checker_digest_sha512() { 23 | let nupkg_file_path = Path::new("tests/fixtures/files/test.nupkg"); 24 | let correct_sha = 25 | "U82mHQSKaIk+lpSVCbWYKNavmNH1i5xrExDEquU1i6I5pV6UMOqRnJRSlKO3cMPfcpp0RgDY+8jUXHdQ4IfXvw==" 26 | .to_string(); 27 | 28 | match checker::digest_sha512b64(&nupkg_file_path) { 29 | Ok(sha_val) => assert_eq!(correct_sha, sha_val), 30 | Err(e) => { 31 | println!("Failed to test digest512b64 - {:?}", e); 32 | assert!(false) 33 | } 34 | } 35 | } 36 | 37 | #[test] 38 | fn test_checker_digest_md5() { 39 | let file_path = Path::new("tests/fixtures/files/pypi.tar.gz"); 40 | let correct_md5 = "fe7daf822f1d36d1bd37ac41cf5817e7".to_string(); 41 | match checker::digest_md5(&file_path) { 42 | Ok(md5_val) => assert_eq!(correct_md5, md5_val), 43 | Err(e) => { 44 | println!("Failed to test digest_md5 - {:?}", e); 45 | assert!(false); 46 | } 47 | }; 48 | } 49 | 50 | #[test] 51 | fn test_checker_digest_file_with_jar() { 52 | let ext_table = digest_ext_table::DigestExtTable::default(); 53 | let jar_file_path = Path::new("tests/fixtures/files/test.jar"); 54 | let correct_sha = "5675fd96b29656504b86029551973d60fb41339b".to_string(); 55 | 56 | match checker::digest_file(&ext_table, &jar_file_path) { 57 | Some(shas) => { 58 | assert_eq!(1, shas.len()); 59 | assert_eq!("sha1".to_string(), shas[0].method); 60 | assert_eq!(correct_sha, shas[0].value); 61 | } 62 | None => { 63 | println!("Failed to test digest_file with Jar file."); 64 | assert!(false); 65 | } 66 | } 67 | } 68 | 69 | #[test] 70 | fn test_checker_digest_file_with_pypi() { 71 | let ext_table = digest_ext_table::DigestExtTable::default(); 72 | let pypi_file_path = Path::new("tests/fixtures/files/pypi.tar.gz"); 73 | let correct_md5 = "fe7daf822f1d36d1bd37ac41cf5817e7".to_string(); 74 | 75 | match checker::digest_file(&ext_table, &pypi_file_path) { 76 | Some(shas) => { 77 | assert_eq!(1, shas.len()); 78 | assert_eq!("md5".to_string(), shas[0].method); 79 | assert_eq!(correct_md5, shas[0].value); 80 | } 81 | None => { 82 | println!("failed to test digest_file with Pypi file"); 83 | assert!(false); 84 | } 85 | } 86 | } 87 | 88 | #[test] 89 | fn test_checker_digest_file_with_nuget() { 90 | let ext_table = digest_ext_table::DigestExtTable::default(); 91 | let nupkg_file_path = Path::new("tests/fixtures/files/test.nupkg"); 92 | let correct_sha = 93 | "U82mHQSKaIk+lpSVCbWYKNavmNH1i5xrExDEquU1i6I5pV6UMOqRnJRSlKO3cMPfcpp0RgDY+8jUXHdQ4IfXvw==" 94 | .to_string(); 95 | 96 | match checker::digest_file(&ext_table, &nupkg_file_path) { 97 | Some(shas) => { 98 | assert_eq!(1, shas.len()); 99 | assert_eq!("sha512".to_string(), shas[0].method); 100 | assert_eq!(correct_sha, shas[0].value); 101 | } 102 | None => { 103 | println!("failed to test digest_file with Nuget file"); 104 | assert!(false); 105 | } 106 | } 107 | } 108 | 109 | #[test] 110 | fn test_checker_digest_file_block_algo() { 111 | let mut ext_table = digest_ext_table::DigestExtTable::default(); 112 | let pypi_file_path = Path::new("tests/fixtures/files/pypi.tar.gz"); 113 | let correct_md5 = "fe7daf822f1d36d1bd37ac41cf5817e7".to_string(); 114 | 115 | ext_table.block(digest_ext_table::DigestAlgo::Md5); 116 | assert_eq!(false, ext_table.is_md5("gz".to_string())); 117 | 118 | match checker::digest_file(&ext_table, &pypi_file_path) { 119 | Some(shas) => { 120 | println!("failed to block using MD5 algo for Pypi files"); 121 | assert!(false); 122 | } 123 | None => assert!(true), 124 | }; 125 | } 126 | 127 | #[test] 128 | fn test_checker_digest_file_change_algo() { 129 | let mut ext_table = digest_ext_table::DigestExtTable::default(); 130 | let jar_file_path = Path::new("tests/fixtures/files/test.jar"); 131 | let correct_md5 = "0f18acf5fa857f9959675e14d901a7ce".to_string(); 132 | 133 | ext_table.swipe(); 134 | ext_table.add(digest_ext_table::DigestAlgo::Md5, "jar".to_string()); 135 | assert_eq!(true, ext_table.is_md5("jar".to_string())); 136 | 137 | match checker::digest_file(&ext_table, &jar_file_path) { 138 | Some(shas) => { 139 | assert_eq!(1, shas.len()); 140 | assert_eq!("md5".to_string(), shas[0].method); 141 | assert_eq!(correct_md5, shas[0].value); 142 | } 143 | None => { 144 | println!("failed to test changing of algo for Jar file"); 145 | assert!(false); 146 | } 147 | } 148 | } 149 | 150 | #[test] 151 | fn test_checker_digest_file_multiple_algo() { 152 | let mut ext_table = digest_ext_table::DigestExtTable::default(); 153 | let jar_file_path = Path::new("tests/fixtures/files/test.jar"); 154 | let correct_sha = "5675fd96b29656504b86029551973d60fb41339b".to_string(); 155 | let correct_md5 = "0f18acf5fa857f9959675e14d901a7ce".to_string(); 156 | 157 | ext_table.add(digest_ext_table::DigestAlgo::Md5, "jar".to_string()); 158 | assert_eq!(true, ext_table.is_md5("jar".to_string())); 159 | assert_eq!(true, ext_table.is_sha1("jar".to_string())); 160 | 161 | match checker::digest_file(&ext_table, &jar_file_path) { 162 | Some(shas) => { 163 | assert_eq!(2, shas.len()); 164 | 165 | assert_eq!("md5".to_string(), shas[0].method); 166 | assert_eq!(correct_md5, shas[0].value); 167 | 168 | assert_eq!("sha1".to_string(), shas[1].method); 169 | assert_eq!(correct_sha, shas[1].value); 170 | } 171 | None => { 172 | println!("failed to test usage of multiple algos for Jar file"); 173 | assert!(false); 174 | } 175 | } 176 | } 177 | -------------------------------------------------------------------------------- /tests/configs_test.rs: -------------------------------------------------------------------------------- 1 | extern crate veye_checker; 2 | 3 | use std::env; 4 | use std::path::PathBuf; 5 | use veye_checker::configs; 6 | use veye_checker::digest_ext_table::{DigestAlgo, DigestExtTable}; 7 | 8 | #[test] 9 | fn test_configs_read_api_configs_from_env(){ 10 | //set up env 11 | env::set_var("VERSIONEYE_API_KEY", "veye-123"); 12 | env::set_var("VERSIONEYE_API_HOST", "api.veye.com"); 13 | env::set_var("VERSIONEYE_API_PORT", "8080"); 14 | env::remove_var("VERSIONEYE_CSV_SEPARATOR"); 15 | 16 | //run tests 17 | let confs = configs::read_configs_from_env().expect("Failed to read configs from ENV"); 18 | 19 | assert_eq!(confs.api.key, Some("veye-123".to_string()) ); 20 | assert_eq!(confs.api.host, Some("api.veye.com".to_string()) ); 21 | assert_eq!(confs.api.path, Some("api/v2".to_string()) ); 22 | assert_eq!(confs.api.port, Some(8080)); 23 | assert_eq!(confs.api.scheme, Some("https".to_string())); 24 | 25 | //cleanup env 26 | env::remove_var("VERSIONEYE_API_KEY"); 27 | env::remove_var("VERSIONEYE_API_HOST"); 28 | env::remove_var("VERSIONEYE_API_PORT"); 29 | //cleanup some ENV vars to get values from config file 30 | env::remove_var("VERSIONEYE_CSV_SEPARATOR"); 31 | println!("IS key removed? {:?}", env::var("VERSIONEYE_CSV_SEPARATOR").is_err()); 32 | assert!(env::var("VERSIONEYE_CSV_SEPARATOR").is_err()); 33 | } 34 | 35 | #[test] 36 | fn test_configs_read_csv_configs_from_env(){ 37 | //set up env 38 | env::set_var("VERSIONEYE_CSV_SEPARATOR", ","); 39 | env::set_var("VERSIONEYE_CSV_QUOTE", "'"); 40 | env::set_var("VERSIONEYE_CSV_FLEXIBLE", "1"); 41 | 42 | //test correctness 43 | let confs = configs::read_configs_from_env().expect("Failed to read CSV configs from ENV"); 44 | 45 | assert_eq!(Some(",".to_string()), confs.csv.separator); 46 | assert_eq!(Some("'".to_string()), confs.csv.quote); 47 | assert_eq!(Some(true), confs.csv.flexible); 48 | 49 | //cleanup env 50 | env::set_var("VERSIONEYE_CSV_FLEXIBLE", "0"); 51 | env::remove_var("VERSIONEYE_CSV_QUOTE"); 52 | //cleanup some ENV vars to get values from config file 53 | env::remove_var("VERSIONEYE_CSV_SEPARATOR"); 54 | println!("IS key removed? {:?}", env::var("VERSIONEYE_CSV_SEPARATOR").is_err()); 55 | assert!(env::var("VERSIONEYE_CSV_SEPARATOR").is_err()); 56 | } 57 | 58 | #[test] 59 | fn test_configs_read_proxy_configs_from_env(){ 60 | //set up env variables 61 | env::set_var("VERSIONEYE_PROXY_HOST", "127.0.0.1"); 62 | env::set_var("VERSIONEYE_PROXY_PORT", "3128"); 63 | env::set_var("VERSIONEYE_PROXY_SCHEME", "socks"); 64 | 65 | //test correctness 66 | let confs = configs::read_configs_from_env().expect("Failed to read configs from ENV"); 67 | assert_eq!(Some("127.0.0.1".to_string()), confs.proxy.host); 68 | assert_eq!(Some(3128), confs.proxy.port); 69 | assert_eq!(Some("socks".to_string()), confs.proxy.scheme); 70 | 71 | //cleanup env 72 | env::remove_var("VERSIONEYE_PROXY_HOST"); 73 | env::remove_var("VERSIONEYE_PROXY_PORT"); 74 | env::remove_var("VERSIONEYE_PROXY_SCHEME"); 75 | } 76 | 77 | #[test] 78 | fn test_configs_read_scan_configs_from_env(){ 79 | env::set_var("VERSIONEYE_SCAN_MAX_FILE_SIZE", "2048"); 80 | env::set_var("VERSIONEYE_SCAN_MIN_FILE_SIZE", "64"); 81 | 82 | let confs = configs::read_configs_from_env().expect("Failed to read configs from ENV"); 83 | assert_eq!(Some(2048), confs.scan.max_file_size); 84 | assert_eq!(Some(64), confs.scan.min_file_size); 85 | 86 | env::remove_var("VERSIONEYE_SCAN_MAX_FILE_SIZE"); 87 | env::remove_var("VERSIONEYE_SCAN_MIN_FILE_SIZE"); 88 | } 89 | 90 | #[test] 91 | fn test_configs_read_configs_from_toml(){ 92 | 93 | let toml_path = PathBuf::from("./tests/fixtures/veye_checker.toml"); 94 | let confs = configs::read_configs_from_toml(&toml_path).expect("Failed to parse test TOML"); 95 | 96 | assert_eq!(confs.api.key, Some("def-234".to_string())); 97 | assert_eq!(confs.api.port, Some(8090)); 98 | 99 | //check correctness of CSV configs 100 | assert_eq!(Some(",".to_string()), confs.csv.separator); 101 | assert_eq!(Some("'".to_string()), confs.csv.quote); 102 | assert_eq!(Some(false), confs.csv.flexible); 103 | 104 | //check correctness of proxy settings 105 | assert_eq!(Some("192.168.0.1".to_string()), confs.proxy.host); 106 | assert_eq!(Some(9200), confs.proxy.port); 107 | assert_eq!(None, confs.proxy.scheme); 108 | 109 | //cleanup some ENV vars to get values from config file 110 | env::remove_var("VERSIONEYE_CSV_SEPARATOR"); 111 | assert!(env::var("VERSIONEYE_CSV_SEPARATOR").is_err()); 112 | 113 | } 114 | 115 | #[test] 116 | fn test_configs_read_toml_file_only_api_configs(){ 117 | let toml_path = PathBuf::from("./tests/fixtures/only_api.toml"); 118 | let confs = configs::read_configs_from_toml(&toml_path).expect("Failed to parse `only_api.toml`"); 119 | 120 | //specified fields 121 | assert_eq!(confs.api.host, Some("only.api.com".to_string())); 122 | assert_eq!(confs.api.path, Some("api/v4".to_string())); 123 | assert_eq!(confs.api.port, Some(8010)); 124 | 125 | //unspecified fields 126 | assert_eq!(confs.api.key, None); 127 | assert_eq!(confs.api.scheme, None); 128 | 129 | } 130 | 131 | #[test] 132 | fn test_configs_read_toml_file_only_csv_configs(){ 133 | let toml_path = PathBuf::from("./tests/fixtures/only_csv.toml"); 134 | let confs = configs::read_configs_from_toml(&toml_path).expect("Failed to parse `only_csv.toml`"); 135 | 136 | //specified fields 137 | assert_eq!(confs.csv.separator, Some(",".to_string())); 138 | 139 | //unspecified fields 140 | assert_eq!(confs.csv.flexible, None); 141 | assert_eq!(confs.csv.quote, None); 142 | 143 | } 144 | 145 | #[test] 146 | fn test_configs_read_toml_file_only_proxy_configs(){ 147 | let toml_path = PathBuf::from("./tests/fixtures/only_proxy.toml"); 148 | let confs = configs::read_configs_from_toml(&toml_path).expect("Failed to parse `only_proxy.toml`"); 149 | 150 | //specified fields 151 | assert_eq!(confs.proxy.host, Some("192.168.2.1".to_string())); 152 | 153 | //unspecified fields 154 | assert_eq!(confs.proxy.port, None); 155 | 156 | } 157 | 158 | #[test] 159 | fn test_configs_digest_into_table(){ 160 | let md5_confs = configs::DigestConfigItem::new( 161 | false, vec!["jar".to_string(), "pkg".to_string()] 162 | ); 163 | let sha1_confs = configs::DigestConfigItem::new(true, vec!["py".to_string()]); 164 | let digest_configs = configs::DigestConfigs::new( 165 | Some(md5_confs), Some(sha1_confs), None 166 | ); 167 | 168 | let ext_table = digest_configs.into_digest_ext_table(); 169 | assert_eq!(false, ext_table.is_blocked(DigestAlgo::Md5)); 170 | assert_eq!(true, ext_table.is_md5("jar".to_string())); 171 | assert_eq!(true, ext_table.is_md5("pkg".to_string())); 172 | 173 | assert_eq!(true, ext_table.is_blocked(DigestAlgo::Sha1)); 174 | assert_eq!(false, ext_table.is_sha1("py".to_string())); 175 | 176 | //keeps default settings for Sha512 177 | assert_eq!(false, ext_table.is_blocked(DigestAlgo::Sha512)); 178 | assert_eq!(true, ext_table.is_sha512("nupkg".to_string())); 179 | 180 | } 181 | 182 | #[test] 183 | fn test_configs_read_toml_file_extenstions(){ 184 | let toml_path = PathBuf::from("./tests/fixtures/only_file_exts.toml"); 185 | let confs = configs::read_configs_from_toml(&toml_path).expect("Failed to parse `only_file_exts`"); 186 | 187 | let ext_table = confs.digests; 188 | 189 | assert_eq!(false, ext_table.is_blocked(DigestAlgo::Md5)); 190 | assert_eq!(true, ext_table.is_md5("whl".to_string())); 191 | assert_eq!(true, ext_table.is_md5("gz".to_string())); 192 | assert_eq!(false, ext_table.is_md5("jar".to_string())); 193 | 194 | assert_eq!(true, ext_table.is_blocked(DigestAlgo::Sha1)); 195 | assert_eq!(false, ext_table.is_sha1("jar".to_string())); 196 | 197 | } 198 | 199 | #[test] 200 | fn test_configs_read_toml_only_scan_configs(){ 201 | let toml_path = PathBuf::from("./tests/fixtures/only_scan.toml"); 202 | let confs = configs::read_configs_from_toml(&toml_path).expect("Failed to parse `only_scan.toml`"); 203 | 204 | assert_eq!(Some(1024), confs.scan.max_file_size ); 205 | assert_eq!(Some(512), confs.scan.min_file_size); 206 | } 207 | 208 | #[test] 209 | fn test_configs_read_toml_file(){ 210 | //set up env 211 | env::set_var("VERSIONEYE_API_KEY", "veye-123"); 212 | env::set_var("VERSIONEYE_API_HOST", "api.veye.com"); 213 | env::set_var("VERSIONEYE_API_PORT", "8080"); 214 | env::set_var("VERSIONEYE_CSV_FLEXIBLE", "T"); 215 | 216 | //cleanup some ENV vars to get values from config file 217 | env::remove_var("VERSIONEYE_CSV_QUOTE"); 218 | env::remove_var("VERSIONEYE_CSV_SEPARATOR"); 219 | env::remove_var("VERSIONEYE_CSV_FLEXIBLE"); 220 | println!("IS key removed? {:?}", env::var("VERSIONEYE_CSV_SEPARATOR").is_err()); 221 | assert!(env::var("VERSIONEYE_CSV_SEPARATOR").is_err()); 222 | 223 | //execute tests 224 | let conf_filepath = "./tests/fixtures/veye_checker.toml"; 225 | let confs = configs::read_configs(Some(conf_filepath.to_string())); 226 | 227 | assert_eq!(confs.api.key, Some("veye-123".to_string())); 228 | assert_eq!(confs.api.host, Some("api.veye.com".to_string())); 229 | assert_eq!(confs.api.path, Some("api/v2".to_string())); 230 | assert_eq!(confs.api.port, Some(8080)); 231 | assert_eq!(confs.api.scheme, Some("https".to_string())); 232 | 233 | //cleanup env 234 | env::remove_var("VERSIONEYE_API_KEY"); 235 | env::remove_var("VERSIONEYE_API_HOST"); 236 | env::remove_var("VERSIONEYE_API_PORT"); 237 | env::remove_var("VERSIONEYE_CSV_QUOTE"); 238 | env::remove_var("VERSIONEYE_CSV_SEPARATOR"); 239 | env::remove_var("VERSIONEYE_CSV_FLEXIBLE"); 240 | } 241 | -------------------------------------------------------------------------------- /tests/digest_ext_table_test.rs: -------------------------------------------------------------------------------- 1 | extern crate veye_checker; 2 | 3 | use veye_checker::digest_ext_table::{DigestExtTable, DigestAlgo}; 4 | 5 | #[test] 6 | fn test_digest_ext_table_init_with_default_values(){ 7 | let ext_tbl = DigestExtTable::default(); 8 | 9 | assert!(ext_tbl.is_md5("whl".to_string())); 10 | assert!(ext_tbl.is_sha1("jar".to_string())); 11 | assert!(ext_tbl.is_sha512("nupkg".to_string())); 12 | } 13 | 14 | #[test] 15 | fn test_digest_ext_table_adding_new_extension_into_md5(){ 16 | let mut ext_tbl = DigestExtTable::default(); 17 | let file_ext = "tjar".to_string(); 18 | 19 | assert!(ext_tbl.add(DigestAlgo::Md5, file_ext.clone())); 20 | assert!(ext_tbl.is_md5(file_ext)) 21 | } 22 | 23 | #[test] 24 | fn test_digest_ext_table_adding_new_extension_into_sha1(){ 25 | let mut ext_tbl = DigestExtTable::default(); 26 | let file_ext = "twar".to_string(); 27 | 28 | assert!(ext_tbl.add(DigestAlgo::Sha1, file_ext.clone())); 29 | assert!(ext_tbl.is_sha1(file_ext)) 30 | } 31 | 32 | #[test] 33 | fn test_digest_ext_table_adding_new_extension_into_sha512(){ 34 | let mut ext_tbl = DigestExtTable::default(); 35 | let file_ext = "tnupkg".to_string(); 36 | 37 | assert!(ext_tbl.add(DigestAlgo::Sha512, file_ext.clone())); 38 | assert!(ext_tbl.is_sha512(file_ext)) 39 | } 40 | 41 | #[test] 42 | fn test_digest_ext_table_swipes_default_table(){ 43 | let mut ext_tbl = DigestExtTable::default(); 44 | assert!(ext_tbl.swipe()) 45 | } -------------------------------------------------------------------------------- /tests/fixtures/files/npm.tgz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/versioneye/veye-checker/37fcd3de68cb0b70e13f54af1e1d84a5904b6e52/tests/fixtures/files/npm.tgz -------------------------------------------------------------------------------- /tests/fixtures/files/pypi.tar.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/versioneye/veye-checker/37fcd3de68cb0b70e13f54af1e1d84a5904b6e52/tests/fixtures/files/pypi.tar.gz -------------------------------------------------------------------------------- /tests/fixtures/files/pypi.whl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/versioneye/veye-checker/37fcd3de68cb0b70e13f54af1e1d84a5904b6e52/tests/fixtures/files/pypi.whl -------------------------------------------------------------------------------- /tests/fixtures/files/test.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/versioneye/veye-checker/37fcd3de68cb0b70e13f54af1e1d84a5904b6e52/tests/fixtures/files/test.jar -------------------------------------------------------------------------------- /tests/fixtures/files/test.nupkg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/versioneye/veye-checker/37fcd3de68cb0b70e13f54af1e1d84a5904b6e52/tests/fixtures/files/test.nupkg -------------------------------------------------------------------------------- /tests/fixtures/only_api.toml: -------------------------------------------------------------------------------- 1 | [api] 2 | host = "only.api.com" 3 | path = "api/v4" 4 | port = 8010 -------------------------------------------------------------------------------- /tests/fixtures/only_csv.toml: -------------------------------------------------------------------------------- 1 | [csv] 2 | separator = "," 3 | -------------------------------------------------------------------------------- /tests/fixtures/only_file_exts.toml: -------------------------------------------------------------------------------- 1 | [digests.md5] 2 | blocked = false 3 | exts = ["whl", "gz"] 4 | 5 | [digests.sha1] 6 | blocked = true -------------------------------------------------------------------------------- /tests/fixtures/only_proxy.toml: -------------------------------------------------------------------------------- 1 | [proxy] 2 | host = "192.168.2.1" -------------------------------------------------------------------------------- /tests/fixtures/only_scan.toml: -------------------------------------------------------------------------------- 1 | [scan] 2 | max_file_size = 1024 3 | min_file_size = 512 -------------------------------------------------------------------------------- /tests/fixtures/veye_checker.toml: -------------------------------------------------------------------------------- 1 | [api] 2 | host = "api.toml.com" 3 | path = "api/v3" 4 | key = "def-234" 5 | port = 8090 6 | 7 | [csv] 8 | separator = "," 9 | quote = "'" 10 | flexible = false 11 | 12 | [proxy] 13 | host = "192.168.0.1" 14 | port = 9200 15 | -------------------------------------------------------------------------------- /tests/product_test.rs: -------------------------------------------------------------------------------- 1 | extern crate veye_checker; 2 | 3 | use veye_checker::product; 4 | use veye_checker::product::RowSerializer; 5 | 6 | #[test] 7 | fn test_creating_product_sha(){ 8 | let prod_sha = product::ProductSHA::from_sha("abc-123".to_string()); 9 | assert_eq!("".to_string(), prod_sha.packaging); 10 | assert_eq!("".to_string(), prod_sha.method); 11 | assert_eq!("abc-123".to_string(), prod_sha.value); 12 | assert_eq!(true, prod_sha.filepath.is_none()); 13 | } 14 | 15 | #[test] 16 | fn test_creating_new_product_match(){ 17 | let product = product::Product { 18 | language: "rust".to_string(), 19 | prod_key: "serde".to_string(), 20 | version: "1.0".to_string(), 21 | name: "serde".to_string(), 22 | prod_type: Some("cargo".to_string()) 23 | }; 24 | 25 | let prod_sha = product::ProductSHA::from_sha("abc-124".to_string()); 26 | let prod_match = product::ProductMatch::new(product, prod_sha); 27 | let prod_url = "https://www.versioneye.com/rust/serde".to_string(); 28 | 29 | assert_eq!(prod_url, prod_match.url.unwrap()); 30 | assert_eq!(0, prod_match.licenses.len()); 31 | assert_eq!(0, prod_match.n_vulns); 32 | assert_eq!(true, prod_match.error.is_none()); 33 | 34 | let sha = prod_match.sha.expect("Sha value wasnt initialized for ProductMatch"); 35 | assert_eq!("abc-124".to_string(), sha.value); 36 | assert_eq!("".to_string(), sha.packaging); 37 | assert_eq!(true, sha.filepath.is_none()); 38 | 39 | let prod = prod_match.product.expect("product document wasnt initialized correctly"); 40 | assert_eq!("rust".to_string(), prod.language); 41 | assert_eq!("serde".to_string(), prod.prod_key); 42 | assert_eq!("1.0".to_string(), prod.version); 43 | assert_eq!("serde".to_string(), prod.name); 44 | assert_eq!("cargo".to_string(), prod.prod_type.unwrap()); 45 | } 46 | 47 | //it should returns correct order of field headers 48 | #[test] 49 | fn test_product_sha_to_fields(){ 50 | let prod_sha = product::ProductSHA::from_sha("abc-125".to_string()); 51 | let fields = prod_sha.to_fields(); 52 | 53 | //it didnt change prod_sha itself 54 | assert_eq!("".to_string(), prod_sha.packaging); 55 | assert_eq!("".to_string(), prod_sha.method); 56 | assert_eq!("abc-125".to_string(), prod_sha.value); 57 | assert_eq!(true, prod_sha.filepath.is_none()); 58 | 59 | //it returns correct headers 60 | assert_eq!(4, fields.len()); 61 | assert_eq!("filepath".to_string(), fields[0]); 62 | assert_eq!("packaging".to_string(), fields[1]); 63 | assert_eq!("sha_method".to_string(), fields[2]); 64 | assert_eq!("sha_value".to_string(), fields[3]); 65 | 66 | } 67 | 68 | //it should returns correct list of list of values 69 | #[test] 70 | fn test_product_sha_to_rows(){ 71 | let prod_sha = product::ProductSHA::from_sha("abc-126".to_string()); 72 | let mut rows = prod_sha.to_rows(); 73 | assert_eq!(1, rows.len()); 74 | 75 | let row = rows.pop().expect("Failed to fetch product sha row"); 76 | assert_eq!(4, row.len()); 77 | assert_eq!("".to_string(), row[0]); 78 | assert_eq!("".to_string(), row[1]); 79 | assert_eq!("".to_string(), row[2]); 80 | assert_eq!("abc-126".to_string(), row[3]); 81 | } 82 | 83 | 84 | //it should returns correct list of productMatch fields 85 | #[test] 86 | fn test_product_match_to_fields(){ 87 | let prod_match = product::ProductMatch::empty(); 88 | let fields = prod_match.to_fields(); 89 | 90 | //test values stay unaffected 91 | assert_eq!(true, prod_match.sha.is_none()); 92 | assert_eq!(0, prod_match.licenses.len()); 93 | assert_eq!(0, prod_match.n_vulns); 94 | 95 | //test correct order and values of fieldnames 96 | assert_eq!(11, fields.len()); 97 | assert_eq!("filepath".to_string(), fields[0]); 98 | assert_eq!("packaging".to_string(), fields[1]); 99 | assert_eq!("sha_method".to_string(), fields[2]); 100 | assert_eq!("sha_value".to_string(), fields[3]); 101 | assert_eq!("language".to_string(), fields[4]); 102 | assert_eq!("prod_key".to_string(), fields[5]); 103 | assert_eq!("version".to_string(), fields[6]); 104 | assert_eq!("n_vulns".to_string(), fields[7]); 105 | assert_eq!("product_url".to_string(), fields[8]); 106 | assert_eq!("license".to_string(), fields[9]); 107 | assert_eq!("error".to_string(), fields[10]); 108 | } 109 | 110 | //it should return correct list of productMatch values 111 | #[test] 112 | fn test_product_match_to_rows(){ 113 | let product = product::Product { 114 | language: "rust".to_string(), 115 | prod_key: "serde".to_string(), 116 | version: "1.0".to_string(), 117 | name: "serde".to_string(), 118 | prod_type: Some("cargo".to_string()) 119 | }; 120 | 121 | let prod_sha = product::ProductSHA::from_sha("abc-124".to_string()); 122 | let prod_match = product::ProductMatch::new(product, prod_sha); 123 | let mut rows = prod_match.to_rows(); 124 | 125 | //test that values stay unaffected 126 | assert_eq!(0, prod_match.n_vulns); 127 | assert_eq!(0, prod_match.licenses.len()); 128 | 129 | let sha = prod_match.sha.expect("Sha value wasnt initialized for ProductMatch"); 130 | assert_eq!("abc-124".to_string(), sha.value); 131 | assert_eq!("".to_string(), sha.packaging); 132 | assert_eq!(true, sha.filepath.is_none()); 133 | 134 | //test does it returns correct row of values 135 | assert_eq!(1, rows.len()); 136 | let row = rows.pop().expect("It should return= first row with values"); 137 | let url = "https://www.versioneye.com/rust/serde".to_string(); 138 | 139 | assert_eq!("".to_string(), row[0]); 140 | assert_eq!("".to_string(), row[1]); 141 | assert_eq!("".to_string(), row[2]); 142 | assert_eq!("abc-124".to_string(), row[3]); 143 | assert_eq!("rust".to_string(), row[4]); 144 | assert_eq!("serde".to_string(), row[5]); 145 | assert_eq!("1.0".to_string(), row[6]); 146 | assert_eq!("0".to_string(), row[7]); 147 | assert_eq!(url, row[8]); 148 | assert_eq!("unknown".to_string(), row[9]); 149 | assert_eq!("".to_string(), row[10]); 150 | } 151 | 152 | //TODO: test that to_rows() puts each license on the different line 153 | #[test] 154 | fn test_product_match_to_rows_licenses_separated_rows(){ 155 | let mut prod_match = product::ProductMatch::empty(); 156 | let lic1 = product::ProductLicense { 157 | name: "MIT".to_string(), 158 | url: "http://mit.edu".to_string() 159 | }; 160 | let lic2 = product::ProductLicense { 161 | name: "EPL-1.0".to_string(), 162 | url: "http://epl.org".to_string() 163 | }; 164 | 165 | prod_match.licenses.push(lic1); 166 | prod_match.licenses.push(lic2); 167 | 168 | let rows = prod_match.to_rows(); 169 | //test that values stay unaffected 170 | assert_eq!(0, prod_match.n_vulns); 171 | assert_eq!(2, prod_match.licenses.len()); 172 | 173 | //test does it return 2rows with correct license value 174 | assert_eq!(2, rows.len()); 175 | assert_eq!("MIT".to_string(), rows[0][9]); 176 | assert_eq!("EPL-1.0".to_string(), rows[1][9]); 177 | } -------------------------------------------------------------------------------- /tests/tasks_test.rs: -------------------------------------------------------------------------------- 1 | extern crate veye_checker; 2 | 3 | use std::path::PathBuf; 4 | use std::fs::{self, File}; 5 | use std::io::Read; 6 | use veye_checker::{tasks, product, configs, digest_ext_table}; 7 | 8 | #[test] 9 | fn test_task_start_path_scanner(){ 10 | let test_dir = PathBuf::from("test/fixtures/files"); 11 | let ext_table = digest_ext_table::DigestExtTable::default(); 12 | let scan_configs = configs::ScanConfigs::default(); 13 | 14 | let (sha_ch, h1) = tasks::start_path_scanner(ext_table, test_dir, scan_configs); 15 | 16 | for sha in sha_ch.into_iter() { 17 | assert_eq!(true, sha.value.len() > 0); 18 | assert_eq!(true, sha.filepath.is_some()); 19 | } 20 | 21 | h1.join().unwrap(); 22 | 23 | } 24 | 25 | #[test] 26 | fn test_task_start_path_scanner_folder_dont_exist(){ 27 | let test_dir = PathBuf::from("test/fixtures/dont_exists"); 28 | assert_eq!(false, test_dir.exists()); 29 | 30 | let scan_configs = configs::ScanConfigs::default(); 31 | let ext_table = digest_ext_table::DigestExtTable::default(); 32 | let (_, h1) = tasks::start_path_scanner(ext_table, test_dir, scan_configs); 33 | 34 | let res = h1.join().unwrap(); 35 | assert_eq!(true, res.is_err()) 36 | 37 | } 38 | 39 | #[test] 40 | fn test_task_start_path_scanner_limit_file_size(){ 41 | let test_dir = PathBuf::from("test/fixtures/files"); 42 | let ext_table = digest_ext_table::DigestExtTable::default(); 43 | let scan_configs = configs::ScanConfigs { 44 | max_file_size: Some(10000), 45 | min_file_size: Some(5000) 46 | }; 47 | 48 | let (sha_ch, h1) = tasks::start_path_scanner(ext_table, test_dir, scan_configs); 49 | 50 | let mut n = 1; 51 | for sha in sha_ch.into_iter() { 52 | assert_eq!(1, n); //only one item can be on stream 53 | assert_eq!(true, sha.value.len() > 0); 54 | assert_eq!(true, sha.filepath.is_some()); 55 | n += 1; 56 | } 57 | 58 | h1.join().unwrap(); 59 | } 60 | 61 | #[test] 62 | fn test_task_start_sha_publisher(){ 63 | let test_shas = vec![product::ProductSHA::from_sha("abc-123".to_string())]; 64 | 65 | let (sha_ch, h1) = tasks::start_sha_publisher(test_shas); 66 | 67 | for sha in sha_ch.into_iter(){ 68 | assert_eq!("abc-123".to_string(), sha.value); 69 | } 70 | 71 | let res = h1.join().unwrap(); 72 | assert_eq!(true, res.is_ok()); 73 | } 74 | 75 | #[test] 76 | fn test_task_start_sha_publisher_with_empty_array(){ 77 | let test_shas = vec![]; 78 | let (_ , h1) = tasks::start_sha_publisher(test_shas); 79 | 80 | let res = h1.join().unwrap(); 81 | assert_eq!(true, res.is_ok()); 82 | } 83 | 84 | #[test] 85 | #[cfg(feature="api")] 86 | fn test_api_task_start_sha_fetcher(){ 87 | let file_sha = "5675fd96b29656504b86029551973d60fb41339b"; 88 | let confs = configs::read_configs(None); //dont forget to specify API_KEY 89 | let test_shas = vec![ 90 | product::ProductSHA::from_sha(file_sha.to_string()) 91 | ]; 92 | 93 | let (sha_ch, h1) = tasks::start_sha_publisher(test_shas); 94 | let (prod_ch, h2) = tasks::start_sha_fetcher(confs, sha_ch); 95 | 96 | for res in prod_ch.into_iter() { 97 | assert_eq!(true, res.sha.is_some()); 98 | 99 | let sha = res.sha.unwrap(); 100 | assert_eq!("".to_string(), sha.packaging); //it keeps original sha doc 101 | assert_eq!("".to_string(), sha.method); 102 | assert_eq!(file_sha.to_string(), sha.value); 103 | assert_eq!(None, sha.filepath); 104 | 105 | assert_eq!(true, res.product.is_some()); 106 | let prod = res.product.unwrap(); 107 | assert_eq!("java".to_string(), prod.language); 108 | assert_eq!("Maven2".to_string(), prod.prod_type.unwrap()); 109 | assert_eq!("commons-beanutils/commons-beanutils".to_string(), prod.prod_key); 110 | assert_eq!("1.7.0".to_string(), prod.version); 111 | assert_eq!("commons-beanutils".to_string(), prod.name); 112 | 113 | } 114 | 115 | let res1 = h1.join().unwrap(); 116 | assert_eq!(true, res1.is_ok()); 117 | let res2 = h2.join().unwrap(); 118 | assert_eq!(true, res2.is_ok()); 119 | } 120 | 121 | #[test] 122 | #[cfg(feature="api")] 123 | fn test_api_task_start_sha_fetcher_sha_dont_exists(){ 124 | let file_sha = "abc-123-dont-exists"; 125 | let confs = configs::read_configs(None); 126 | let test_shas = vec![ 127 | product::ProductSHA::from_sha(file_sha.to_string()) 128 | ]; 129 | 130 | let (sha_ch, h1) = tasks::start_sha_publisher(test_shas); 131 | let (prod_ch, h2) = tasks::start_sha_fetcher(confs, sha_ch); 132 | 133 | //it should return ProductMatch with original sha and empty prod info 134 | for res in prod_ch.into_iter() { 135 | assert_eq!(true, res.sha.is_some()); 136 | let sha = res.sha.unwrap(); 137 | assert_eq!("".to_string(), sha.packaging); //it keeps original sha doc 138 | assert_eq!("".to_string(), sha.method); 139 | assert_eq!(file_sha.to_string(), sha.value); 140 | assert_eq!(None, sha.filepath); 141 | 142 | assert_eq!(true, res.product.is_none()); 143 | } 144 | 145 | let res1 = h1.join().unwrap(); 146 | assert_eq!(true, res1.is_ok()); 147 | let res2 = h2.join().unwrap(); 148 | assert_eq!(true, res2.is_ok()); 149 | } 150 | 151 | #[test] 152 | fn test_task_start_sha_csv_writer(){ 153 | let confs = configs::read_configs(None); 154 | let file_sha = "5675fd96b29656504b86029551973d60fb41339b"; 155 | let test_shas = vec![ 156 | product::ProductSHA::from_sha(file_sha.to_string()) 157 | ]; 158 | let outpath = PathBuf::from("temp/test_task_sha_writer.csv"); 159 | let expected_content = "filepath;packaging;sha_method;sha_value\n;;;5675fd96b29656504b86029551973d60fb41339b\n"; 160 | 161 | let (sha_ch, h1) = tasks::start_sha_publisher(test_shas); 162 | let h2 = tasks::start_sha_csv_writer(outpath.clone(), confs.csv, sha_ch); 163 | 164 | let res1 = h1.join().unwrap(); 165 | assert_eq!(true, res1.is_ok()); 166 | let res2 = h2.join().unwrap(); 167 | assert_eq!(true, res2.is_ok()); 168 | 169 | let f_res = File::open(outpath.clone().as_path()); 170 | assert_eq!(true, f_res.is_ok()); 171 | let mut fd = f_res.unwrap(); 172 | let mut content = String::new(); 173 | fd.read_to_string(&mut content).unwrap(); 174 | assert_eq!(expected_content.to_string(), content); 175 | 176 | fs::remove_file(outpath.as_path()).expect("Failed to delete test_task_start_sha file"); 177 | } 178 | 179 | #[test] 180 | fn test_task_start_sha_csv_writer_empty_input(){ 181 | let confs = configs::read_configs(None); 182 | let test_shas = vec![]; 183 | let outpath = PathBuf::from("temp/test_task_sha_writer_empty.csv"); 184 | let expected_content = ""; 185 | 186 | let (sha_ch, h1) = tasks::start_sha_publisher(test_shas); 187 | let h2 = tasks::start_sha_csv_writer(outpath.clone(), confs.csv, sha_ch); 188 | 189 | let res1 = h1.join().unwrap(); 190 | assert_eq!(true, res1.is_ok()); 191 | let res2 = h2.join().unwrap(); 192 | assert_eq!(true, res2.is_ok()); 193 | 194 | let f_res = File::open(outpath.clone().as_path()); 195 | assert_eq!(true, f_res.is_ok()); 196 | let mut fd = f_res.unwrap(); 197 | let mut content = String::new(); 198 | fd.read_to_string(&mut content).unwrap(); 199 | assert_eq!(expected_content.to_string(), content); 200 | 201 | fs::remove_file(outpath.as_path()).expect("Failed to delete test_task_start_sha file"); 202 | } 203 | 204 | #[test] 205 | fn test_task_start_product_csv_writer(){ 206 | let outpath = PathBuf::from("temp/test_task_product_writer.csv"); 207 | let confs = configs::read_configs(None); 208 | 209 | let test_prods = vec![ product::ProductMatch::empty() ]; 210 | let expected_content = "filepath;packaging;sha_method;sha_value;language;prod_key;version;n_vulns;product_url;license;error\n;;;;;;;0;;unknown;\n"; 211 | 212 | let (prod_ch, h1) = tasks::start_product_match_publisher(test_prods); 213 | let h2 = tasks::start_product_csv_writer(outpath.clone(), confs.csv, prod_ch); 214 | 215 | let res1 = h1.join().unwrap(); 216 | assert_eq!(true, res1.is_ok()); 217 | let res2 = h2.join().unwrap(); 218 | assert_eq!(true, res2.is_ok()); 219 | 220 | let f_res = File::open(outpath.clone().as_path()); 221 | assert_eq!(true, f_res.is_ok()); 222 | let mut fd = f_res.unwrap(); 223 | let mut content = String::new(); 224 | fd.read_to_string(&mut content).unwrap(); 225 | assert_eq!(expected_content.to_string(), content); 226 | 227 | fs::remove_file(outpath.as_path()).expect("Failed to delete test_task_start_sha file"); 228 | } 229 | 230 | #[test] 231 | fn test_task_start_product_csv_writer_empty_rows(){ 232 | let outpath = PathBuf::from("temp/test_task_product_writer_empty.csv"); 233 | let test_prods = vec![]; 234 | let expected_content = ""; 235 | let confs = configs::read_configs(None); 236 | 237 | let (prod_ch, h1) = tasks::start_product_match_publisher(test_prods); 238 | let h2 = tasks::start_product_csv_writer(outpath.clone(), confs.csv, prod_ch); 239 | 240 | let res1 = h1.join().unwrap(); 241 | assert_eq!(true, res1.is_ok()); 242 | let res2 = h2.join().unwrap(); 243 | assert_eq!(true, res2.is_ok()); 244 | 245 | let f_res = File::open(outpath.clone().as_path()); 246 | assert_eq!(true, f_res.is_ok()); 247 | let mut fd = f_res.unwrap(); 248 | let mut content = String::new(); 249 | fd.read_to_string(&mut content).unwrap(); 250 | assert_eq!(expected_content.to_string(), content); 251 | 252 | fs::remove_file(outpath.as_path()).expect("Failed to delete test_task_start_sha file"); 253 | } -------------------------------------------------------------------------------- /veye-checker.iml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | --------------------------------------------------------------------------------