├── .circleci └── config.yml ├── .codeclimate.yml ├── .dockerignore ├── .eslintrc ├── .gitignore ├── .rspec ├── .rubocop.yml ├── DEVELOPERS.md ├── Dockerfile ├── Gemfile ├── Gemfile.lock ├── LICENSE ├── Makefile ├── README.md ├── Rakefile ├── bin └── duplication ├── config └── contents │ └── duplicated_code.md.erb ├── entrypoint ├── lib ├── cc │ ├── engine │ │ ├── analyzers │ │ │ ├── analyzer_base.rb │ │ │ ├── command_line_runner.rb │ │ │ ├── csharp │ │ │ │ └── main.rb │ │ │ ├── engine_config.rb │ │ │ ├── file_list.rb │ │ │ ├── file_thread_pool.rb │ │ │ ├── go │ │ │ │ └── main.rb │ │ │ ├── java │ │ │ │ └── main.rb │ │ │ ├── javascript │ │ │ │ └── main.rb │ │ │ ├── kotlin │ │ │ │ └── main.rb │ │ │ ├── node.rb │ │ │ ├── parser_base.rb │ │ │ ├── parser_error.rb │ │ │ ├── php │ │ │ │ └── main.rb │ │ │ ├── python │ │ │ │ ├── main.rb │ │ │ │ ├── node.rb │ │ │ │ ├── parser.py │ │ │ │ └── parser.rb │ │ │ ├── reporter.rb │ │ │ ├── ruby │ │ │ │ └── main.rb │ │ │ ├── scala │ │ │ │ └── main.rb │ │ │ ├── sexp.rb │ │ │ ├── sexp_lines.rb │ │ │ ├── swift │ │ │ │ └── main.rb │ │ │ ├── typescript │ │ │ │ └── main.rb │ │ │ ├── violation.rb │ │ │ ├── violation_read_up.rb │ │ │ └── violations.rb │ │ ├── duplication.rb │ │ ├── parse_metrics.rb │ │ ├── processed_source.rb │ │ └── sexp_builder.rb │ └── logger.rb └── ccflay.rb ├── spec ├── cc │ ├── ccflay_spec.rb │ └── engine │ │ ├── analyzers │ │ ├── analyzer_base_spec.rb │ │ ├── command_line_runner_spec.rb │ │ ├── csharp │ │ │ └── csharp_spec.rb │ │ ├── engine_config_spec.rb │ │ ├── file_list_spec.rb │ │ ├── file_thread_pool_spec.rb │ │ ├── go │ │ │ └── main_spec.rb │ │ ├── java │ │ │ └── java_spec.rb │ │ ├── javascript │ │ │ └── main_spec.rb │ │ ├── kotlin │ │ │ └── kotlin_spec.rb │ │ ├── php │ │ │ └── main_spec.rb │ │ ├── python │ │ │ └── main_spec.rb │ │ ├── ruby │ │ │ └── main_spec.rb │ │ ├── scala │ │ │ └── scala_spec.rb │ │ ├── sexp_lines_spec.rb │ │ ├── swift │ │ │ └── main_spec.rb │ │ ├── typescript │ │ │ └── main_spec.rb │ │ ├── violation_spec.rb │ │ └── violations_spec.rb │ │ ├── duplication_spec.rb │ │ ├── parse_metrics_spec.rb │ │ ├── processed_source_spec.rb │ │ └── sexp_builder_spec.rb ├── fixtures │ ├── empty_file.js │ ├── from_phan_php7.php │ ├── huge_js_file.js │ ├── issue_6609_1.go │ ├── issue_6609_2.go │ ├── normal_js_file.js │ ├── php_71_sample.php │ └── symfony_configuration.php ├── spec_helper.rb └── support │ └── helpers │ └── analyzer_spec_helpers.rb └── vendor └── php-parser ├── .gitignore ├── README.md ├── composer.json ├── composer.lock ├── lib └── PhpParser │ └── Serializer │ └── JSON.php ├── parser.php └── test ├── run └── test.php /.circleci/config.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | jobs: 3 | build_and_test: 4 | docker: 5 | - image: circleci/python:latest 6 | steps: 7 | - checkout 8 | - setup_remote_docker: 9 | docker_layer_caching: true 10 | 11 | - run: make image 12 | - run: make citest 13 | 14 | release_images: 15 | docker: 16 | - image: circleci/python:latest 17 | steps: 18 | - checkout 19 | - setup_remote_docker: 20 | docker_layer_caching: true 21 | - run: echo "$DOCKER_PASSWORD" | docker login -u "$DOCKER_USERNAME" --password-stdin 22 | - run: 23 | name: Push image to Dockerhub 24 | command: | 25 | make release RELEASE_TAG="b$CIRCLE_BUILD_NUM" 26 | make release RELEASE_TAG="$(echo $CIRCLE_BRANCH | grep -oP 'channel/\K[\w\-]+')" 27 | 28 | workflows: 29 | version: 2 30 | build_deploy: 31 | jobs: 32 | - build_and_test 33 | - release_images: 34 | context: Quality 35 | requires: 36 | - build_and_test 37 | filters: 38 | branches: 39 | only: /master|channel\/[\w-]+/ 40 | notify: 41 | webhooks: 42 | - url: https://cc-slack-proxy.herokuapp.com/circle 43 | -------------------------------------------------------------------------------- /.codeclimate.yml: -------------------------------------------------------------------------------- 1 | --- 2 | prepare: 3 | fetch: 4 | - url: https://raw.githubusercontent.com/codeclimate/styleguide/master/ruby/rubocop.yml 5 | path: base_rubocop.yml 6 | engines: 7 | rubocop: 8 | enabled: true 9 | exclude_fingerprints: 10 | - 15684e77a6036a56ca2db7cbb958ab08 # long method 11 | - 8f5f9fae9dac04fe85d0529021092b01 # long method 12 | eslint: 13 | enabled: true 14 | bundler-audit: 15 | enabled: true 16 | duplication: 17 | enabled: true 18 | config: 19 | languages: 20 | - ruby 21 | ratings: 22 | paths: 23 | - "**.rb" 24 | - Gemfile.lock 25 | exclude_paths: 26 | - config/**/* 27 | - spec/**/* 28 | - vendor/**/* 29 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | .git/ 2 | -------------------------------------------------------------------------------- /.eslintrc: -------------------------------------------------------------------------------- 1 | ecmaFeatures: {} 2 | env: 3 | node: true 4 | rules: 5 | no-alert: 0 6 | no-array-constructor: 0 7 | no-bitwise: 0 8 | no-caller: 0 9 | no-catch-shadow: 0 10 | no-class-assign: 0 11 | no-cond-assign: 2 12 | no-console: 2 13 | no-const-assign: 0 14 | no-constant-condition: 2 15 | no-continue: 0 16 | no-control-regex: 2 17 | no-debugger: 2 18 | no-delete-var: 2 19 | no-div-regex: 0 20 | no-dupe-keys: 2 21 | no-dupe-args: 2 22 | no-duplicate-case: 2 23 | no-else-return: 0 24 | no-empty: 2 25 | no-empty-character-class: 2 26 | no-empty-label: 0 27 | no-eq-null: 0 28 | no-eval: 0 29 | no-ex-assign: 2 30 | no-extend-native: 0 31 | no-extra-bind: 0 32 | no-extra-boolean-cast: 2 33 | no-extra-parens: 0 34 | no-extra-semi: 2 35 | no-fallthrough: 2 36 | no-floating-decimal: 0 37 | no-func-assign: 2 38 | no-implicit-coercion: 0 39 | no-implied-eval: 0 40 | no-inline-comments: 0 41 | no-inner-declarations: 42 | - 2 43 | - functions 44 | no-invalid-regexp: 2 45 | no-invalid-this: 0 46 | no-irregular-whitespace: 2 47 | no-iterator: 0 48 | no-label-var: 0 49 | no-labels: 0 50 | no-lone-blocks: 0 51 | no-lonely-if: 0 52 | no-loop-func: 0 53 | no-mixed-requires: 54 | - 0 55 | - false 56 | no-mixed-spaces-and-tabs: 57 | - 2 58 | - false 59 | linebreak-style: 60 | - 0 61 | - unix 62 | no-multi-spaces: 0 63 | no-multi-str: 0 64 | no-multiple-empty-lines: 65 | - 0 66 | - max: 2 67 | no-native-reassign: 0 68 | no-negated-in-lhs: 2 69 | no-nested-ternary: 0 70 | no-new: 0 71 | no-new-func: 0 72 | no-new-object: 0 73 | no-new-require: 0 74 | no-new-wrappers: 0 75 | no-obj-calls: 2 76 | no-octal: 2 77 | no-octal-escape: 0 78 | no-param-reassign: 0 79 | no-path-concat: 0 80 | no-plusplus: 0 81 | no-process-env: 0 82 | no-process-exit: 0 83 | no-proto: 0 84 | no-redeclare: 2 85 | no-regex-spaces: 2 86 | no-reserved-keys: 0 87 | no-restricted-modules: 0 88 | no-return-assign: 0 89 | no-script-url: 0 90 | no-self-compare: 0 91 | no-sequences: 0 92 | no-shadow: 0 93 | no-shadow-restricted-names: 0 94 | no-spaced-func: 0 95 | no-sparse-arrays: 2 96 | no-sync: 0 97 | no-ternary: 0 98 | no-trailing-spaces: 0 99 | no-this-before-super: 0 100 | no-throw-literal: 0 101 | no-undef: 2 102 | no-undef-init: 0 103 | no-undefined: 0 104 | no-unexpected-multiline: 0 105 | no-underscore-dangle: 0 106 | no-unneeded-ternary: 0 107 | no-unreachable: 2 108 | no-unused-expressions: 0 109 | no-unused-vars: 110 | - 2 111 | - vars: all 112 | args: after-used 113 | no-use-before-define: 0 114 | no-useless-call: 0 115 | no-void: 0 116 | no-var: 0 117 | no-warning-comments: 118 | - 0 119 | - terms: 120 | - todo 121 | - fixme 122 | - xxx 123 | location: start 124 | no-with: 0 125 | array-bracket-spacing: 126 | - 0 127 | - never 128 | arrow-parens: 0 129 | arrow-spacing: 0 130 | accessor-pairs: 0 131 | block-scoped-var: 0 132 | brace-style: 133 | - 0 134 | - 1tbs 135 | callback-return: 0 136 | camelcase: 0 137 | comma-dangle: 138 | - 2 139 | - never 140 | comma-spacing: 0 141 | comma-style: 0 142 | complexity: 143 | - 0 144 | - 11 145 | computed-property-spacing: 146 | - 0 147 | - never 148 | consistent-return: 0 149 | consistent-this: 150 | - 0 151 | - that 152 | constructor-super: 0 153 | curly: 154 | - 0 155 | - all 156 | default-case: 0 157 | dot-location: 0 158 | dot-notation: 159 | - 0 160 | - allowKeywords: true 161 | eol-last: 0 162 | eqeqeq: 0 163 | func-names: 0 164 | func-style: 165 | - 0 166 | - declaration 167 | generator-star-spacing: 0 168 | guard-for-in: 0 169 | handle-callback-err: 0 170 | indent: 0 171 | init-declarations: 0 172 | key-spacing: 173 | - 0 174 | - beforeColon: false 175 | afterColon: true 176 | lines-around-comment: 0 177 | max-depth: 178 | - 0 179 | - 4 180 | max-len: 181 | - 0 182 | - 80 183 | - 4 184 | max-nested-callbacks: 185 | - 0 186 | - 2 187 | max-params: 188 | - 0 189 | - 3 190 | max-statements: 191 | - 0 192 | - 10 193 | new-cap: 0 194 | new-parens: 0 195 | newline-after-var: 0 196 | object-curly-spacing: 197 | - 0 198 | - never 199 | object-shorthand: 0 200 | one-var: 0 201 | operator-assignment: 202 | - 0 203 | - always 204 | operator-linebreak: 0 205 | padded-blocks: 0 206 | prefer-const: 0 207 | prefer-spread: 0 208 | prefer-reflect: 0 209 | quote-props: 0 210 | quotes: 211 | - 0 212 | - double 213 | radix: 0 214 | require-yield: 0 215 | semi: 0 216 | semi-spacing: 217 | - 0 218 | - before: false 219 | after: true 220 | sort-vars: 0 221 | space-after-keywords: 222 | - 0 223 | - always 224 | space-before-blocks: 225 | - 0 226 | - always 227 | space-before-function-paren: 228 | - 0 229 | - always 230 | space-in-parens: 231 | - 0 232 | - never 233 | space-infix-ops: 0 234 | space-return-throw-case: 0 235 | space-unary-ops: 236 | - 0 237 | - words: true 238 | nonwords: false 239 | spaced-comment: 0 240 | strict: 0 241 | use-isnan: 2 242 | valid-jsdoc: 0 243 | valid-typeof: 2 244 | vars-on-top: 0 245 | wrap-iife: 0 246 | wrap-regex: 0 247 | yoda: 248 | - 0 249 | - never 250 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | base_rubocop.yml 2 | node_modules 3 | -------------------------------------------------------------------------------- /.rspec: -------------------------------------------------------------------------------- 1 | --require spec_helper 2 | --color 3 | -------------------------------------------------------------------------------- /.rubocop.yml: -------------------------------------------------------------------------------- 1 | inherit_from: base_rubocop.yml 2 | 3 | AllCops: 4 | TargetRubyVersion: 2.3 5 | -------------------------------------------------------------------------------- /DEVELOPERS.md: -------------------------------------------------------------------------------- 1 | # Developer notes 2 | 3 | ## Upgrading php parser 4 | 5 | * install php 6 | * `cd vendor/php-parser` 7 | * edit composer.json to use the newer version 8 | * install composer: `curl "https://getcomposer.org/installer" | php` 9 | * update `composer.lock`: `php composer.phar update` 10 | * `rm composer.phar` 11 | 12 | ## QA 13 | 14 | There is an automated [QA tool](https://github.com/codeclimate/qm_qa) that can 15 | be used to run the engine against a popular set of OSS repos across supported 16 | languages. If you are adding a new language here, please add that language to 17 | the list of languages scanned by the QA tool, and run it! 18 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM codeclimate/codeclimate-parser:b962 2 | LABEL maintainer="Code Climate " 3 | 4 | # Reset from base image 5 | USER root 6 | 7 | WORKDIR /usr/src/app/ 8 | 9 | # 3x normal default 10 | ENV RUBY_THREAD_MACHINE_STACK_SIZE=3145728 11 | 12 | RUN apt-get update && \ 13 | RUNLEVEL=1 apt-get install --yes --no-install-recommends \ 14 | python2.7 15 | 16 | COPY Gemfile* ./ 17 | COPY vendor/php-parser/composer* ./vendor/php-parser/ 18 | 19 | RUN bundle install --jobs 4 --quiet && \ 20 | composer install --no-interaction --quiet --working-dir ./vendor/php-parser 21 | 22 | COPY . ./ 23 | RUN chown -R app:app ./ 24 | 25 | USER app 26 | 27 | # Hide deprecation warnings 28 | ENV RUBYOPT="-W0" 29 | ENTRYPOINT ["/usr/src/app/entrypoint"] 30 | CMD ["/usr/src/app/bin/duplication", "/code", "/config.json"] 31 | -------------------------------------------------------------------------------- /Gemfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | source "https://rubygems.org" 3 | 4 | gem "concurrent-ruby", "~> 1.0.0" 5 | gem "flay", "~> 2.13.1" 6 | gem "sexp_processor", "~> 4.17" 7 | 8 | gem "codeclimate-parser-client", 9 | path: "/home/app/codeclimate-parser-client" 10 | 11 | group :test do 12 | gem "pry" 13 | gem "rake" 14 | gem "rspec" 15 | end 16 | -------------------------------------------------------------------------------- /Gemfile.lock: -------------------------------------------------------------------------------- 1 | PATH 2 | remote: /home/app/codeclimate-parser-client 3 | specs: 4 | codeclimate-parser-client (0.0.0) 5 | ast (~> 2.3) 6 | excon (~> 0.99.0) 7 | 8 | GEM 9 | remote: https://rubygems.org/ 10 | specs: 11 | ast (2.4.2) 12 | coderay (1.1.3) 13 | concurrent-ruby (1.0.5) 14 | diff-lcs (1.5.0) 15 | erubi (1.12.0) 16 | excon (0.99.0) 17 | flay (2.13.1) 18 | erubi (~> 1.10) 19 | path_expander (~> 1.0) 20 | ruby_parser (~> 3.0) 21 | sexp_processor (~> 4.0) 22 | method_source (1.0.0) 23 | path_expander (1.1.1) 24 | pry (0.14.2) 25 | coderay (~> 1.1) 26 | method_source (~> 1.0) 27 | rake (13.1.0) 28 | rspec (3.12.0) 29 | rspec-core (~> 3.12.0) 30 | rspec-expectations (~> 3.12.0) 31 | rspec-mocks (~> 3.12.0) 32 | rspec-core (3.12.2) 33 | rspec-support (~> 3.12.0) 34 | rspec-expectations (3.12.3) 35 | diff-lcs (>= 1.2.0, < 2.0) 36 | rspec-support (~> 3.12.0) 37 | rspec-mocks (3.12.6) 38 | diff-lcs (>= 1.2.0, < 2.0) 39 | rspec-support (~> 3.12.0) 40 | rspec-support (3.12.1) 41 | ruby_parser (3.20.3) 42 | sexp_processor (~> 4.16) 43 | sexp_processor (4.17.0) 44 | 45 | PLATFORMS 46 | ruby 47 | 48 | DEPENDENCIES 49 | codeclimate-parser-client! 50 | concurrent-ruby (~> 1.0.0) 51 | flay (~> 2.13.1) 52 | pry 53 | rake 54 | rspec 55 | sexp_processor (~> 4.17) 56 | 57 | BUNDLED WITH 58 | 2.4.22 59 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2015 Code Climate, Inc. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy 4 | of this software and associated documentation files (the "Software"), to deal 5 | in the Software without restriction, including without limitation the rights 6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 7 | copies of the Software, and to permit persons to whom the Software is 8 | furnished to do so, subject to the following conditions: 9 | 10 | The above copyright notice and this permission notice shall be included in 11 | all copies or substantial portions of the Software. 12 | 13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 19 | THE SOFTWARE. 20 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: image test citest release 2 | 3 | IMAGE_NAME ?= codeclimate/codeclimate-duplication 4 | RELEASE_REGISTRY ?= codeclimate 5 | 6 | ifndef RELEASE_TAG 7 | override RELEASE_TAG = latest 8 | endif 9 | 10 | image: 11 | docker build --rm -t $(IMAGE_NAME) . 12 | 13 | test: image 14 | docker run --tty --interactive --rm $(IMAGE_NAME) bundle exec rspec $(RSPEC_ARGS) 15 | 16 | citest: 17 | docker run --rm $(IMAGE_NAME) bundle exec rake 18 | 19 | bundle: 20 | docker run --rm \ 21 | --entrypoint /bin/sh \ 22 | --volume $(PWD):/usr/src/app \ 23 | $(IMAGE_NAME) -c "bundle $(BUNDLE_ARGS)" 24 | 25 | release: 26 | docker tag $(IMAGE_NAME) $(RELEASE_REGISTRY)/codeclimate-duplication:$(RELEASE_TAG) 27 | docker push $(RELEASE_REGISTRY)/codeclimate-duplication:$(RELEASE_TAG) 28 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # This repository is deprecated and archived 2 | This is a repository for a Code Climate Quality plugin which is packaged as a Docker image. 3 | 4 | Code Climate Quality is being replaced with the new [Qlty](qlty.sh) code quality platform. Qlty uses a new plugin system which does not require packaging plugins as Docker images. 5 | 6 | As a result, this repository is no longer maintained and has been archived. 7 | 8 | ## Advantages of Qlty plugins 9 | The new Qlty plugins system provides key advantages over the older, Docker-based plugin system: 10 | 11 | - Linting runs much faster without the overhead of virtualization 12 | - New versions of linters are available immediately without needing to wait for a re-packaged release 13 | - Plugins can be run with any arbitrary extensions (like extra rules and configs) without requiring pre-packaging 14 | - Eliminates security issues associated with exposing a Docker daemon 15 | 16 | ## Try out Qlty today free 17 | 18 | [Qlty CLI](https://docs.qlty.sh/cli/quickstart) is the fastest linter and auto-formatter for polyglot teams. It is completely free and available for Mac, Windows, and Linux. 19 | 20 | - Install Qlty CLI: 21 | ` 22 | curl https://qlty.sh | sh # Mac or Linux 23 | ` 24 | or ` powershell -c "iwr https://qlty.sh | iex" # Windows` 25 | 26 | [Qlty Cloud](https://docs.qlty.sh/cloud/quickstart) is a full code health platform for integrating code quality into development team workflows. It is free for unlimited private contributors. 27 | - [Try Qlty Cloud today](https://docs.qlty.sh/cloud/quickstart) 28 | 29 | **Note**: For existing customers of Quality, please see our [Migration Guide](https://docs.qlty.sh/migration/guide) for more information and resources. 30 | -------------------------------------------------------------------------------- /Rakefile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | begin 3 | require "rspec/core/rake_task" 4 | RSpec::Core::RakeTask.new(:spec) 5 | 6 | task default: :spec 7 | rescue LoadError 8 | end 9 | -------------------------------------------------------------------------------- /bin/duplication: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | # frozen_string_literal: true 3 | 4 | $:.unshift(File.expand_path(File.join(File.dirname(__FILE__), "../lib"))) 5 | require "cc/logger" 6 | require "cc/engine/duplication" 7 | 8 | config_path = ARGV[1] || "/config.json" 9 | 10 | config = 11 | if File.exist?(config_path) 12 | JSON.parse(File.read(config_path)) 13 | else 14 | {} 15 | end 16 | 17 | CC.logger.level = 18 | if config["debug"] || config.fetch("config", {})["debug"] 19 | ::Logger::DEBUG 20 | else 21 | ::Logger::INFO 22 | end 23 | 24 | directory = ARGV[0] || "/code" 25 | 26 | CC::Engine::Duplication.new( 27 | directory: directory, engine_config: config, io: STDOUT, 28 | ).run 29 | -------------------------------------------------------------------------------- /config/contents/duplicated_code.md.erb: -------------------------------------------------------------------------------- 1 | ## Duplicated Code 2 | 3 | Duplicated code can lead to software that is hard to understand and difficult to change. The Don't Repeat Yourself (DRY) principle states: 4 | 5 | > Every piece of knowledge must have a single, unambiguous, authoritative representation within a system. 6 | 7 | When you violate DRY, bugs and maintenance problems are sure to follow. Duplicated code has a tendency to both continue to replicate and also to diverge (leaving bugs as two similar implementations differ in subtle ways). 8 | 9 | ## Tuning 10 | 11 | **This issue has a mass of <%= mass %>**. 12 | 13 | We set useful threshold defaults for the languages we support but you may want to adjust these settings based on your project guidelines. 14 | 15 | The threshold configuration represents the minimum [mass](https://docs.codeclimate.com/docs/duplication#mass) a code block must have to be analyzed for duplication. The lower the threshold, the more fine-grained the comparison. 16 | 17 | If the engine is too easily reporting duplication, try raising the threshold. If you suspect that the engine isn't catching enough duplication, try lowering the threshold. The best setting tends to differ from language to language. 18 | 19 | See [`codeclimate-duplication`'s documentation](https://docs.codeclimate.com/docs/duplication) for more information about tuning the mass threshold in your `.codeclimate.yml`. 20 | 21 | ## Refactorings 22 | 23 | * [Extract Method](http://sourcemaking.com/refactoring/extract-method) 24 | * [Extract Class](http://sourcemaking.com/refactoring/extract-class) 25 | * [Form Template Method](http://sourcemaking.com/refactoring/form-template-method) 26 | * [Introduce Null Object](http://sourcemaking.com/refactoring/introduce-null-object) 27 | * [Pull Up Method](http://sourcemaking.com/refactoring/pull-up-method) 28 | * [Pull Up Field](http://sourcemaking.com/refactoring/pull-up-field) 29 | * [Substitute Algorithm](http://sourcemaking.com/refactoring/substitute-algorithm) 30 | 31 | ## Further Reading 32 | 33 | * [Don't Repeat Yourself](http://c2.com/cgi/wiki?DontRepeatYourself) on the C2 Wiki 34 | * [Duplicated Code](http://sourcemaking.com/refactoring/duplicated-code) on SourceMaking 35 | * [Refactoring: Improving the Design of Existing Code](http://www.amazon.com/Refactoring-Improving-Design-Existing-Code/dp/0201485672) by Martin Fowler. _Duplicated Code_, p76 36 | -------------------------------------------------------------------------------- /entrypoint: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | ~/codeclimate-parser/bin/boot-retries >&2 && exec "$@" 3 | -------------------------------------------------------------------------------- /lib/cc/engine/analyzers/analyzer_base.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | # Monkey patch for Parser class 4 | # used in language analyzers via Sexp::Matcher.parse 5 | # https://github.com/seattlerb/sexp_processor/blob/master/lib/sexp_matcher.rb 6 | class Sexp 7 | class Matcher < Sexp 8 | class Parser 9 | def parse_sexp 10 | token = next_token 11 | 12 | case token 13 | when "(" then 14 | parse_list 15 | when "[" then 16 | parse_cmd 17 | when "nil" then 18 | nil 19 | when /^\d+$/ then 20 | token.to_i 21 | when "___" then 22 | Sexp.___ 23 | when "_" then 24 | Sexp._ 25 | when %r%^/(.*)/$% then 26 | re = $1 27 | raise SyntaxError, "Not allowed: /%p/" % [re] unless 28 | re =~ /\A([\w()|.*+^$]+)\z/ 29 | Regexp.new re 30 | when /^"(.*)"$/ then 31 | $1 32 | when /^([A-Z]\w*)$/ then 33 | if Object.const_defined?($1) 34 | Object.const_get $1 35 | else 36 | # Handle as a symbol or string 37 | $1.to_sym # or return $1 as a string 38 | end 39 | when /^:?([\w?!=~-]+)$/ then 40 | $1.to_sym 41 | else 42 | raise SyntaxError, "unhandled token: %p" % [token] 43 | end 44 | end 45 | end 46 | end 47 | end 48 | 49 | require "cc/engine/analyzers/parser_error" 50 | require "cc/engine/analyzers/parser_base" 51 | require "cc/engine/analyzers/file_list" 52 | require "cc/engine/processed_source" 53 | require "cc/engine/sexp_builder" 54 | 55 | module CC 56 | module Engine 57 | module Analyzers 58 | class Base 59 | RESCUABLE_ERRORS = [ 60 | ::CC::Engine::Analyzers::ParserError, 61 | ::Errno::ENOENT, 62 | ::Racc::ParseError, 63 | ::RubyParser::SyntaxError, 64 | ::RuntimeError, 65 | ].freeze 66 | 67 | POINTS_PER_MINUTE = 10_000 # Points represent engineering time to resolve issue 68 | BASE_POINTS = 30 * POINTS_PER_MINUTE 69 | 70 | SEVERITIES = [ 71 | MAJOR = "major".freeze, 72 | MINOR = "minor".freeze, 73 | ].freeze 74 | 75 | MAJOR_SEVERITY_THRESHOLD = 120 * POINTS_PER_MINUTE 76 | 77 | def initialize(engine_config:, parse_metrics:) 78 | @engine_config = engine_config 79 | @parse_metrics = parse_metrics 80 | end 81 | 82 | def run(file) 83 | if (skip_reason = skip?(file)) 84 | CC.logger.info("Skipping file #{file} because #{skip_reason}") 85 | nil 86 | else 87 | process_file(file) 88 | end 89 | rescue => ex 90 | if RESCUABLE_ERRORS.map { |klass| ex.instance_of?(klass) }.include?(true) 91 | CC.logger.info("Skipping file #{file} due to exception (#{ex.class}): #{ex.message}\n") 92 | nil 93 | else 94 | CC.logger.info("#{ex.class} error occurred processing file #{file}: aborting.") 95 | raise ex 96 | end 97 | end 98 | 99 | def files 100 | file_list.files 101 | end 102 | 103 | def filters 104 | engine_config.filters_for(language) | default_filters 105 | end 106 | 107 | def post_filters 108 | engine_config.post_filters_for(language) | default_post_filters 109 | end 110 | 111 | def language 112 | self.class::LANGUAGE 113 | end 114 | 115 | def check_mass_threshold(check) 116 | engine_config.mass_threshold_for(language, check) || self.class::DEFAULT_MASS_THRESHOLD 117 | end 118 | 119 | def mass_threshold 120 | engine_config.minimum_mass_threshold_for(language) || self.class::DEFAULT_MASS_THRESHOLD 121 | end 122 | 123 | def count_threshold 124 | engine_config.count_threshold_for(language) 125 | end 126 | 127 | def calculate_points(violation) 128 | overage = violation.mass - check_mass_threshold(violation.check_name) 129 | base_points + (overage * points_per_overage) 130 | end 131 | 132 | def calculate_severity(points) 133 | if points >= MAJOR_SEVERITY_THRESHOLD 134 | MAJOR 135 | else 136 | MINOR 137 | end 138 | end 139 | 140 | def transform_sexp(sexp) 141 | sexp 142 | end 143 | 144 | # Please see: codeclimate/app#6227 145 | def use_sexp_lines? 146 | true 147 | end 148 | 149 | private 150 | 151 | attr_reader :engine_config, :parse_metrics 152 | 153 | def base_points 154 | self.class::BASE_POINTS 155 | end 156 | 157 | def default_filters 158 | [] 159 | end 160 | 161 | def default_post_filters 162 | [] 163 | end 164 | 165 | def points_per_overage 166 | self.class::POINTS_PER_OVERAGE 167 | end 168 | 169 | def process_file(_path) 170 | raise NoMethodError, "Subclass must implement `process_file`" 171 | end 172 | 173 | def file_list 174 | @_file_list ||= ::CC::Engine::Analyzers::FileList.new( 175 | engine_config: engine_config, 176 | patterns: engine_config.patterns_for( 177 | language, 178 | patterns, 179 | ), 180 | ) 181 | end 182 | 183 | def skip?(_path) 184 | nil 185 | end 186 | 187 | def parse(file, request_path) 188 | processed_source = ProcessedSource.new(file, request_path) 189 | parse_metrics.incr(:succeeded) 190 | SexpBuilder.new(processed_source.ast, file).build 191 | rescue => ex 192 | handle_exception(processed_source, ex) 193 | end 194 | 195 | def handle_exception(processed_source, ex) 196 | CC.logger.debug { "Contents:\n#{processed_source.raw_source}" } 197 | 198 | case 199 | when ex.is_a?(CC::Parser::Client::HTTPError) && ex.response_status.to_s.start_with?("4") 200 | CC.logger.warn("Skipping #{processed_source.path} due to #{ex.class}") 201 | CC.logger.warn("Response status: #{ex.response_status}") 202 | CC.logger.debug { "Response:\n#{ex.response_body}" } 203 | parse_metrics.incr(ex.code.to_sym) 204 | when ex.is_a?(CC::Parser::Client::EncodingError) 205 | CC.logger.warn("Skipping #{processed_source.path} due to #{ex.class}: #{ex.message}") 206 | parse_metrics.incr(:encoding_error) 207 | when ex.is_a?(CC::Parser::Client::NestingDepthError) 208 | CC.logger.warn("Skipping #{processed_source.path} due to #{ex.class}") 209 | CC.logger.warn(ex.message) 210 | parse_metrics.incr(:client_nesting_depth_error) 211 | else 212 | CC.logger.error("Error processing file: #{processed_source.path}") 213 | CC.logger.error(ex.message) 214 | raise ex 215 | end 216 | nil 217 | end 218 | 219 | def patterns 220 | self.class::PATTERNS 221 | end 222 | end 223 | end 224 | end 225 | end 226 | -------------------------------------------------------------------------------- /lib/cc/engine/analyzers/command_line_runner.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "open3" 4 | require "timeout" 5 | 6 | module CC 7 | module Engine 8 | module Analyzers 9 | class CommandLineRunner 10 | DEFAULT_TIMEOUT = 300 11 | 12 | def initialize(command, timeout = DEFAULT_TIMEOUT) 13 | @command = command 14 | @timeout = timeout 15 | end 16 | 17 | def run(input) 18 | Timeout.timeout(timeout) do 19 | out, err, status = Open3.capture3(command, stdin_data: input) 20 | 21 | status ||= handle_open3_race_condition(out) 22 | 23 | if status.success? 24 | yield out 25 | else 26 | raise ::CC::Engine::Analyzers::ParserError, "`#{command}` exited with code #{status.exitstatus}:\n#{err}" 27 | end 28 | end 29 | end 30 | 31 | private 32 | 33 | attr_reader :command, :timeout 34 | 35 | # Work around a race condition in JRuby's Open3.capture3 that can lead 36 | # to a nil status returned. We'll consider the process successful if it 37 | # produced output that can be parsed as JSON. 38 | # 39 | # https://github.com/jruby/jruby/blob/master/lib/ruby/stdlib/open3.rb#L200-L201 40 | # 41 | def handle_open3_race_condition(out) 42 | JSON.parse(out) 43 | NullStatus.new(true, 0) 44 | rescue JSON::ParserError 45 | NullStatus.new(false, 1) 46 | end 47 | 48 | NullStatus = Struct.new(:success, :exitstatus) do 49 | def success? 50 | success 51 | end 52 | end 53 | end 54 | end 55 | end 56 | end 57 | -------------------------------------------------------------------------------- /lib/cc/engine/analyzers/csharp/main.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "flay" 4 | require "json" 5 | require "cc/engine/analyzers/reporter" 6 | require "cc/engine/analyzers/analyzer_base" 7 | 8 | module CC 9 | module Engine 10 | module Analyzers 11 | module Csharp 12 | class Main < CC::Engine::Analyzers::Base 13 | LANGUAGE = "csharp".freeze 14 | PATTERNS = ["**/*.cs"].freeze 15 | DEFAULT_MASS_THRESHOLD = 60 16 | DEFAULT_FILTERS = [ 17 | "(UsingDirective ___)".freeze 18 | ].freeze 19 | POINTS_PER_OVERAGE = 10_000 20 | REQUEST_PATH = "/csharp".freeze 21 | 22 | def use_sexp_lines? 23 | false 24 | end 25 | 26 | private 27 | 28 | def process_file(file) 29 | parse(file, REQUEST_PATH) 30 | end 31 | 32 | def default_filters 33 | DEFAULT_FILTERS.map { |filter| Sexp::Matcher.parse filter } 34 | end 35 | end 36 | end 37 | end 38 | end 39 | end 40 | -------------------------------------------------------------------------------- /lib/cc/engine/analyzers/engine_config.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module CC 4 | module Engine 5 | module Analyzers 6 | class EngineConfig 7 | DEFAULT_COUNT_THRESHOLD = 2 8 | IDENTICAL_CODE_CHECK = "identical-code".freeze 9 | IDENTICAL_CODE_CHECK_LEGACY_NAME = "Identical code".freeze 10 | SIMILAR_CODE_CHECK = "similar-code".freeze 11 | SIMILAR_CODE_CHECK_LEGACY_NAME = "Similar code".freeze 12 | CHECK_MAPPINGS = { 13 | IDENTICAL_CODE_CHECK_LEGACY_NAME => IDENTICAL_CODE_CHECK, 14 | SIMILAR_CODE_CHECK_LEGACY_NAME => SIMILAR_CODE_CHECK, 15 | }.freeze 16 | 17 | InvalidConfigError = Class.new(StandardError) 18 | 19 | def initialize(hash) 20 | @config = normalize(hash) 21 | end 22 | 23 | def include_paths 24 | config.fetch("include_paths", ["./"]) 25 | end 26 | 27 | def languages 28 | config.fetch("languages", {}) 29 | end 30 | 31 | def concurrency 32 | config.fetch("config", {}).fetch("concurrency", 2).to_i 33 | end 34 | 35 | def dump_ast? 36 | config.fetch("config", {}).fetch("dump_ast", false) 37 | end 38 | 39 | def filters_for(language) 40 | fetch_language(language).fetch("filters", []).map do |filter| 41 | Sexp::Matcher.parse filter 42 | end 43 | end 44 | 45 | def post_filters_for(language) 46 | fetch_language(language).fetch("post_filters", []).map do |filter| 47 | Sexp::Matcher.parse filter 48 | end 49 | end 50 | 51 | def minimum_mass_threshold_for(language) 52 | [ 53 | mass_threshold_for(language, IDENTICAL_CODE_CHECK), 54 | mass_threshold_for(language, SIMILAR_CODE_CHECK), 55 | ].compact.min 56 | end 57 | 58 | def mass_threshold_for(language, check) 59 | qm_threshold = qm_checks.fetch(check, {}).fetch("config", {})["threshold"] 60 | 61 | if qm_threshold 62 | qm_threshold.to_i 63 | else 64 | threshold = fetch_language(language).fetch("mass_threshold", nil) 65 | 66 | if threshold 67 | threshold.to_i 68 | end 69 | end 70 | end 71 | 72 | def count_threshold_for(language) 73 | threshold = fetch_language(language)["count_threshold"] || 74 | config.fetch("config", {}).fetch("count_threshold", nil) || 75 | DEFAULT_COUNT_THRESHOLD 76 | 77 | if threshold 78 | threshold.to_i 79 | end 80 | end 81 | 82 | def fetch_language(language) 83 | language = config. 84 | fetch("languages", {}). 85 | fetch(language, {}) 86 | 87 | if language.is_a? Hash 88 | language 89 | else 90 | {} 91 | end 92 | end 93 | 94 | def patterns_for(language, fallbacks) 95 | Array(fetch_language(language).fetch("patterns", fallbacks)) 96 | end 97 | 98 | def check_enabled?(legacy_check_name, check_name) 99 | legacy_config = legacy_checks.fetch(legacy_check_name, { 100 | "enabled" => true 101 | }) 102 | 103 | qm_checks.fetch(check_name, legacy_config).fetch("enabled", true) 104 | end 105 | 106 | def all_checks_disabled? 107 | CHECK_MAPPINGS.none? do |legacy_check_name, check_name| 108 | check_enabled?(legacy_check_name, check_name) 109 | end 110 | end 111 | 112 | private 113 | 114 | attr_reader :config 115 | 116 | def normalize(hash) 117 | hash.tap do |config| 118 | languages = config.fetch("config", {}).fetch("languages") do 119 | default_languages 120 | end 121 | config["languages"] = build_language_config(languages) 122 | end 123 | end 124 | 125 | def default_languages 126 | tuples = Duplication::LANGUAGES.map do |language, _| 127 | [language, {}] 128 | end 129 | Hash[tuples] 130 | end 131 | 132 | def build_language_config(languages) 133 | if languages.is_a?(Array) 134 | languages.each_with_object({}) do |language, map| 135 | language, config = coerce_array_entry(language) 136 | map[language.downcase] = config 137 | end 138 | elsif languages.is_a?(Hash) 139 | languages.each_with_object({}) do |(key, value), map| 140 | map[key.downcase] = value 141 | end 142 | else 143 | raise InvalidConfigError, "languages config entry is invalid: please check documentation for details of configuring languages" 144 | end 145 | end 146 | 147 | def coerce_array_entry(entry) 148 | if entry.is_a?(String) 149 | [entry.downcase, {}] 150 | elsif entry.is_a?(Hash) && entry.keys.count == 1 151 | [entry.keys.first, entry[entry.keys.first]] 152 | else 153 | raise InvalidConfigError, "#{entry.inspect} is not a valid language entry" 154 | end 155 | end 156 | 157 | def legacy_checks 158 | config.fetch("checks", {}) 159 | end 160 | 161 | def qm_checks 162 | config.fetch("config", {}).fetch("checks", {}) 163 | end 164 | end 165 | end 166 | end 167 | end 168 | -------------------------------------------------------------------------------- /lib/cc/engine/analyzers/file_list.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "pathname" 4 | 5 | module CC 6 | module Engine 7 | module Analyzers 8 | class FileList 9 | def initialize(engine_config:, patterns:) 10 | @engine_config = engine_config 11 | @patterns = patterns 12 | end 13 | 14 | def files 15 | engine_config.include_paths.flat_map do |path| 16 | pathname = Pathname.new(path) 17 | if pathname.directory? && !pathname.cleanpath.symlink? 18 | expand(path) 19 | elsif pathname.file? && !pathname.symlink? && matches?(path) 20 | [path] 21 | else 22 | [] 23 | end 24 | end 25 | end 26 | 27 | private 28 | 29 | attr_reader :engine_config, :patterns 30 | 31 | def expand(path) 32 | globs = patterns.map { |p| File.join(relativize(path), p) } 33 | 34 | Dir.glob(globs).select { |f| File.file?(f) && !File.symlink?(f) } 35 | end 36 | 37 | def matches?(path) 38 | patterns.any? do |p| 39 | File.fnmatch?( 40 | relativize(p), 41 | relativize(path), 42 | File::FNM_PATHNAME | File::FNM_EXTGLOB, 43 | ) 44 | end 45 | end 46 | 47 | # Ensure all paths (and patterns) are ./-prefixed 48 | def relativize(path) 49 | "./#{path.sub(%r{^\./}, "")}" 50 | end 51 | end 52 | end 53 | end 54 | end 55 | -------------------------------------------------------------------------------- /lib/cc/engine/analyzers/file_thread_pool.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "thread" 4 | 5 | module CC 6 | module Engine 7 | module Analyzers 8 | class FileThreadPool 9 | DEFAULT_CONCURRENCY = 2 10 | MAX_CONCURRENCY = 2 11 | 12 | def initialize(files, concurrency: DEFAULT_CONCURRENCY) 13 | @files = files 14 | @concurrency = concurrency 15 | end 16 | 17 | def run 18 | queue = build_queue 19 | lock = Mutex.new 20 | 21 | @workers = Array.new(thread_count) do 22 | with_thread_abort_on_exceptions do 23 | while (item = next_item(queue, lock)) 24 | yield item 25 | end 26 | end 27 | end 28 | end 29 | 30 | def join 31 | workers.map(&:join) 32 | end 33 | 34 | private 35 | 36 | attr_reader :files, :concurrency, :workers 37 | 38 | def next_item(queue, lock) 39 | lock.synchronize { queue.pop(true) unless queue.empty? } 40 | end 41 | 42 | def build_queue 43 | Queue.new.tap do |queue| 44 | files.each do |file| 45 | queue.push(file) 46 | end 47 | end 48 | end 49 | 50 | def thread_count 51 | if (1..MAX_CONCURRENCY).cover?(concurrency) 52 | concurrency 53 | elsif concurrency < 1 54 | DEFAULT_CONCURRENCY 55 | else 56 | DEFAULT_CONCURRENCY 57 | end 58 | end 59 | 60 | def with_thread_abort_on_exceptions(&block) 61 | thread = Thread.new(&block) 62 | thread.abort_on_exception = true 63 | thread 64 | end 65 | end 66 | end 67 | end 68 | end 69 | -------------------------------------------------------------------------------- /lib/cc/engine/analyzers/go/main.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "flay" 4 | require "json" 5 | require "cc/engine/analyzers/reporter" 6 | require "cc/engine/analyzers/analyzer_base" 7 | 8 | module CC 9 | module Engine 10 | module Analyzers 11 | module Go 12 | class Main < CC::Engine::Analyzers::Base 13 | LANGUAGE = "go" 14 | PATTERNS = ["**/*.go"].freeze 15 | DEFAULT_MASS_THRESHOLD = 100 16 | DEFAULT_FILTERS = [ 17 | "(GenDecl _ (specs (ImportSpec ___)) _)", 18 | "(comments ___)", 19 | ].freeze 20 | POINTS_PER_OVERAGE = 10_000 21 | REQUEST_PATH = "/go" 22 | 23 | def transform_sexp(sexp) 24 | sexp.delete_if { |node| node[0] == :name } 25 | end 26 | 27 | def use_sexp_lines? 28 | false 29 | end 30 | 31 | private 32 | 33 | def process_file(file) 34 | parse(file, REQUEST_PATH) 35 | end 36 | 37 | def default_filters 38 | DEFAULT_FILTERS.map { |filter| Sexp::Matcher.parse filter } 39 | end 40 | end 41 | end 42 | end 43 | end 44 | end 45 | -------------------------------------------------------------------------------- /lib/cc/engine/analyzers/java/main.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "flay" 4 | require "json" 5 | require "cc/engine/analyzers/reporter" 6 | require "cc/engine/analyzers/analyzer_base" 7 | 8 | module CC 9 | module Engine 10 | module Analyzers 11 | module Java 12 | class Main < CC::Engine::Analyzers::Base 13 | LANGUAGE = "java".freeze 14 | PATTERNS = ["**/*.java"].freeze 15 | DEFAULT_MASS_THRESHOLD = 40 16 | DEFAULT_FILTERS = [ 17 | "(ImportDeclaration ___)".freeze, 18 | "(PackageDeclaration ___)".freeze, 19 | ].freeze 20 | POINTS_PER_OVERAGE = 10_000 21 | REQUEST_PATH = "/java".freeze 22 | 23 | def use_sexp_lines? 24 | false 25 | end 26 | 27 | private 28 | 29 | def process_file(file) 30 | parse(file, REQUEST_PATH) 31 | end 32 | 33 | def default_filters 34 | DEFAULT_FILTERS.map { |filter| Sexp::Matcher.parse filter } 35 | end 36 | end 37 | end 38 | end 39 | end 40 | end 41 | -------------------------------------------------------------------------------- /lib/cc/engine/analyzers/javascript/main.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "cc/engine/analyzers/analyzer_base" 4 | 5 | module CC 6 | module Engine 7 | module Analyzers 8 | module Javascript 9 | class Main < CC::Engine::Analyzers::Base 10 | PATTERNS = [ 11 | "**/*.js", 12 | "**/*.jsx", 13 | ].freeze 14 | LANGUAGE = "javascript" 15 | DEFAULT_MASS_THRESHOLD = 45 16 | DEFAULT_FILTERS = [ 17 | "(directives (Directive (value (DirectiveLiteral ___))))".freeze, 18 | "(ImportDeclaration ___)".freeze, 19 | "(VariableDeclarator _ (init (CallExpression (_ (Identifier require)) ___)))".freeze, 20 | ].freeze 21 | DEFAULT_POST_FILTERS = [ 22 | "(NUKE ___)".freeze, 23 | "(Program _ ___)".freeze, 24 | ].freeze 25 | POINTS_PER_OVERAGE = 30_000 26 | REQUEST_PATH = "/javascript".freeze 27 | 28 | def use_sexp_lines? 29 | false 30 | end 31 | 32 | ## 33 | # Transform sexp as such: 34 | # 35 | # s(:Program, :module, s(:body, ... )) 36 | # => s(:NUKE, s(:Program, :module, s(:NUKE, ... ))) 37 | 38 | def transform_sexp(sexp) 39 | return sexp unless sexp.body 40 | 41 | sexp.body.sexp_type = :NUKE # negate top level body 42 | sexp = s(:NUKE, sexp) # wrap with extra node to force full process 43 | 44 | sexp 45 | end 46 | 47 | protected 48 | 49 | def process_file(file) 50 | parse(file, self.class::REQUEST_PATH) 51 | end 52 | 53 | def default_filters 54 | self.class::DEFAULT_FILTERS.map { |filter| Sexp::Matcher.parse filter } 55 | end 56 | 57 | def default_post_filters 58 | self.class::DEFAULT_POST_FILTERS.map { |filter| Sexp::Matcher.parse filter } 59 | end 60 | end 61 | end 62 | end 63 | end 64 | end 65 | -------------------------------------------------------------------------------- /lib/cc/engine/analyzers/kotlin/main.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "flay" 4 | require "json" 5 | require "cc/engine/analyzers/reporter" 6 | require "cc/engine/analyzers/analyzer_base" 7 | 8 | module CC 9 | module Engine 10 | module Analyzers 11 | module Kotlin 12 | class Main < CC::Engine::Analyzers::Base 13 | LANGUAGE = "kotlin".freeze 14 | PATTERNS = ["**/*.kt"].freeze 15 | DEFAULT_MASS_THRESHOLD = 60 16 | DEFAULT_FILTERS = [ 17 | "(IMPORT_LIST ___)".freeze, 18 | "(PACKAGE_DIRECTIVE ___)".freeze, 19 | "(KDoc ___)".freeze, 20 | "(EOL_COMMENT ___)".freeze, 21 | ].freeze 22 | POINTS_PER_OVERAGE = 10_000 23 | REQUEST_PATH = "/kotlin".freeze 24 | 25 | def use_sexp_lines? 26 | false 27 | end 28 | 29 | private 30 | 31 | def process_file(file) 32 | parse(file, REQUEST_PATH) 33 | end 34 | 35 | def default_filters 36 | DEFAULT_FILTERS.map { |filter| Sexp::Matcher.parse filter } 37 | end 38 | end 39 | end 40 | end 41 | end 42 | end 43 | -------------------------------------------------------------------------------- /lib/cc/engine/analyzers/node.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module CC 4 | module Engine 5 | module Analyzers 6 | class Node 7 | SCRUB_PROPERTIES = [].freeze 8 | 9 | def initialize(node, file, default_line = 0) 10 | @node = node 11 | @file = file 12 | 13 | set_default_line(default_line) 14 | end 15 | 16 | def format 17 | if @node.is_a?(Hash) 18 | format_hash 19 | elsif @node.is_a?(Array) 20 | @node.map do |n| 21 | self.class.new(n, @file, @line).format 22 | end 23 | end 24 | end 25 | 26 | private 27 | 28 | def format_hash 29 | raise "Subclass must implement format_hash" 30 | end 31 | 32 | def create_sexp(*args) 33 | Sexp.new(*args).tap do |sexp| 34 | sexp.file = @file 35 | set_sexp_line(sexp) 36 | end 37 | end 38 | 39 | def set_sexp_line(sexp) 40 | sexp.line = @line 41 | end 42 | 43 | def properties_to_sexps 44 | valid_properties.map do |key, value| 45 | if value.is_a?(Array) 46 | create_sexp(key.to_sym, *self.class.new(value, @file, @line).format) 47 | elsif value.is_a?(Hash) 48 | create_sexp(key.to_sym, self.class.new(value, @file, @line).format) 49 | else 50 | value.to_s.to_sym 51 | end 52 | end 53 | end 54 | 55 | def valid_properties 56 | @node.reject do |key, value| 57 | value_empty = [nil, {}, []].include?(value) 58 | self.class::SCRUB_PROPERTIES.include?(key) || value_empty 59 | end 60 | end 61 | 62 | def set_default_line(default) 63 | @line = line_number || default 64 | end 65 | 66 | def line_number 67 | raise "Subclass must implement `line_number`" 68 | end 69 | end 70 | end 71 | end 72 | end 73 | -------------------------------------------------------------------------------- /lib/cc/engine/analyzers/parser_base.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module CC 4 | module Engine 5 | module Analyzers 6 | class ParserBase 7 | private 8 | 9 | def parse_json(text) 10 | JSON.parse(text, max_nesting: false) 11 | end 12 | end 13 | end 14 | end 15 | end 16 | -------------------------------------------------------------------------------- /lib/cc/engine/analyzers/parser_error.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module CC 4 | module Engine 5 | module Analyzers 6 | ParserError = Class.new(StandardError) 7 | end 8 | end 9 | end 10 | -------------------------------------------------------------------------------- /lib/cc/engine/analyzers/php/main.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "cc/engine/analyzers/analyzer_base" 4 | 5 | module CC 6 | module Engine 7 | module Analyzers 8 | module Php 9 | class Main < CC::Engine::Analyzers::Base 10 | LANGUAGE = "php" 11 | PATTERNS = [ 12 | "**/*.php", 13 | ].freeze 14 | DEFAULT_MASS_THRESHOLD = 90 15 | DEFAULT_FILTERS = [ 16 | "(Stmt_Use ___)", 17 | "(comments ___)", 18 | ].freeze 19 | POINTS_PER_OVERAGE = 29_000 20 | REQUEST_PATH = "/php" 21 | 22 | def use_sexp_lines? 23 | false 24 | end 25 | 26 | private 27 | 28 | def process_file(file) 29 | parse(file, REQUEST_PATH) 30 | end 31 | 32 | def default_filters 33 | DEFAULT_FILTERS.map { |filter| Sexp::Matcher.parse filter } 34 | end 35 | end 36 | end 37 | end 38 | end 39 | end 40 | -------------------------------------------------------------------------------- /lib/cc/engine/analyzers/python/main.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "cc/engine/analyzers/python/parser" 4 | require "cc/engine/analyzers/python/node" 5 | require "cc/engine/analyzers/reporter" 6 | require "cc/engine/analyzers/analyzer_base" 7 | require "flay" 8 | 9 | module CC 10 | module Engine 11 | module Analyzers 12 | module Python 13 | class Main < CC::Engine::Analyzers::Base 14 | LANGUAGE = "python" 15 | DEFAULT_MASS_THRESHOLD = 32 16 | DEFAULT_PYTHON_VERSION = 2 17 | POINTS_PER_OVERAGE = 50_000 18 | 19 | def transform_sexp(sexp) 20 | sexp.flatter 21 | end 22 | 23 | private 24 | 25 | def process_file(path) 26 | Node.new(parser(path).parse.syntax_tree, path).format 27 | end 28 | 29 | def parser(path) 30 | ::CC::Engine::Analyzers::Python::Parser.new(python_version, File.binread(path), path) 31 | end 32 | 33 | def python_version 34 | engine_config.fetch_language(LANGUAGE).fetch("python_version", DEFAULT_PYTHON_VERSION) 35 | end 36 | 37 | def patterns 38 | case python_version 39 | when 2, "2" 40 | ["**/*.py"] 41 | when 3, "3" 42 | ["**/*.py", "**/*.py3"] 43 | else 44 | raise ArgumentError, "Supported python versions are 2 and 3. You configured: #{python_version.inspect}" 45 | end 46 | end 47 | end 48 | end 49 | end 50 | end 51 | end 52 | -------------------------------------------------------------------------------- /lib/cc/engine/analyzers/python/node.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "cc/engine/analyzers/node" 4 | 5 | module CC 6 | module Engine 7 | module Analyzers 8 | module Python 9 | class Node < CC::Engine::Analyzers::Node 10 | SCRUB_PROPERTIES = %w[_type attributes ctx].freeze 11 | 12 | private 13 | 14 | def format_hash 15 | type = @node["_type"].to_sym 16 | 17 | if valid_properties 18 | create_sexp(type, *properties_to_sexps) 19 | else 20 | type 21 | end 22 | end 23 | 24 | def line_number 25 | if @node.is_a?(Hash) 26 | @node.fetch("attributes", {}).fetch("lineno", nil) 27 | end 28 | end 29 | end 30 | end 31 | end 32 | end 33 | end 34 | -------------------------------------------------------------------------------- /lib/cc/engine/analyzers/python/parser.py: -------------------------------------------------------------------------------- 1 | import json, sys, ast 2 | 3 | PY3 = sys.version_info[0] == 3 4 | 5 | def string_type(): 6 | return str if PY3 else basestring 7 | 8 | def num_types(): 9 | if PY3: 10 | return (int, float) 11 | else: 12 | return (int, float, long) 13 | 14 | def to_json(node): 15 | json_ast = {'attributes': {}} 16 | json_ast['_type'] = node.__class__.__name__ 17 | for key, value in ast.iter_fields(node): 18 | json_ast[key] = cast_value(value) 19 | for attr in node._attributes: 20 | json_ast['attributes'][attr] = cast_value(getattr(node, attr)) 21 | return json_ast 22 | 23 | def cast_infinity(value): 24 | if value > 0: 25 | return "Infinity" 26 | else: 27 | return "-Infinity" 28 | 29 | def cast_value(value): 30 | if value is None or isinstance(value, (bool, string_type())): 31 | return value 32 | elif PY3 and isinstance(value, bytes): 33 | return value.decode() 34 | elif isinstance(value, complex): 35 | # Complex numbers cannot be serialised directly. Ruby's to_json 36 | # handles this by string-ifying the numbers, so we do similarly here. 37 | return str(complex) 38 | elif isinstance(value, num_types()): 39 | if abs(value) == 1e3000: 40 | return cast_infinity(value) 41 | return value 42 | elif isinstance(value, list): 43 | return [cast_value(v) for v in value] 44 | else: 45 | return to_json(value) 46 | 47 | if __name__ == '__main__': 48 | source = "" 49 | for line in sys.stdin.readlines(): 50 | source += line 51 | print(json.dumps(to_json(ast.parse(source)))) 52 | -------------------------------------------------------------------------------- /lib/cc/engine/analyzers/python/parser.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "cc/engine/analyzers/command_line_runner" 4 | require "timeout" 5 | require "json" 6 | 7 | module CC 8 | module Engine 9 | module Analyzers 10 | module Python 11 | class Parser < ParserBase 12 | TIMEOUT = 10 13 | attr_reader :code, :filename, :syntax_tree 14 | 15 | def initialize(python_version, code, filename) 16 | @python_version = python_version 17 | @code = code 18 | @filename = filename 19 | end 20 | 21 | def parse 22 | runner = CommandLineRunner.new(python_command, TIMEOUT) 23 | runner.run(code) do |ast| 24 | @syntax_tree = parse_json(ast) 25 | end 26 | 27 | self 28 | rescue Timeout::Error 29 | CC.logger.warn("TIMEOUT parsing #{filename}. Skipping.") 30 | end 31 | 32 | private 33 | 34 | attr_reader :python_version 35 | 36 | def python_command 37 | file = File.expand_path(File.dirname(__FILE__)) + "/parser.py" 38 | "#{python_binary} #{file}" 39 | end 40 | 41 | def python_binary 42 | case python_version 43 | when 2, "2" 44 | "python2" 45 | when 3, "3" 46 | "python3" 47 | else 48 | raise ArgumentError, "Supported python versions are 2 and 3. You configured: #{python_version.inspect}" 49 | end 50 | end 51 | end 52 | end 53 | end 54 | end 55 | end 56 | -------------------------------------------------------------------------------- /lib/cc/engine/analyzers/reporter.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "cc/engine/analyzers/violations" 4 | require "cc/engine/analyzers/file_thread_pool" 5 | require "thread" 6 | require "concurrent" 7 | require "ccflay" 8 | 9 | module CC 10 | module Engine 11 | module Analyzers 12 | class Reporter 13 | def initialize(engine_config, language_strategy, io) 14 | @engine_config = engine_config 15 | @language_strategy = language_strategy 16 | @io = io 17 | @reports = Set.new 18 | end 19 | 20 | def run 21 | CC.logger.debug("Processing #{language_strategy.files.count} #{lang} files concurrency=#{engine_config.concurrency}") 22 | 23 | process_files 24 | 25 | if engine_config.dump_ast? 26 | dump_ast 27 | else 28 | report 29 | CC.logger.debug("Reported #{reports.size} violations...") 30 | end 31 | end 32 | 33 | def dump_ast 34 | require "pp" 35 | 36 | issues = flay.analyze 37 | 38 | return if issues.empty? 39 | 40 | CC.logger.debug("Sexps for issues:") 41 | 42 | issues.each_with_index do |issue, idx1| 43 | CC.logger.debug( 44 | format( 45 | "#%2d) %s#%d mass=%d:", 46 | idx1 + 1, 47 | issue.name, 48 | issue.structural_hash, 49 | issue.mass, 50 | ), 51 | ) 52 | 53 | locs = issue.locations.map.with_index do |loc, idx2| 54 | format("# %d.%d) %s:%s", idx1 + 1, idx2 + 1, loc.file, loc.line) 55 | end 56 | 57 | locs.zip(flay.hashes[issue.structural_hash]).each do |loc, sexp| 58 | CC.logger.debug(loc) 59 | CC.logger.debug(sexp.pretty_inspect) 60 | end 61 | end 62 | end 63 | 64 | def process_files 65 | pool = FileThreadPool.new( 66 | language_strategy.files, 67 | concurrency: engine_config.concurrency, 68 | ) 69 | 70 | processed_files_count = Concurrent::AtomicFixnum.new 71 | 72 | pool.run do |file| 73 | begin 74 | CC.logger.debug("Processing #{lang} file: #{file}") 75 | 76 | sexp = language_strategy.run(file) 77 | 78 | process_sexp(sexp) 79 | 80 | processed_files_count.increment 81 | rescue Exception => ex 82 | CC.logger.warn("Error processing file: #{file}") 83 | raise ex 84 | end 85 | end 86 | 87 | pool.join 88 | 89 | CC.logger.debug("Processed #{processed_files_count.value} #{lang} files") 90 | end 91 | 92 | def lang 93 | CC::Engine::Duplication::LANGUAGES.invert[language_strategy.class] 94 | end 95 | 96 | def report 97 | flay.analyze.each do |issue| 98 | violations = new_violations(issue) 99 | 100 | violations.each do |violation| 101 | next if skip?(violation) 102 | CC.logger.debug("Violation name=#{violation.report_name} mass=#{violation.mass}") 103 | 104 | unless reports.include?(violation.report_name) 105 | reports.add(violation.report_name) 106 | io.puts "#{violation.format.to_json}\0" 107 | end 108 | end 109 | end 110 | end 111 | 112 | def process_sexp(sexp) 113 | return unless sexp 114 | flay.process_sexp(language_strategy.transform_sexp(sexp)) 115 | end 116 | 117 | private 118 | 119 | attr_reader :reports 120 | 121 | def flay 122 | @flay ||= CCFlay.new(flay_options) 123 | end 124 | 125 | attr_reader :engine_config, :language_strategy, :io 126 | 127 | def new_violations(issue) 128 | hashes = flay.hashes[issue.structural_hash] 129 | Violations.new(language_strategy, issue, hashes) 130 | end 131 | 132 | def flay_options 133 | changes = { 134 | mass: language_strategy.mass_threshold, 135 | filters: language_strategy.filters, 136 | post_filters: language_strategy.post_filters, 137 | } 138 | 139 | CCFlay.default_options.merge changes 140 | end 141 | 142 | def skip?(violation) 143 | below_threshold?(violation) || 144 | insufficient_occurrence?(violation) || 145 | check_disabled?(violation) 146 | end 147 | 148 | def below_threshold?(violation) 149 | violation.mass < language_strategy.check_mass_threshold(violation.check_name) 150 | end 151 | 152 | def insufficient_occurrence?(violation) 153 | (violation.occurrences + 1) < language_strategy.count_threshold 154 | end 155 | 156 | def check_disabled?(violation) 157 | !engine_config.check_enabled?( 158 | violation.fingerprint_check_name, 159 | violation.check_name, 160 | ) 161 | end 162 | end 163 | end 164 | end 165 | end 166 | -------------------------------------------------------------------------------- /lib/cc/engine/analyzers/ruby/main.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "flay" 4 | require "json" 5 | require "cc/engine/analyzers/reporter" 6 | require "cc/engine/analyzers/analyzer_base" 7 | 8 | module CC 9 | module Engine 10 | module Analyzers 11 | module Ruby 12 | class Main < CC::Engine::Analyzers::Base 13 | LANGUAGE = "ruby" 14 | PATTERNS = [ 15 | "**/*.rb", 16 | ].freeze 17 | DEFAULT_MASS_THRESHOLD = 25 18 | BASE_POINTS = 150_000 19 | POINTS_PER_OVERAGE = 20_000 20 | TIMEOUT = 300 21 | 22 | private 23 | 24 | def process_file(file) 25 | RubyParser.new.process(File.binread(file), file, TIMEOUT) 26 | rescue Timeout::Error 27 | CC.logger.warn("TIMEOUT parsing #{file}. Skipping.") 28 | end 29 | end 30 | end 31 | end 32 | end 33 | end 34 | -------------------------------------------------------------------------------- /lib/cc/engine/analyzers/scala/main.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "flay" 4 | require "json" 5 | require "cc/engine/analyzers/reporter" 6 | require "cc/engine/analyzers/analyzer_base" 7 | 8 | module CC 9 | module Engine 10 | module Analyzers 11 | module Scala 12 | class Main < CC::Engine::Analyzers::Base 13 | LANGUAGE = "scala".freeze 14 | PATTERNS = ["**/*.sc", "**/*.scala"].freeze 15 | DEFAULT_MASS_THRESHOLD = 60 16 | DEFAULT_FILTERS = [ 17 | "(Import ___)".freeze, 18 | ].freeze 19 | POINTS_PER_OVERAGE = 10_000 20 | REQUEST_PATH = "/scala".freeze 21 | 22 | def use_sexp_lines? 23 | false 24 | end 25 | 26 | private 27 | 28 | def process_file(file) 29 | parse(file, REQUEST_PATH) 30 | end 31 | 32 | def default_filters 33 | DEFAULT_FILTERS.map { |filter| Sexp::Matcher.parse filter } 34 | end 35 | end 36 | end 37 | end 38 | end 39 | end 40 | -------------------------------------------------------------------------------- /lib/cc/engine/analyzers/sexp.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | class Sexp 4 | attr_accessor :end_line 5 | end 6 | -------------------------------------------------------------------------------- /lib/cc/engine/analyzers/sexp_lines.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | module CC 4 | module Engine 5 | module Analyzers 6 | class SexpLines 7 | attr_reader :begin_line, :end_line 8 | 9 | def initialize(root_sexp) 10 | @root_sexp = root_sexp 11 | calculate 12 | end 13 | 14 | private 15 | 16 | attr_reader :root_sexp 17 | 18 | def calculate 19 | @begin_line = [root_sexp.line, root_sexp.line_min].compact.min 20 | @end_line = [root_sexp.end_line, root_sexp.line_max].compact.max 21 | end 22 | end 23 | end 24 | end 25 | end 26 | 27 | class Sexp 28 | # override to cache... TODO: add back to sexp_processor, then remove this 29 | def line_min 30 | @line_min ||= deep_each.map(&:line).min 31 | end 32 | 33 | # override to cache... TODO: add back to sexp_processor, then remove this 34 | def line_max 35 | @line_max ||= deep_each.map(&:line).max 36 | end 37 | end 38 | -------------------------------------------------------------------------------- /lib/cc/engine/analyzers/swift/main.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "cc/engine/analyzers/analyzer_base" 4 | 5 | module CC 6 | module Engine 7 | module Analyzers 8 | module Swift 9 | class Main < CC::Engine::Analyzers::Base 10 | PATTERNS = [ 11 | "**/*.swift", 12 | ].freeze 13 | LANGUAGE = "swift" 14 | DEFAULT_MASS_THRESHOLD = 40 15 | DEFAULT_FILTERS = [ 16 | "(ImportDeclaration ___)".freeze, 17 | ] 18 | POINTS_PER_OVERAGE = 10_000 19 | REQUEST_PATH = "/swift" 20 | 21 | def use_sexp_lines? 22 | false 23 | end 24 | 25 | private 26 | 27 | def process_file(file) 28 | parse(file, REQUEST_PATH) 29 | end 30 | 31 | def default_filters 32 | DEFAULT_FILTERS.map { |filter| Sexp::Matcher.parse filter } 33 | end 34 | end 35 | end 36 | end 37 | end 38 | end 39 | -------------------------------------------------------------------------------- /lib/cc/engine/analyzers/typescript/main.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "cc/engine/analyzers/javascript/main" 4 | 5 | module CC 6 | module Engine 7 | module Analyzers 8 | module TypeScript # TODO: inconsistent naming w/ Javascript 9 | class Main < CC::Engine::Analyzers::Javascript::Main 10 | PATTERNS = [ 11 | "**/*.ts", 12 | "**/*.tsx", 13 | ].freeze 14 | 15 | LANGUAGE = "typescript" 16 | 17 | REQUEST_PATH = "/typescript".freeze 18 | end 19 | end 20 | end 21 | end 22 | end 23 | -------------------------------------------------------------------------------- /lib/cc/engine/analyzers/violation.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "cc/engine/analyzers/sexp_lines" 4 | require "cc/engine/analyzers/violation_read_up" 5 | require "digest" 6 | 7 | module CC 8 | module Engine 9 | module Analyzers 10 | class Violation 11 | def initialize(language_strategy:, identical:, current_sexp:, other_sexps:) 12 | @language_strategy = language_strategy 13 | @identical = identical 14 | @current_sexp = current_sexp 15 | @other_sexps = other_sexps 16 | end 17 | 18 | def format 19 | { 20 | "type": "issue", 21 | "check_name": check_name, 22 | "description": description, 23 | "categories": ["Duplication"], 24 | "location": format_location, 25 | "remediation_points": calculate_points, 26 | "other_locations": format_other_locations, 27 | "content": content_body, 28 | "fingerprint": fingerprint, 29 | "severity": calculate_severity, 30 | } 31 | end 32 | 33 | def report_name 34 | "#{current_sexp.file}-#{current_sexp.line}" 35 | end 36 | 37 | def mass 38 | current_sexp.mass 39 | end 40 | 41 | def occurrences 42 | other_sexps.count 43 | end 44 | 45 | def total_occurrences 46 | occurrences + 1 47 | end 48 | 49 | def identical? 50 | @identical 51 | end 52 | 53 | def check_name 54 | "#{duplication_type}-code" 55 | end 56 | 57 | def fingerprint_check_name 58 | "#{duplication_type.capitalize} code" 59 | end 60 | 61 | private 62 | 63 | attr_reader :language_strategy, :other_sexps, :current_sexp 64 | 65 | def calculate_points 66 | @calculate_points ||= language_strategy.calculate_points(self) 67 | end 68 | 69 | def points_across_occurrences 70 | calculate_points * total_occurrences 71 | end 72 | 73 | def calculate_severity 74 | language_strategy.calculate_severity(points_across_occurrences) 75 | end 76 | 77 | def format_location 78 | format_sexp(current_sexp) 79 | end 80 | 81 | def format_other_locations 82 | other_sexps.map do |sexp| 83 | format_sexp(sexp) 84 | end 85 | end 86 | 87 | def format_sexp(sexp) 88 | if language_strategy.use_sexp_lines? 89 | lines = SexpLines.new(sexp) 90 | { 91 | "path": sexp.file.gsub(%r{^\./}, ""), 92 | "lines": { 93 | "begin": lines.begin_line, 94 | "end": lines.end_line, 95 | }, 96 | } 97 | else 98 | { 99 | "path": sexp.file.gsub(%r{^\./}, ""), 100 | "lines": { 101 | "begin": sexp.line, 102 | "end": sexp.end_line, 103 | }, 104 | } 105 | end 106 | end 107 | 108 | def content_body 109 | @_content_body ||= { "body": ViolationReadUp.new(mass).contents } 110 | end 111 | 112 | def fingerprint 113 | digest = Digest::MD5.new 114 | digest << current_sexp.file 115 | digest << "-" 116 | digest << current_sexp.mass.to_s 117 | digest << "-" 118 | digest << fingerprint_check_name 119 | digest.to_s 120 | end 121 | 122 | def duplication_type 123 | if identical? 124 | "identical" 125 | else 126 | "similar" 127 | end 128 | end 129 | 130 | def description 131 | "#{duplication_type.capitalize} blocks of code found in #{total_occurrences} locations. Consider refactoring." 132 | end 133 | end 134 | end 135 | end 136 | end 137 | -------------------------------------------------------------------------------- /lib/cc/engine/analyzers/violation_read_up.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "erb" 4 | 5 | module CC 6 | module Engine 7 | module Analyzers 8 | class ViolationReadUp 9 | def initialize(mass) 10 | @mass = mass 11 | end 12 | 13 | def contents 14 | ERB.new(File.read(template_path)).result(binding) 15 | end 16 | 17 | private 18 | 19 | attr_reader :mass 20 | 21 | TEMPLATE_REL_PATH = "../../../../config/contents/duplicated_code.md.erb" 22 | 23 | def template_path 24 | File.expand_path( 25 | File.join(File.dirname(__FILE__), TEMPLATE_REL_PATH), 26 | ) 27 | end 28 | end 29 | end 30 | end 31 | end 32 | -------------------------------------------------------------------------------- /lib/cc/engine/analyzers/violations.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "cc/engine/analyzers/violation" 4 | 5 | module CC 6 | module Engine 7 | module Analyzers 8 | class Violations 9 | def initialize(language_strategy, issue, hashes) 10 | @language_strategy = language_strategy 11 | @issue = issue 12 | @hashes = hashes 13 | end 14 | 15 | def each 16 | hashes.each_with_index do |sexp, i| 17 | yield Violation.new( 18 | current_sexp: sexp, 19 | other_sexps: other_sexps(hashes.dup, i), 20 | identical: identical?, 21 | language_strategy: language_strategy, 22 | ) 23 | end 24 | end 25 | 26 | private 27 | 28 | attr_reader :language_strategy, :issue, :hashes 29 | 30 | def other_sexps(members, i) 31 | members.delete_at(i) 32 | members.sort_by(&:file) 33 | end 34 | 35 | def identical? 36 | issue.identical? 37 | end 38 | end 39 | end 40 | end 41 | end 42 | -------------------------------------------------------------------------------- /lib/cc/engine/duplication.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "bundler/setup" 4 | require "cc/engine/parse_metrics" 5 | require "cc/engine/analyzers/csharp/main" 6 | require "cc/engine/analyzers/go/main" 7 | require "cc/engine/analyzers/java/main" 8 | require "cc/engine/analyzers/javascript/main" 9 | require "cc/engine/analyzers/kotlin/main" 10 | require "cc/engine/analyzers/php/main" 11 | require "cc/engine/analyzers/python/main" 12 | require "cc/engine/analyzers/reporter" 13 | require "cc/engine/analyzers/ruby/main" 14 | require "cc/engine/analyzers/scala/main" 15 | require "cc/engine/analyzers/swift/main" 16 | require "cc/engine/analyzers/typescript/main" 17 | require "cc/engine/analyzers/engine_config" 18 | require "cc/engine/analyzers/sexp" 19 | require "flay" 20 | require "json" 21 | 22 | module CC 23 | module Engine 24 | class Duplication 25 | LANGUAGES = { 26 | "csharp" => ::CC::Engine::Analyzers::Csharp::Main, 27 | "go" => ::CC::Engine::Analyzers::Go::Main, 28 | "java" => ::CC::Engine::Analyzers::Java::Main, 29 | "javascript" => ::CC::Engine::Analyzers::Javascript::Main, 30 | "kotlin" => ::CC::Engine::Analyzers::Kotlin::Main, 31 | "php" => ::CC::Engine::Analyzers::Php::Main, 32 | "python" => ::CC::Engine::Analyzers::Python::Main, 33 | "ruby" => ::CC::Engine::Analyzers::Ruby::Main, 34 | "typescript" => ::CC::Engine::Analyzers::TypeScript::Main, 35 | "scala" => ::CC::Engine::Analyzers::Scala::Main, 36 | "swift" => ::CC::Engine::Analyzers::Swift::Main, 37 | }.freeze 38 | 39 | def initialize(directory:, engine_config:, io:) 40 | @directory = directory 41 | @engine_config = CC::Engine::Analyzers::EngineConfig.new(engine_config || {}) 42 | @io = io 43 | end 44 | 45 | def run 46 | return if engine_config.all_checks_disabled? 47 | 48 | Dir.chdir(directory) do 49 | languages_to_analyze.each do |language| 50 | parse_metrics = ParseMetrics.new( 51 | language: language, 52 | io: io, 53 | ) 54 | engine = LANGUAGES[language].new( 55 | engine_config: engine_config, 56 | parse_metrics: parse_metrics, 57 | ) 58 | reporter = CC::Engine::Analyzers::Reporter.new(engine_config, engine, io) 59 | reporter.run 60 | parse_metrics.report 61 | end 62 | end 63 | end 64 | 65 | private 66 | 67 | attr_reader :directory, :engine_config, :io 68 | 69 | def languages_to_analyze 70 | engine_config.languages.keys.select do |language| 71 | LANGUAGES.keys.include?(language) 72 | end 73 | end 74 | end 75 | end 76 | end 77 | -------------------------------------------------------------------------------- /lib/cc/engine/parse_metrics.rb: -------------------------------------------------------------------------------- 1 | module CC 2 | module Engine 3 | class ParseMetrics 4 | def initialize(language:, io:) 5 | @language = language 6 | @io = io 7 | @counts = Hash.new(0) 8 | end 9 | 10 | def incr(result_type) 11 | counts[result_type] += 1 12 | end 13 | 14 | def report 15 | counts.each do |result_type, count| 16 | doc = metric_doc(result_type, count) 17 | # puts allows a race between content newline, use print 18 | io.print("#{JSON.generate(doc)}\0\n") 19 | end 20 | end 21 | 22 | private 23 | 24 | attr_reader :counts, :io, :language 25 | 26 | def metric_doc(result_type, count) 27 | { 28 | name: "#{language}.parse.#{result_type}", 29 | type: "measurement", 30 | value: count, 31 | } 32 | end 33 | end 34 | end 35 | end 36 | -------------------------------------------------------------------------------- /lib/cc/engine/processed_source.rb: -------------------------------------------------------------------------------- 1 | require "cc/parser" 2 | 3 | module CC 4 | module Engine 5 | class ProcessedSource 6 | attr_reader :path 7 | 8 | def initialize(path, request_path) 9 | @path = path 10 | @request_path = request_path 11 | end 12 | 13 | def raw_source 14 | @raw_source ||= File.read(path) 15 | end 16 | 17 | def ast 18 | @ast ||= CC::Parser.parse(raw_source, request_path, filename: path) 19 | end 20 | 21 | private 22 | 23 | attr_reader :request_path 24 | end 25 | end 26 | end 27 | -------------------------------------------------------------------------------- /lib/cc/engine/sexp_builder.rb: -------------------------------------------------------------------------------- 1 | module CC 2 | module Engine 3 | class SexpBuilder 4 | def initialize(input, path) 5 | @input = input 6 | @path = path 7 | end 8 | 9 | def build 10 | if input.is_a?(CC::Parser::Node) 11 | sexp(input.type.to_sym, *build_properties(input)) 12 | elsif input.is_a?(Array) 13 | input.map do |node| 14 | self.class.new(node, path).build 15 | end 16 | end 17 | end 18 | 19 | private 20 | 21 | attr_reader :input, :path 22 | 23 | def build_properties(node) 24 | node.properties.map do |key, property| 25 | if property.is_a?(CC::Parser::Node) 26 | sexp(key.to_sym, self.class.new(property, path).build) 27 | elsif property.is_a?(Array) 28 | sexp(key.to_sym, *self.class.new(property, path).build) 29 | else 30 | property.to_s.to_sym 31 | end 32 | end 33 | end 34 | 35 | def sexp(*args) 36 | Sexp.new(*args).tap do |sexp| 37 | sexp.file = path 38 | sexp.line = input.location.first_line 39 | sexp.end_line = input.location.last_line 40 | end 41 | end 42 | end 43 | end 44 | end 45 | -------------------------------------------------------------------------------- /lib/cc/logger.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "logger" 4 | 5 | module CC 6 | def self.logger 7 | @logger ||= ::Logger.new(STDERR) 8 | end 9 | end 10 | -------------------------------------------------------------------------------- /lib/ccflay.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require "flay" 4 | require "concurrent" 5 | require "digest" 6 | 7 | ## 8 | # A thread-safe and stable hash subclass of Flay. 9 | 10 | class CCFlay < Flay 11 | def initialize(option = nil) 12 | super 13 | 14 | @hashes = Concurrent::Hash.new do |hash, key| 15 | hash[key] = Concurrent::Array.new 16 | end 17 | 18 | self.identical = Concurrent::Hash.new 19 | self.masses = Concurrent::Hash.new 20 | end 21 | 22 | def post_filter *patterns 23 | return if patterns.empty? 24 | 25 | self.hashes.delete_if { |_, sexps| 26 | sexps.any? { |sexp| 27 | patterns.any? { |pattern| 28 | # pattern =~ sexp 29 | pattern.satisfy? sexp 30 | } 31 | } 32 | } 33 | end 34 | 35 | def prune 36 | post_filter(*option[:post_filters]) 37 | 38 | super 39 | end 40 | end 41 | 42 | class Sexp 43 | attr_writer :mass 44 | 45 | def flatter 46 | result = dup.clear 47 | result.mass = mass 48 | 49 | each_with_object(result) do |s, r| 50 | if s.is_a?(Sexp) 51 | ss = s.flatter 52 | 53 | # s(:a, s(:b, s(:c, 42))) => s(:a, :b, s(:c, 42)) 54 | if ss.size == 2 && ss[1].is_a?(Sexp) 55 | r.concat ss 56 | else 57 | r << ss 58 | end 59 | else 60 | r << s 61 | end 62 | end 63 | end 64 | end 65 | -------------------------------------------------------------------------------- /spec/cc/ccflay_spec.rb: -------------------------------------------------------------------------------- 1 | require "spec_helper" 2 | require "ccflay" 3 | 4 | RSpec.describe CCFlay do 5 | describe "#flatter" do 6 | it "should be isomorphic" do 7 | inn = s(:a, s(:b, s(:c, 42)), :d, s(:e, s(:f, s(:g, s(:h, 42))), s(:i))) 8 | exp = s(:a, :b, s(:c, 42), :d, s(:e, s(:f, :g, s(:h, 42)), s(:i))) 9 | 10 | expect(inn.flatter).to eq(exp) 11 | end 12 | 13 | it "should cache the original size" do 14 | inn = s(:a, s(:b, s(:c, 42)), :d, s(:e, s(:f, s(:g, s(:h, 42))), s(:i))) 15 | exp = s(:a, :b, s(:c, 42), :d, s(:e, s(:f, :g, s(:h, 42)), s(:i))) 16 | 17 | expect(inn.mass).to eq(8) 18 | expect(exp.mass).to eq(6) 19 | 20 | expect(inn.flatter.mass).to eq(8) 21 | end 22 | end 23 | 24 | describe Sexp::NODE_NAMES do 25 | describe ".default_proc" do 26 | it "should consistently hash node names on-demand with a CRC checksum" do 27 | node1 = Sexp::NODE_NAMES["some_node1"] 28 | node2 = Sexp::NODE_NAMES["some_node2"] 29 | 30 | expect(node1).to eq(1_364_960_975) 31 | expect(node2).to eq(3_360_880_501) 32 | end 33 | 34 | context "'couldn't find node type' errors (bug #206)" do 35 | it "should suppress them" do 36 | expect { Sexp::NODE_NAMES["bug_206_node"] }.to_not output.to_stderr 37 | end 38 | end 39 | end 40 | end 41 | end 42 | -------------------------------------------------------------------------------- /spec/cc/engine/analyzers/analyzer_base_spec.rb: -------------------------------------------------------------------------------- 1 | require 'spec_helper' 2 | require 'cc/engine/analyzers/analyzer_base' 3 | require 'cc/engine/analyzers/engine_config' 4 | 5 | module CC::Engine::Analyzers 6 | RSpec.describe Base, in_tmpdir: true do 7 | class DummyAnalyzer < Base 8 | LANGUAGE = "dummy" 9 | PATTERNS = [ 10 | '**/*.a', 11 | '**/*.b' 12 | ] 13 | end 14 | 15 | include AnalyzerSpecHelpers 16 | 17 | let(:engine_config) { EngineConfig.new({}) } 18 | let(:analyzer) do 19 | DummyAnalyzer.new( 20 | engine_config: engine_config, 21 | parse_metrics: CC::Engine::ParseMetrics.new( 22 | language: "dummy", 23 | io: StringIO.new, 24 | ), 25 | ) 26 | end 27 | 28 | before(:each) do 29 | create_source_file("foo.a", "") 30 | create_source_file("foo.b", "") 31 | create_source_file("foo.c", "") 32 | end 33 | 34 | it "lists files according to the default patterns" do 35 | expect(analyzer.files).to match_array(['./foo.a', './foo.b']) 36 | end 37 | 38 | it "knows what language it is analyzing" do 39 | expect(analyzer.language).to eq("dummy") 40 | end 41 | 42 | context "with custom patterns" do 43 | let(:engine_config) do 44 | EngineConfig.new({ 45 | "config" => { 46 | "languages" => { 47 | "dummy" => { 48 | "patterns" => [ 49 | "**/*.c" 50 | ], 51 | }, 52 | }, 53 | }, 54 | }) 55 | end 56 | 57 | it "lists files according to the config patterns" do 58 | expect(analyzer.files).to match_array(['./foo.c']) 59 | end 60 | end 61 | 62 | context "exact pattern" do 63 | let(:engine_config) do 64 | EngineConfig.new({ 65 | "config" => { 66 | "languages" => { 67 | "dummy" => { 68 | "patterns" => [ 69 | "*.c" 70 | ], 71 | }, 72 | }, 73 | }, 74 | }) 75 | end 76 | 77 | before(:each) do 78 | Dir.mkdir("nested") 79 | create_source_file("nested/foo.c", "") 80 | end 81 | 82 | it "lists files exactly according to the config patterns" do 83 | expect(analyzer.files).to match_array(['./foo.c']) 84 | end 85 | end 86 | end 87 | end 88 | -------------------------------------------------------------------------------- /spec/cc/engine/analyzers/command_line_runner_spec.rb: -------------------------------------------------------------------------------- 1 | require "spec_helper" 2 | 3 | module CC::Engine::Analyzers 4 | RSpec.describe CommandLineRunner do 5 | describe "#run" do 6 | it "runs the command on the input and yields the output" do 7 | runner = CommandLineRunner.new("cat; echo hi") 8 | 9 | output = runner.run("oh ") { |o| o } 10 | 11 | expect(output).to eq "oh hi\n" 12 | end 13 | 14 | 15 | it "raises on errors" do 16 | runner = CommandLineRunner.new("echo error output >&2; false") 17 | 18 | expect { runner.run("") }.to raise_error( 19 | ParserError, /code 1:\nerror output/ 20 | ) 21 | end 22 | 23 | it "times out commands" do 24 | runner = CommandLineRunner.new("sleep 3", 0.01) 25 | 26 | expect { runner.run("") }.to raise_error(Timeout::Error) 27 | end 28 | 29 | context "when Open3 returns a nil status" do 30 | it "accepts it if the output parses as JSON" do 31 | runner = CommandLineRunner.new("") 32 | 33 | allow(Open3).to receive(:capture3).and_return(["{\"type\":\"issue\"}", "", nil]) 34 | 35 | output = runner.run("") { |o| o } 36 | expect(output).to eq "{\"type\":\"issue\"}" 37 | end 38 | 39 | it "raises if the output was not valid JSON" do 40 | runner = CommandLineRunner.new("") 41 | 42 | allow(Open3).to receive(:capture3).and_return(["", "error output", nil]) 43 | 44 | expect { runner.run("") }.to raise_error( 45 | ParserError, /code 1:\nerror output/ 46 | ) 47 | end 48 | end 49 | end 50 | end 51 | end 52 | -------------------------------------------------------------------------------- /spec/cc/engine/analyzers/csharp/csharp_spec.rb: -------------------------------------------------------------------------------- 1 | require "spec_helper" 2 | require "cc/engine/analyzers/csharp/main" 3 | require "cc/engine/analyzers/engine_config" 4 | 5 | module CC::Engine::Analyzers 6 | RSpec.describe Csharp::Main, in_tmpdir: true do 7 | include AnalyzerSpecHelpers 8 | 9 | describe "#run" do 10 | let(:engine_conf) { EngineConfig.new({}) } 11 | 12 | it "prints an issue for similar code" do 13 | create_source_file("foo.cs", <<-EOCSHARP) 14 | class ArrayDemo 15 | { 16 | void Foo() 17 | { 18 | int[] anArray = new int[10]; 19 | 20 | foreach (int i in new int[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 }) 21 | { 22 | anArray[i] = i; 23 | } 24 | 25 | foreach (int i in anArray) 26 | { 27 | Console.WriteLine(i); 28 | } 29 | 30 | Console.WriteLine(""); 31 | } 32 | 33 | void Bar() 34 | { 35 | int[] anArray = new int[10]; 36 | 37 | foreach (int i in new int[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 }) 38 | { 39 | anArray[i] = i; 40 | } 41 | 42 | foreach (int i in anArray) 43 | { 44 | Console.WriteLine(i); 45 | } 46 | 47 | Console.WriteLine(""); 48 | } 49 | } 50 | EOCSHARP 51 | 52 | issues = run_engine(engine_conf).strip.split("\0") 53 | result = issues.first.strip 54 | json = JSON.parse(result) 55 | 56 | expect(json["type"]).to eq("issue") 57 | expect(json["check_name"]).to eq("similar-code") 58 | expect(json["description"]).to eq("Similar blocks of code found in 2 locations. Consider refactoring.") 59 | expect(json["categories"]).to eq(["Duplication"]) 60 | expect(json["location"]).to eq({ 61 | "path" => "foo.cs", 62 | "lines" => { "begin" => 3, "end" => 18 }, 63 | }) 64 | expect(json["other_locations"]).to eq([ 65 | {"path" => "foo.cs", "lines" => { "begin" => 20, "end" => 35 } }, 66 | ]) 67 | expect(json["severity"]).to eq(CC::Engine::Analyzers::Base::MAJOR) 68 | end 69 | 70 | it "ignores using declarations" do 71 | create_source_file("foo.cs", <<-EOF) 72 | using System; 73 | EOF 74 | 75 | create_source_file("bar.cs", <<-EOF) 76 | using System; 77 | EOF 78 | 79 | issues = run_engine(engine_conf).strip.split("\0") 80 | expect(issues).to be_empty 81 | end 82 | 83 | it "prints an issue for similar code when the only difference is the value of a literal" do 84 | create_source_file("foo.cs", <<-EOCSHARP) 85 | class ArrayDemo 86 | { 87 | void Foo() 88 | { 89 | var scott = new int[] { 90 | 0x00000000, 0x77073096, 0xEE0E612C, 0x990951BA, 0x076DC419, 0x706AF48F 91 | }; 92 | 93 | var anArray = new int[10]; 94 | 95 | for (int i = 0; i < 10; i++) 96 | { 97 | anArray[i] = i; 98 | } 99 | 100 | foreach (i in anArray) 101 | { 102 | Console.WriteLine(i + " "); 103 | } 104 | 105 | Console.WriteLine(); 106 | } 107 | 108 | void Bar() 109 | { 110 | var scott = new int[] { 111 | 0xF3B97148, 0x84BE41DE, 0x1ADAD47D, 0x6DDDE4EB, 0xF4D4B551, 0x83D385C7 112 | }; 113 | 114 | var anArray = new int[10]; 115 | 116 | for (int i = 0; i < 10; i++) 117 | { 118 | anArray[i] = i; 119 | } 120 | 121 | foreach (i in anArray) 122 | { 123 | Console.WriteLine(i + " "); 124 | } 125 | 126 | Console.WriteLine(); 127 | } 128 | } 129 | EOCSHARP 130 | 131 | issues = run_engine(engine_conf).strip.split("\0") 132 | expect(issues.length).to be > 0 133 | result = issues.first.strip 134 | json = JSON.parse(result) 135 | 136 | expect(json["type"]).to eq("issue") 137 | expect(json["check_name"]).to eq("similar-code") 138 | 139 | expect(json["description"]).to eq("Similar blocks of code found in 2 locations. Consider refactoring.") 140 | expect(json["categories"]).to eq(["Duplication"]) 141 | expect(json["location"]).to eq({ 142 | "path" => "foo.cs", 143 | "lines" => { "begin" => 3, "end" => 22 }, 144 | }) 145 | expect(json["other_locations"]).to eq([ 146 | {"path" => "foo.cs", "lines" => { "begin" => 24, "end" => 43 } }, 147 | ]) 148 | expect(json["severity"]).to eq(CC::Engine::Analyzers::Base::MAJOR) 149 | end 150 | 151 | it "ignores comment docs and comments" do 152 | create_source_file("foo.cs", <<-EOCSHARP) 153 | /******************************************************************** 154 | * A comment! 155 | *******************************************************************/ 156 | 157 | using System; 158 | 159 | class Foo 160 | { 161 | void Bar() 162 | { 163 | Console.WriteLine("Hello"); 164 | } 165 | } 166 | EOCSHARP 167 | 168 | create_source_file("bar.cs", <<-EOCSHARP) 169 | /******************************************************************** 170 | * A comment! 171 | *******************************************************************/ 172 | 173 | using System; 174 | 175 | class Bar 176 | { 177 | void Baz() 178 | { 179 | Console.WriteLine("Qux"); 180 | } 181 | } 182 | EOCSHARP 183 | 184 | issues = run_engine(engine_conf).strip.split("\0") 185 | expect(issues).to be_empty 186 | end 187 | 188 | end 189 | end 190 | end 191 | -------------------------------------------------------------------------------- /spec/cc/engine/analyzers/file_list_spec.rb: -------------------------------------------------------------------------------- 1 | require "spec_helper" 2 | require "cc/engine/analyzers/file_list" 3 | require "cc/engine/analyzers/engine_config" 4 | 5 | RSpec.describe CC::Engine::Analyzers::FileList do 6 | around do |example| 7 | Dir.mktmpdir do |directory| 8 | @tmp_dir = directory 9 | 10 | Dir.chdir(@tmp_dir) do 11 | Dir.mkdir("nested") 12 | File.write(File.join(@tmp_dir, "nested", "nest.hs"), "") 13 | 14 | File.write(File.join(@tmp_dir, "foo.js"), "") 15 | File.write(File.join(@tmp_dir, "foo.jsx"), "") 16 | File.write(File.join(@tmp_dir, "foo.ex"), "") 17 | 18 | File.write("/tmp/bar.js", "") 19 | FileUtils.ln_s("/tmp/bar.js", File.join(@tmp_dir, "bar.js")) 20 | Dir.mkdir("/tmp/baz") 21 | File.write("/tmp/baz/baz.js", "") 22 | FileUtils.ln_s("/tmp/baz/", File.join(@tmp_dir, "baz")) 23 | 24 | begin 25 | example.run 26 | ensure 27 | FileUtils.rm_rf(["/tmp/bar.js", "/tmp/baz"]) 28 | end 29 | end 30 | end 31 | end 32 | 33 | describe "#files" do 34 | it "expands patterns for directory includes, and ignores symlinks" do 35 | file_list = ::CC::Engine::Analyzers::FileList.new( 36 | engine_config: CC::Engine::Analyzers::EngineConfig.new( 37 | "include_paths" => ["./"], 38 | ), 39 | patterns: ["**/*.js", "**/*.jsx"], 40 | ) 41 | 42 | expect(file_list.files).to eq(["./foo.js", "./foo.jsx"]) 43 | end 44 | 45 | it "filters file includes by patterns" do 46 | file_list = ::CC::Engine::Analyzers::FileList.new( 47 | engine_config: CC::Engine::Analyzers::EngineConfig.new( 48 | "include_paths" => ["./foo.ex", "./foo.js"], 49 | ), 50 | patterns: ["**/*.js", "**/*.jsx"], 51 | ) 52 | 53 | expect(file_list.files).to eq(["./foo.js"]) 54 | end 55 | 56 | it "does not emit directories even if they match the patterns" do 57 | file_list = ::CC::Engine::Analyzers::FileList.new( 58 | engine_config: CC::Engine::Analyzers::EngineConfig.new( 59 | "include_paths" => ["./"], 60 | ), 61 | patterns: ["**/*.js"], 62 | ) 63 | 64 | Dir.mkdir("vendor.js") 65 | File.write(File.join(@tmp_dir, "vendor.js", "vendor.src.js"), "") 66 | 67 | expect(file_list.files).to include("./vendor.js/vendor.src.js") 68 | expect(file_list.files).not_to include("./vendor.js") 69 | end 70 | end 71 | end 72 | -------------------------------------------------------------------------------- /spec/cc/engine/analyzers/file_thread_pool_spec.rb: -------------------------------------------------------------------------------- 1 | require "spec_helper" 2 | require "cc/engine/analyzers/file_thread_pool" 3 | 4 | RSpec.describe CC::Engine::Analyzers::FileThreadPool do 5 | describe "#run" do 6 | let(:thread) { Thread.new {} } 7 | it "uses default count of threads when concurrency is not specified" do 8 | allow(Thread).to receive(:new).and_return(thread) 9 | 10 | pool = CC::Engine::Analyzers::FileThreadPool.new([]) 11 | pool.run {} 12 | 13 | expect(Thread).to have_received(:new).exactly( 14 | CC::Engine::Analyzers::FileThreadPool::DEFAULT_CONCURRENCY, 15 | ).times 16 | end 17 | 18 | it "uses default concurrency when concurrency is over max" do 19 | allow(Thread).to receive(:new).and_return(thread) 20 | 21 | run_pool_with_concurrency( 22 | CC::Engine::Analyzers::FileThreadPool::DEFAULT_CONCURRENCY + 2, 23 | ) 24 | 25 | expect(Thread).to have_received(:new).exactly( 26 | CC::Engine::Analyzers::FileThreadPool::DEFAULT_CONCURRENCY, 27 | ).times 28 | end 29 | 30 | it "uses default concucurrency when concucurrency is under 1" do 31 | allow(Thread).to receive(:new).and_return(thread) 32 | 33 | run_pool_with_concurrency(-2) 34 | 35 | expect(Thread).to have_received(:new).exactly( 36 | CC::Engine::Analyzers::FileThreadPool::DEFAULT_CONCURRENCY, 37 | ).times 38 | end 39 | 40 | it "uses supplied concurrency when valid" do 41 | allow(Thread).to receive(:new).and_return(thread) 42 | 43 | run_pool_with_concurrency(1) 44 | 45 | expect(Thread).to have_received(:new).exactly(1).times 46 | end 47 | 48 | it "calls block for each file" do 49 | pool = CC::Engine::Analyzers::FileThreadPool.new(["abc", "123", "xyz"]) 50 | 51 | results = [] 52 | pool.run do |f| 53 | results.push f.reverse 54 | end 55 | pool.join 56 | 57 | expect(results).to include("cba") 58 | expect(results).to include("321") 59 | expect(results).to include("zyx") 60 | end 61 | 62 | it "aborts on a thread exception" do 63 | allow(Thread).to receive(:new).and_return(thread) 64 | 65 | run_pool_with_concurrency(1) 66 | 67 | expect(thread.abort_on_exception).to eq(true) 68 | end 69 | end 70 | 71 | def run_pool_with_concurrency(concurrency) 72 | pool = CC::Engine::Analyzers::FileThreadPool.new( 73 | [], 74 | concurrency: concurrency, 75 | ) 76 | pool.run {} 77 | end 78 | end 79 | -------------------------------------------------------------------------------- /spec/cc/engine/analyzers/go/main_spec.rb: -------------------------------------------------------------------------------- 1 | require "spec_helper" 2 | require "cc/engine/analyzers/go/main" 3 | require 'cc/engine/analyzers/reporter' 4 | require "cc/engine/analyzers/engine_config" 5 | 6 | module CC::Engine::Analyzers 7 | RSpec.describe Go::Main, in_tmpdir: true do 8 | include AnalyzerSpecHelpers 9 | 10 | describe "#run" do 11 | it "prints an issue for identical code" do 12 | create_source_file("foo.go", <<-EOGO) 13 | package main 14 | 15 | import "fmt" 16 | 17 | func main() { 18 | fmt.Println(add(24, 24)) 19 | fmt.Println(add(24, 24)) 20 | } 21 | 22 | func add(x int, y int) int { 23 | return x + y 24 | } 25 | EOGO 26 | 27 | issues = run_engine(engine_conf).strip.split("\0") 28 | result = issues.first.strip 29 | json = JSON.parse(result) 30 | 31 | expect(json["type"]).to eq("issue") 32 | expect(json["check_name"]).to eq("identical-code") 33 | expect(json["description"]).to eq("Identical blocks of code found in 2 locations. Consider refactoring.") 34 | expect(json["categories"]).to eq(["Duplication"]) 35 | expect(json["location"]).to eq({ 36 | "path" => "foo.go", 37 | "lines" => { "begin" => 6, "end" => 6 }, 38 | }) 39 | expect(json["remediation_points"]).to eq(360_000) 40 | expect(json["other_locations"]).to eq([ 41 | {"path" => "foo.go", "lines" => { "begin" => 7, "end" => 7} }, 42 | ]) 43 | expect(json["content"]["body"]).to match(/This issue has a mass of 16/) 44 | expect(json["fingerprint"]).to eq("484ee5799eb0e6c933751cfa85ba33c3") 45 | expect(json["severity"]).to eq(CC::Engine::Analyzers::Base::MINOR) 46 | end 47 | 48 | it "prints an issue for similar code" do 49 | create_source_file("foo.go", <<-EOGO) 50 | package main 51 | 52 | import "fmt" 53 | 54 | func add(x int, y int) int { 55 | return x + y 56 | } 57 | 58 | func add_something(x int, y int) int { 59 | return x + y 60 | } 61 | 62 | func add_something_else(x int, y int) int { 63 | return x + y 64 | } 65 | 66 | func main() { 67 | fmt.Println(add(44, 15)) 68 | fmt.Println(add_something(44, 15)) 69 | fmt.Println(add_something_else(44, 15)) 70 | } 71 | EOGO 72 | 73 | issues = run_engine(engine_conf).strip.split("\0") 74 | result = issues.first.strip 75 | json = JSON.parse(result) 76 | 77 | expect(json["type"]).to eq("issue") 78 | expect(json["check_name"]).to eq("similar-code") 79 | expect(json["description"]).to eq("Similar blocks of code found in 3 locations. Consider refactoring.") 80 | expect(json["categories"]).to eq(["Duplication"]) 81 | expect(json["location"]).to eq({ 82 | "path" => "foo.go", 83 | "lines" => { "begin" => 5, "end" => 7 }, 84 | }) 85 | expect(json["remediation_points"]).to eq(540_000) 86 | expect(json["other_locations"]).to eq([ 87 | {"path" => "foo.go", "lines" => { "begin" => 9, "end" => 11} }, 88 | {"path" => "foo.go", "lines" => { "begin" => 13, "end" => 15} }, 89 | ]) 90 | expect(json["content"]["body"]).to match /This issue has a mass of 34/ 91 | expect(json["fingerprint"]).to eq("ed3f2dbc039a394ad03d16e4d9f342fe") 92 | expect(json["severity"]).to eq(CC::Engine::Analyzers::Base::MAJOR) 93 | end 94 | 95 | it "outputs a warning for unprocessable errors" do 96 | create_source_file("foo.go", <<-EOGO) 97 | --- 98 | EOGO 99 | 100 | expect(CC.logger).to receive(:warn).with(/Response status: 422/) 101 | expect(CC.logger).to receive(:warn).with(/Skipping/) 102 | run_engine(engine_conf) 103 | end 104 | 105 | it "ignores import declarations" do 106 | create_source_file("foo.go", <<-EOGO) 107 | package main 108 | 109 | import "fmt" 110 | 111 | func main() { 112 | fmt.Println("This is a thing") 113 | } 114 | EOGO 115 | 116 | create_source_file("bar.go", <<-EOGO) 117 | package main 118 | 119 | import "fmt" 120 | 121 | func main() { 122 | fmt.Println("This is something else!") 123 | } 124 | EOGO 125 | 126 | issues = run_engine(engine_conf 25).strip.split("\0") 127 | expect(issues).to be_empty 128 | end 129 | 130 | it "does not flag entire file as issue" do 131 | create_source_file("foo.go", File.read(fixture_path("issue_6609_1.go"))) 132 | create_source_file("bar.go", File.read(fixture_path("issue_6609_2.go"))) 133 | issues = run_engine(engine_conf).strip.split("\0") 134 | issues.map! {|issue| JSON.parse issue} 135 | invalid_issues = issues.find_all{|issue| issue["location"]["lines"]["begin"] == 1} 136 | expect(invalid_issues).to be_empty, invalid_issues.map {|issue| issue["location"]}.join("\n") 137 | end 138 | 139 | it "does not flag duplicate comments" do 140 | create_source_file("foo.go", <<-EOGO) 141 | // This is a comment. 142 | // This is a comment. 143 | // This is a comment. 144 | // This is also a comment. 145 | // This is also a comment. 146 | 147 | package main 148 | 149 | // import "fmt" 150 | 151 | func main() { 152 | fmt.Println("This is a duplicate!") 153 | } 154 | 155 | /* This is a multiline comment */ 156 | /* This is a multiline comment */ 157 | /* This is a also multiline comment */ 158 | /* This is a also multiline comment */ 159 | 160 | // func add(x int, y int) int { 161 | // return x + y 162 | // } 163 | 164 | // func add(x int, y int) int { 165 | // return x + y 166 | // } 167 | 168 | // func add(x int, y int) int { 169 | // return x + y 170 | // } 171 | 172 | // func add(x int, y int) int { 173 | // return x + y 174 | // } 175 | EOGO 176 | 177 | create_source_file("bar.go", <<-EOGO) 178 | // This is a comment. 179 | // This is a comment. 180 | // This is a comment. 181 | // This is also a comment. 182 | // This is also a comment. 183 | 184 | package main 185 | 186 | // import "fmt" 187 | 188 | func main() { 189 | // This is a comment. 190 | // This is a comment. 191 | // This is a comment. 192 | // This is also a comment. 193 | // This is also a comment. 194 | } 195 | 196 | /* This is a multiline comment */ 197 | /* This is a multiline comment */ 198 | /* This is a also multiline comment */ 199 | /* This is a also multiline comment */ 200 | 201 | // func add(x int, y int) int { 202 | // return x + y 203 | // } 204 | 205 | // func add(x int, y int) int { 206 | // return x + y 207 | // } 208 | 209 | // func add(x int, y int) int { 210 | // return x + y 211 | // } 212 | 213 | // func add(x int, y int) int { 214 | // return x + y 215 | // } 216 | EOGO 217 | 218 | expect(run_engine(engine_conf)).to be_empty 219 | end 220 | 221 | def engine_conf mass = 10 222 | CC::Engine::Analyzers::EngineConfig.new({ 223 | 'config' => { 224 | 'checks' => { 225 | 'similar-code' => { 226 | 'enabled' => true, 227 | }, 228 | 'identical-code' => { 229 | 'enabled' => true, 230 | }, 231 | }, 232 | 'languages' => { 233 | 'go' => { 234 | 'mass_threshold' => mass, 235 | }, 236 | }, 237 | }, 238 | }) 239 | end 240 | end 241 | end 242 | end 243 | -------------------------------------------------------------------------------- /spec/cc/engine/analyzers/java/java_spec.rb: -------------------------------------------------------------------------------- 1 | require "spec_helper" 2 | require "cc/engine/analyzers/java/main" 3 | require "cc/engine/analyzers/engine_config" 4 | 5 | module CC::Engine::Analyzers 6 | RSpec.describe Java::Main, in_tmpdir: true do 7 | include AnalyzerSpecHelpers 8 | 9 | describe "#run" do 10 | let(:engine_conf) { EngineConfig.new({}) } 11 | 12 | it "prints an issue for similar code" do 13 | create_source_file("foo.java", <<-EOF) 14 | public class ArrayDemo { 15 | public static void foo() { 16 | int[] anArray; 17 | 18 | anArray = new int[10]; 19 | 20 | for (int i = 0; i < anArray.length; i++) { 21 | anArray[i] = i; 22 | } 23 | 24 | for (int i = 0; i < anArray.length; i++) { 25 | System.out.print(anArray[i] + " "); 26 | } 27 | 28 | System.out.println(); 29 | } 30 | 31 | public static void bar() { 32 | int[] anArray; 33 | 34 | anArray = new int[10]; 35 | 36 | for (int i = 0; i < anArray.length; i++) { 37 | anArray[i] = i; 38 | } 39 | 40 | for (int i = 0; i < anArray.length; i++) { 41 | System.out.print(anArray[i] + " "); 42 | } 43 | 44 | System.out.println(); 45 | } 46 | } 47 | EOF 48 | 49 | issues = run_engine(engine_conf).strip.split("\0") 50 | result = issues.first.strip 51 | json = JSON.parse(result) 52 | 53 | expect(json["type"]).to eq("issue") 54 | expect(json["check_name"]).to eq("similar-code") 55 | expect(json["description"]).to eq("Similar blocks of code found in 2 locations. Consider refactoring.") 56 | expect(json["categories"]).to eq(["Duplication"]) 57 | expect(json["location"]).to eq({ 58 | "path" => "foo.java", 59 | "lines" => { "begin" => 2, "end" => 16 }, 60 | }) 61 | expect(json["remediation_points"]).to eq(930_000) 62 | expect(json["other_locations"]).to eq([ 63 | {"path" => "foo.java", "lines" => { "begin" => 18, "end" => 32 } }, 64 | ]) 65 | expect(json["content"]["body"]).to match /This issue has a mass of 103/ 66 | expect(json["fingerprint"]).to eq("48eb151dc29634f90a86ffabf9d3c4b5") 67 | expect(json["severity"]).to eq(CC::Engine::Analyzers::Base::MAJOR) 68 | end 69 | 70 | it "prints an issue for identical code" do 71 | create_source_file("foo.java", <<-EOF) 72 | public class ArrayDemo { 73 | public static void foo(int[] anArray) { 74 | for (int i = 0; i < anArray.length; i++) { 75 | System.out.print(anArray[i] + " "); 76 | } 77 | 78 | System.out.println(); 79 | } 80 | 81 | public static void foo(int[] anArray) { 82 | for (int i = 0; i < anArray.length; i++) { 83 | System.out.print(anArray[i] + " "); 84 | } 85 | 86 | System.out.println(); 87 | } 88 | } 89 | EOF 90 | 91 | issues = run_engine(engine_conf).strip.split("\0") 92 | result = issues.first.strip 93 | json = JSON.parse(result) 94 | 95 | expect(json["type"]).to eq("issue") 96 | expect(json["check_name"]).to eq("identical-code") 97 | expect(json["description"]).to eq("Identical blocks of code found in 2 locations. Consider refactoring.") 98 | expect(json["categories"]).to eq(["Duplication"]) 99 | expect(json["location"]).to eq({ 100 | "path" => "foo.java", 101 | "lines" => { "begin" => 2, "end" => 8 }, 102 | }) 103 | expect(json["remediation_points"]).to eq(420_000) 104 | expect(json["other_locations"]).to eq([ 105 | {"path" => "foo.java", "lines" => { "begin" => 10, "end" => 16 } }, 106 | ]) 107 | expect(json["content"]["body"]).to match /This issue has a mass of 52/ 108 | expect(json["fingerprint"]).to eq("dbb957b34f7b5312538235c0aa3f52a0") 109 | expect(json["severity"]).to eq(CC::Engine::Analyzers::Base::MINOR) 110 | end 111 | 112 | it "outputs a warning for unprocessable errors" do 113 | create_source_file("foo.java", <<-EOF) 114 | --- 115 | EOF 116 | 117 | expect(CC.logger).to receive(:warn).with(/Response status: 422/) 118 | expect(CC.logger).to receive(:warn).with(/Skipping/) 119 | run_engine(engine_conf) 120 | end 121 | 122 | it "ignores import and package declarations" do 123 | create_source_file("foo.java", <<-EOF) 124 | package org.springframework.rules.constraint; 125 | 126 | import java.util.Comparator; 127 | 128 | import org.springframework.rules.constraint.Constraint; 129 | import org.springframework.rules.closure.BinaryConstraint; 130 | EOF 131 | 132 | create_source_file("bar.java", <<-EOF) 133 | package org.springframework.rules.constraint; 134 | 135 | import java.util.Comparator; 136 | 137 | import org.springframework.rules.constraint.Constraint; 138 | import org.springframework.rules.closure.BinaryConstraint; 139 | EOF 140 | 141 | issues = run_engine(engine_conf).strip.split("\0") 142 | expect(issues).to be_empty 143 | end 144 | 145 | it "prints an issue for similar code when the only difference is the value of a literal" do 146 | create_source_file("foo.java", <<-EOF) 147 | public class ArrayDemo { 148 | public static void foo() { 149 | int[] scott; 150 | scott = new int[] { 151 | 0x00000000, 0x77073096, 0xEE0E612C, 0x990951BA, 0x076DC419, 0x706AF48F 152 | }; 153 | 154 | int[] anArray; 155 | 156 | anArray = new int[10]; 157 | 158 | for (int i = 0; i < anArray.length; i++) { 159 | anArray[i] = i; 160 | } 161 | 162 | for (int i = 0; i < anArray.length; i++) { 163 | System.out.print(anArray[i] + " "); 164 | } 165 | 166 | System.out.println(); 167 | } 168 | 169 | public static void foo() { 170 | int[] scott; 171 | scott = new int[] { 172 | 0xF3B97148, 0x84BE41DE, 0x1ADAD47D, 0x6DDDE4EB, 0xF4D4B551, 0x83D385C7 173 | }; 174 | 175 | int[] anArray; 176 | 177 | anArray = new int[10]; 178 | 179 | for (int i = 0; i < anArray.length; i++) { 180 | anArray[i] = i; 181 | } 182 | 183 | for (int i = 0; i < anArray.length; i++) { 184 | System.out.print(anArray[i] + " "); 185 | } 186 | 187 | System.out.println(); 188 | } 189 | } 190 | EOF 191 | 192 | issues = run_engine(engine_conf).strip.split("\0") 193 | expect(issues.length).to be > 0 194 | result = issues.first.strip 195 | json = JSON.parse(result) 196 | 197 | expect(json["type"]).to eq("issue") 198 | expect(json["check_name"]).to eq("similar-code") 199 | 200 | expect(json["description"]).to eq("Similar blocks of code found in 2 locations. Consider refactoring.") 201 | expect(json["categories"]).to eq(["Duplication"]) 202 | expect(json["location"]).to eq({ 203 | "path" => "foo.java", 204 | "lines" => { "begin" => 2, "end" => 21 }, 205 | }) 206 | expect(json["remediation_points"]).to eq(1_230_000) 207 | expect(json["other_locations"]).to eq([ 208 | {"path" => "foo.java", "lines" => { "begin" => 23, "end" => 42 } }, 209 | ]) 210 | expect(json["content"]["body"]).to match /This issue has a mass of 133/ 211 | expect(json["fingerprint"]).to eq("9abf88bac3a56bf708a5c4ceaf251d98") 212 | expect(json["severity"]).to eq(CC::Engine::Analyzers::Base::MAJOR) 213 | end 214 | end 215 | end 216 | end 217 | -------------------------------------------------------------------------------- /spec/cc/engine/analyzers/javascript/main_spec.rb: -------------------------------------------------------------------------------- 1 | require 'spec_helper' 2 | require 'cc/engine/analyzers/javascript/main' 3 | require 'cc/engine/analyzers/reporter' 4 | require 'cc/engine/analyzers/engine_config' 5 | 6 | RSpec.describe CC::Engine::Analyzers::Javascript::Main, in_tmpdir: true do 7 | include AnalyzerSpecHelpers 8 | 9 | describe "#run" do 10 | it "prints an issue for identical code" do 11 | create_source_file("foo.js", <<-EOJS) 12 | console.log("hello JS!"); 13 | console.log("hello JS!"); 14 | console.log("hello JS!"); 15 | EOJS 16 | 17 | issues = run_engine(engine_conf).strip.split("\0") 18 | result = issues.first.strip 19 | json = JSON.parse(result) 20 | 21 | expect(json["type"]).to eq("issue") 22 | expect(json["check_name"]).to eq("identical-code") 23 | expect(json["description"]).to eq("Identical blocks of code found in 3 locations. Consider refactoring.") 24 | expect(json["categories"]).to eq(["Duplication"]) 25 | expect(json["location"]).to eq({ 26 | "path" => "foo.js", 27 | "lines" => { "begin" => 1, "end" => 1 }, 28 | }) 29 | expect(json["remediation_points"]).to eq(600_000) 30 | expect(json["other_locations"]).to eq([ 31 | {"path" => "foo.js", "lines" => { "begin" => 2, "end" => 2} }, 32 | {"path" => "foo.js", "lines" => { "begin" => 3, "end" => 3} }, 33 | ]) 34 | expect(json["content"]["body"]).to match(/This issue has a mass of 11/) 35 | expect(json["fingerprint"]).to eq("c4d29200c20d02297c6f550ad2c87c15") 36 | expect(json["severity"]).to eq(CC::Engine::Analyzers::Base::MAJOR) 37 | end 38 | 39 | it "prints an issue for similar code" do 40 | create_source_file("foo.js", <<-EOJS) 41 | console.log("hello JS!"); 42 | console.log("hellllllo JS!"); 43 | console.log("helllllllllllllllllo JS!"); 44 | EOJS 45 | 46 | issues = run_engine(engine_conf).strip.split("\0") 47 | result = issues.first.strip 48 | json = JSON.parse(result) 49 | 50 | expect(json["type"]).to eq("issue") 51 | expect(json["check_name"]).to eq("similar-code") 52 | expect(json["description"]).to eq("Similar blocks of code found in 3 locations. Consider refactoring.") 53 | expect(json["categories"]).to eq(["Duplication"]) 54 | expect(json["location"]).to eq({ 55 | "path" => "foo.js", 56 | "lines" => { "begin" => 1, "end" => 1 }, 57 | }) 58 | expect(json["remediation_points"]).to eq(600_000) 59 | expect(json["other_locations"]).to eq([ 60 | {"path" => "foo.js", "lines" => { "begin" => 2, "end" => 2} }, 61 | {"path" => "foo.js", "lines" => { "begin" => 3, "end" => 3} }, 62 | ]) 63 | expect(json["content"]["body"]).to match(/This issue has a mass of 11/) 64 | expect(json["fingerprint"]).to eq("d9dab8e4607e2a74da3b9eefb49eacec") 65 | expect(json["severity"]).to eq(CC::Engine::Analyzers::Base::MAJOR) 66 | end 67 | 68 | it "handles ES6 spread params" do 69 | create_source_file("foo.jsx", <<-EOJS) 70 | const ThingClass = React.createClass({ 71 | propTypes: { 72 | ...OtherThing.propTypes, 73 | otherProp: "someVal" 74 | } 75 | }); 76 | EOJS 77 | 78 | expect(CC.logger).not_to receive(:info).with(/Skipping file/) 79 | run_engine(engine_conf) 80 | end 81 | 82 | it "skips unparsable files" do 83 | create_source_file("foo.js", <<-EOJS) 84 | function () { do(); // missing closing brace 85 | EOJS 86 | 87 | expect(CC.logger).to receive(:warn).with(/Skipping \.\/foo\.js/) 88 | expect(CC.logger).to receive(:warn).with("Response status: 422") 89 | expect(run_engine(engine_conf)).to eq("") 90 | end 91 | 92 | it "skips minified files" do 93 | path = fixture_path("huge_js_file.js") 94 | create_source_file("foo.js", File.read(path)) 95 | 96 | expect(CC.logger).to receive(:warn).with(/Skipping \.\/foo\.js/) 97 | expect(CC.logger).to receive(:warn).with("Response status: 422") 98 | expect(run_engine(engine_conf)).to eq("") 99 | end 100 | 101 | it "handles parser 500s" do 102 | create_source_file("foo.js", <<-EOJS) 103 | EOJS 104 | 105 | error = CC::Parser::Client::HTTPError.new(500, "Error processing file: ./foo.js") 106 | allow(CC::Parser).to receive(:parse).with("", "/javascript", filename: "./foo.js").and_raise(error) 107 | 108 | expect(CC.logger).to receive(:error).with("Error processing file: ./foo.js") 109 | expect(CC.logger).to receive(:error).with(error.message) 110 | 111 | expect { run_engine(engine_conf) }.to raise_error(error) 112 | end 113 | end 114 | 115 | it "does not flag duplicate comments" do 116 | create_source_file("foo.js", <<-EOJS) 117 | // A comment. 118 | // A comment. 119 | 120 | /* A comment. */ 121 | /* A comment. */ 122 | EOJS 123 | 124 | expect(run_engine(engine_conf)).to be_empty 125 | end 126 | 127 | it "does not report the same line for multiple issues" do 128 | create_source_file("dup.jsx", <<-EOJSX) 129 | Login 130 | EOJSX 131 | 132 | issues = run_engine(engine_conf).strip.split("\0") 133 | 134 | expect(issues.length).to eq 1 135 | end 136 | 137 | it "ignores imports" do 138 | create_source_file("foo.js", <<~EOJS) 139 | import React, { Component, PropTypes } from 'react' 140 | import { Table, TableBody, TableHeader, TableHeaderColumn, TableRow } from 'material-ui/Table' 141 | import values from 'lodash/values' 142 | import { v4 } from 'uuid' 143 | EOJS 144 | 145 | create_source_file("bar.js", <<~EOJS) 146 | import React, { Component, PropTypes } from 'react' 147 | import { Table, TableBody, TableHeader, TableHeaderColumn, TableRow } from 'material-ui/Table' 148 | import values from 'lodash/values' 149 | import { v4 } from 'uuid' 150 | EOJS 151 | 152 | issues = run_engine(engine_conf).strip.split("\0") 153 | expect(issues).to be_empty 154 | end 155 | 156 | it "ignores requires" do 157 | create_source_file("foo.js", <<~EOJS) 158 | const a = require('foo'), 159 | b = require('bar'), 160 | c = require('baz'), 161 | d = require('bam'); 162 | a + b + c + d; 163 | EOJS 164 | 165 | create_source_file("bar.js", <<~EOJS) 166 | const a = require('foo'), 167 | b = require('bar'), 168 | c = require('baz'), 169 | d = require('bam'); 170 | print(a); 171 | EOJS 172 | 173 | issues = run_engine(engine_conf 3).strip.split("\0") 174 | expect(issues).to be_empty 175 | end 176 | 177 | it "ignores imports and reports proper line number boundaries" do 178 | create_source_file("foo.js", <<~EOJS) 179 | 'use strict'; 180 | 181 | import React from 'react'; 182 | 183 | import Translator from '../../../i18n/translator-tag.jsx'; 184 | import { gettingSeniorID } from '../../../helpers/data/senior'; 185 | import { choosingReducedFee } from '../../../helpers/data/reduced-fee'; 186 | import { correctID } from '../../../helpers/data/card-type'; 187 | 188 | 189 | const Senior = (props) => { 190 | if (!gettingSeniorID(props.IDApp)) { return null; } 191 | return 192 | }; 193 | 194 | const Reduced = (props) => { 195 | if (!choosingReducedFee(props.IDApp)) { return null; } 196 | return 197 | }; 198 | 199 | const Regular = (props) => { 200 | if (gettingSeniorID(props.IDApp) || choosingReducedFee(props.IDApp)) { return null; } 201 | return 202 | }; 203 | 204 | 205 | const CorrectingIDInfo = (props) => { 206 | if(!correctID(props)) { return null; } 207 | return ( 208 |
209 | 210 | 211 | 212 |
213 | ); 214 | }; 215 | 216 | export default CorrectingIDInfo; 217 | EOJS 218 | 219 | create_source_file("bar.js", <<~EOJS) 220 | 'use strict'; 221 | 222 | import React from 'react'; 223 | import { updateID } from '../../../helpers/data/card-type'; 224 | import { gettingSeniorID } from '../../../helpers/data/senior'; 225 | import { choosingReducedFee } from '../../../helpers/data/reduced-fee'; 226 | import Translator from '../../../i18n/translator-tag.jsx'; 227 | 228 | const Senior = (props) => { 229 | if (!gettingSeniorID(props.IDApp)) { return null; } 230 | return 231 | }; 232 | 233 | const Reduced = (props) => { 234 | if (!choosingReducedFee(props.IDApp)) { return null; } 235 | return 236 | }; 237 | 238 | const Regular = (props) => { 239 | if (gettingSeniorID(props.IDApp) || choosingReducedFee(props.IDApp)) { return null; } 240 | return 241 | }; 242 | 243 | const UpdatingIDInfo = (props) => { 244 | if (!updateID(props)) { return null; } 245 | return ( 246 |
247 | 248 | 249 | 250 |
251 | ); 252 | }; 253 | 254 | export default UpdatingIDInfo; 255 | EOJS 256 | 257 | issues = run_engine(engine_conf).strip.split("\0") 258 | issues = issues.map { |issue| JSON.parse issue } 259 | 260 | infected = issues.any? { |i| i.dig("location", "lines", "begin") == 1 } 261 | 262 | expect(infected).to be false 263 | end 264 | 265 | it "outputs the correct line numbers for ASTs missing line details (codeclimate/app#6227)" do 266 | create_source_file("foo.js", <<~EOJS) 267 | `/movie?${getQueryString({ movie_id: movieId })}` 268 | EOJS 269 | 270 | create_source_file("bar.js", <<~EOJS) 271 | var greeting = "hello"; 272 | 273 | `/movie?${getQueryString({ movie_id: movieId })}` 274 | EOJS 275 | 276 | issues = run_engine(engine_conf).strip.split("\0") 277 | expect(issues).to_not be_empty 278 | 279 | issues.map! { |issue| JSON.parse(issue) } 280 | 281 | foo_issue = issues.detect { |issue| issue.fetch("location").fetch("path") == "foo.js" } 282 | expect(foo_issue["location"]).to eq({ 283 | "path" => "foo.js", 284 | "lines" => { "begin" => 1, "end" => 1 }, 285 | }) 286 | 287 | bar_issue = issues.detect { |issue| issue.fetch("location").fetch("path") == "bar.js" } 288 | expect(bar_issue["location"]).to eq({ 289 | "path" => "bar.js", 290 | "lines" => { "begin" => 3, "end" => 3 }, 291 | }) 292 | end 293 | 294 | def engine_conf mass = 1 295 | CC::Engine::Analyzers::EngineConfig.new({ 296 | 'config' => { 297 | 'checks' => { 298 | 'similar-code' => { 299 | 'enabled' => true, 300 | }, 301 | 'identical-code' => { 302 | 'enabled' => true, 303 | }, 304 | }, 305 | 'languages' => { 306 | 'javascript' => { 307 | 'mass_threshold' => mass, 308 | }, 309 | }, 310 | }, 311 | }) 312 | end 313 | end 314 | -------------------------------------------------------------------------------- /spec/cc/engine/analyzers/kotlin/kotlin_spec.rb: -------------------------------------------------------------------------------- 1 | require "spec_helper" 2 | require "cc/engine/analyzers/kotlin/main" 3 | require "cc/engine/analyzers/engine_config" 4 | 5 | module CC::Engine::Analyzers 6 | RSpec.describe Kotlin::Main, in_tmpdir: true do 7 | include AnalyzerSpecHelpers 8 | 9 | describe "#run" do 10 | let(:engine_conf) { EngineConfig.new({}) } 11 | 12 | it "prints an issue for similar code" do 13 | create_source_file("foo.kt", <<-EOF) 14 | class ArrayDemo { 15 | fun foo() { 16 | val anArray: Array = Array(10) 17 | 18 | for (i in 0..10) { 19 | anArray[i] = i 20 | } 21 | 22 | for (i in 0..10) { 23 | println(anArray[i]) 24 | } 25 | 26 | println("") 27 | } 28 | 29 | fun bar() { 30 | val anArray: Array = Array(10) 31 | 32 | for (i in 0..10) { 33 | anArray[i] = i 34 | } 35 | 36 | for (i in 0..10) { 37 | println(anArray[i]) 38 | } 39 | 40 | println("") 41 | } 42 | } 43 | EOF 44 | 45 | issues = run_engine(engine_conf).strip.split("\0") 46 | result = issues.first.strip 47 | json = JSON.parse(result) 48 | 49 | expect(json["type"]).to eq("issue") 50 | expect(json["check_name"]).to eq("similar-code") 51 | expect(json["description"]).to eq("Similar blocks of code found in 2 locations. Consider refactoring.") 52 | expect(json["categories"]).to eq(["Duplication"]) 53 | expect(json["location"]).to eq({ 54 | "path" => "foo.kt", 55 | "lines" => { "begin" => 2, "end" => 14 }, 56 | }) 57 | expect(json["other_locations"]).to eq([ 58 | {"path" => "foo.kt", "lines" => { "begin" => 16, "end" => 28 } }, 59 | ]) 60 | expect(json["severity"]).to eq(CC::Engine::Analyzers::Base::MAJOR) 61 | end 62 | 63 | it "prints an issue for identical code" do 64 | create_source_file("foo.kt", <<-EOF) 65 | class ArrayDemo { 66 | fun foo(anArray: Array) { 67 | for (i in anArray.indices) { 68 | println(anArray[i] + " ") 69 | } 70 | 71 | println("") 72 | } 73 | 74 | fun foo(anArray: Array) { 75 | for (i in anArray.indices) { 76 | println(anArray[i] + " ") 77 | } 78 | 79 | println("") 80 | } 81 | } 82 | EOF 83 | 84 | issues = run_engine(engine_conf).strip.split("\0") 85 | result = issues.first.strip 86 | json = JSON.parse(result) 87 | 88 | expect(json["type"]).to eq("issue") 89 | expect(json["check_name"]).to eq("identical-code") 90 | expect(json["description"]).to eq("Identical blocks of code found in 2 locations. Consider refactoring.") 91 | expect(json["categories"]).to eq(["Duplication"]) 92 | expect(json["location"]).to eq({ 93 | "path" => "foo.kt", 94 | "lines" => { "begin" => 2, "end" => 8 }, 95 | }) 96 | expect(json["other_locations"]).to eq([ 97 | {"path" => "foo.kt", "lines" => { "begin" => 10, "end" => 16 } }, 98 | ]) 99 | expect(json["severity"]).to eq(CC::Engine::Analyzers::Base::MAJOR) 100 | end 101 | 102 | it "outputs a warning for unprocessable errors" do 103 | create_source_file("foo.kt", <<-EOF) 104 | --- 105 | EOF 106 | 107 | expect(CC.logger).to receive(:warn).with(/Response status: 422/) 108 | expect(CC.logger).to receive(:warn).with(/Skipping/) 109 | run_engine(engine_conf) 110 | end 111 | 112 | it "ignores import and package declarations" do 113 | create_source_file("foo.kt", <<-EOF) 114 | package org.springframework.rules.constraint; 115 | 116 | import java.util.Comparator; 117 | 118 | import org.springframework.rules.constraint.Constraint; 119 | import org.springframework.rules.closure.BinaryConstraint; 120 | EOF 121 | 122 | create_source_file("bar.kt", <<-EOF) 123 | package org.springframework.rules.constraint; 124 | 125 | import java.util.Comparator; 126 | 127 | import org.springframework.rules.constraint.Constraint; 128 | import org.springframework.rules.closure.BinaryConstraint; 129 | EOF 130 | 131 | issues = run_engine(engine_conf).strip.split("\0") 132 | expect(issues).to be_empty 133 | end 134 | 135 | it "prints an issue for similar code when the only difference is the value of a literal" do 136 | create_source_file("foo.kt", <<-EOF) 137 | class ArrayDemo { 138 | fun foo() { 139 | val scott = arrayOfInt( 140 | 0x00000000, 0x77073096, 0xEE0E612C, 0x990951BA, 0x076DC419, 0x706AF48F 141 | ) 142 | 143 | val anArray: Array = Array(10) 144 | 145 | for (i in 0..10) { 146 | anArray[i] = i 147 | } 148 | 149 | for (i in 0..10) { 150 | println(anArray[i] + " ") 151 | } 152 | 153 | println() 154 | } 155 | 156 | fun foo() { 157 | val scott = arrayOfInt( 158 | 0xF3B97148, 0x84BE41DE, 0x1ADAD47D, 0x6DDDE4EB, 0xF4D4B551, 0x83D385C7 159 | ) 160 | 161 | val anArray: Array = Array(10) 162 | 163 | for (i in 0..10) { 164 | anArray[i] = i 165 | } 166 | 167 | for (i in 0..10) { 168 | println(anArray[i] + " ") 169 | } 170 | 171 | println() 172 | } 173 | } 174 | EOF 175 | 176 | issues = run_engine(engine_conf).strip.split("\0") 177 | expect(issues.length).to be > 0 178 | result = issues.first.strip 179 | json = JSON.parse(result) 180 | 181 | expect(json["type"]).to eq("issue") 182 | expect(json["check_name"]).to eq("similar-code") 183 | 184 | expect(json["description"]).to eq("Similar blocks of code found in 2 locations. Consider refactoring.") 185 | expect(json["categories"]).to eq(["Duplication"]) 186 | expect(json["location"]).to eq({ 187 | "path" => "foo.kt", 188 | "lines" => { "begin" => 2, "end" => 18 }, 189 | }) 190 | expect(json["other_locations"]).to eq([ 191 | {"path" => "foo.kt", "lines" => { "begin" => 20, "end" => 36 } }, 192 | ]) 193 | expect(json["severity"]).to eq(CC::Engine::Analyzers::Base::MAJOR) 194 | end 195 | 196 | it "ignores comment docs and comments" do 197 | create_source_file("foo.kt", <<-EOF) 198 | /******************************************************************** 199 | * Copyright (C) 2017 by Max Lv 200 | *******************************************************************/ 201 | 202 | package com.github.shadowsocks.acl 203 | // Comment here 204 | 205 | import org.junit.Assert 206 | import org.junit.Test 207 | 208 | class AclTest { 209 | // Comment here 210 | companion object { 211 | private const val INPUT1 = """[proxy_all] 212 | [bypass_list] 213 | 1.0.1.0/24 214 | (^|\.)4tern\.com${'$'} 215 | """ 216 | } 217 | 218 | @Test 219 | fun parse() { 220 | Assert.assertEquals(INPUT1, Acl().fromReader(INPUT1.reader()).toString()); 221 | } 222 | } 223 | EOF 224 | 225 | create_source_file("bar.kt", <<-EOF) 226 | /********************************************************************* 227 | * Copyright (C) 2017 by Max Lv 228 | ********************************************************************/ 229 | 230 | package com.evernote.android.job 231 | // Comment here 232 | 233 | object JobConstants { 234 | // Comment here 235 | const val DATABASE_NAME = JobStorage.DATABASE_NAME 236 | const val PREF_FILE_NAME = JobStorage.PREF_FILE_NAME 237 | } 238 | EOF 239 | 240 | issues = run_engine(engine_conf).strip.split("\0") 241 | expect(issues).to be_empty 242 | end 243 | 244 | end 245 | end 246 | end 247 | -------------------------------------------------------------------------------- /spec/cc/engine/analyzers/php/main_spec.rb: -------------------------------------------------------------------------------- 1 | require 'spec_helper' 2 | require 'cc/engine/analyzers/php/main' 3 | require 'cc/engine/analyzers/reporter' 4 | require 'cc/engine/analyzers/engine_config' 5 | 6 | RSpec.describe CC::Engine::Analyzers::Php::Main, in_tmpdir: true do 7 | include AnalyzerSpecHelpers 8 | 9 | describe "#run" do 10 | it "prints an issue for identical code" do 11 | create_source_file("foo.php", <<-EOPHP) 12 | 0 32 | result = issues.first.strip 33 | json = JSON.parse(result) 34 | 35 | expect(json["type"]).to eq("issue") 36 | expect(json["check_name"]).to eq("identical-code") 37 | expect(json["description"]).to eq("Identical blocks of code found in 2 locations. Consider refactoring.") 38 | expect(json["categories"]).to eq(["Duplication"]) 39 | expect(json["location"]).to eq({ 40 | "path" => "foo.php", 41 | "lines" => { "begin" => 2, "end" => 8 }, 42 | }) 43 | expect(json["remediation_points"]).to eq(967000) 44 | expect(json["other_locations"]).to eq([ 45 | {"path" => "foo.php", "lines" => { "begin" => 10, "end" => 16} }, 46 | ]) 47 | expect(json["content"]["body"]).to match(/This issue has a mass of 28/) 48 | expect(json["fingerprint"]).to eq("b41447552cff977d3d98dff4cd282ec2") 49 | expect(json["severity"]).to eq(CC::Engine::Analyzers::Base::MAJOR) 50 | end 51 | 52 | it "prints an issue for similar code" do 53 | create_source_file("foo.php", <<-EOPHP) 54 | 0 74 | result = issues.first.strip 75 | json = JSON.parse(result) 76 | 77 | expect(json["type"]).to eq("issue") 78 | expect(json["check_name"]).to eq("similar-code") 79 | expect(json["description"]).to eq("Similar blocks of code found in 2 locations. Consider refactoring.") 80 | expect(json["categories"]).to eq(["Duplication"]) 81 | expect(json["location"]).to eq({ 82 | "path" => "foo.php", 83 | "lines" => { "begin" => 2, "end" => 8 }, 84 | }) 85 | expect(json["remediation_points"]).to eq(967000) 86 | expect(json["other_locations"]).to eq([ 87 | {"path" => "foo.php", "lines" => { "begin" => 10, "end" => 16} }, 88 | ]) 89 | expect(json["content"]["body"]).to match(/This issue has a mass of 28/) 90 | expect(json["fingerprint"]).to eq("c4c0b456f59f109d0a5cfce7d4807935") 91 | end 92 | 93 | it "runs against complex files" do 94 | FileUtils.cp(fixture_path("symfony_configuration.php"), File.join(@code, "configuration.php")) 95 | issues = run_engine(engine_conf).strip.split("\0") 96 | expect(issues.length).to be > 0 97 | result = issues.first.strip 98 | 99 | expect(result).to match "\"type\":\"issue\"" 100 | end 101 | 102 | it "handles INF & NAN constants" do 103 | create_source_file("foo.php", <<-EOPHP) 104 | 0 142 | result = issues.first.strip 143 | json = JSON.parse(result) 144 | expect(json["location"]).to eq({ 145 | "path" => "foo.php", 146 | "lines" => { "begin" => 190, "end" => 190 }, 147 | }) 148 | end 149 | 150 | it "can parse php 7.1 code" do 151 | create_source_file("foo.php", File.read(fixture_path("php_71_sample.php"))) 152 | 153 | issues = run_engine(engine_conf).strip.split("\0") 154 | 155 | expect(issues.length).to eq(2) 156 | 157 | expect(JSON.parse(issues.first.strip)["location"]).to eq({ 158 | "path" => "foo.php", 159 | "lines" => { "begin" => 2, "end" => 9 }, 160 | }) 161 | 162 | expect(JSON.parse(issues.last.strip)["location"]).to eq({ 163 | "path" => "foo.php", 164 | "lines" => { "begin" => 11, "end" => 18 }, 165 | }) 166 | end 167 | 168 | it "ignores namespace and use declarations" do 169 | create_source_file("foo.php", <<~EOPHP) 170 | 0 235 | 236 | issue = JSON.parse(issues.first.strip) 237 | expect(issue["location"]).to eq( 238 | "path" => "foo.php", 239 | "lines" => { "begin" => 8, "end" => 14 }, 240 | ) 241 | expect(issue["content"]["body"]).to match(/This issue has a mass of 28/) 242 | end 243 | 244 | it "ignores one-line comments" do 245 | create_source_file("foo.php", <<-EOPHP) 246 | 0 269 | 270 | issue = JSON.parse(issues.first.strip) 271 | expect(issue["location"]).to eq( 272 | "path" => "foo.php", 273 | "lines" => { "begin" => 4, "end" => 10 }, 274 | ) 275 | expect(issue["content"]["body"]).to match(/This issue has a mass of 28/) 276 | end 277 | end 278 | end 279 | 280 | def engine_conf mass = 5 281 | CC::Engine::Analyzers::EngineConfig.new({ 282 | 'config' => { 283 | 'languages' => { 284 | 'php' => { 285 | 'mass_threshold' => mass, 286 | }, 287 | }, 288 | }, 289 | }) 290 | end 291 | end 292 | -------------------------------------------------------------------------------- /spec/cc/engine/analyzers/python/main_spec.rb: -------------------------------------------------------------------------------- 1 | require 'spec_helper' 2 | require 'cc/engine/analyzers/python/main' 3 | require 'cc/engine/analyzers/engine_config' 4 | 5 | RSpec.describe CC::Engine::Analyzers::Python::Main, in_tmpdir: true do 6 | include AnalyzerSpecHelpers 7 | 8 | describe "#run" do 9 | it "prints an issue for identical code" do 10 | create_source_file("foo.py", <<-EOJS) 11 | print("Hello", "python") 12 | print("Hello", "python") 13 | print("Hello", "python") 14 | EOJS 15 | 16 | issues = run_engine(engine_conf).strip.split("\0") 17 | result = issues.first.strip 18 | json = JSON.parse(result) 19 | 20 | expect(json["type"]).to eq("issue") 21 | expect(json["check_name"]).to eq("identical-code") 22 | expect(json["description"]).to eq("Identical blocks of code found in 3 locations. Consider refactoring.") 23 | expect(json["categories"]).to eq(["Duplication"]) 24 | expect(json["location"]).to eq({ 25 | "path" => "foo.py", 26 | "lines" => { "begin" => 1, "end" => 1 }, 27 | }) 28 | expect(json["remediation_points"]).to eq(350_000) 29 | expect(json["other_locations"]).to eq([ 30 | {"path" => "foo.py", "lines" => { "begin" => 2, "end" => 2} }, 31 | {"path" => "foo.py", "lines" => { "begin" => 3, "end" => 3} }, 32 | ]) 33 | expect(json["content"]["body"]).to match(/This issue has a mass of 6/) 34 | expect(json["fingerprint"]).to eq("3f3d34361bcaef98839d9da6ca9fcee4") 35 | expect(json["severity"]).to eq(CC::Engine::Analyzers::Base::MINOR) 36 | end 37 | 38 | it "prints an issue for similar code" do 39 | create_source_file("foo.py", <<-EOJS) 40 | print("Hello", "python") 41 | print("Hello It's me", "python") 42 | print("Hello from the other side", "python") 43 | EOJS 44 | 45 | issues = run_engine(engine_conf).strip.split("\0") 46 | result = issues.first.strip 47 | json = JSON.parse(result) 48 | 49 | expect(json["type"]).to eq("issue") 50 | expect(json["check_name"]).to eq("similar-code") 51 | expect(json["description"]).to eq("Similar blocks of code found in 3 locations. Consider refactoring.") 52 | expect(json["categories"]).to eq(["Duplication"]) 53 | expect(json["location"]).to eq({ 54 | "path" => "foo.py", 55 | "lines" => { "begin" => 1, "end" => 1 }, 56 | }) 57 | expect(json["remediation_points"]).to eq(350_000) 58 | expect(json["other_locations"]).to eq([ 59 | {"path" => "foo.py", "lines" => { "begin" => 2, "end" => 2} }, 60 | {"path" => "foo.py", "lines" => { "begin" => 3, "end" => 3} }, 61 | ]) 62 | expect(json["content"]["body"]).to match(/This issue has a mass of 6/) 63 | expect(json["fingerprint"]).to eq("019118ceed60bf40b35aad581aae1b02") 64 | expect(json["severity"]).to eq(CC::Engine::Analyzers::Base::MINOR) 65 | end 66 | 67 | it "finds duplication in python3 code" do 68 | create_source_file("foo.py", <<-EOJS) 69 | def a(thing: str): 70 | print("Hello", str) 71 | 72 | def b(thing: str): 73 | print("Hello", str) 74 | 75 | def c(thing: str): 76 | print("Hello", str) 77 | 78 | def b(thing: str): 79 | bytes_literal = b'asdf' 80 | EOJS 81 | 82 | conf = CC::Engine::Analyzers::EngineConfig.new({ 83 | "config" => { 84 | "languages" => { 85 | "python" => { 86 | "mass_threshold" => 4, 87 | "python_version" => 3, 88 | }, 89 | }, 90 | }, 91 | }) 92 | issues = run_engine(conf).strip.split("\0") 93 | result = issues.first.strip 94 | json = JSON.parse(result) 95 | 96 | expect(json["type"]).to eq("issue") 97 | expect(json["check_name"]).to eq("similar-code") 98 | expect(json["description"]).to eq("Similar blocks of code found in 3 locations. Consider refactoring.") 99 | expect(json["categories"]).to eq(["Duplication"]) 100 | expect(json["location"]).to eq({ 101 | "path" => "foo.py", 102 | "lines" => { "begin" => 1, "end" => 2 }, 103 | }) 104 | expect(json["remediation_points"]).to eq(900_000) 105 | expect(json["other_locations"]).to eq([ 106 | {"path" => "foo.py", "lines" => { "begin" => 4, "end" => 5 } }, 107 | {"path" => "foo.py", "lines" => { "begin" => 7, "end" => 8 } }, 108 | ]) 109 | expect(json["content"]["body"]).to match(/This issue has a mass of 16/) 110 | expect(json["fingerprint"]).to eq("607cf2d16d829e667c5f34534197d14c") 111 | expect(json["severity"]).to eq(CC::Engine::Analyzers::Base::MAJOR) 112 | end 113 | 114 | it "finds duplication with complex-number literals" do 115 | create_source_file("complex.py", <<-EOJS) 116 | def a(): 117 | return 1+1j 118 | 119 | def b(): 120 | return 1 + 1J 121 | 122 | def c(): 123 | return (1 + 1j) 124 | 125 | def d(): 126 | return 1 127 | EOJS 128 | 129 | conf = CC::Engine::Analyzers::EngineConfig.new({ 130 | "config" => { 131 | "languages" => { 132 | "python" => { 133 | "mass_threshold" => 4, 134 | "python_version" => 3, 135 | }, 136 | }, 137 | }, 138 | }) 139 | issues = run_engine(conf).strip.split("\0") 140 | result = issues.first.strip 141 | json = JSON.parse(result) 142 | 143 | expect(json["type"]).to eq("issue") 144 | expect(json["check_name"]).to eq("similar-code") 145 | expect(json["description"]).to eq("Similar blocks of code found in 3 locations. Consider refactoring.") 146 | expect(json["categories"]).to eq(["Duplication"]) 147 | expect(json["location"]).to eq({ 148 | "path" => "complex.py", 149 | "lines" => { "begin" => 1, "end" => 2 }, 150 | }) 151 | expect(json["remediation_points"]).to eq(750_000) 152 | expect(json["other_locations"]).to eq([ 153 | {"path" => "complex.py", "lines" => { "begin" => 4, "end" => 5 } }, 154 | {"path" => "complex.py", "lines" => { "begin" => 7, "end" => 8 } }, 155 | ]) 156 | expect(json["content"]["body"]).to match(/This issue has a mass of 13/) 157 | expect(json["fingerprint"]).to eq("f867cd91cfb73d925510a79a58619d1a") 158 | expect(json["severity"]).to eq(CC::Engine::Analyzers::Base::MAJOR) 159 | end 160 | 161 | it "skips unparsable files" do 162 | create_source_file("foo.py", <<-EOPY) 163 | --- 164 | EOPY 165 | 166 | expect(CC.logger).to receive(:info).with(/Skipping file/) 167 | expect(run_engine(engine_conf)).to eq("") 168 | end 169 | end 170 | 171 | it "handles an empty yml key in config" do 172 | create_source_file("foo.py", <<-EOPY) 173 | def a(thing): 174 | print("Hello", thing) 175 | EOPY 176 | 177 | conf = CC::Engine::Analyzers::EngineConfig.new({ 178 | "config" => { 179 | "languages" => { 180 | "python" => "", 181 | }, 182 | }, 183 | }) 184 | 185 | expect(run_engine(engine_conf)).to eq("") 186 | end 187 | 188 | def engine_conf 189 | CC::Engine::Analyzers::EngineConfig.new({ 190 | "config" => { 191 | "languages" => { 192 | "python" => { 193 | "mass_threshold" => 5, 194 | }, 195 | }, 196 | }, 197 | }) 198 | end 199 | end 200 | -------------------------------------------------------------------------------- /spec/cc/engine/analyzers/scala/scala_spec.rb: -------------------------------------------------------------------------------- 1 | require "spec_helper" 2 | require "cc/engine/analyzers/scala/main" 3 | require "cc/engine/analyzers/engine_config" 4 | 5 | module CC::Engine::Analyzers 6 | RSpec.describe Scala::Main, in_tmpdir: true do 7 | include AnalyzerSpecHelpers 8 | 9 | describe "#run" do 10 | let(:engine_conf) { EngineConfig.new({}) } 11 | 12 | it "prints an issue for similar code" do 13 | create_source_file("foo.scala", <<-EOF) 14 | class Foo { 15 | def foo() { 16 | val anArray = new Array[Int](10) 17 | for (i <- 0 to 10) { 18 | anArray(i) = i 19 | } 20 | 21 | for (i <- 0 to 10) { 22 | println(anArray(i)) 23 | } 24 | 25 | println("") 26 | } 27 | 28 | def bar() { 29 | val anArray = new Array[Int](10) 30 | 31 | for (i <- 0 to 10) { 32 | anArray(i) = i 33 | } 34 | 35 | for (i <- 0 to 10) { 36 | println(anArray(i)) 37 | } 38 | 39 | println("") 40 | } 41 | } 42 | EOF 43 | 44 | issues = run_engine(engine_conf).strip.split("\0") 45 | 46 | result = issues.first.strip 47 | json = JSON.parse(result) 48 | 49 | expect(json["type"]).to eq("issue") 50 | expect(json["check_name"]).to eq("similar-code") 51 | expect(json["description"]).to eq("Similar blocks of code found in 2 locations. Consider refactoring.") 52 | expect(json["categories"]).to eq(["Duplication"]) 53 | expect(json["location"]).to eq({ 54 | "path" => "foo.scala", 55 | "lines" => { "begin" => 2, "end" => 13 }, 56 | }) 57 | expect(json["other_locations"]).to eq([ 58 | {"path" => "foo.scala", "lines" => { "begin" => 15, "end" => 27 } }, 59 | ]) 60 | expect(json["severity"]).to eq(CC::Engine::Analyzers::Base::MAJOR) 61 | end 62 | 63 | it "prints an issue for identical code" do 64 | create_source_file("foo.scala", <<-EOF) 65 | class Foo { 66 | def foo() { 67 | val anArray = new Array[Int](10) 68 | for (i <- 0 to 10) { 69 | anArray(i) = i 70 | } 71 | 72 | for (i <- 0 to 10) { 73 | println(anArray(i)) 74 | } 75 | 76 | println("") 77 | } 78 | 79 | def foo() { 80 | val anArray = new Array[Int](10) 81 | for (i <- 0 to 10) { 82 | anArray(i) = i 83 | } 84 | 85 | for (i <- 0 to 10) { 86 | println(anArray(i)) 87 | } 88 | 89 | println("") 90 | } 91 | } 92 | EOF 93 | 94 | issues = run_engine(engine_conf).strip.split("\0") 95 | result = issues.first.strip 96 | json = JSON.parse(result) 97 | 98 | expect(json["type"]).to eq("issue") 99 | expect(json["check_name"]).to eq("identical-code") 100 | expect(json["description"]).to eq("Identical blocks of code found in 2 locations. Consider refactoring.") 101 | expect(json["categories"]).to eq(["Duplication"]) 102 | expect(json["location"]).to eq({ 103 | "path" => "foo.scala", 104 | "lines" => { "begin" => 2, "end" => 13 }, 105 | }) 106 | expect(json["other_locations"]).to eq([ 107 | {"path" => "foo.scala", "lines" => { "begin" => 15, "end" => 26 } }, 108 | ]) 109 | expect(json["severity"]).to eq(CC::Engine::Analyzers::Base::MAJOR) 110 | end 111 | 112 | it "outputs a warning for unprocessable errors" do 113 | create_source_file("foo.scala", <<-EOF) 114 | --- 115 | EOF 116 | 117 | expect(CC.logger).to receive(:warn).with(/Response status: 422/) 118 | expect(CC.logger).to receive(:warn).with(/Skipping/) 119 | run_engine(engine_conf) 120 | end 121 | 122 | it "ignores import and package declarations" do 123 | create_source_file("foo.scala", <<-EOF) 124 | package org.springframework.rules.constraint; 125 | 126 | import java.util.Comparator; 127 | 128 | import org.springframework.rules.constraint.Constraint; 129 | import org.springframework.rules.closure.BinaryConstraint; 130 | EOF 131 | 132 | create_source_file("bar.scala", <<-EOF) 133 | package org.springframework.rules.constraint; 134 | 135 | import java.util.Comparator; 136 | 137 | import org.springframework.rules.constraint.Constraint; 138 | import org.springframework.rules.closure.BinaryConstraint; 139 | EOF 140 | 141 | conf = CC::Engine::Analyzers::EngineConfig.new({ 142 | 'config' => { 143 | 'checks' => { 144 | 'similar-code' => { 'enabled' => true }, 145 | 'identical-code' => { 'enabled' => true }, 146 | }, 147 | 'languages' => { 148 | 'scala' => { 'mass_threshold' => 9 }, 149 | }, 150 | }, 151 | }) 152 | 153 | issues = run_engine(conf).strip.split("\0") 154 | expect(issues).to be_empty 155 | end 156 | 157 | it "prints an issue for similar code when the only difference is the value of a literal" do 158 | create_source_file("foo.sc", <<-EOF) 159 | class ArrayDemo { 160 | def foo() { 161 | val scott = arrayOfInt( 162 | 0x00000000, 0x77073096, 0xEE0E612C, 0x990951BA, 0x076DC419, 0x706AF48F 163 | ) 164 | 165 | val anArray: Array[Int] = Array(10) 166 | 167 | for (i <- 0 to 10) { 168 | anArray(i) = i 169 | } 170 | 171 | for (i <- 0 to 10) { 172 | println(anArray(i) + " ") 173 | } 174 | 175 | println() 176 | } 177 | 178 | def foo() { 179 | val scott = arrayOfInt( 180 | 0xF3B97148, 0x84BE41DE, 0x1ADAD47D, 0x6DDDE4EB, 0xF4D4B551, 0x83D385C7 181 | ) 182 | 183 | val anArray: Array[Int] = Array(10) 184 | 185 | for (i <- 0 to 10) { 186 | anArray(i) = i 187 | } 188 | 189 | for (i <- 0 to 10) { 190 | println(anArray(i) + " ") 191 | } 192 | 193 | println() 194 | } 195 | } 196 | EOF 197 | 198 | issues = run_engine(engine_conf).strip.split("\0") 199 | expect(issues.length).to be > 0 200 | result = issues.first.strip 201 | json = JSON.parse(result) 202 | 203 | expect(json["type"]).to eq("issue") 204 | expect(json["check_name"]).to eq("similar-code") 205 | 206 | expect(json["description"]).to eq("Similar blocks of code found in 2 locations. Consider refactoring.") 207 | expect(json["categories"]).to eq(["Duplication"]) 208 | expect(json["location"]).to eq({ 209 | "path" => "foo.sc", 210 | "lines" => { "begin" => 2, "end" => 18 }, 211 | }) 212 | expect(json["other_locations"]).to eq([ 213 | {"path" => "foo.sc", "lines" => { "begin" => 20, "end" => 36 } }, 214 | ]) 215 | expect(json["severity"]).to eq(CC::Engine::Analyzers::Base::MAJOR) 216 | end 217 | 218 | it "ignores comment docs and comments" do 219 | create_source_file("foo.sc", <<-EOF) 220 | /******************************************************************** 221 | * Copyright (C) 2017 by Max Lv 222 | *******************************************************************/ 223 | 224 | package com.github.shadowsocks.acl 225 | // Comment here 226 | 227 | import org.junit.Assert 228 | import org.junit.Test 229 | 230 | class AclTest { 231 | // Comment here 232 | companion object { 233 | private const val INPUT1 = """[proxy_all] 234 | [bypass_list] 235 | 1.0.1.0/24 236 | (^|\.)4tern\.com${'$'} 237 | """ 238 | } 239 | 240 | def parse() { 241 | Assert.assertEquals(INPUT1, Acl().fromReader(INPUT1.reader()).toString()); 242 | } 243 | } 244 | EOF 245 | 246 | create_source_file("bar.sc", <<-EOF) 247 | /********************************************************************* 248 | * Copyright (C) 2017 by Max Lv 249 | ********************************************************************/ 250 | 251 | package com.evernote.android.job 252 | // Comment here 253 | 254 | object JobConstants { 255 | // Comment here 256 | const val DATABASE_NAME = JobStorage.DATABASE_NAME 257 | const val PREF_FILE_NAME = JobStorage.PREF_FILE_NAME 258 | } 259 | EOF 260 | 261 | issues = run_engine(engine_conf).strip.split("\0") 262 | expect(issues).to be_empty 263 | end 264 | 265 | end 266 | end 267 | end 268 | -------------------------------------------------------------------------------- /spec/cc/engine/analyzers/sexp_lines_spec.rb: -------------------------------------------------------------------------------- 1 | require "spec_helper" 2 | 3 | module CC::Engine::Analyzers 4 | RSpec.describe SexpLines do 5 | describe "violation location" do 6 | it "gets appropriate locations for rescue blocks" do 7 | source = <<-SOURCE 8 | begin 9 | foo 10 | rescue SyntaxError => e 11 | Jekyll.logger.warn "YAML Exception reading \#{File.join(base, name)}: \#{e.message}" 12 | rescue Exception => e 13 | Jekyll.logger.warn "Error reading file \#{File.join(base, name)}: \#{e.message}" 14 | end 15 | SOURCE 16 | 17 | locations = locations_from_source(source) 18 | 19 | expect(locations.count).to eq 2 20 | expect(locations[0].begin_line).to eq(3) 21 | expect(locations[0].end_line).to eq(4) 22 | expect(locations[1].begin_line).to eq(5) 23 | expect(locations[1].end_line).to eq(6) 24 | end 25 | 26 | it "gets appropriate locations for hashes" do 27 | source = <<-SOURCE 28 | { 29 | name: "Bear Vs. Shark", 30 | greatest: true, 31 | city: "Ferndale", 32 | state: "Michigan", 33 | email: "shark@bear.com", 34 | phone: "9145551234", 35 | } 36 | 37 | { 38 | name: "Bars of Gold", 39 | greatest: true, 40 | city: "Ferndale", 41 | state: "Michigan", 42 | email: "barsofgold@gmail.com", 43 | phone: "9145551234", 44 | } 45 | SOURCE 46 | 47 | locations = locations_from_source(source, mass: 1) 48 | 49 | expect(locations.count).to eq 2 50 | 51 | expect([locations[0].begin_line, locations[0].end_line]).to eq([1, 7]) 52 | expect([locations[1].begin_line, locations[1].end_line]).to eq([10, 16]) 53 | end 54 | end 55 | 56 | def locations_from_source(source, flay_opts = {}) 57 | flay = CCFlay.new({ 58 | diff: false, 59 | mass: 18, 60 | summary: false, 61 | verbose: false, 62 | number: true, 63 | timeout: 10, 64 | liberal: false, 65 | fuzzy: false, 66 | only: nil, 67 | }.merge(flay_opts)) 68 | 69 | sexp = RubyParser.new.process(source, "file.rb") 70 | flay.process_sexp(sexp) 71 | report = flay.analyze[0] or raise "No analysis" 72 | sexps = flay.hashes[report.structural_hash] 73 | sexps.map { |sexp| SexpLines.new(sexp) } 74 | end 75 | end 76 | end 77 | -------------------------------------------------------------------------------- /spec/cc/engine/analyzers/swift/main_spec.rb: -------------------------------------------------------------------------------- 1 | require "spec_helper" 2 | require "cc/engine/analyzers/swift/main" 3 | require "cc/engine/analyzers/engine_config" 4 | 5 | RSpec.describe CC::Engine::Analyzers::Swift::Main, in_tmpdir: true do 6 | include AnalyzerSpecHelpers 7 | 8 | describe "#run" do 9 | it "prints an issue for identical code" do 10 | create_source_file("foo.swift", <<-EOSWIFT) 11 | if (x < 10 && false || true && false || true) { 12 | print("complex") 13 | } 14 | 15 | if (x < 10 && false || true && false || true) { 16 | print("complex") 17 | } 18 | 19 | if (x < 10 && false || true && false || true) { 20 | print("complex") 21 | } 22 | EOSWIFT 23 | 24 | issues = run_engine(engine_conf).strip.split("\0") 25 | result = issues.first.strip 26 | json = JSON.parse(result) 27 | 28 | expect(json["type"]).to eq("issue") 29 | expect(json["check_name"]).to eq("identical-code") 30 | expect(json["description"]).to eq("Identical blocks of code found in 3 locations. Consider refactoring.") 31 | expect(json["categories"]).to eq(["Duplication"]) 32 | expect(json["location"]).to eq({ 33 | "path" => "foo.swift", 34 | "lines" => { "begin" => 1, "end" => 3 }, 35 | }) 36 | expect(json["remediation_points"]).to eq(700_000) 37 | expect(json["other_locations"]).to eq([ 38 | { 39 | "path" => "foo.swift", 40 | "lines" => { "begin" => 5, "end" => 7 } 41 | }, 42 | { 43 | "path" => "foo.swift", 44 | "lines" => { "begin" => 9, "end" => 11 } 45 | }, 46 | ]) 47 | expect(json["content"]["body"]).to match(/This issue has a mass of 41/) 48 | expect(json.key?("fingerprint")).to eq(true) 49 | expect(json["severity"]).to eq(CC::Engine::Analyzers::Base::MAJOR) 50 | end 51 | 52 | it "prints an issue for similar code" do 53 | create_source_file("foo.swift", <<-EOSWIFT) 54 | if (x < 15 || false || true && false) { 55 | print("also complex") 56 | } 57 | 58 | if (x < 10 && false || true && false) { 59 | print("complex") 60 | } 61 | EOSWIFT 62 | 63 | issues = run_engine(engine_conf).strip.split("\0") 64 | result = issues.first.strip 65 | json = JSON.parse(result) 66 | 67 | expect(json["type"]).to eq("issue") 68 | expect(json["check_name"]).to eq("similar-code") 69 | expect(json["description"]).to eq("Similar blocks of code found in 2 locations. Consider refactoring.") 70 | expect(json["categories"]).to eq(["Duplication"]) 71 | expect(json["location"]).to eq({ 72 | "path" => "foo.swift", 73 | "lines" => { "begin" => 1, "end" => 3 }, 74 | }) 75 | expect(json["remediation_points"]).to eq(660_000) 76 | expect(json["other_locations"]).to eq([ 77 | { 78 | "path" => "foo.swift", 79 | "lines" => { "begin" => 5, "end" => 7 } 80 | }, 81 | ]) 82 | expect(json["content"]["body"]).to match(/This issue has a mass of 37/) 83 | expect(json.key?("fingerprint")).to eq(true) 84 | expect(json["severity"]).to eq(CC::Engine::Analyzers::Base::MAJOR) 85 | end 86 | 87 | it "skips unparsable files" do 88 | create_source_file("foo.swift", <<-EOTS) 89 | func() { // missing closing brace 90 | EOTS 91 | 92 | expect(CC.logger).to receive(:warn).with(/Skipping \.\/foo\.swift/) 93 | expect(CC.logger).to receive(:warn).with("Response status: 422") 94 | expect(run_engine(engine_conf)).to eq("") 95 | end 96 | 97 | it "does not flag duplicate comments" do 98 | create_source_file("foo.swift", <<-EOSWIFT) 99 | // A comment. 100 | // A comment. 101 | 102 | /* A comment. */ 103 | /* A comment. */ 104 | EOSWIFT 105 | 106 | expect(run_engine(engine_conf)).to be_empty 107 | end 108 | 109 | it "ignores imports" do 110 | create_source_file("foo.swift", <<~EOTS) 111 | import Foundation 112 | import UIKit 113 | EOTS 114 | 115 | create_source_file("bar.swift", <<~EOTS) 116 | import Foundation 117 | import UIKit 118 | EOTS 119 | 120 | issues = run_engine(engine_conf).strip.split("\0") 121 | expect(issues).to be_empty 122 | end 123 | end 124 | 125 | def engine_conf 126 | CC::Engine::Analyzers::EngineConfig.new({ 127 | 'config' => { 128 | 'checks' => { 129 | 'similar-code' => { 130 | 'enabled' => true, 131 | }, 132 | 'identical-code' => { 133 | 'enabled' => true, 134 | }, 135 | }, 136 | 'languages' => { 137 | 'swift' => { 138 | 'mass_threshold' => 1, 139 | }, 140 | }, 141 | }, 142 | }) 143 | end 144 | end 145 | -------------------------------------------------------------------------------- /spec/cc/engine/analyzers/typescript/main_spec.rb: -------------------------------------------------------------------------------- 1 | require 'spec_helper' 2 | require 'cc/engine/analyzers/typescript/main' 3 | require 'cc/engine/analyzers/reporter' 4 | require 'cc/engine/analyzers/engine_config' 5 | 6 | RSpec.describe CC::Engine::Analyzers::TypeScript::Main, in_tmpdir: true do 7 | include AnalyzerSpecHelpers 8 | 9 | describe "#run" do 10 | it "prints an issue for identical code" do 11 | create_source_file("foo.ts", <<-EOTS) 12 | enum Direction { Up = "UP", Down = "DOWN", Left = "LEFT", Right = "RIGHT" } 13 | enum Direction { Up = "UP", Down = "DOWN", Left = "LEFT", Right = "RIGHT" } 14 | enum Direction { Up = "UP", Down = "DOWN", Left = "LEFT", Right = "RIGHT" } 15 | EOTS 16 | 17 | issues = run_engine(engine_conf).strip.split("\0") 18 | result = issues.first.strip 19 | json = JSON.parse(result) 20 | 21 | expect(json["type"]).to eq("issue") 22 | expect(json["check_name"]).to eq("identical-code") 23 | expect(json["description"]).to eq("Identical blocks of code found in 3 locations. Consider refactoring.") 24 | expect(json["categories"]).to eq(["Duplication"]) 25 | expect(json["location"]).to eq({ 26 | "path" => "foo.ts", 27 | "lines" => { "begin" => 1, "end" => 1 }, 28 | }) 29 | expect(json["remediation_points"]).to eq(990_000) 30 | expect(json["other_locations"]).to eq([ 31 | {"path" => "foo.ts", "lines" => { "begin" => 2, "end" => 2} }, 32 | {"path" => "foo.ts", "lines" => { "begin" => 3, "end" => 3} }, 33 | ]) 34 | expect(json["content"]["body"]).to match(/This issue has a mass of 24/) 35 | expect(json["fingerprint"]).to eq("a53b767d2f602f832540ef667ca0618f") 36 | expect(json["severity"]).to eq(CC::Engine::Analyzers::Base::MAJOR) 37 | end 38 | 39 | it "prints an issue for similar code" do 40 | create_source_file("foo.ts", <<-EOTS) 41 | enum Direction { Up = "UP", Down = "DOWN", Left = "LEFT", Right = "RIGHT" } 42 | enum Direction { Up = "up", Down = "down", Left = "left", Right = "right" } 43 | enum Direction { up = "UP", down = "DOWN", left = "LEFT", right = "RIGHT" } 44 | EOTS 45 | 46 | issues = run_engine(engine_conf).strip.split("\0") 47 | result = issues.first.strip 48 | json = JSON.parse(result) 49 | 50 | expect(json["type"]).to eq("issue") 51 | expect(json["check_name"]).to eq("similar-code") 52 | expect(json["description"]).to eq("Similar blocks of code found in 3 locations. Consider refactoring.") 53 | expect(json["categories"]).to eq(["Duplication"]) 54 | expect(json["location"]).to eq({ 55 | "path" => "foo.ts", 56 | "lines" => { "begin" => 1, "end" => 1 }, 57 | }) 58 | expect(json["remediation_points"]).to eq(990_000) 59 | expect(json["other_locations"]).to eq([ 60 | {"path" => "foo.ts", "lines" => { "begin" => 2, "end" => 2} }, 61 | {"path" => "foo.ts", "lines" => { "begin" => 3, "end" => 3} }, 62 | ]) 63 | expect(json["content"]["body"]).to match(/This issue has a mass of 24/) 64 | expect(json["fingerprint"]).to eq("ede3452b637e0bc021541e6369b9362e") 65 | expect(json["severity"]).to eq(CC::Engine::Analyzers::Base::MAJOR) 66 | end 67 | 68 | it "handles ES6 spread params" do 69 | create_source_file("foo.tsx", <<-EOTS) 70 | const ThingClass = React.createClass({ 71 | propTypes: { 72 | ...OtherThing.propTypes, 73 | otherProp: "someVal" 74 | } 75 | }); 76 | EOTS 77 | 78 | expect(CC.logger).not_to receive(:info).with(/Skipping file/) 79 | run_engine(engine_conf) 80 | end 81 | 82 | it "skips unparsable files" do 83 | create_source_file("foo.ts", <<-EOTS) 84 | function () { do(); // missing closing brace 85 | EOTS 86 | 87 | expect(CC.logger).to receive(:warn).with(/Skipping \.\/foo\.ts/) 88 | expect(CC.logger).to receive(:warn).with("Response status: 422") 89 | expect(run_engine(engine_conf)).to eq("") 90 | end 91 | 92 | it "handles parser 500s" do 93 | create_source_file("foo.ts", <<-EOTS) 94 | EOTS 95 | 96 | error = CC::Parser::Client::HTTPError.new(500, "Error processing file: ./foo.ts") 97 | allow(CC::Parser).to receive(:parse).with("", "/typescript", filename: "./foo.ts").and_raise(error) 98 | 99 | expect(CC.logger).to receive(:error).with("Error processing file: ./foo.ts") 100 | expect(CC.logger).to receive(:error).with(error.message) 101 | 102 | expect { run_engine(engine_conf) }.to raise_error(error) 103 | end 104 | end 105 | 106 | it "does not flag duplicate comments" do 107 | create_source_file("foo.ts", <<-EOTS) 108 | // A comment. 109 | // A comment. 110 | 111 | /* A comment. */ 112 | /* A comment. */ 113 | EOTS 114 | 115 | expect(run_engine(engine_conf)).to be_empty 116 | end 117 | 118 | it "ignores imports" do 119 | create_source_file("foo.ts", <<~EOTS) 120 | import React, { Component, PropTypes } from 'react' 121 | import { Table, TableBody, TableHeader, TableHeaderColumn, TableRow } from 'material-ui/Table' 122 | import values from 'lodash/values' 123 | import { v4 } from 'uuid' 124 | EOTS 125 | 126 | create_source_file("bar.ts", <<~EOTS) 127 | import React, { Component, PropTypes } from 'react' 128 | import { Table, TableBody, TableHeader, TableHeaderColumn, TableRow } from 'material-ui/Table' 129 | import values from 'lodash/values' 130 | import { v4 } from 'uuid' 131 | EOTS 132 | 133 | issues = run_engine(engine_conf).strip.split("\0") 134 | expect(issues).to be_empty 135 | end 136 | 137 | it "ignores requires" do 138 | create_source_file("foo.ts", <<~EOTS) 139 | const a = require('foo'), 140 | b = require('bar'), 141 | c = require('baz'), 142 | d = require('bam'); 143 | print(a); 144 | EOTS 145 | 146 | create_source_file("bar.ts", <<~EOTS) 147 | const a = require('foo'), 148 | b = require('bar'), 149 | c = require('baz'), 150 | d = require('bam'); 151 | c * d; 152 | EOTS 153 | 154 | issues = run_engine(engine_conf 3).strip.split("\0") 155 | expect(issues).to be_empty 156 | end 157 | 158 | it "outputs the correct line numbers for ASTs missing line details (codeclimate/app#6227)" do 159 | create_source_file("foo.ts", <<~EOTS) 160 | `/movie?${getQueryString({ movie_id: movieId })}` 161 | EOTS 162 | 163 | create_source_file("bar.ts", <<~EOTS) 164 | var greeting = "hello"; 165 | 166 | `/movie?${getQueryString({ movie_id: movieId })}` 167 | EOTS 168 | 169 | issues = run_engine(engine_conf).strip.split("\0") 170 | expect(issues).to_not be_empty 171 | 172 | issues.map! { |issue| JSON.parse(issue) } 173 | 174 | foo_issue = issues.detect { |issue| issue.fetch("location").fetch("path") == "foo.ts" } 175 | expect(foo_issue["location"]).to eq({ 176 | "path" => "foo.ts", 177 | "lines" => { "begin" => 1, "end" => 1 }, 178 | }) 179 | 180 | bar_issue = issues.detect { |issue| issue.fetch("location").fetch("path") == "bar.ts" } 181 | expect(bar_issue["location"]).to eq({ 182 | "path" => "bar.ts", 183 | "lines" => { "begin" => 3, "end" => 3 }, 184 | }) 185 | end 186 | 187 | it "supports TypeScript+React files" do 188 | create_source_file("foo.tsx", <<~EOTS) 189 | function ComponentFoo(prop: FooProp) { 190 | return ; 191 | } 192 | 193 | function ComponentFoo(prop: FooProp) { 194 | return ; 195 | } 196 | EOTS 197 | 198 | issues = run_engine(engine_conf).strip.split("\0") 199 | result = issues.first.strip 200 | json = JSON.parse(result) 201 | 202 | expect(json["type"]).to eq("issue") 203 | expect(json["check_name"]).to eq("similar-code") 204 | expect(json["location"]).to eq({ 205 | "path" => "foo.tsx", 206 | "lines" => { "begin" => 1, "end" => 3 }, 207 | }) 208 | expect(json["other_locations"]).to eq([ 209 | {"path" => "foo.tsx", "lines" => { "begin" => 5, "end" => 7 } } 210 | ]) 211 | expect(json["fingerprint"]).to eq("d8f0315c3c4e9ba81003a7ec6c823fb0") 212 | end 213 | 214 | def engine_conf mass = 1 215 | CC::Engine::Analyzers::EngineConfig.new({ 216 | 'config' => { 217 | 'checks' => { 218 | 'similar-code' => { 219 | 'enabled' => true, 220 | }, 221 | 'identical-code' => { 222 | 'enabled' => true, 223 | }, 224 | }, 225 | 'languages' => { 226 | 'typescript' => { 227 | 'mass_threshold' => mass, 228 | }, 229 | }, 230 | }, 231 | }) 232 | end 233 | end 234 | -------------------------------------------------------------------------------- /spec/cc/engine/analyzers/violation_spec.rb: -------------------------------------------------------------------------------- 1 | require "spec_helper" 2 | 3 | require "cc/engine/analyzers/violation" 4 | 5 | module CC::Engine::Analyzers 6 | RSpec.describe Violation do 7 | describe "#format" do 8 | it "gives the correct path for paths with leading single char dir" do 9 | sexp1 = Sexp.new([:foo, :a]).tap do |s| 10 | s.line = 42 11 | s.file = "_/a.rb" 12 | end 13 | sexp2 = Sexp.new([:foo, :a]).tap do |s| 14 | s.line = 13 15 | s.file = "T/b.rb" 16 | end 17 | engine_config = EngineConfig.new({}) 18 | language_strategy = Ruby::Main.new( 19 | engine_config: engine_config, 20 | parse_metrics: nil, 21 | ) 22 | issue = described_class.new( 23 | language_strategy: language_strategy, 24 | identical: true, 25 | current_sexp: sexp1, 26 | other_sexps: [sexp2], 27 | ).format 28 | 29 | expect(issue[:location][:path]).to eq("_/a.rb") 30 | expect(issue[:other_locations][0][:path]).to eq("T/b.rb") 31 | end 32 | end 33 | end 34 | end 35 | -------------------------------------------------------------------------------- /spec/cc/engine/analyzers/violations_spec.rb: -------------------------------------------------------------------------------- 1 | require "spec_helper" 2 | 3 | module CC::Engine::Analyzers 4 | RSpec.describe Violations do 5 | describe "#each" do 6 | let(:issue) { double(:issue, mass: 10, identical?: true) } 7 | let(:hashes) { sexps } 8 | let(:language_strategy) { double(:language_strategy, calculate_points: 30, calculate_severity: CC::Engine::Analyzers::Base::MINOR, use_sexp_lines?: true) } 9 | let(:violations) { [] } 10 | 11 | before do 12 | Violations.new(language_strategy, issue, hashes).each do |v| 13 | violations << v 14 | end 15 | end 16 | 17 | it "yields correct number of violations" do 18 | expect(violations.length).to eq(3) 19 | end 20 | 21 | it "yields violation objects with correct information" do 22 | first_formatted = violations[0].format 23 | second_formatted = violations[1].format 24 | third_formatted = violations[2].format 25 | 26 | expect(first_formatted[:type]).to eq("issue") 27 | expect(first_formatted[:check_name]).to eq("identical-code") 28 | expect(first_formatted[:description]).to eq("Identical blocks of code found in 3 locations. Consider refactoring.") 29 | expect(first_formatted[:categories]).to eq(["Duplication"]) 30 | expect(first_formatted[:remediation_points]).to eq(30) 31 | expect(first_formatted[:location]).to eq({:path=>"file.rb", :lines=>{:begin=>1, :end=>5}}) 32 | expect(first_formatted[:other_locations]).to eq([ 33 | { :path => "file.rb", :lines => { :begin => 9, :end => 13} }, 34 | { :path => "file.rb", :lines => { :begin => 17, :end => 21} }, 35 | ]) 36 | expect(first_formatted[:fingerprint]).to eq("64d2fe721009691194926b5534f2eaea") 37 | expect(first_formatted[:severity]).to eq(CC::Engine::Analyzers::Base::MINOR) 38 | 39 | expect(second_formatted[:location]).to eq({:path=>"file.rb", :lines=>{:begin=>9, :end=>13}}) 40 | expect(second_formatted[:other_locations]).to eq([ 41 | { :path => "file.rb", :lines => { :begin => 1, :end => 5} }, 42 | { :path => "file.rb", :lines => { :begin => 17, :end => 21} }, 43 | ]) 44 | 45 | expect(third_formatted[:location]).to eq({:path=>"file.rb", :lines=>{:begin=>17, :end=>21}}) 46 | expect(third_formatted[:other_locations]).to eq([ 47 | { :path => "file.rb", :lines => { :begin => 1, :end => 5} }, 48 | { :path => "file.rb", :lines => { :begin => 9, :end => 13} }, 49 | ]) 50 | end 51 | 52 | def sexps 53 | source = <<-SOURCE 54 | describe '#ruby?' do 55 | before { subject.type = 'ruby' } 56 | 57 | it 'returns true' do 58 | 10.times { |i| if i < 5; if i % 2 == 0; subject.increase_mass!; end; end }; expect(subject.ruby?).to be true 59 | end 60 | end 61 | 62 | describe '#js?' do 63 | before { subject.type = 'js' } 64 | 65 | it 'returns true' do 66 | 10.times { |i| if i < 5; if i % 2 == 0; subject.increase_mass!; end; end }; expect(subject.js?).to be true 67 | end 68 | end 69 | 70 | describe '#whaddup?' do 71 | before { subject.type = 'js' } 72 | 73 | it 'returns true' do 74 | 10.times { |i| if i < 5; if i % 2 == 0; subject.increase_mass!; end; end }; expect(subject.js?).to be true 75 | end 76 | end 77 | SOURCE 78 | 79 | flay = CCFlay.new({ 80 | diff: false, 81 | mass: CC::Engine::Analyzers::Ruby::Main::DEFAULT_MASS_THRESHOLD, 82 | summary: false, 83 | verbose: false, 84 | number: true, 85 | timeout: 10, 86 | liberal: false, 87 | fuzzy: false, 88 | only: nil, 89 | }) 90 | 91 | sexp = RubyParser.new.process(source, "file.rb") 92 | flay.process_sexp(sexp) 93 | report = flay.analyze[0] 94 | sexps = flay.hashes[report.structural_hash] 95 | end 96 | end 97 | end 98 | end 99 | -------------------------------------------------------------------------------- /spec/cc/engine/duplication_spec.rb: -------------------------------------------------------------------------------- 1 | require "spec_helper" 2 | require "cc/engine/duplication" 3 | 4 | RSpec.describe(CC::Engine::Duplication) do 5 | include AnalyzerSpecHelpers 6 | 7 | describe "#run" do 8 | it "skips analysis when all duplication checks are disabled" do 9 | dir = "foo" 10 | config = { 11 | "config" => { 12 | "checks" => { 13 | "similar-code" => { 14 | "enabled" => false, 15 | }, 16 | "identical-code" => { 17 | "enabled" => false, 18 | }, 19 | }, 20 | }, 21 | } 22 | expect(Dir).to_not receive(:chdir) 23 | expect(CC::Engine::Analyzers::Reporter).to_not receive(:new) 24 | 25 | CC::Engine::Duplication.new( 26 | directory: dir, engine_config: config, io: double, 27 | ).run 28 | end 29 | 30 | it "emits parse metrics for HTTP parsed languages", in_tmpdir: true do 31 | create_source_file("foo.js", <<-EOJS) 32 | console.log("hello JS!"); 33 | EOJS 34 | 35 | stdout = StringIO.new 36 | 37 | CC::Engine::Duplication.new( 38 | directory: @code, engine_config: {}, io: stdout, 39 | ).run 40 | 41 | expect(stdout.string).not_to be_empty 42 | measurement = JSON.parse(stdout.string.strip) 43 | expect(measurement).to eq( 44 | "name" => "javascript.parse.succeeded", 45 | "type" => "measurement", 46 | "value" => 1, 47 | ) 48 | end 49 | end 50 | end 51 | -------------------------------------------------------------------------------- /spec/cc/engine/parse_metrics_spec.rb: -------------------------------------------------------------------------------- 1 | require "spec_helper" 2 | 3 | require "cc/engine/parse_metrics" 4 | 5 | RSpec.describe CC::Engine::ParseMetrics do 6 | it "sends issues to stdout" do 7 | stdout = StringIO.new 8 | metrics = CC::Engine::ParseMetrics.new( 9 | language: "intercal", 10 | io: stdout, 11 | ) 12 | 13 | metrics.incr(:source_minified) 14 | metrics.incr(:parse_error) 15 | metrics.incr(:source_minified) 16 | 17 | metrics.report 18 | 19 | out_pieces = stdout.string.split("\0\n").map(&:strip) 20 | expect(out_pieces.count).to eq(2) 21 | 22 | expect(JSON.parse(out_pieces[0])).to eq({ 23 | "name" => "intercal.parse.source_minified", 24 | "type" => "measurement", 25 | "value" => 2, 26 | }) 27 | 28 | expect(JSON.parse(out_pieces[1])).to eq({ 29 | "name" => "intercal.parse.parse_error", 30 | "type" => "measurement", 31 | "value" => 1, 32 | }) 33 | end 34 | end 35 | -------------------------------------------------------------------------------- /spec/cc/engine/processed_source_spec.rb: -------------------------------------------------------------------------------- 1 | require "spec_helper" 2 | require "cc/engine/processed_source" 3 | 4 | RSpec.describe CC::Engine::ProcessedSource, in_tmpdir: true do 5 | include AnalyzerSpecHelpers 6 | 7 | describe "#ast" do 8 | it "returns an AST" do 9 | 10 | create_source_file("foo.java", <<-EOF) 11 | public class Carousel { 12 | public int fav_num = 3; 13 | public int least_fav_num = 0x0000000; 14 | } 15 | EOF 16 | 17 | path = "foo.java" 18 | request_path = "/java" 19 | processed_source = described_class.new(path, request_path) 20 | ast = processed_source.ast 21 | 22 | expect(ast).to be_a CC::Parser::Node 23 | expect(ast.type).to eq("CompilationUnit") 24 | numbers = with_type("IntegerLiteralExpr", ast) 25 | expect(numbers.length).to eq(2) 26 | expect(numbers.first.properties.fetch("value")).to eq("3") 27 | expect(numbers.last.properties.fetch("value")).to eq("0x0000000") 28 | end 29 | 30 | def with_type(type, node) 31 | flattened = flatten(node) 32 | flattened.select { |child| child.type == type } 33 | end 34 | 35 | def flatten(node) 36 | ([node] + node.properties.fetch("body").map { |child| flatten(child) }).flatten 37 | end 38 | end 39 | end 40 | -------------------------------------------------------------------------------- /spec/cc/engine/sexp_builder_spec.rb: -------------------------------------------------------------------------------- 1 | require "spec_helper" 2 | require "cc/engine/sexp_builder" 3 | require "cc/parser" 4 | 5 | RSpec.describe(CC::Engine::SexpBuilder) do 6 | include AnalyzerSpecHelpers 7 | 8 | describe "#build" do 9 | it "converts a node to sexp with accurate location information" do 10 | node = CC::Parser.parse(<<-EOPHP, "/php") 11 | = 4 45 | new("A") 46 | elsif level >= 2 47 | new("E") 48 | elsif level >= 1 49 | new("I") 50 | elsif level >= 0 51 | new("O") 52 | else 53 | new("U") 54 | end 55 | end 56 | EORUBY 57 | 58 | node1 = CC::Parser.parse(<<-EORUBY, "/ruby") 59 | def self.from_foo(foo) 60 | if foo <= 20 61 | new("A") 62 | elsif foo <= 40 63 | new("E") 64 | elsif foo <= 80 65 | new("I") 66 | elsif foo <= 160 67 | new("O") 68 | else 69 | new("U") 70 | end 71 | end 72 | EORUBY 73 | 74 | sexp0 = described_class.new(node0, "foo0.rb").build 75 | sexp1 = described_class.new(node1, "foo1.rb").build 76 | expect(sexp0.deep_each.map(&:first)).to eq(sexp1.deep_each.map(&:first)) 77 | end 78 | 79 | it "correctly builds sexps with conditionals" do 80 | node = CC::Parser.parse(<<-EORUBY, "/ruby") 81 | def self.from_level(level) 82 | if level >= 4 83 | new("A") 84 | elsif level >= 2 85 | new("E") 86 | elsif level >= 1 87 | new("I") 88 | elsif level >= 0 89 | new("O") 90 | else 91 | new("U") 92 | end 93 | end 94 | EORUBY 95 | 96 | sexp = described_class.new(node, "file.rb").build 97 | 98 | defs, _, _, args, condition_body = *sexp 99 | _, if_condition = *condition_body 100 | 101 | expect(sexp.line).to eq(1) 102 | expect(sexp.end_line).to eq(13) 103 | expect(if_condition.line).to eq(2) 104 | expect(if_condition.end_line).to eq(12) 105 | expect([*if_condition].map {|sexp| (sexp.is_a? Symbol) ? sexp : sexp.first }). 106 | to eq([:if, :condition, :then, :else]) 107 | end 108 | end 109 | end 110 | -------------------------------------------------------------------------------- /spec/fixtures/empty_file.js: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/codeclimate/codeclimate-duplication/bc2c85cd3c10c2c4b99b186e5a5a4ad53ce840e9/spec/fixtures/empty_file.js -------------------------------------------------------------------------------- /spec/fixtures/from_phan_php7.php: -------------------------------------------------------------------------------- 1 | kind); 76 | } 77 | 78 | /** 79 | * @param string|Node|null $node 80 | * An AST node 81 | * 82 | * @param int $indent 83 | * The indentation level for the string 84 | * 85 | * @return string 86 | * A string representation of an AST node 87 | */ 88 | public static function nodeToString( 89 | $node, 90 | $name = null, 91 | int $indent = 0 92 | ) : string { 93 | $string = str_repeat("\t", $indent); 94 | 95 | if ($name !== null) { 96 | $string .= "$name => "; 97 | } 98 | 99 | if (is_string($node)) { 100 | return $string . $node . "\n"; 101 | } 102 | 103 | if (!$node) { 104 | return $string . 'null' . "\n"; 105 | } 106 | 107 | if (!is_object($node)) { 108 | return $string . $node . "\n"; 109 | } 110 | 111 | $string .= \ast\get_kind_name($node->kind); 112 | 113 | $string .= ' [' 114 | . self::astFlagDescription($node->flags ?? 0) 115 | . ']'; 116 | 117 | if (isset($node->lineno)) { 118 | $string .= ' #' . $node->lineno; 119 | } 120 | 121 | if ($node instanceof Decl) { 122 | if (isset($node->endLineno)) { 123 | $string .= ':' . $node->endLineno; 124 | } 125 | } 126 | 127 | if (isset($node->name)) { 128 | $string .= ' name:' . $node->name; 129 | } 130 | 131 | $string .= "\n"; 132 | 133 | foreach ($node->children ?? [] as $name => $child_node) { 134 | $string .= self::nodeToString( 135 | $child_node, 136 | $name, 137 | $indent + 1 138 | ); 139 | } 140 | 141 | return $string; 142 | } 143 | 144 | /** 145 | * @return string 146 | * Get a string representation of AST node flags such as 147 | * 'ASSIGN_DIV|TYPE_ARRAY' 148 | */ 149 | public static function astFlagDescription(int $flag) : string 150 | { 151 | $flag_names = []; 152 | foreach (self::$AST_FLAG_ID_NAME_MAP as $id => $name) { 153 | if ($flag == $id) { 154 | $flag_names[] = $name; 155 | } 156 | } 157 | 158 | return implode('|', $flag_names); 159 | } 160 | 161 | /** 162 | * @return string 163 | * Pretty-printer for debug_backtrace 164 | * 165 | * @suppress PhanUnreferencedMethod 166 | */ 167 | public static function backtrace(int $levels = 0) 168 | { 169 | $bt = debug_backtrace(DEBUG_BACKTRACE_IGNORE_ARGS, $levels+1); 170 | foreach ($bt as $level => $context) { 171 | if (!$level) { 172 | continue; 173 | } 174 | echo "#".($level-1)." {$context['file']}:{$context['line']} {$context['class']} "; 175 | if (!empty($context['type'])) { 176 | echo $context['class'].$context['type']; 177 | } 178 | echo $context['function']; 179 | echo "\n"; 180 | } 181 | } 182 | 183 | /** 184 | * Note that flag IDs are not unique. You're likely going to get 185 | * an incorrect name back from this. So sorry. 186 | * 187 | * @suppress PhanUnreferencedProperty 188 | */ 189 | private static $AST_FLAG_ID_NAME_MAP = [ 190 | \ast\flags\ASSIGN_ADD => 'ASSIGN_ADD', 191 | \ast\flags\ASSIGN_BITWISE_AND => 'ASSIGN_BITWISE_AND', 192 | \ast\flags\ASSIGN_BITWISE_OR => 'ASSIGN_BITWISE_OR', 193 | \ast\flags\ASSIGN_BITWISE_XOR => 'ASSIGN_BITWISE_XOR', 194 | \ast\flags\ASSIGN_CONCAT => 'ASSIGN_CONCAT', 195 | \ast\flags\ASSIGN_DIV => 'ASSIGN_DIV', 196 | \ast\flags\ASSIGN_MOD => 'ASSIGN_MOD', 197 | \ast\flags\ASSIGN_MUL => 'ASSIGN_MUL', 198 | \ast\flags\ASSIGN_POW => 'ASSIGN_POW', 199 | \ast\flags\ASSIGN_SHIFT_LEFT => 'ASSIGN_SHIFT_LEFT', 200 | \ast\flags\ASSIGN_SHIFT_RIGHT => 'ASSIGN_SHIFT_RIGHT', 201 | \ast\flags\ASSIGN_SUB => 'ASSIGN_SUB', 202 | \ast\flags\BINARY_ADD => 'BINARY_ADD', 203 | \ast\flags\BINARY_BITWISE_AND => 'BINARY_BITWISE_AND', 204 | \ast\flags\BINARY_BITWISE_OR => 'BINARY_BITWISE_OR', 205 | \ast\flags\BINARY_BITWISE_XOR => 'BINARY_BITWISE_XOR', 206 | \ast\flags\BINARY_BOOL_XOR => 'BINARY_BOOL_XOR', 207 | \ast\flags\BINARY_CONCAT => 'BINARY_CONCAT', 208 | \ast\flags\BINARY_DIV => 'BINARY_DIV', 209 | \ast\flags\BINARY_IS_EQUAL => 'BINARY_IS_EQUAL', 210 | \ast\flags\BINARY_IS_IDENTICAL => 'BINARY_IS_IDENTICAL', 211 | \ast\flags\BINARY_IS_NOT_EQUAL => 'BINARY_IS_NOT_EQUAL', 212 | \ast\flags\BINARY_IS_NOT_IDENTICAL => 'BINARY_IS_NOT_IDENTICAL', 213 | \ast\flags\BINARY_IS_SMALLER => 'BINARY_IS_SMALLER', 214 | \ast\flags\BINARY_IS_SMALLER_OR_EQUAL => 'BINARY_IS_SMALLER_OR_EQUAL', 215 | \ast\flags\BINARY_MOD => 'BINARY_MOD', 216 | \ast\flags\BINARY_MUL => 'BINARY_MUL', 217 | \ast\flags\BINARY_POW => 'BINARY_POW', 218 | \ast\flags\BINARY_SHIFT_LEFT => 'BINARY_SHIFT_LEFT', 219 | \ast\flags\BINARY_SHIFT_RIGHT => 'BINARY_SHIFT_RIGHT', 220 | \ast\flags\BINARY_SPACESHIP => 'BINARY_SPACESHIP', 221 | \ast\flags\BINARY_SUB => 'BINARY_SUB', 222 | \ast\flags\CLASS_ABSTRACT => 'CLASS_ABSTRACT', 223 | \ast\flags\CLASS_FINAL => 'CLASS_FINAL', 224 | \ast\flags\CLASS_INTERFACE => 'CLASS_INTERFACE', 225 | \ast\flags\CLASS_TRAIT => 'CLASS_TRAIT', 226 | \ast\flags\MODIFIER_ABSTRACT => 'MODIFIER_ABSTRACT', 227 | \ast\flags\MODIFIER_FINAL => 'MODIFIER_FINAL', 228 | \ast\flags\MODIFIER_PRIVATE => 'MODIFIER_PRIVATE', 229 | \ast\flags\MODIFIER_PROTECTED => 'MODIFIER_PROTECTED', 230 | \ast\flags\MODIFIER_PUBLIC => 'MODIFIER_PUBLIC', 231 | \ast\flags\MODIFIER_STATIC => 'MODIFIER_STATIC', 232 | \ast\flags\NAME_FQ => 'NAME_FQ', 233 | \ast\flags\NAME_NOT_FQ => 'NAME_NOT_FQ', 234 | \ast\flags\NAME_RELATIVE => 'NAME_RELATIVE', 235 | \ast\flags\PARAM_REF => 'PARAM_REF', 236 | \ast\flags\PARAM_VARIADIC => 'PARAM_VARIADIC', 237 | \ast\flags\RETURNS_REF => 'RETURNS_REF', 238 | \ast\flags\TYPE_ARRAY => 'TYPE_ARRAY', 239 | \ast\flags\TYPE_BOOL => 'TYPE_BOOL', 240 | \ast\flags\TYPE_CALLABLE => 'TYPE_CALLABLE', 241 | \ast\flags\TYPE_DOUBLE => 'TYPE_DOUBLE', 242 | \ast\flags\TYPE_LONG => 'TYPE_LONG', 243 | \ast\flags\TYPE_NULL => 'TYPE_NULL', 244 | \ast\flags\TYPE_OBJECT => 'TYPE_OBJECT', 245 | \ast\flags\TYPE_STRING => 'TYPE_STRING', 246 | \ast\flags\UNARY_BITWISE_NOT => 'UNARY_BITWISE_NOT', 247 | \ast\flags\UNARY_BOOL_NOT => 'UNARY_BOOL_NOT', 248 | \ast\flags\BINARY_BOOL_AND => 'BINARY_BOOL_AND', 249 | \ast\flags\BINARY_BOOL_OR => 'BINARY_BOOL_OR', 250 | \ast\flags\BINARY_IS_GREATER => 'BINARY_IS_GREATER', 251 | \ast\flags\BINARY_IS_GREATER_OR_EQUAL => 'BINARY_IS_GREATER_OR_EQUAL', 252 | \ast\flags\CLASS_ANONYMOUS => 'CLASS_ANONYMOUS', 253 | \ast\flags\EXEC_EVAL => 'EXEC_EVAL', 254 | \ast\flags\EXEC_INCLUDE => 'EXEC_INCLUDE', 255 | \ast\flags\EXEC_INCLUDE_ONCE => 'EXEC_INCLUDE_ONCE', 256 | \ast\flags\EXEC_REQUIRE => 'EXEC_REQUIRE', 257 | \ast\flags\EXEC_REQUIRE_ONCE => 'EXEC_REQUIRE_ONCE', 258 | \ast\flags\MAGIC_CLASS => 'MAGIC_CLASS', 259 | \ast\flags\MAGIC_DIR => 'MAGIC_DIR', 260 | \ast\flags\MAGIC_FILE => 'MAGIC_FILE', 261 | \ast\flags\MAGIC_FUNCTION => 'MAGIC_FUNCTION', 262 | \ast\flags\MAGIC_LINE => 'MAGIC_LINE', 263 | \ast\flags\MAGIC_METHOD => 'MAGIC_METHOD', 264 | \ast\flags\MAGIC_NAMESPACE => 'MAGIC_NAMESPACE', 265 | \ast\flags\MAGIC_TRAIT => 'MAGIC_TRAIT', 266 | \ast\flags\UNARY_MINUS => 'UNARY_MINUS', 267 | \ast\flags\UNARY_PLUS => 'UNARY_PLUS', 268 | \ast\flags\UNARY_SILENCE => 'UNARY_SILENCE', 269 | \ast\flags\USE_CONST => 'USE_CONST', 270 | \ast\flags\USE_FUNCTION => 'USE_FUNCTION', 271 | \ast\flags\USE_NORMAL => 'USE_NORMAL', 272 | ]; 273 | } 274 | -------------------------------------------------------------------------------- /spec/fixtures/issue_6609_1.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "encoding/json" 5 | fmtlog "log" 6 | "net/http" 7 | "os" 8 | "path/filepath" 9 | "reflect" 10 | "strings" 11 | "time" 12 | 13 | "github.com/Sirupsen/logrus" 14 | "github.com/cenk/backoff" 15 | "github.com/containous/flaeg" 16 | "github.com/containous/staert" 17 | "github.com/containous/traefik/acme" 18 | "github.com/containous/traefik/collector" 19 | "github.com/containous/traefik/configuration" 20 | "github.com/containous/traefik/job" 21 | "github.com/containous/traefik/log" 22 | "github.com/containous/traefik/provider/ecs" 23 | "github.com/containous/traefik/provider/kubernetes" 24 | "github.com/containous/traefik/safe" 25 | "github.com/containous/traefik/server" 26 | "github.com/containous/traefik/server/uuid" 27 | traefikTls "github.com/containous/traefik/tls" 28 | "github.com/containous/traefik/types" 29 | "github.com/containous/traefik/version" 30 | "github.com/coreos/go-systemd/daemon" 31 | ) 32 | 33 | func main() { 34 | //traefik config inits 35 | traefikConfiguration := NewTraefikConfiguration() 36 | traefikPointersConfiguration := NewTraefikDefaultPointersConfiguration() 37 | //traefik Command init 38 | traefikCmd := &flaeg.Command{ 39 | Name: "traefik", 40 | Description: `traefik is a modern HTTP reverse proxy and load balancer made to deploy microservices with ease. 41 | Complete documentation is available at https://traefik.io`, 42 | Config: traefikConfiguration, 43 | DefaultPointersConfig: traefikPointersConfiguration, 44 | Run: func() error { 45 | run(&traefikConfiguration.GlobalConfiguration, traefikConfiguration.ConfigFile) 46 | return nil 47 | }, 48 | } 49 | 50 | //storeconfig Command init 51 | storeConfigCmd := newStoreConfigCmd(traefikConfiguration, traefikPointersConfiguration) 52 | 53 | //init flaeg source 54 | f := flaeg.New(traefikCmd, os.Args[1:]) 55 | //add custom parsers 56 | f.AddParser(reflect.TypeOf(configuration.EntryPoints{}), &configuration.EntryPoints{}) 57 | f.AddParser(reflect.TypeOf(configuration.DefaultEntryPoints{}), &configuration.DefaultEntryPoints{}) 58 | f.AddParser(reflect.TypeOf(traefikTls.RootCAs{}), &traefikTls.RootCAs{}) 59 | f.AddParser(reflect.TypeOf(types.Constraints{}), &types.Constraints{}) 60 | f.AddParser(reflect.TypeOf(kubernetes.Namespaces{}), &kubernetes.Namespaces{}) 61 | f.AddParser(reflect.TypeOf(ecs.Clusters{}), &ecs.Clusters{}) 62 | f.AddParser(reflect.TypeOf([]acme.Domain{}), &acme.Domains{}) 63 | f.AddParser(reflect.TypeOf(types.Buckets{}), &types.Buckets{}) 64 | 65 | //add commands 66 | f.AddCommand(newVersionCmd()) 67 | f.AddCommand(newBugCmd(traefikConfiguration, traefikPointersConfiguration)) 68 | f.AddCommand(storeConfigCmd) 69 | f.AddCommand(newHealthCheckCmd(traefikConfiguration, traefikPointersConfiguration)) 70 | 71 | usedCmd, err := f.GetCommand() 72 | if err != nil { 73 | fmtlog.Println(err) 74 | os.Exit(-1) 75 | } 76 | 77 | if _, err := f.Parse(usedCmd); err != nil { 78 | fmtlog.Printf("Error parsing command: %s\n", err) 79 | os.Exit(-1) 80 | } 81 | 82 | //staert init 83 | s := staert.NewStaert(traefikCmd) 84 | //init toml source 85 | toml := staert.NewTomlSource("traefik", []string{traefikConfiguration.ConfigFile, "/etc/traefik/", "$HOME/.traefik/", "."}) 86 | 87 | //add sources to staert 88 | s.AddSource(toml) 89 | s.AddSource(f) 90 | if _, err := s.LoadConfig(); err != nil { 91 | fmtlog.Printf("Error reading TOML config file %s : %s\n", toml.ConfigFileUsed(), err) 92 | os.Exit(-1) 93 | } 94 | 95 | traefikConfiguration.ConfigFile = toml.ConfigFileUsed() 96 | 97 | kv, err := createKvSource(traefikConfiguration) 98 | if err != nil { 99 | fmtlog.Printf("Error creating kv store: %s\n", err) 100 | os.Exit(-1) 101 | } 102 | storeConfigCmd.Run = runStoreConfig(kv, traefikConfiguration) 103 | 104 | // IF a KV Store is enable and no sub-command called in args 105 | if kv != nil && usedCmd == traefikCmd { 106 | if traefikConfiguration.Cluster == nil { 107 | traefikConfiguration.Cluster = &types.Cluster{Node: uuid.Get()} 108 | } 109 | if traefikConfiguration.Cluster.Store == nil { 110 | traefikConfiguration.Cluster.Store = &types.Store{Prefix: kv.Prefix, Store: kv.Store} 111 | } 112 | s.AddSource(kv) 113 | operation := func() error { 114 | _, err := s.LoadConfig() 115 | return err 116 | } 117 | notify := func(err error, time time.Duration) { 118 | log.Errorf("Load config error: %+v, retrying in %s", err, time) 119 | } 120 | err := backoff.RetryNotify(safe.OperationWithRecover(operation), job.NewBackOff(backoff.NewExponentialBackOff()), notify) 121 | if err != nil { 122 | fmtlog.Printf("Error loading configuration: %s\n", err) 123 | os.Exit(-1) 124 | } 125 | } 126 | 127 | if err := s.Run(); err != nil { 128 | fmtlog.Printf("Error running traefik: %s\n", err) 129 | os.Exit(-1) 130 | } 131 | 132 | os.Exit(0) 133 | } 134 | 135 | func run(globalConfiguration *configuration.GlobalConfiguration, configFile string) { 136 | configureLogging(globalConfiguration) 137 | 138 | if len(configFile) > 0 { 139 | log.Infof("Using TOML configuration file %s", configFile) 140 | } 141 | 142 | http.DefaultTransport.(*http.Transport).Proxy = http.ProxyFromEnvironment 143 | 144 | globalConfiguration.SetEffectiveConfiguration(configFile) 145 | 146 | jsonConf, _ := json.Marshal(globalConfiguration) 147 | log.Infof("Traefik version %s built on %s", version.Version, version.BuildDate) 148 | 149 | if globalConfiguration.CheckNewVersion { 150 | checkNewVersion() 151 | } 152 | 153 | stats(globalConfiguration) 154 | 155 | log.Debugf("Global configuration loaded %s", string(jsonConf)) 156 | svr := server.NewServer(*globalConfiguration) 157 | svr.Start() 158 | defer svr.Close() 159 | 160 | sent, err := daemon.SdNotify(false, "READY=1") 161 | if !sent && err != nil { 162 | log.Error("Fail to notify", err) 163 | } 164 | 165 | t, err := daemon.SdWatchdogEnabled(false) 166 | if err != nil { 167 | log.Error("Problem with watchdog", err) 168 | } else if t != 0 { 169 | // Send a ping each half time given 170 | t = t / 2 171 | log.Info("Watchdog activated with timer each ", t) 172 | safe.Go(func() { 173 | tick := time.Tick(t) 174 | for range tick { 175 | _, errHealthCheck := healthCheck(*globalConfiguration) 176 | if globalConfiguration.Ping == nil || errHealthCheck == nil { 177 | if ok, _ := daemon.SdNotify(false, "WATCHDOG=1"); !ok { 178 | log.Error("Fail to tick watchdog") 179 | } 180 | } else { 181 | log.Error(errHealthCheck) 182 | } 183 | } 184 | }) 185 | } 186 | 187 | svr.Wait() 188 | log.Info("Shutting down") 189 | logrus.Exit(0) 190 | } 191 | 192 | func configureLogging(globalConfiguration *configuration.GlobalConfiguration) { 193 | // configure default log flags 194 | fmtlog.SetFlags(fmtlog.Lshortfile | fmtlog.LstdFlags) 195 | 196 | if globalConfiguration.Debug { 197 | globalConfiguration.LogLevel = "DEBUG" 198 | } 199 | 200 | // configure log level 201 | level, err := logrus.ParseLevel(strings.ToLower(globalConfiguration.LogLevel)) 202 | if err != nil { 203 | log.Error("Error getting level", err) 204 | } 205 | log.SetLevel(level) 206 | 207 | // configure log output file 208 | logFile := globalConfiguration.TraefikLogsFile 209 | if len(logFile) > 0 { 210 | log.Warn("top-level traefikLogsFile has been deprecated -- please use traefiklog.filepath") 211 | } 212 | if globalConfiguration.TraefikLog != nil && len(globalConfiguration.TraefikLog.FilePath) > 0 { 213 | logFile = globalConfiguration.TraefikLog.FilePath 214 | } 215 | 216 | // configure log format 217 | var formatter logrus.Formatter 218 | if globalConfiguration.TraefikLog != nil && globalConfiguration.TraefikLog.Format == "json" { 219 | formatter = &logrus.JSONFormatter{} 220 | } else { 221 | disableColors := false 222 | if len(logFile) > 0 { 223 | disableColors = true 224 | } 225 | formatter = &logrus.TextFormatter{DisableColors: disableColors, FullTimestamp: true, DisableSorting: true} 226 | } 227 | log.SetFormatter(formatter) 228 | 229 | if len(logFile) > 0 { 230 | dir := filepath.Dir(logFile) 231 | 232 | err := os.MkdirAll(dir, 0755) 233 | if err != nil { 234 | log.Errorf("Failed to create log path %s: %s", dir, err) 235 | } 236 | 237 | err = log.OpenFile(logFile) 238 | logrus.RegisterExitHandler(func() { 239 | if err := log.CloseFile(); err != nil { 240 | log.Error("Error closing log", err) 241 | } 242 | }) 243 | if err != nil { 244 | log.Error("Error opening file", err) 245 | } 246 | } 247 | } 248 | 249 | func checkNewVersion() { 250 | ticker := time.Tick(24 * time.Hour) 251 | safe.Go(func() { 252 | for time.Sleep(10 * time.Minute); ; <-ticker { 253 | version.CheckNewVersion() 254 | } 255 | }) 256 | } 257 | 258 | func stats(globalConfiguration *configuration.GlobalConfiguration) { 259 | if globalConfiguration.SendAnonymousUsage { 260 | log.Info(` 261 | Stats collection is enabled. 262 | Many thanks for contributing to Traefik's improvement by allowing us to receive anonymous information from your configuration. 263 | Help us improve Traefik by leaving this feature on :) 264 | More details on: https://docs.traefik.io/basic/#collected-data 265 | `) 266 | collect(globalConfiguration) 267 | } else { 268 | log.Info(` 269 | Stats collection is disabled. 270 | Help us improve Traefik by turning this feature on :) 271 | More details on: https://docs.traefik.io/basic/#collected-data 272 | `) 273 | } 274 | } 275 | 276 | func collect(globalConfiguration *configuration.GlobalConfiguration) { 277 | ticker := time.Tick(24 * time.Hour) 278 | safe.Go(func() { 279 | for time.Sleep(10 * time.Minute); ; <-ticker { 280 | if err := collector.Collect(globalConfiguration); err != nil { 281 | log.Debug(err) 282 | } 283 | } 284 | }) 285 | } 286 | -------------------------------------------------------------------------------- /spec/fixtures/issue_6609_2.go: -------------------------------------------------------------------------------- 1 | package rancher 2 | 3 | import ( 4 | "context" 5 | "os" 6 | "time" 7 | 8 | "github.com/cenk/backoff" 9 | "github.com/containous/traefik/job" 10 | "github.com/containous/traefik/log" 11 | "github.com/containous/traefik/safe" 12 | "github.com/containous/traefik/types" 13 | "github.com/mitchellh/mapstructure" 14 | rancher "github.com/rancher/go-rancher/v2" 15 | ) 16 | 17 | const ( 18 | labelRancherStackServiceName = "io.rancher.stack_service.name" 19 | hostNetwork = "host" 20 | ) 21 | 22 | var withoutPagination *rancher.ListOpts 23 | 24 | // APIConfiguration contains configuration properties specific to the Rancher 25 | // API provider. 26 | type APIConfiguration struct { 27 | Endpoint string `description:"Rancher server API HTTP(S) endpoint"` 28 | AccessKey string `description:"Rancher server API access key"` 29 | SecretKey string `description:"Rancher server API secret key"` 30 | } 31 | 32 | func init() { 33 | withoutPagination = &rancher.ListOpts{ 34 | Filters: map[string]interface{}{"limit": 0}, 35 | } 36 | } 37 | 38 | func (p *Provider) createClient() (*rancher.RancherClient, error) { 39 | rancherURL := getenv("CATTLE_URL", p.API.Endpoint) 40 | accessKey := getenv("CATTLE_ACCESS_KEY", p.API.AccessKey) 41 | secretKey := getenv("CATTLE_SECRET_KEY", p.API.SecretKey) 42 | 43 | return rancher.NewRancherClient(&rancher.ClientOpts{ 44 | Url: rancherURL, 45 | AccessKey: accessKey, 46 | SecretKey: secretKey, 47 | }) 48 | } 49 | 50 | func getenv(key, fallback string) string { 51 | value := os.Getenv(key) 52 | if len(value) == 0 { 53 | return fallback 54 | } 55 | return value 56 | } 57 | 58 | func (p *Provider) apiProvide(configurationChan chan<- types.ConfigMessage, pool *safe.Pool, constraints types.Constraints) error { 59 | p.Constraints = append(p.Constraints, constraints...) 60 | 61 | if p.API == nil { 62 | p.API = &APIConfiguration{} 63 | } 64 | 65 | safe.Go(func() { 66 | operation := func() error { 67 | rancherClient, err := p.createClient() 68 | 69 | if err != nil { 70 | log.Errorf("Failed to create a client for rancher, error: %s", err) 71 | return err 72 | } 73 | 74 | ctx := context.Background() 75 | var stacks = listRancherStacks(rancherClient) 76 | var services = listRancherServices(rancherClient) 77 | var container = listRancherContainer(rancherClient) 78 | 79 | var rancherData = parseAPISourcedRancherData(stacks, services, container) 80 | 81 | configuration := p.buildConfiguration(rancherData) 82 | configurationChan <- types.ConfigMessage{ 83 | ProviderName: "rancher", 84 | Configuration: configuration, 85 | } 86 | 87 | if p.Watch { 88 | _, cancel := context.WithCancel(ctx) 89 | ticker := time.NewTicker(time.Second * time.Duration(p.RefreshSeconds)) 90 | pool.Go(func(stop chan bool) { 91 | for { 92 | select { 93 | case <-ticker.C: 94 | 95 | log.Debugf("Refreshing new Data from Provider API") 96 | var stacks = listRancherStacks(rancherClient) 97 | var services = listRancherServices(rancherClient) 98 | var container = listRancherContainer(rancherClient) 99 | 100 | rancherData := parseAPISourcedRancherData(stacks, services, container) 101 | 102 | configuration := p.buildConfiguration(rancherData) 103 | if configuration != nil { 104 | configurationChan <- types.ConfigMessage{ 105 | ProviderName: "rancher", 106 | Configuration: configuration, 107 | } 108 | } 109 | case <-stop: 110 | ticker.Stop() 111 | cancel() 112 | return 113 | } 114 | } 115 | }) 116 | } 117 | 118 | return nil 119 | } 120 | notify := func(err error, time time.Duration) { 121 | log.Errorf("Provider connection error %+v, retrying in %s", err, time) 122 | } 123 | err := backoff.RetryNotify(operation, job.NewBackOff(backoff.NewExponentialBackOff()), notify) 124 | if err != nil { 125 | log.Errorf("Cannot connect to Provider Endpoint %+v", err) 126 | } 127 | }) 128 | 129 | return nil 130 | } 131 | 132 | func listRancherStacks(client *rancher.RancherClient) []*rancher.Stack { 133 | 134 | var stackList []*rancher.Stack 135 | 136 | stacks, err := client.Stack.List(withoutPagination) 137 | 138 | if err != nil { 139 | log.Errorf("Cannot get Provider Stacks %+v", err) 140 | } 141 | 142 | for k := range stacks.Data { 143 | stackList = append(stackList, &stacks.Data[k]) 144 | } 145 | 146 | return stackList 147 | } 148 | 149 | func listRancherServices(client *rancher.RancherClient) []*rancher.Service { 150 | 151 | var servicesList []*rancher.Service 152 | 153 | services, err := client.Service.List(withoutPagination) 154 | 155 | if err != nil { 156 | log.Errorf("Cannot get Provider Services %+v", err) 157 | } 158 | 159 | for k := range services.Data { 160 | servicesList = append(servicesList, &services.Data[k]) 161 | } 162 | 163 | return servicesList 164 | } 165 | 166 | func listRancherContainer(client *rancher.RancherClient) []*rancher.Container { 167 | 168 | var containerList []*rancher.Container 169 | 170 | container, err := client.Container.List(withoutPagination) 171 | 172 | if err != nil { 173 | log.Errorf("Cannot get Provider Services %+v", err) 174 | } 175 | 176 | valid := true 177 | 178 | for valid { 179 | for k := range container.Data { 180 | containerList = append(containerList, &container.Data[k]) 181 | } 182 | 183 | container, err = container.Next() 184 | 185 | if err != nil { 186 | break 187 | } 188 | 189 | if container == nil || len(container.Data) == 0 { 190 | valid = false 191 | } 192 | } 193 | 194 | return containerList 195 | } 196 | 197 | func parseAPISourcedRancherData(stacks []*rancher.Stack, services []*rancher.Service, containers []*rancher.Container) []rancherData { 198 | var rancherDataList []rancherData 199 | 200 | for _, stack := range stacks { 201 | 202 | for _, service := range services { 203 | 204 | if service.StackId != stack.Id { 205 | continue 206 | } 207 | 208 | rData := rancherData{ 209 | Name: service.Name + "/" + stack.Name, 210 | Health: service.HealthState, 211 | State: service.State, 212 | Labels: make(map[string]string), 213 | Containers: []string{}, 214 | } 215 | 216 | if service.LaunchConfig == nil || service.LaunchConfig.Labels == nil { 217 | log.Warnf("Rancher Service Labels are missing. Stack: %s, service: %s", stack.Name, service.Name) 218 | } else { 219 | for key, value := range service.LaunchConfig.Labels { 220 | rData.Labels[key] = value.(string) 221 | } 222 | } 223 | 224 | for _, container := range containers { 225 | if container.Labels[labelRancherStackServiceName] == stack.Name+"/"+service.Name && 226 | containerFilter(container.Name, container.HealthState, container.State) { 227 | 228 | if container.NetworkMode == hostNetwork { 229 | var endpoints []*rancher.PublicEndpoint 230 | err := mapstructure.Decode(service.PublicEndpoints, &endpoints) 231 | 232 | if err != nil { 233 | log.Errorf("Failed to decode PublicEndpoint: %v", err) 234 | continue 235 | } 236 | 237 | if len(endpoints) > 0 { 238 | rData.Containers = append(rData.Containers, endpoints[0].IpAddress) 239 | } 240 | } else { 241 | rData.Containers = append(rData.Containers, container.PrimaryIpAddress) 242 | } 243 | } 244 | } 245 | rancherDataList = append(rancherDataList, rData) 246 | } 247 | } 248 | 249 | return rancherDataList 250 | } 251 | -------------------------------------------------------------------------------- /spec/fixtures/normal_js_file.js: -------------------------------------------------------------------------------- 1 | /* This is a JS file that should not be minified */ 2 | function foo() { 3 | var i = 1; 4 | for (var j = 0; j < 500; j++) { 5 | i += j % 3; 6 | } 7 | return i; 8 | } 9 | 10 | function bar() { 11 | var i = 1; 12 | for (var j = 0; j < 500; j++) { 13 | i += j % 3; 14 | } 15 | console.log("This one very long line should not result in the file being considered minified because it is not minified, it is, as they say, unminified. You could call it maxified. If you wanted to. Maybe you don't. I don't know your life, man."); 16 | return i; 17 | } 18 | -------------------------------------------------------------------------------- /spec/fixtures/php_71_sample.php: -------------------------------------------------------------------------------- 1 | =5.5" 26 | }, 27 | "require-dev": { 28 | "phpunit/phpunit": "~4.0|~5.0" 29 | }, 30 | "bin": [ 31 | "bin/php-parse" 32 | ], 33 | "type": "library", 34 | "extra": { 35 | "branch-alias": { 36 | "dev-master": "3.0-dev" 37 | } 38 | }, 39 | "autoload": { 40 | "psr-4": { 41 | "PhpParser\\": "lib/PhpParser" 42 | } 43 | }, 44 | "notification-url": "https://packagist.org/downloads/", 45 | "license": [ 46 | "BSD-3-Clause" 47 | ], 48 | "authors": [ 49 | { 50 | "name": "Nikita Popov" 51 | } 52 | ], 53 | "description": "A PHP parser written in PHP", 54 | "keywords": [ 55 | "parser", 56 | "php" 57 | ], 58 | "time": "2017-03-05T18:23:57+00:00" 59 | } 60 | ], 61 | "packages-dev": [], 62 | "aliases": [], 63 | "minimum-stability": "stable", 64 | "stability-flags": [], 65 | "prefer-stable": false, 66 | "prefer-lowest": false, 67 | "platform": [], 68 | "platform-dev": [] 69 | } 70 | -------------------------------------------------------------------------------- /vendor/php-parser/lib/PhpParser/Serializer/JSON.php: -------------------------------------------------------------------------------- 1 | _serialize($nodes); 20 | } 21 | 22 | protected function _serialize($node) { 23 | if ($node instanceof Node) { 24 | $doc = array(); 25 | $doc['nodeType'] = $node->getType(); 26 | 27 | foreach ($node->getAttributes() as $name => $value) { 28 | $doc[$name] = $value; 29 | } 30 | 31 | foreach ($node as $name => $subNode) { 32 | if (INF === $subNode) { 33 | $doc[$name] = "_PHP:CONST:INF"; 34 | } elseif (NAN === $subNode) { 35 | $doc[$name] = "_PHP:CONST:NaN"; 36 | } elseif (is_string($subNode)) { 37 | $doc[$name] = utf8_encode($subNode); 38 | } elseif (is_int($subNode)) { 39 | $doc[$name] = $subNode; 40 | } elseif (is_float($subNode)) { 41 | $doc[$name] = $subNode; 42 | } elseif (true === $subNode) { 43 | $doc[$name] = $subNode; 44 | } elseif (false === $subNode) { 45 | $doc[$name] = $subNode; 46 | } elseif (null === $subNode) { 47 | $doc[$name] = $subNode; 48 | } elseif (null !== $subNode) { 49 | $doc[$name] = $this->_serialize($subNode); 50 | } 51 | } 52 | 53 | return $doc; 54 | } elseif ($node instanceof Comment) { 55 | $doc = array(); 56 | $doc['nodeType'] = 'comment'; 57 | $doc['isDocComment'] = $node instanceof Comment\Doc ? true : false; 58 | $doc['line'] = $node->getLine(); 59 | $doc['text'] = $node->getText(); 60 | return $doc; 61 | } elseif (is_array($node)) { 62 | $doc = array(); 63 | 64 | foreach ($node as $subNode) { 65 | $doc[] = $this->_serialize($subNode); 66 | } 67 | 68 | return $doc; 69 | } elseif (is_string($node)) { 70 | return utf8_encode($node); 71 | } elseif (is_int($node)) { 72 | return $node; 73 | } elseif (is_float($node)) { 74 | return $node; 75 | } elseif (true === $node) { 76 | return $node; 77 | } elseif (false === $node) { 78 | return $node; 79 | } elseif (null === $node) { 80 | return $node; 81 | } else { 82 | throw new \InvalidArgumentException('Unexpected node type'); 83 | } 84 | } 85 | } 86 | -------------------------------------------------------------------------------- /vendor/php-parser/parser.php: -------------------------------------------------------------------------------- 1 | create(PhpParser\ParserFactory::PREFER_PHP7); 11 | $code = file_get_contents("php://stdin"); 12 | $stmts = $parser->parse($code); 13 | 14 | $serializer = new PhpParser\Serializer\JSON; 15 | $nodes = $serializer->serialize($stmts); 16 | $json = json_encode($nodes); 17 | if (false === $json) { 18 | fwrite(STDERR, "Parse Error: JSON encoding failed: ".json_last_error_msg()."\n"); 19 | exit(1); 20 | } else { 21 | echo $json; 22 | } 23 | } catch (PHPParser\Error $e) { 24 | fwrite(STDERR, "Parse Error: ".$e->getMessage()."\n"); 25 | exit(1); 26 | } 27 | -------------------------------------------------------------------------------- /vendor/php-parser/test/run: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # Simple smoke test of ./parser.php 4 | # 5 | ### 6 | input() { 7 | cat <