├── .gitignore ├── .travis.yml ├── CHANGELOG.md ├── CONTRIBUTING.md ├── LICENSE ├── OSSMETADATA ├── README.md ├── build.gradle ├── buildViaTravis.sh ├── docker-compose.yml ├── gradle.properties ├── gradle └── wrapper │ ├── gradle-wrapper.jar │ └── gradle-wrapper.properties ├── gradlew ├── gradlew.bat ├── installViaTravis.sh ├── kibana-dashboards ├── dashboard.json └── visualizations.json ├── osstracker-console ├── .gitignore ├── Dockerfile ├── app.js ├── package.json ├── public │ ├── css │ │ └── osstracker.css │ ├── favicon.ico │ ├── images │ │ └── netflix-oss-logo-new.png │ ├── index.html │ ├── js │ │ ├── overall.js │ │ ├── ownership.js │ │ ├── perrepo.js │ │ └── settings.json │ ├── overall.html │ ├── ownership.html │ ├── perrepoprivate.html │ └── perrepopublic.html ├── routes │ ├── fakeEmployeeDirectoryService.js │ └── index.js ├── settings.json └── views │ ├── error.jade │ └── layout.jade ├── osstracker-ddl ├── elasticsearch-mappings.json ├── elasticsearch.txt └── osstracker.cql ├── osstracker-scraper ├── build.gradle └── src │ ├── main │ └── scala │ │ └── com │ │ └── netflix │ │ └── oss │ │ └── tools │ │ └── osstrackerscraper │ │ ├── CassandraAccess.scala │ │ ├── Conf.scala │ │ ├── ElasticSearchAccess.scala │ │ ├── GithubAccess.scala │ │ ├── GithubScraper.scala │ │ ├── OssLifecycle.scala │ │ └── ReportWriter.scala │ └── test │ ├── resources │ ├── archiaus-issues.json │ ├── hollow-issues.json │ ├── log4j.properties │ └── security_monkey-issues.json │ └── scala │ └── com │ └── netflix │ └── oss │ └── tools │ └── osstrackerscraper │ └── GitHubAccessTest.scala ├── osstracker-scraperapp ├── Dockerfile ├── build.gradle ├── build.sh └── src │ └── main │ ├── resources │ └── log4j.properties │ └── scala │ └── com │ └── netflix │ └── oss │ └── tools │ └── osstrackerscraper │ └── app │ ├── ConsoleReportWriter.scala │ └── RunGithubScraper.scala └── settings.gradle /.gitignore: -------------------------------------------------------------------------------- 1 | build/ 2 | .gradle/ 3 | 4 | *.iml 5 | *.iws 6 | *.ipr 7 | .idea/ 8 | */out 9 | osstracker-scrapernetflixapp 10 | cassandra_data 11 | es_data 12 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: java 2 | jdk: 3 | - oraclejdk8 4 | sudo: true 5 | install: ./installViaTravis.sh 6 | script: ./buildViaTravis.sh 7 | services: 8 | - docker 9 | cache: 10 | directories: 11 | - $HOME/.gradle/caches 12 | - $HOME/.gradle/wrapper/dists 13 | env: 14 | global: 15 | - secure: wFD85Q4emqQje3ZccWtWTxKHZs07cu6l5bQAZr+RUAR2czHyCqm+gFBVsNftHpwUK+g0oNCEpGDCpS8gtLdPZoql4w5isLEcYCCTdKj1ywK2tW4Nu+VXudmLcoEd6TLo7MrU0B55wWiUbkFhDC/0VeAmb3MMU6bqQI7VY9XSXLEid6yCkCQwv5q40iIyMTuIedR+2ynDZhpHuYfkxnlPtGErTQtm8Y5V1g1PPd5kiaY9qqpMF3oJG53wtuCU1hNArZpPz67PruS/1UjdOyR3Py8BpALLeW0QEw4T5lf/ORbL8NaTEymytgmCZlHUKzqQvOY850bHyeujNBca7+Z0hiPpDy44imikam5CSkMjp0qAYziq4HzBaKr7tEDdnWZs7JwLdhHoFEDicPX36pp1nbWVznmnZBETuVfnTT1Fchk9imdah57M5G7EvvTVFwHUy4fOT4nX/ye/ttMOKiI2arA6EewENKFkIcCdyqDa3ql9tkAwupH+TEXATAiCh7vqiXBBFCiYSE2RzFjlDZ8aJAhTb5zH1raLsrOo+eQpLUJdHLx/D+Mo4S7AJLIYFAVRTssyRDvGCTu+3g6nsXTdD8KONdNFitoAnY6FIVfLGUnKdIfLI5gNKLFa6PWpRt6YYUA6Z9D2JI3l1aj41WOeeBUNHiwoNBI3jBA6bGQ80Ec= 16 | - secure: N8bhBBwgPB4b1lswj0qATgpxpTw0kNO26Ir08YM4fF9xk50Kdha9dYARWGsJ9qEI2ogjXXCevnDAT8YpmNLA+3o+AHx+mfKJVtaOadQZ/Q4k2WyzmB/LqiY1klDkU38vBfReAAXmDttdZQGn0lg1swweg5o2AtWC+TYe1CCOf+tkfga4EUiDio04/1dqH5i/ey0k4aGLZ0AlKsSeElIyO/Kngy5kYYzbBHLq+9FLcEAVZ/8iQudfRQ3IeChP8xwOU+Tka4e/78KTq1gkF0TrrGRUijbhAAQX49ajQ1ivnEPOiWdQz6+PnBlbIiMS9WtySEEWH+3Kf+ZFQeDRKhlUhWg8VLJt76bZc4O1D1553N8BTat7pvEkeFa+7z66pcddX5BPDpxGx2nt1Hg02km0zAZ4SoBOVLTz4xGgZiuokHl1ifo/2oy+aJSAvePEy7VZ6lE4QsHIFgr5Y7IoAJN5adWviCFQBWeUIC8GTI9p4jEMXyCjG20e5wVaQmnP0IndbMlODC+tK14SMbYhaqqc7qFXbE9Ra/RHcKtXU9JBzEo9DIauO9CxauJ6z9/CBFg2u6oOJ78ISaqh6xFav42iOrlYQe7uFsOjQHSvn9CiHUgdvqohZPM2o1p83dn0FHV1leFRXPZJ7WxL2AsEBbTf3NYgwCxaqawIex4zQOWN55Y= 17 | - secure: cGw6crPYh6vKNUHADJ400scVrvwhgoBPF0YiFWYivPU+BAI9tdl+SPOEHn+jQmsFKCJJVIBnHeh+wuC1zTrH3pziUghcZpucMQoOwjIAXo6c4ExgX8Fygr6o+zofcDemKekBQabWivACzOxtuLodZKl21TKNC4SmFwoRc9twnlb7uDDh5Tb3s29LJ9KGg7UaPQuUVis0VYgOevOSzRV8N2tbHy9YAquhXr2cYb3LGGDqC3MS/ynBqqbuhkpk6/pthQc5B8gCGhwiKlyxn2dv8wFJhxmqGvFkiTEJ8IPb5XO6kubF3lPaJ81UOZXwlxp0ml10lYNKHMP/GvWvq2LatACY8Eem7o1CvJ+oRU0pDhag5VFo/DenrDWJv9/DdZRXkHqdb8hzIzOrrCCyxeYME9+9TUMoNQpSNU07wC+ArwlNKOV+Ka+qLuPQeSj4WR+osowlraxnRKzs1hDiA+aALbcb5vSw57wf36bULT+bPmDonexyxKnB3AvtDwidUTFgPhNKQT4DFPepMjefQ6m0Q8FAlG/5VGkbPL8YV2U9ryMModhxz8FQDnIUiRZ+7E/We8DWdDdA2Sl89JbSS3EJFh+MTigorIsTP9qk/jqNP2jzFQP92BGbgeZu0jEn6ele3j5REpWldIRxAKJdvXsLyCsgDEuegvKbjxTpXLzD+tQ= 18 | - secure: wsjI4nMfXy9FWEzeEz90mv/RQ+PpddJzTOtwkEu/UzNiUkGh+Zcknvp3FVcZkfWpHotErCrkIaAtNOlxdguUZQiPj0JWWHubppM/Vqy1vyCVm8IVQBz4IE6MGA0bU411dVW3RW5TPfL1pOFbt5G9uD/OYdzbT2i+MqIGxKghNSXL/sfrFjvNw62+6k/jWnHzWVpX9qvoEqOSijiDpe8zguKUoJxckYLOhYFNoK4U9quX/qI0Qu+EJP7xGTWsQS+k/dqNtZ/82AXgBSwKhm9eA2nZdYQPJdHw5DRWgesYZjbAxid+TwF4fKufX+vYOGwTWarrf4cFAGlcuf7G0aW1bhWTnXvLhHySMrTJRrC+h2uMUXgVFdXiw3UtzfdCJUhAHCMyqM/eBTLsPLiAERt0fAauBM41lhX3QSrLpTDuMMfcru3hr+L8HVaaYaRmGw0QDDkws+PA2O9sf9truG0juKNSKupRqccunfs9ZSf0yGoFCNOojrHvxQd/fx+Lw3HDthUtVO8F+Li4VSJnEX2TAE6r7cCK4CjhV+b1GhgAd7Rs8XuY2eylrgZBtVXY+9BtQnnWgcHciHr6DiVPj8ob4LicZHHHLqikDDOm6LgpGMFtzDUblAn2/uejJG5TxzmfvqtgEauCJpYym9uoET9m5LoHyHcdTqgjiTJ0jNDqJKg= 19 | - secure: "y7E7CCz7PcK43vf+sNG/wLgguH/aoGwmuVuXnOePnxdZYsD2C9ijvFlRj7yEJJ2F2vtOXnZw4puuh6kzDynN0f7vbd/PcQPAK846ttMAyuZUzUEzu2T8KrBOKV2O8kbfpE/0LwxafRISbAKlj29ZHhMHCnIIDEi29GmfQ/ijcgZRDw/P2qWQbxbkYADBZggDAPaN9krjCWrU1tlei8QZ+qEqRghCINf+FcXG0Zgzipyde0Iwa+3iX45vfBaliL0IrpEsuZYPlswyEc6oNkPU8FqHxv3uvG2mkVllFiBVCToelR7fn1IwKGjEfeI9S9gvYzpdLxwlnMDM2DETeJFdU5TyJlqpljAq8bqt6AVGluIN8ML7tBFXFkhzITIAery7Upru0H4d2BLDFNLa2OjzeT4cA+NxhwaVXtdHJ/AudCKlSJUxvWN9sCwtldtNLyai345ULvfhAW7a8Tnd6AQ+381LXfSPRGVjLjMxqJuGtVfspM8Qrz0s1ID0V5YAkLlVhAOmkgH9iqTCBVNfnxN9y6/FoSI0KQAo9YgnUcH6afiEc4sjuBIBSPLvjC3/IXDGvGM3rf4vmtSiPs+TkV4kWTnkqSyxQvcY9HA22CM7OdgQBAHOPMgc0diV0I3eyaU7mC9x7MlWQmcgUvucw2z5kGGkIdOlbFHSC20jW8nNZeo=" 20 | - secure: "Nb71SzKYVwEW2Ya8toFLfjyl2j26bR41lamp7u75hL/KccUwL6LAbWneKrnbvNCdHhC2ao8pcAG77AnuYCOq+dGKI2WWQlreQ4I6JYVUaSAdpJnX8WMSMsKUvKNLXtcgOtUzj2PtgHcKhPOboz2DGGkrD1Er/LYWGU92/f8+XSjMFZidDdHwoMCU0Nu4jCXB5xr9k/t7A2hIZlD/OqVoUz0skFE6GM9oPUa5ai8pktLUXC6s6upcAybP0UQKW46+xkstcQYDOLHgir/m7hkkngMlHeMCkgxXFUCavbhqxlG5SUmXS/Q4Qu8iGrpSXsTzwl/khsn0JtuaXsKK7jKMlY+Y4YrcxnvYMIIQTa08tOZ77qh0Emfsyw0HcndE0kqWkT+4s/ZRRYUd67N+KMT1zUsfkWaDGeQc8E3aobrLumxTX2HI1tPeCb4DcQ5SborkOV6d555cCHtdUusrGncTe+QO/xzvbwrnXDBuquto054WSi3pru+xmBjX96FtqvtDQMuUAwOvpNkNa/69YfzHqWQ25qGRUovhOBN/7sQrjIst5f8vonJEt+3sqI2CyD4ZK1JCjJXTTiKyPbUELXZVetWKvLIaZQb7GhNp/1Mpon+DPumanwyQs+ZoCHYFrkdPWbC7wQ+QhS1BWCMlV3Wtj6gF5Dd56ZVBb6TEiL6D4Dk=" 21 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Netflix/osstracker/ccbe617cab05e432a1736e4560a042f30a56f2bb/CHANGELOG.md -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to osstracker 2 | 3 | If you would like to contribute code you can do so through GitHub by forking the repository and sending a pull request. 4 | 5 | When submitting code, please make every effort to follow existing conventions and style in order to keep the code as readable as possible. 6 | 7 | ## License 8 | 9 | By contributing your code, you agree to license your contribution under the terms of the APLv2: https://github.com/Netflix/osstracker/blob/master/LICENSE 10 | 11 | All files are released with the Apache 2.0 license. 12 | 13 | If you are adding a new file it should have a header like this: 14 | 15 | ``` 16 | /** 17 | * Copyright 2016 Netflix, Inc. 18 | * 19 | * Licensed under the Apache License, Version 2.0 (the "License"); 20 | * you may not use this file except in compliance with the License. 21 | * You may obtain a copy of the License at 22 | * 23 | * http://www.apache.org/licenses/LICENSE-2.0 24 | * 25 | * Unless required by applicable law or agreed to in writing, software 26 | * distributed under the License is distributed on an "AS IS" BASIS, 27 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 28 | * See the License for the specific language governing permissions and 29 | * limitations under the License. 30 | */ 31 | ``` 32 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "[]" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright 2016 Netflix, Inc. 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. 203 | -------------------------------------------------------------------------------- /OSSMETADATA: -------------------------------------------------------------------------------- 1 | osslifecycle=active 2 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | OSS Tracker 2 | ========== 3 | 4 | [![Build Status](https://travis-ci.org/Netflix/osstracker.svg?branch=master)](https://travis-ci.org/Netflix/osstracker) 5 | [![NetflixOSS Lifecycle](https://img.shields.io/osslifecycle/Netflix/osstracker.svg)]() 6 | 7 | OSS Tracker is an application that collects information about a Github organization and aggregates the data across 8 | all projects within that organization into a single user interface to be used by various roles within the owning 9 | organization. 10 | 11 | For the community manager, all repositories are listed and metrics are combined for the organization as a whole. A 12 | community manager can also organize projects into functional areas and appoint shepherds of these areas to assign 13 | management and engineering leads. 14 | 15 | The shepherds of each functional area can not only assign and maintain leads for each project, but also view 16 | aggregated metrics for their area. 17 | 18 | For individual owners, the OSS tracker gives a daily summary as well as historical information on key repository 19 | metrics such as open issues and pull requests, days since last commit, and average time to resolve issues and pull 20 | requests. 21 | 22 | OSS Tracker works by running multiple analysis jobs as part of osstracer-scraper periodically. These jobs populate 23 | a project ownership database as well as a time series project statistics database. OSS Tracker then exposes a web 24 | application (osstracker-console) that gives visibility into these databases as well as access to control ownership 25 | and categorization of each project. In order to decrease the need for advanced visualization, much of the time series 26 | data graphing leverages kibana on top of elasticsearch. 27 | 28 | More Info 29 | ========= 30 | You can see more about OSS Tracker from our meetup [video](https://www.youtube.com/watch?v=5s-SS_aXoi0) and [slides](http://www.slideshare.net/aspyker/netflix-open-source-meetup-season-4-episode-1). 31 | 32 | Deployment 33 | ========== 34 | For a sample deployment of the OSS Tracker using Terraform + Ansible, you can refer to [this project](https://github.com/RestComm/netflix-oss-tracker-infra). 35 | 36 | [![Apache 2.0](https://img.shields.io/github/license/Netflix/osstracker.svg)](http://www.apache.org/licenses/LICENSE-2.0) 37 | 38 | LICENSE 39 | ======= 40 | 41 | Copyright 2016 Netflix, Inc. 42 | 43 | Licensed under the Apache License, Version 2.0 (the "License"); 44 | you may not use this file except in compliance with the License. 45 | You may obtain a copy of the License at 46 | 47 | 48 | 49 | Unless required by applicable law or agreed to in writing, software 50 | distributed under the License is distributed on an "AS IS" BASIS, 51 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 52 | See the License for the specific language governing permissions and 53 | limitations under the License. 54 | -------------------------------------------------------------------------------- /build.gradle: -------------------------------------------------------------------------------- 1 | buildscript { 2 | repositories { 3 | jcenter() 4 | mavenCentral() 5 | } 6 | dependencies { 7 | classpath 'com.github.jengelman.gradle.plugins:shadow:1.2.3' 8 | classpath 'org.apache.ant:ant:1.9.7' // to deal with https://github.com/johnrengelman/shadow/issues/188 9 | } 10 | } 11 | 12 | plugins { 13 | id 'nebula.netflixoss' version '3.3.0' 14 | } 15 | 16 | ext.githubProjectName = rootProject.name 17 | 18 | subprojects { 19 | apply plugin: 'nebula.netflixoss' 20 | group = 'com.netflix.oss.tools' 21 | 22 | repositories { 23 | jcenter() 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /buildViaTravis.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # This script will build the project. 3 | 4 | # Evaluating a pull request 5 | if [ "$TRAVIS_PULL_REQUEST" != "false" ]; then 6 | echo -e "Build Pull Request #$TRAVIS_PULL_REQUEST => Branch [$TRAVIS_BRANCH]" 7 | ./gradlew build 8 | 9 | # Building a code commit, but not a release tag 10 | elif [ "$TRAVIS_PULL_REQUEST" == "false" ] && [ "$TRAVIS_TAG" == "" ]; then 11 | echo -e 'Build Branch with Snapshot => Branch ['$TRAVIS_BRANCH']' 12 | ./gradlew -Prelease.travisci=true -PbintrayUser="${bintrayUser}" -PbintrayKey="${bintrayKey}" -PsonatypeUsername="${sonatypeUsername}" -PsonatypePassword="${sonatypePassword}" build snapshot 13 | 14 | # Building a release tag 15 | elif [ "$TRAVIS_PULL_REQUEST" == "false" ] && [ "$TRAVIS_TAG" != "" ]; then 16 | echo -e 'Build Branch for Release => Branch ['$TRAVIS_BRANCH'] Tag ['$TRAVIS_TAG']' 17 | case "$TRAVIS_TAG" in 18 | *-rc\.*) 19 | ./gradlew -Prelease.travisci=true -Prelease.useLastTag=true -PbintrayUser="${bintrayUser}" -PbintrayKey="${bintrayKey}" -PsonatypeUsername="${sonatypeUsername}" -PsonatypePassword="${sonatypePassword}" candidate 20 | ;; 21 | *) 22 | ./gradlew -Prelease.travisci=true -Prelease.useLastTag=true -PbintrayUser="${bintrayUser}" -PbintrayKey="${bintrayKey}" -PsonatypeUsername="${sonatypeUsername}" -PsonatypePassword="${sonatypePassword}" final 23 | ;; 24 | esac 25 | 26 | if [[ $? -ne 0 ]]; then 27 | exit 1 28 | fi 29 | ./gradlew :osstracker-scraperapp:shadowJar 30 | cd osstracker-scraperapp 31 | docker build -t netflixoss/osstracker-scraper:$TRAVIS_TAG . 32 | docker tag netflixoss/osstracker-scraper:$TRAVIS_TAG netflixoss/osstracker-scraper:latest 33 | docker images 34 | docker login -u=${dockerhubUsername} -p=${dockerhubPassword} 35 | docker push netflixoss/osstracker-scraper:$TRAVIS_TAG 36 | docker push netflixoss/osstracker-scraper:latest 37 | cd .. 38 | cd osstracker-console 39 | docker build -t netflixoss/osstracker-console:$TRAVIS_TAG . 40 | docker tag netflixoss/osstracker-console:$TRAVIS_TAG netflixoss/osstracker-console:latest 41 | docker images 42 | docker login -u=${dockerhubUsername} -p=${dockerhubPassword} 43 | docker push netflixoss/osstracker-console:$TRAVIS_TAG 44 | docker push netflixoss/osstracker-console:latest 45 | cd .. 46 | 47 | # No a valid build 48 | else 49 | echo -e 'WARN: Should not be here => Branch ['$TRAVIS_BRANCH'] Tag ['$TRAVIS_TAG'] Pull Request ['$TRAVIS_PULL_REQUEST']' 50 | ./gradlew build 51 | fi 52 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | # This compose file will start up Netflix OSS Tracker and load it with a days worth of data 2 | # 3 | # You need to export a variable before running it with your github key 4 | # export github_oauth=YOUR_KEY_HERE 5 | # export github_org=YOUR_ORG_HERE 6 | # 7 | version: '3' 8 | services: 9 | cassandra: 10 | container_name: osstracker-cassandra 11 | image: cassandra:latest 12 | ports: 13 | - "9042:9042" 14 | environment: 15 | - "MAX_HEAP_SIZE=256M" 16 | - "HEAP_NEWSIZE=128M" 17 | restart: always 18 | volumes: 19 | - ./cassandra_data:/var/lib/cassandra 20 | 21 | elasticsearch: 22 | container_name: osstracker-elasticsearch 23 | image: elasticsearch:5.6.4 24 | environment: 25 | ES_JAVA_OPTS: "-Xmx256m -Xms256m" 26 | ports: 27 | - "9200:9200" 28 | environment: 29 | - "ES_JAVA_OPTS=-Xms512m -Xmx512m" 30 | restart: always 31 | ulimits: 32 | memlock: 33 | soft: -1 34 | hard: -1 35 | volumes: 36 | - ./es_data:/usr/share/elasticsearch/data 37 | 38 | kibana: 39 | container_name: osstracker-kibana 40 | image: kibana:5.6.4 41 | ports: 42 | - "5601:5601" 43 | restart: always 44 | 45 | osstracker-console: 46 | container_name: osstracker-console 47 | image: netflixoss/osstracker-console:latest 48 | ports: 49 | - "3000:3000" 50 | environment: 51 | CASS_HOST: cassandra 52 | CASS_PORT: 9042 53 | ES_HOST: elasticsearch 54 | ES_PORT: 9200 55 | restart: always 56 | depends_on: 57 | - cassandra 58 | - elasticsearch 59 | 60 | cassandra-load-keyspace: 61 | container_name: osstracker-cassandra-load-keyspace 62 | image: cassandra:latest 63 | depends_on: 64 | - cassandra 65 | volumes: 66 | - ./osstracker-ddl/osstracker.cql:/osstracker.cql 67 | command: /bin/bash -c "sleep 60 && echo loading cassandra keyspace && cqlsh cassandra -f /osstracker.cql" 68 | deploy: 69 | restart_policy: 70 | condition: on-failure 71 | delay: 5s 72 | max_attempts: 3 73 | window: 120s 74 | 75 | elasticsearch-configure-index-1: 76 | container_name: osstracker-elasticsearch-configure-index-1 77 | image: tutum/curl 78 | depends_on: 79 | - elasticsearch 80 | command: /bin/bash -c "sleep 60 && echo adding index && curl -X PUT elasticsearch:9200/osstracker" 81 | deploy: 82 | restart_policy: 83 | condition: on-failure 84 | delay: 5s 85 | max_attempts: 3 86 | window: 120s 87 | 88 | elasticsearch-configure-index-2: 89 | container_name: osstracker-elasticsearch-configure-index-2 90 | image: tutum/curl 91 | depends_on: 92 | - elasticsearch 93 | volumes: 94 | - ./osstracker-ddl/elasticsearch-mappings.json:/elasticsearch-mappings.json 95 | command: /bin/bash -c "sleep 80 && echo adding index && curl -X PUT -d @/elasticsearch-mappings.json elasticsearch:9200/osstracker/repo_stats/_mapping" 96 | deploy: 97 | restart_policy: 98 | condition: on-failure 99 | delay: 5s 100 | max_attempts: 3 101 | window: 120s 102 | 103 | osstracker-scraper-cassandra: 104 | container_name: osstracker-scraper-cassandra 105 | image: netflixoss/osstracker-scraper:latest 106 | depends_on: 107 | - cassandra 108 | - elasticsearch 109 | environment: 110 | CASS_HOST: cassandra 111 | CASS_PORT: 9042 112 | ES_HOST: elasticsearch 113 | ES_PORT: 9200 114 | github_oauth: ${github_oauth} 115 | github_org: ${github_org} 116 | restart: always 117 | command: /bin/sh -c "sleep 100 && java -cp /osstracker-scraperapp-all.jar com.netflix.oss.tools.osstrackerscraper.app.RunGithubScraper --action updatecassandra" 118 | 119 | osstracker-scraper-elasticsearch: 120 | container_name: osstracker-scraper-elasticsearch 121 | image: netflixoss/osstracker-scraper:latest 122 | depends_on: 123 | - cassandra 124 | - elasticsearch 125 | environment: 126 | CASS_HOST: cassandra 127 | CASS_PORT: 9042 128 | ES_HOST: elasticsearch 129 | ES_PORT: 9200 130 | github_oauth: ${github_oauth} 131 | github_org: ${github_org} 132 | restart: always 133 | command: /bin/sh -c "sleep 160 && java -cp /osstracker-scraperapp-all.jar com.netflix.oss.tools.osstrackerscraper.app.RunGithubScraper --action updateelasticsearch" -------------------------------------------------------------------------------- /gradle.properties: -------------------------------------------------------------------------------- 1 | CASS_HOST=127.0.0.1 2 | ES_HOST=127.0.0.1 3 | release.scope=patch 4 | -------------------------------------------------------------------------------- /gradle/wrapper/gradle-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Netflix/osstracker/ccbe617cab05e432a1736e4560a042f30a56f2bb/gradle/wrapper/gradle-wrapper.jar -------------------------------------------------------------------------------- /gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | #Mon Feb 08 13:18:27 PST 2016 2 | distributionBase=GRADLE_USER_HOME 3 | distributionPath=wrapper/dists 4 | zipStoreBase=GRADLE_USER_HOME 5 | zipStorePath=wrapper/dists 6 | distributionUrl=https\://services.gradle.org/distributions/gradle-2.10-bin.zip 7 | -------------------------------------------------------------------------------- /gradlew: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | ############################################################################## 4 | ## 5 | ## Gradle start up script for UN*X 6 | ## 7 | ############################################################################## 8 | 9 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 10 | DEFAULT_JVM_OPTS="" 11 | 12 | APP_NAME="Gradle" 13 | APP_BASE_NAME=`basename "$0"` 14 | 15 | # Use the maximum available, or set MAX_FD != -1 to use that value. 16 | MAX_FD="maximum" 17 | 18 | warn ( ) { 19 | echo "$*" 20 | } 21 | 22 | die ( ) { 23 | echo 24 | echo "$*" 25 | echo 26 | exit 1 27 | } 28 | 29 | # OS specific support (must be 'true' or 'false'). 30 | cygwin=false 31 | msys=false 32 | darwin=false 33 | case "`uname`" in 34 | CYGWIN* ) 35 | cygwin=true 36 | ;; 37 | Darwin* ) 38 | darwin=true 39 | ;; 40 | MINGW* ) 41 | msys=true 42 | ;; 43 | esac 44 | 45 | # Attempt to set APP_HOME 46 | # Resolve links: $0 may be a link 47 | PRG="$0" 48 | # Need this for relative symlinks. 49 | while [ -h "$PRG" ] ; do 50 | ls=`ls -ld "$PRG"` 51 | link=`expr "$ls" : '.*-> \(.*\)$'` 52 | if expr "$link" : '/.*' > /dev/null; then 53 | PRG="$link" 54 | else 55 | PRG=`dirname "$PRG"`"/$link" 56 | fi 57 | done 58 | SAVED="`pwd`" 59 | cd "`dirname \"$PRG\"`/" >/dev/null 60 | APP_HOME="`pwd -P`" 61 | cd "$SAVED" >/dev/null 62 | 63 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar 64 | 65 | # Determine the Java command to use to start the JVM. 66 | if [ -n "$JAVA_HOME" ] ; then 67 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then 68 | # IBM's JDK on AIX uses strange locations for the executables 69 | JAVACMD="$JAVA_HOME/jre/sh/java" 70 | else 71 | JAVACMD="$JAVA_HOME/bin/java" 72 | fi 73 | if [ ! -x "$JAVACMD" ] ; then 74 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME 75 | 76 | Please set the JAVA_HOME variable in your environment to match the 77 | location of your Java installation." 78 | fi 79 | else 80 | JAVACMD="java" 81 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 82 | 83 | Please set the JAVA_HOME variable in your environment to match the 84 | location of your Java installation." 85 | fi 86 | 87 | # Increase the maximum file descriptors if we can. 88 | if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then 89 | MAX_FD_LIMIT=`ulimit -H -n` 90 | if [ $? -eq 0 ] ; then 91 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then 92 | MAX_FD="$MAX_FD_LIMIT" 93 | fi 94 | ulimit -n $MAX_FD 95 | if [ $? -ne 0 ] ; then 96 | warn "Could not set maximum file descriptor limit: $MAX_FD" 97 | fi 98 | else 99 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" 100 | fi 101 | fi 102 | 103 | # For Darwin, add options to specify how the application appears in the dock 104 | if $darwin; then 105 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" 106 | fi 107 | 108 | # For Cygwin, switch paths to Windows format before running java 109 | if $cygwin ; then 110 | APP_HOME=`cygpath --path --mixed "$APP_HOME"` 111 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` 112 | JAVACMD=`cygpath --unix "$JAVACMD"` 113 | 114 | # We build the pattern for arguments to be converted via cygpath 115 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` 116 | SEP="" 117 | for dir in $ROOTDIRSRAW ; do 118 | ROOTDIRS="$ROOTDIRS$SEP$dir" 119 | SEP="|" 120 | done 121 | OURCYGPATTERN="(^($ROOTDIRS))" 122 | # Add a user-defined pattern to the cygpath arguments 123 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then 124 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" 125 | fi 126 | # Now convert the arguments - kludge to limit ourselves to /bin/sh 127 | i=0 128 | for arg in "$@" ; do 129 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` 130 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option 131 | 132 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition 133 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` 134 | else 135 | eval `echo args$i`="\"$arg\"" 136 | fi 137 | i=$((i+1)) 138 | done 139 | case $i in 140 | (0) set -- ;; 141 | (1) set -- "$args0" ;; 142 | (2) set -- "$args0" "$args1" ;; 143 | (3) set -- "$args0" "$args1" "$args2" ;; 144 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;; 145 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; 146 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; 147 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; 148 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; 149 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; 150 | esac 151 | fi 152 | 153 | # Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules 154 | function splitJvmOpts() { 155 | JVM_OPTS=("$@") 156 | } 157 | eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS 158 | JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME" 159 | 160 | exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@" 161 | -------------------------------------------------------------------------------- /gradlew.bat: -------------------------------------------------------------------------------- 1 | @if "%DEBUG%" == "" @echo off 2 | @rem ########################################################################## 3 | @rem 4 | @rem Gradle startup script for Windows 5 | @rem 6 | @rem ########################################################################## 7 | 8 | @rem Set local scope for the variables with windows NT shell 9 | if "%OS%"=="Windows_NT" setlocal 10 | 11 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 12 | set DEFAULT_JVM_OPTS= 13 | 14 | set DIRNAME=%~dp0 15 | if "%DIRNAME%" == "" set DIRNAME=. 16 | set APP_BASE_NAME=%~n0 17 | set APP_HOME=%DIRNAME% 18 | 19 | @rem Find java.exe 20 | if defined JAVA_HOME goto findJavaFromJavaHome 21 | 22 | set JAVA_EXE=java.exe 23 | %JAVA_EXE% -version >NUL 2>&1 24 | if "%ERRORLEVEL%" == "0" goto init 25 | 26 | echo. 27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 28 | echo. 29 | echo Please set the JAVA_HOME variable in your environment to match the 30 | echo location of your Java installation. 31 | 32 | goto fail 33 | 34 | :findJavaFromJavaHome 35 | set JAVA_HOME=%JAVA_HOME:"=% 36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe 37 | 38 | if exist "%JAVA_EXE%" goto init 39 | 40 | echo. 41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 42 | echo. 43 | echo Please set the JAVA_HOME variable in your environment to match the 44 | echo location of your Java installation. 45 | 46 | goto fail 47 | 48 | :init 49 | @rem Get command-line arguments, handling Windowz variants 50 | 51 | if not "%OS%" == "Windows_NT" goto win9xME_args 52 | if "%@eval[2+2]" == "4" goto 4NT_args 53 | 54 | :win9xME_args 55 | @rem Slurp the command line arguments. 56 | set CMD_LINE_ARGS= 57 | set _SKIP=2 58 | 59 | :win9xME_args_slurp 60 | if "x%~1" == "x" goto execute 61 | 62 | set CMD_LINE_ARGS=%* 63 | goto execute 64 | 65 | :4NT_args 66 | @rem Get arguments from the 4NT Shell from JP Software 67 | set CMD_LINE_ARGS=%$ 68 | 69 | :execute 70 | @rem Setup the command line 71 | 72 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar 73 | 74 | @rem Execute Gradle 75 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% 76 | 77 | :end 78 | @rem End local scope for the variables with windows NT shell 79 | if "%ERRORLEVEL%"=="0" goto mainEnd 80 | 81 | :fail 82 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of 83 | rem the _cmd.exe /c_ return code! 84 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 85 | exit /b 1 86 | 87 | :mainEnd 88 | if "%OS%"=="Windows_NT" endlocal 89 | 90 | :omega 91 | -------------------------------------------------------------------------------- /installViaTravis.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # This script will build the project. 3 | 4 | if [ "$TRAVIS_PULL_REQUEST" != "false" ]; then 5 | echo -e "Assemble Pull Request #$TRAVIS_PULL_REQUEST => Branch [$TRAVIS_BRANCH]" 6 | ./gradlew assemble 7 | elif [ "$TRAVIS_PULL_REQUEST" == "false" ] && [ "$TRAVIS_TAG" == "" ]; then 8 | echo -e 'Assemble Branch with Snapshot => Branch ['$TRAVIS_BRANCH']' 9 | ./gradlew -Prelease.travisci=true assemble 10 | elif [ "$TRAVIS_PULL_REQUEST" == "false" ] && [ "$TRAVIS_TAG" != "" ]; then 11 | echo -e 'Assemble Branch for Release => Branch ['$TRAVIS_BRANCH'] Tag ['$TRAVIS_TAG']' 12 | ./gradlew -Prelease.travisci=true -Prelease.useLastTag=true assemble 13 | else 14 | echo -e 'WARN: Should not be here => Branch ['$TRAVIS_BRANCH'] Tag ['$TRAVIS_TAG'] Pull Request ['$TRAVIS_PULL_REQUEST']' 15 | ./gradlew assemble 16 | fi 17 | -------------------------------------------------------------------------------- /kibana-dashboards/dashboard.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "_id": "osstracker-all-repos-stats", 4 | "_type": "dashboard", 5 | "_source": { 6 | "title": "OSS Tracker _ All Repos _ Overview", 7 | "hits": 0, 8 | "description": "", 9 | "panelsJSON": "[{\"col\":1,\"id\":\"osstracker-per-repo-open-issues-bars\",\"panelIndex\":1,\"row\":1,\"size_x\":6,\"size_y\":4,\"type\":\"visualization\"},{\"col\":7,\"id\":\"osstracker-per-repo-open-pullrequests-bars\",\"panelIndex\":2,\"row\":1,\"size_x\":6,\"size_y\":4,\"type\":\"visualization\"},{\"col\":1,\"id\":\"osstracker-per-repo-open-issues-stacked\",\"panelIndex\":3,\"row\":8,\"size_x\":6,\"size_y\":4,\"type\":\"visualization\"},{\"col\":7,\"id\":\"osstracker-per-repo-open-pullrequests-stacked\",\"panelIndex\":4,\"row\":8,\"size_x\":6,\"size_y\":4,\"type\":\"visualization\"},{\"col\":1,\"id\":\"AWACTqn9rcBqLUquJtNm\",\"panelIndex\":5,\"row\":12,\"size_x\":6,\"size_y\":3,\"type\":\"visualization\"},{\"col\":7,\"id\":\"AWACRuOTrcBqLUquJtNj\",\"panelIndex\":6,\"row\":12,\"size_x\":6,\"size_y\":3,\"type\":\"visualization\"},{\"col\":1,\"id\":\"AWACSeg4rcBqLUquJtNl\",\"panelIndex\":7,\"row\":5,\"size_x\":6,\"size_y\":3,\"type\":\"visualization\"},{\"col\":7,\"id\":\"AWACSSTdrcBqLUquJtNk\",\"panelIndex\":8,\"row\":5,\"size_x\":6,\"size_y\":3,\"type\":\"visualization\"}]", 10 | "optionsJSON": "{\"darkTheme\":false}", 11 | "uiStateJSON": "{\"P-1\":{\"vis\":{\"legendOpen\":false}},\"P-2\":{\"vis\":{\"legendOpen\":false}},\"P-3\":{\"vis\":{\"legendOpen\":true}},\"P-4\":{\"vis\":{\"legendOpen\":false}},\"P-7\":{\"vis\":{\"legendOpen\":false}},\"P-8\":{\"vis\":{\"legendOpen\":false}}}", 12 | "version": 1, 13 | "timeRestore": false, 14 | "kibanaSavedObjectMeta": { 15 | "searchSourceJSON": "{\"filter\":[{\"query\":{\"query_string\":{\"analyze_wildcard\":true,\"query\":\"*\"}}}],\"highlightAll\":true,\"version\":true}" 16 | } 17 | } 18 | } 19 | ] -------------------------------------------------------------------------------- /kibana-dashboards/visualizations.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "_id": "osstracker-per-repo-open-pullrequests-stacked", 4 | "_type": "visualization", 5 | "_source": { 6 | "title": "Open Pull Requests per repo (over time)", 7 | "visState": "{\"title\":\"Open Pull Requests per repo (over time)\",\"type\":\"area\",\"params\":{\"shareYAxis\":true,\"addTooltip\":true,\"addLegend\":true,\"smoothLines\":false,\"scale\":\"linear\",\"interpolate\":\"linear\",\"mode\":\"stacked\",\"times\":[],\"addTimeMarker\":false,\"defaultYExtents\":false,\"setYExtents\":false,\"yAxis\":{},\"grid\":{\"categoryLines\":false,\"style\":{\"color\":\"#eee\"}},\"categoryAxes\":[{\"id\":\"CategoryAxis-1\",\"type\":\"category\",\"position\":\"bottom\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\"},\"labels\":{\"show\":true,\"truncate\":100},\"title\":{\"text\":\"Time\"}}],\"valueAxes\":[{\"id\":\"ValueAxis-1\",\"name\":\"LeftAxis-1\",\"type\":\"value\",\"position\":\"left\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\",\"mode\":\"normal\",\"setYExtents\":false,\"defaultYExtents\":false},\"labels\":{\"show\":true,\"rotate\":0,\"filter\":false,\"truncate\":100},\"title\":{\"text\":\"Open Pull Requests\"}}],\"seriesParams\":[{\"show\":\"true\",\"type\":\"area\",\"mode\":\"stacked\",\"data\":{\"label\":\"Open Pull Requests\",\"id\":\"1\"},\"drawLinesBetweenPoints\":true,\"showCircles\":true,\"interpolate\":\"linear\",\"valueAxis\":\"ValueAxis-1\"}],\"legendPosition\":\"right\",\"type\":\"area\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"max\",\"schema\":\"metric\",\"params\":{\"field\":\"pullRequests.openCount\",\"customLabel\":\"Open Pull Requests\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"date_histogram\",\"schema\":\"segment\",\"params\":{\"field\":\"asOfISO\",\"interval\":\"d\",\"customInterval\":\"2h\",\"min_doc_count\":1,\"extended_bounds\":{},\"customLabel\":\"Time\"}},{\"id\":\"3\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"group\",\"params\":{\"field\":\"repo_name\",\"size\":10,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Project\"}}],\"listeners\":{}}", 8 | "uiStateJSON": "{}", 9 | "description": "", 10 | "version": 1, 11 | "kibanaSavedObjectMeta": { 12 | "searchSourceJSON": "{\"index\":\"AWACP2vbrcBqLUquJtNi\",\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true}},\"filter\":[]}" 13 | } 14 | } 15 | }, 16 | { 17 | "_id": "osstracker-per-repo-open-issues-stacked", 18 | "_type": "visualization", 19 | "_source": { 20 | "title": "Open Issues per Repo (over time)", 21 | "visState": "{\"title\":\"Open Issues per Repo (over time)\",\"type\":\"area\",\"params\":{\"shareYAxis\":true,\"addTooltip\":true,\"addLegend\":true,\"smoothLines\":false,\"scale\":\"linear\",\"interpolate\":\"linear\",\"mode\":\"stacked\",\"times\":[],\"addTimeMarker\":false,\"defaultYExtents\":false,\"setYExtents\":false,\"yAxis\":{},\"grid\":{\"categoryLines\":false,\"style\":{\"color\":\"#eee\"}},\"categoryAxes\":[{\"id\":\"CategoryAxis-1\",\"type\":\"category\",\"position\":\"bottom\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\"},\"labels\":{\"show\":true,\"truncate\":100},\"title\":{\"text\":\"Time\"}}],\"valueAxes\":[{\"id\":\"ValueAxis-1\",\"name\":\"LeftAxis-1\",\"type\":\"value\",\"position\":\"left\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\",\"mode\":\"normal\",\"setYExtents\":false,\"defaultYExtents\":false},\"labels\":{\"show\":true,\"rotate\":0,\"filter\":false,\"truncate\":100},\"title\":{\"text\":\"Open Issues\"}}],\"seriesParams\":[{\"show\":\"true\",\"type\":\"area\",\"mode\":\"stacked\",\"data\":{\"label\":\"Open Issues\",\"id\":\"1\"},\"drawLinesBetweenPoints\":true,\"showCircles\":true,\"interpolate\":\"linear\",\"valueAxis\":\"ValueAxis-1\"}],\"legendPosition\":\"right\",\"type\":\"area\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"max\",\"schema\":\"metric\",\"params\":{\"field\":\"issues.openCount\",\"customLabel\":\"Open Issues\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"date_histogram\",\"schema\":\"segment\",\"params\":{\"field\":\"asOfISO\",\"interval\":\"d\",\"customInterval\":\"2h\",\"min_doc_count\":1,\"extended_bounds\":{},\"customLabel\":\"Time\"}},{\"id\":\"3\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"group\",\"params\":{\"field\":\"repo_name\",\"size\":10,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Project\"}}],\"listeners\":{}}", 22 | "uiStateJSON": "{}", 23 | "description": "", 24 | "version": 1, 25 | "kibanaSavedObjectMeta": { 26 | "searchSourceJSON": "{\"index\":\"AWACP2vbrcBqLUquJtNi\",\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true}},\"filter\":[]}" 27 | } 28 | } 29 | }, 30 | { 31 | "_id": "AWACRuOTrcBqLUquJtNj", 32 | "_type": "visualization", 33 | "_source": { 34 | "title": "Stars per Repo (over time)", 35 | "visState": "{\"title\":\"Stars per Repo (over time)\",\"type\":\"area\",\"params\":{\"shareYAxis\":true,\"addTooltip\":true,\"addLegend\":true,\"smoothLines\":false,\"scale\":\"linear\",\"interpolate\":\"linear\",\"mode\":\"stacked\",\"times\":[],\"addTimeMarker\":false,\"defaultYExtents\":false,\"setYExtents\":false,\"yAxis\":{},\"grid\":{\"categoryLines\":false,\"style\":{\"color\":\"#eee\"}},\"categoryAxes\":[{\"id\":\"CategoryAxis-1\",\"type\":\"category\",\"position\":\"bottom\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\"},\"labels\":{\"show\":true,\"truncate\":100},\"title\":{\"text\":\"Time\"}}],\"valueAxes\":[{\"id\":\"ValueAxis-1\",\"name\":\"LeftAxis-1\",\"type\":\"value\",\"position\":\"left\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\",\"mode\":\"normal\",\"setYExtents\":false,\"defaultYExtents\":false},\"labels\":{\"show\":true,\"rotate\":0,\"filter\":false,\"truncate\":100},\"title\":{\"text\":\"Stars\"}}],\"seriesParams\":[{\"show\":\"true\",\"type\":\"area\",\"mode\":\"stacked\",\"data\":{\"label\":\"Stars\",\"id\":\"1\"},\"drawLinesBetweenPoints\":true,\"showCircles\":true,\"interpolate\":\"linear\",\"valueAxis\":\"ValueAxis-1\"}],\"legendPosition\":\"right\",\"type\":\"area\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"max\",\"schema\":\"metric\",\"params\":{\"field\":\"stars\",\"customLabel\":\"Stars\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"date_histogram\",\"schema\":\"segment\",\"params\":{\"field\":\"asOfISO\",\"interval\":\"d\",\"customInterval\":\"2h\",\"min_doc_count\":1,\"extended_bounds\":{},\"customLabel\":\"Time\"}},{\"id\":\"3\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"group\",\"params\":{\"field\":\"repo_name\",\"size\":10,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Project\"}}],\"listeners\":{}}", 36 | "uiStateJSON": "{}", 37 | "description": "", 38 | "version": 1, 39 | "kibanaSavedObjectMeta": { 40 | "searchSourceJSON": "{\"index\":\"AWACP2vbrcBqLUquJtNi\",\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true}},\"filter\":[]}" 41 | } 42 | } 43 | }, 44 | { 45 | "_id": "osstracker-per-repo-open-issues-bars", 46 | "_type": "visualization", 47 | "_source": { 48 | "title": "Total Open Issues per Repo", 49 | "visState": "{\"title\":\"Total Open Issues per Repo\",\"type\":\"histogram\",\"params\":{\"shareYAxis\":true,\"addTooltip\":true,\"addLegend\":true,\"scale\":\"linear\",\"mode\":\"stacked\",\"times\":[],\"addTimeMarker\":false,\"defaultYExtents\":false,\"setYExtents\":false,\"yAxis\":{},\"grid\":{\"categoryLines\":false,\"style\":{\"color\":\"#eee\"}},\"categoryAxes\":[{\"id\":\"CategoryAxis-1\",\"type\":\"category\",\"position\":\"bottom\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\"},\"labels\":{\"show\":true,\"truncate\":100},\"title\":{\"text\":\"Project\"}}],\"valueAxes\":[{\"id\":\"ValueAxis-1\",\"name\":\"LeftAxis-1\",\"type\":\"value\",\"position\":\"left\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\",\"mode\":\"normal\",\"setYExtents\":false,\"defaultYExtents\":false},\"labels\":{\"show\":true,\"rotate\":0,\"filter\":false,\"truncate\":100},\"title\":{\"text\":\"Max issues.openCount\"}}],\"seriesParams\":[{\"show\":\"true\",\"type\":\"histogram\",\"mode\":\"stacked\",\"data\":{\"label\":\"Max issues.openCount\",\"id\":\"1\"},\"valueAxis\":\"ValueAxis-1\",\"drawLinesBetweenPoints\":true,\"showCircles\":true,\"interpolate\":\"linear\"}],\"legendPosition\":\"right\",\"type\":\"histogram\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"max\",\"schema\":\"metric\",\"params\":{\"field\":\"issues.openCount\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"repo_name\",\"size\":70,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Project\"}}],\"listeners\":{}}", 50 | "uiStateJSON": "{}", 51 | "description": "", 52 | "version": 1, 53 | "kibanaSavedObjectMeta": { 54 | "searchSourceJSON": "{\"index\":\"AWACP2vbrcBqLUquJtNi\",\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true}},\"filter\":[]}" 55 | } 56 | } 57 | }, 58 | { 59 | "_id": "AWACTqn9rcBqLUquJtNm", 60 | "_type": "visualization", 61 | "_source": { 62 | "title": "Contributor Actions", 63 | "visState": "{\"title\":\"Contributor Actions\",\"type\":\"line\",\"params\":{\"grid\":{\"categoryLines\":false,\"style\":{\"color\":\"#eee\"}},\"categoryAxes\":[{\"id\":\"CategoryAxis-1\",\"type\":\"category\",\"position\":\"bottom\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\"},\"labels\":{\"show\":true,\"truncate\":100},\"title\":{\"text\":\"Time\"}}],\"valueAxes\":[{\"id\":\"ValueAxis-1\",\"name\":\"LeftAxis-1\",\"type\":\"value\",\"position\":\"left\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\",\"mode\":\"normal\"},\"labels\":{\"show\":true,\"rotate\":0,\"filter\":false,\"truncate\":100},\"title\":{\"text\":\"Contributors & Actions\"}}],\"seriesParams\":[{\"show\":\"true\",\"type\":\"line\",\"mode\":\"normal\",\"data\":{\"label\":\"Avg Forks\",\"id\":\"1\"},\"valueAxis\":\"ValueAxis-1\",\"drawLinesBetweenPoints\":true,\"showCircles\":true},{\"show\":true,\"mode\":\"normal\",\"type\":\"line\",\"drawLinesBetweenPoints\":true,\"showCircles\":true,\"data\":{\"id\":\"2\",\"label\":\"Avg Stars\"},\"valueAxis\":\"ValueAxis-1\"},{\"show\":true,\"mode\":\"normal\",\"type\":\"line\",\"drawLinesBetweenPoints\":true,\"showCircles\":true,\"data\":{\"id\":\"4\",\"label\":\"Avg Contributors\"},\"valueAxis\":\"ValueAxis-1\"}],\"addTooltip\":true,\"addLegend\":true,\"legendPosition\":\"right\",\"times\":[],\"addTimeMarker\":false,\"type\":\"line\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"avg\",\"schema\":\"metric\",\"params\":{\"field\":\"forks\",\"customLabel\":\"Avg Forks\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"avg\",\"schema\":\"metric\",\"params\":{\"field\":\"stars\",\"customLabel\":\"Avg Stars\"}},{\"id\":\"3\",\"enabled\":true,\"type\":\"date_histogram\",\"schema\":\"segment\",\"params\":{\"field\":\"asOfISO\",\"interval\":\"auto\",\"customInterval\":\"2h\",\"min_doc_count\":1,\"extended_bounds\":{},\"customLabel\":\"Time\"}},{\"id\":\"4\",\"enabled\":true,\"type\":\"avg\",\"schema\":\"metric\",\"params\":{\"field\":\"numContributors\",\"customLabel\":\"Avg Contributors\"}}],\"listeners\":{}}", 64 | "uiStateJSON": "{}", 65 | "description": "", 66 | "version": 1, 67 | "kibanaSavedObjectMeta": { 68 | "searchSourceJSON": "{\"index\":\"AWACP2vbrcBqLUquJtNi\",\"query\":{\"match_all\":{}},\"filter\":[]}" 69 | } 70 | } 71 | }, 72 | { 73 | "_id": "AWACSeg4rcBqLUquJtNl", 74 | "_type": "visualization", 75 | "_source": { 76 | "title": "Total Contributors per Repo", 77 | "visState": "{\"title\":\"Total Contributors per Repo\",\"type\":\"histogram\",\"params\":{\"shareYAxis\":true,\"addTooltip\":true,\"addLegend\":true,\"scale\":\"linear\",\"mode\":\"stacked\",\"times\":[],\"addTimeMarker\":false,\"defaultYExtents\":false,\"setYExtents\":false,\"yAxis\":{},\"grid\":{\"categoryLines\":false,\"style\":{\"color\":\"#eee\"}},\"categoryAxes\":[{\"id\":\"CategoryAxis-1\",\"type\":\"category\",\"position\":\"bottom\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\"},\"labels\":{\"show\":true,\"truncate\":100},\"title\":{\"text\":\"Project\"}}],\"valueAxes\":[{\"id\":\"ValueAxis-1\",\"name\":\"LeftAxis-1\",\"type\":\"value\",\"position\":\"left\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\",\"mode\":\"normal\",\"setYExtents\":false,\"defaultYExtents\":false},\"labels\":{\"show\":true,\"rotate\":0,\"filter\":false,\"truncate\":100},\"title\":{\"text\":\"Total Contributors\"}}],\"seriesParams\":[{\"show\":\"true\",\"type\":\"histogram\",\"mode\":\"stacked\",\"data\":{\"label\":\"Total Contributors\",\"id\":\"1\"},\"valueAxis\":\"ValueAxis-1\",\"drawLinesBetweenPoints\":true,\"showCircles\":true,\"interpolate\":\"linear\"}],\"legendPosition\":\"right\",\"type\":\"histogram\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"max\",\"schema\":\"metric\",\"params\":{\"field\":\"numContributors\",\"customLabel\":\"Total Contributors\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"repo_name\",\"size\":70,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Project\"}}],\"listeners\":{}}", 78 | "uiStateJSON": "{}", 79 | "description": "", 80 | "version": 1, 81 | "kibanaSavedObjectMeta": { 82 | "searchSourceJSON": "{\"index\":\"AWACP2vbrcBqLUquJtNi\",\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true}},\"filter\":[]}" 83 | } 84 | } 85 | }, 86 | { 87 | "_id": "AWACSSTdrcBqLUquJtNk", 88 | "_type": "visualization", 89 | "_source": { 90 | "title": "Total Forks per Repo", 91 | "visState": "{\"title\":\"Total Forks per Repo\",\"type\":\"histogram\",\"params\":{\"shareYAxis\":true,\"addTooltip\":true,\"addLegend\":true,\"scale\":\"linear\",\"mode\":\"stacked\",\"times\":[],\"addTimeMarker\":false,\"defaultYExtents\":false,\"setYExtents\":false,\"yAxis\":{},\"grid\":{\"categoryLines\":false,\"style\":{\"color\":\"#eee\"}},\"categoryAxes\":[{\"id\":\"CategoryAxis-1\",\"type\":\"category\",\"position\":\"bottom\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\"},\"labels\":{\"show\":true,\"truncate\":100},\"title\":{\"text\":\"Project\"}}],\"valueAxes\":[{\"id\":\"ValueAxis-1\",\"name\":\"LeftAxis-1\",\"type\":\"value\",\"position\":\"left\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\",\"mode\":\"normal\",\"setYExtents\":false,\"defaultYExtents\":false},\"labels\":{\"show\":true,\"rotate\":0,\"filter\":false,\"truncate\":100},\"title\":{\"text\":\"Forks\"}}],\"seriesParams\":[{\"show\":\"true\",\"type\":\"histogram\",\"mode\":\"stacked\",\"data\":{\"label\":\"Forks\",\"id\":\"1\"},\"valueAxis\":\"ValueAxis-1\",\"drawLinesBetweenPoints\":true,\"showCircles\":true,\"interpolate\":\"linear\"}],\"legendPosition\":\"right\",\"type\":\"histogram\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"max\",\"schema\":\"metric\",\"params\":{\"field\":\"forks\",\"customLabel\":\"Forks\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"repo_name\",\"size\":70,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Project\"}}],\"listeners\":{}}", 92 | "uiStateJSON": "{}", 93 | "description": "", 94 | "version": 1, 95 | "kibanaSavedObjectMeta": { 96 | "searchSourceJSON": "{\"index\":\"AWACP2vbrcBqLUquJtNi\",\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true}},\"filter\":[]}" 97 | } 98 | } 99 | }, 100 | { 101 | "_id": "osstracker-per-repo-open-pullrequests-bars", 102 | "_type": "visualization", 103 | "_source": { 104 | "title": "Total Open Pull Requests per Repo", 105 | "visState": "{\"title\":\"Total Open Pull Requests per Repo\",\"type\":\"histogram\",\"params\":{\"shareYAxis\":true,\"addTooltip\":true,\"addLegend\":true,\"scale\":\"linear\",\"mode\":\"stacked\",\"times\":[],\"addTimeMarker\":false,\"defaultYExtents\":false,\"setYExtents\":false,\"yAxis\":{},\"grid\":{\"categoryLines\":false,\"style\":{\"color\":\"#eee\"}},\"categoryAxes\":[{\"id\":\"CategoryAxis-1\",\"type\":\"category\",\"position\":\"bottom\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\"},\"labels\":{\"show\":true,\"truncate\":100},\"title\":{\"text\":\"Project\"}}],\"valueAxes\":[{\"id\":\"ValueAxis-1\",\"name\":\"LeftAxis-1\",\"type\":\"value\",\"position\":\"left\",\"show\":true,\"style\":{},\"scale\":{\"type\":\"linear\",\"mode\":\"normal\",\"setYExtents\":false,\"defaultYExtents\":false},\"labels\":{\"show\":true,\"rotate\":0,\"filter\":false,\"truncate\":100},\"title\":{\"text\":\"Open Pull Requests\"}}],\"seriesParams\":[{\"show\":\"true\",\"type\":\"histogram\",\"mode\":\"stacked\",\"data\":{\"label\":\"Open Pull Requests\",\"id\":\"1\"},\"valueAxis\":\"ValueAxis-1\",\"drawLinesBetweenPoints\":true,\"showCircles\":true,\"interpolate\":\"linear\"}],\"legendPosition\":\"right\",\"type\":\"histogram\"},\"aggs\":[{\"id\":\"1\",\"enabled\":true,\"type\":\"max\",\"schema\":\"metric\",\"params\":{\"field\":\"pullRequests.openCount\",\"customLabel\":\"Open Pull Requests\"}},{\"id\":\"2\",\"enabled\":true,\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"repo_name\",\"size\":70,\"order\":\"desc\",\"orderBy\":\"1\",\"customLabel\":\"Project\"}}],\"listeners\":{}}", 106 | "uiStateJSON": "{}", 107 | "description": "", 108 | "version": 1, 109 | "kibanaSavedObjectMeta": { 110 | "searchSourceJSON": "{\"index\":\"AWACP2vbrcBqLUquJtNi\",\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true}},\"filter\":[]}" 111 | } 112 | } 113 | } 114 | ] -------------------------------------------------------------------------------- /osstracker-console/.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | -------------------------------------------------------------------------------- /osstracker-console/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:4-slim 2 | 3 | MAINTAINER NetflixOSS 4 | 5 | ADD / / 6 | 7 | RUN npm install 8 | 9 | ENTRYPOINT ["node", "app.js"] -------------------------------------------------------------------------------- /osstracker-console/app.js: -------------------------------------------------------------------------------- 1 | var express = require('express'); 2 | var path = require('path'); 3 | var favicon = require('serve-favicon'); 4 | var logger = require('morgan'); 5 | var cookieParser = require('cookie-parser'); 6 | var bodyParser = require('body-parser'); 7 | var debug = require('debug')('osstracker:server'); 8 | var http = require('http'); 9 | 10 | var routes = require('./routes/index'); 11 | 12 | var app = express(); 13 | 14 | // view engine setup 15 | app.set('views', path.join(__dirname, 'views')); 16 | app.set('view engine', 'pug'); 17 | 18 | // uncomment after placing your favicon in /public 19 | app.use(favicon(path.join(__dirname, 'public', 'favicon.ico'))); 20 | app.use(logger('dev')); 21 | app.use(bodyParser.json()); 22 | app.use(bodyParser.urlencoded({ extended: true })); 23 | app.use(cookieParser()); 24 | app.use(express.static(path.join(__dirname, 'public'))); 25 | 26 | app.use('/', routes); 27 | 28 | // catch 404 and forward to error handler 29 | app.use(function(req, res, next) { 30 | var err = new Error('Not Found'); 31 | err.status = 404; 32 | next(err); 33 | }); 34 | 35 | // error handlers 36 | 37 | // development error handler 38 | // will print stacktrace 39 | if (app.get('env') === 'development') { 40 | app.use(function(err, req, res, next) { 41 | res.status(err.status || 500); 42 | res.render('error', { 43 | message: err.message, 44 | error: err 45 | }); 46 | }); 47 | } 48 | 49 | // production error handler 50 | // no stacktraces leaked to user 51 | app.use(function(err, req, res, next) { 52 | res.status(err.status || 500); 53 | res.render('error', { 54 | message: err.message, 55 | error: {} 56 | }); 57 | }); 58 | 59 | module.exports = app; 60 | 61 | /** 62 | * Get port from environment and store in Express. 63 | */ 64 | 65 | var port = normalizePort(process.env.PORT || '3000'); 66 | app.set('port', port); 67 | 68 | /** 69 | * Create HTTP server. 70 | */ 71 | 72 | var server = http.createServer(app); 73 | 74 | /** 75 | * Listen on provided port, on all network interfaces. 76 | */ 77 | 78 | server.listen(port); 79 | server.on('error', onError); 80 | server.on('listening', onListening); 81 | 82 | /** 83 | * Normalize a port into a number, string, or false. 84 | */ 85 | 86 | function normalizePort(val) { 87 | var port = parseInt(val, 10); 88 | 89 | if (isNaN(port)) { 90 | // named pipe 91 | return val; 92 | } 93 | 94 | if (port >= 0) { 95 | // port number 96 | return port; 97 | } 98 | 99 | return false; 100 | } 101 | 102 | /** 103 | * Event listener for HTTP server "error" event. 104 | */ 105 | 106 | function onError(error) { 107 | if (error.syscall !== 'listen') { 108 | throw error; 109 | } 110 | 111 | var bind = typeof port === 'string' 112 | ? 'Pipe ' + port 113 | : 'Port ' + port; 114 | 115 | // handle specific listen errors with friendly messages 116 | switch (error.code) { 117 | case 'EACCES': 118 | console.error(bind + ' requires elevated privileges'); 119 | process.exit(1); 120 | break; 121 | case 'EADDRINUSE': 122 | console.error(bind + ' is already in use'); 123 | process.exit(1); 124 | break; 125 | default: 126 | throw error; 127 | } 128 | } 129 | 130 | /** 131 | * Event listener for HTTP server "listening" event. 132 | */ 133 | 134 | function onListening() { 135 | var addr = server.address(); 136 | var bind = typeof addr === 'string' 137 | ? 'pipe ' + addr 138 | : 'port ' + addr.port; 139 | debug('Listening on ' + bind); 140 | } 141 | 142 | -------------------------------------------------------------------------------- /osstracker-console/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "osstracker", 3 | "version": "0.0.0", 4 | "private": true, 5 | "scripts": { 6 | "start": "node app.js" 7 | }, 8 | "dependencies": { 9 | "body-parser": "~1.13.2", 10 | "cassandra-driver": "3.0.1", 11 | "cookie-parser": "~1.3.5", 12 | "debug": "~2.2.0", 13 | "express": "~4.13.1", 14 | "pug": "~2.0.0-beta3", 15 | "log4js": "~0.6.31", 16 | "morgan": "~1.6.1", 17 | "request": "~2.69.0", 18 | "serve-favicon": "~2.3.0" 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /osstracker-console/public/css/osstracker.css: -------------------------------------------------------------------------------- 1 | body { 2 | padding-top: 50px; 3 | } 4 | 5 | .footer { 6 | position: absolute; 7 | bottom: 0; 8 | width: 100%; 9 | /* Set the fixed height of the footer here */ 10 | height: 200px; 11 | background-position: right bottom; 12 | background-repeat: no-repeat; 13 | } 14 | 15 | /* TODO: Need to work on CSS to get it back to one two lines */ 16 | .typeahead, 17 | .tt-query, 18 | .tt-hint { 19 | /*width: 396px; 20 | height: 30px; 21 | padding: 8px 12px; 22 | font-size: 24px; 23 | line-height: 30px; 24 | border: 2px solid #ccc; 25 | -webkit-border-radius: 8px; 26 | -moz-border-radius: 8px; 27 | border-radius: 8px;*/ 28 | outline: none; 29 | } 30 | 31 | .typeahead { 32 | /*background-color: #fff;*/ 33 | } 34 | 35 | .typeahead:focus { 36 | /*border: 2px solid #0097cf;*/ 37 | } 38 | 39 | .tt-query { 40 | -webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075); 41 | -moz-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075); 42 | box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075); 43 | } 44 | 45 | .tt-hint { 46 | color: #999 47 | } 48 | 49 | .tt-dropdown-menu { 50 | width: 422px; 51 | margin-top: 12px; 52 | padding: 8px 0; 53 | background-color: #fff; 54 | border: 1px solid #ccc; 55 | border: 1px solid rgba(0, 0, 0, 0.2); 56 | -webkit-border-radius: 8px; 57 | -moz-border-radius: 8px; 58 | border-radius: 8px; 59 | -webkit-box-shadow: 0 5px 10px rgba(0,0,0,.2); 60 | -moz-box-shadow: 0 5px 10px rgba(0,0,0,.2); 61 | box-shadow: 0 5px 10px rgba(0,0,0,.2); 62 | } 63 | 64 | .tt-suggestion { 65 | padding: 3px 20px; 66 | font-size: 18px; 67 | line-height: 24px; 68 | } 69 | 70 | .tt-suggestion.tt-cursor { 71 | color: #fff; 72 | background-color: #0097cf; 73 | 74 | } 75 | 76 | .tt-suggestion p { 77 | margin: 0; 78 | } 79 | 80 | .smallmargin { 81 | margin-left: 10px; 82 | padding-left: 10px; 83 | margin-right: 10px; 84 | padding-right: 10px; 85 | width: 1300px; 86 | } -------------------------------------------------------------------------------- /osstracker-console/public/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Netflix/osstracker/ccbe617cab05e432a1736e4560a042f30a56f2bb/osstracker-console/public/favicon.ico -------------------------------------------------------------------------------- /osstracker-console/public/images/netflix-oss-logo-new.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Netflix/osstracker/ccbe617cab05e432a1736e4560a042f30a56f2bb/osstracker-console/public/images/netflix-oss-logo-new.png -------------------------------------------------------------------------------- /osstracker-console/public/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | OSS Tracker 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 21 | 22 | 23 | 45 |
46 | 47 |
48 |

 

49 | 50 |

Welcome to OSS Tracker

51 |

OSS Tracker is an application that collects information about a Github organization and aggregates the data across 52 | all projects within that organization into a single user interface to be used by various roles within the owning 53 | organization.

54 | 55 |

For the community manager, all repositories are listed and metrics are combined for the organization 56 | as a whole. A community manager can also organize projects into functional areas and appoint shepherds of these areas to assign 57 | management and engineering leads.

58 | 59 |

The shepherds of each functional area can not only assign and maintain leads for 60 | each project, but also view aggregated metrics for their area.

61 | 62 |

For individual owners, the OSS tracker gives a daily summary as well as historical 63 | information on key repository metrics such as open issues and pull requests, days since last commit, and average time to resolve 64 | issues and pull requests.

65 |

66 |
67 | 68 |
69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | -------------------------------------------------------------------------------- /osstracker-console/public/js/overall.js: -------------------------------------------------------------------------------- 1 | $(document).ready(function(){ 2 | var settings; 3 | var kibanaUrl = "/#/dashboard/OSSTracker-Overall-Stats?_g=(refreshInterval:(display:Off,pause:!f,section:0,value:0),time:(from:now-7d,mode:quick,to:now))"; 4 | 5 | $.getJSON("/js/settings.json", function(json) { 6 | settings = json; 7 | $.get('/hosts/eshost', function(data) { 8 | var hrefLink = 'http://' + data + ':' + settings.kibanaPort + kibanaUrl 9 | $("a[href='http://replaceme']").attr('href', hrefLink) 10 | }); 11 | }); 12 | 13 | $.get('/repos/overview', function(data) { 14 | $('#avgStarsText').text(data.avgStars); 15 | $('#avgForksText').text(data.avgForks); 16 | $('#avgOpenIssuesCountText').text(data.issues.avgOpenCount); 17 | $('#avgClosedIssuesCountText').text(data.issues.avgClosedCount); 18 | $('#totalOpenIssuesCountText').text(data.issues.totalOpenCount); 19 | $('#totalClosedIssuesCountText').text(data.issues.totalClosedCount); 20 | $('#avgOpenPRsCountText').text(data.pullRequests.avgOpenCount); 21 | $('#avgClosedPRsCountText').text(data.pullRequests.avgClosedCount); 22 | $('#totalOpenPRsCountText').text(data.pullRequests.totalOpenCount); 23 | $('#totalClosedPRsCountText').text(data.pullRequests.totalClosedCount); 24 | }); 25 | 26 | }); 27 | -------------------------------------------------------------------------------- /osstracker-console/public/js/ownership.js: -------------------------------------------------------------------------------- 1 | var users; 2 | var repos; 3 | 4 | $(document).ready(function() { 5 | $.get('/repos', function(data) { 6 | repos = data; 7 | }) 8 | .fail(function() { 9 | alert("problem with loading repos data"); 10 | }); 11 | 12 | var substringMatcher = function(strs) { 13 | return function findMatches(q, cb) { 14 | var matches, substringRegex; 15 | 16 | // an array that will be populated with substring matches 17 | matches = []; 18 | 19 | // regex used to determine if a string contains the substring `q` 20 | substrRegex = new RegExp(q, 'i'); 21 | 22 | // iterate through the pool of strings and for any string that 23 | // contains the substring `q`, add it to the `matches` array 24 | $.each(strs, function(i, str) { 25 | if (substrRegex.test(str)) { 26 | // the typeahead jQuery plugin expects suggestions to a 27 | // JavaScript object, refer to typeahead docs for more info 28 | matches.push({ value: str }); 29 | } 30 | }); 31 | 32 | cb(matches); 33 | }; 34 | }; 35 | 36 | var usersREST = "/users" 37 | var names = [] 38 | $.get(usersREST, function(data) { 39 | users = data; 40 | 41 | $.each(data, function(i, item) { 42 | names.push(item.name); 43 | }); 44 | 45 | $('#repoModalRepoDevLead').typeahead({ 46 | hint: true, 47 | highlight: true, 48 | minLength: 1 49 | }, 50 | { 51 | name: 'devNames', 52 | displayKey: 'value', 53 | source: substringMatcher(names) 54 | }); 55 | 56 | $('#repoModalRepoMgrLead').typeahead({ 57 | hint: true, 58 | highlight: true, 59 | minLength: 1 60 | }, 61 | { 62 | name: 'mgrNames', 63 | displayKey: 'value', 64 | source: substringMatcher(names) 65 | }); 66 | }) 67 | .fail(function() { 68 | alert("problem with loading users data"); 69 | }); 70 | 71 | var orgsREST = "/repos/orgs"; 72 | var orgs = [] 73 | $.get(orgsREST, function(data) { 74 | $.each(data, function(i, item) { 75 | orgs.push(item.orgName); 76 | }); 77 | 78 | $('#repoModalRepoOrg').typeahead({ 79 | hint : true, 80 | highlight : true, 81 | minLength : 1 82 | }, { 83 | name : 'orgNames', 84 | displayKey : 'value', 85 | source : substringMatcher(orgs) 86 | }); 87 | }) 88 | .fail(function() { 89 | alert("problem with loading repos/orgs data"); 90 | }); 91 | 92 | $(document).ajaxStop(function() { 93 | $('#table').bootstrapTable({ 94 | columns: [{ 95 | field: 'name', 96 | title: 'Repo', 97 | sortable: true, 98 | formatter: repoLinkFormatter 99 | }, { 100 | field: 'orgName', 101 | title: 'OSS Area', 102 | sortable: true 103 | }, { 104 | field: 'mgrLead', 105 | title: 'Mgr Lead', 106 | sortable: true, 107 | formatter: mgrEmpIdToNameFormatter 108 | }, { 109 | field: 'devLead', 110 | title: 'Dev Lead', 111 | sortable: true, 112 | formatter: mgrEmpIdToNameFormatter 113 | }, { 114 | field: 'name', 115 | title: 'Edit', 116 | sortable: false, 117 | formatter: editLinkFormatter 118 | }], 119 | data: repos 120 | }); 121 | 122 | $(window).resize(function () { 123 | $('#table').bootstrapTable('resetView'); 124 | }); 125 | }); 126 | }); 127 | 128 | function repoLinkFormatter(value, row) { 129 | return '' + value + ''; 130 | } 131 | 132 | function editLinkFormatter(value, row) { 133 | return 'Edit'; 134 | } 135 | 136 | function mgrEmpIdToNameFormatter(value, row) { 137 | return getNameFromEmpId(value); 138 | } 139 | 140 | function showRepoEdit(repoName) { 141 | $('#repoModalRepoName').val(repoName); 142 | $('#repoModalRepoOrg').val(getRepoOrg(repoName)); 143 | $('#repoModalRepoDevLead').val(getNameFromEmpId(getRepoDevLeadEmpIp(repoName))); 144 | $('#repoModalRepoMgrLead').val(getNameFromEmpId(getRepoMgrLeadEmpIp(repoName))); 145 | $('#clickRepoLinks').modal('show'); 146 | } 147 | 148 | function updateRepository() { 149 | var repoName = $('#repoModalRepoName').val(); 150 | var repoOrg = $('#repoModalRepoOrg').val(); 151 | var repoMgrLead = $('#repoModalRepoMgrLead').val(); 152 | var repoDevLead = $('#repoModalRepoDevLead').val(); 153 | var mgrLead = getEmpIdFromName(repoMgrLead); 154 | var devLead = getEmpIdFromName(repoDevLead); 155 | 156 | if (mgrLead == -1 || devLead == -1) { 157 | $('#clickRepoLinks').modal('hide'); 158 | $('#errorModalErrorText').text('Currently we only support known Netflix github users. Add your github account to whitepages.'); 159 | $('#errorModal').modal('show'); 160 | return; 161 | } 162 | 163 | $.post( 164 | '/repos/' + repoName, 165 | { repoOrg: repoOrg, mgrLead: mgrLead, devLead: devLead}, 166 | function(data) { 167 | console.log(data); 168 | } 169 | ); 170 | $('#table').bootstrapTable('showLoading'); 171 | loadReposData(function() { 172 | $('#table').data = repos; 173 | $('#table').bootstrapTable('hideLoading'); 174 | $('#table').bootstrapTable('load', repos); 175 | $('#clickRepoLinks').modal('hide'); 176 | }) 177 | } 178 | 179 | function getGitHubUser(name) { 180 | var user = users.filter(function(item) { 181 | return item.name == name; 182 | }) 183 | var githubId = user[0].githubId; 184 | return githubId; 185 | } 186 | 187 | function getRepoOrg(repoName) { 188 | var repo = repos.filter(function(item) { 189 | return item.name == repoName; 190 | }) 191 | return repo[0].orgName; 192 | } 193 | 194 | function getRepoMgrLeadEmpIp(repoName) { 195 | var repo = repos.filter(function(item) { 196 | return item.name == repoName; 197 | }) 198 | return repo[0].mgrLead; 199 | } 200 | 201 | function getRepoDevLeadEmpIp(repoName) { 202 | var repo = repos.filter(function(item) { 203 | return item.name == repoName; 204 | }) 205 | return repo[0].devLead; 206 | } 207 | 208 | function getNameFromEmpId(id) { 209 | var emp = users.filter(function(item) { 210 | return item.employeeId == id; 211 | }) 212 | if (emp.length == 0) { 213 | return id; 214 | } 215 | return emp[0].name; 216 | } 217 | 218 | function getEmpIdFromName(name) { 219 | var emp = users.filter(function(item) { 220 | return item.name == name; 221 | }) 222 | if (emp.length == 0) { 223 | return -1; 224 | } 225 | return emp[0].employeeId; 226 | } 227 | 228 | function loadReposData(callback/*()*/) { 229 | $.get('/repos', function(data) { 230 | repos = data; 231 | callback(); 232 | }); 233 | } -------------------------------------------------------------------------------- /osstracker-console/public/js/perrepo.js: -------------------------------------------------------------------------------- 1 | var esHost; 2 | var esPort; 3 | 4 | $(document).ready(function(){ 5 | var settings; 6 | 7 | $.getJSON("/js/settings.json", function(json) { 8 | settings = json; 9 | esPort = settings.kibanaPort; 10 | }); 11 | 12 | $.get('/repos/stats', function(data) { 13 | data = data.filter(function (elem) { 14 | return publicOrPrivate ? elem.public : !elem.public; 15 | }) 16 | 17 | $('#statsTable').bootstrapTable({ 18 | columns: [ 19 | { 20 | field: 'name', 21 | title: 'Repo', 22 | sortable: true, 23 | formatter: repoLinkFormatter 24 | }, 25 | { 26 | title: 'Graphs', 27 | formatter: esStatsFormatter 28 | }, 29 | { 30 | field: 'forks', 31 | title: 'Forks', 32 | sortable: true 33 | }, 34 | { 35 | field: 'stars', 36 | title: 'Stars', 37 | sortable: true 38 | }, 39 | { 40 | field: 'issueOpenCount', 41 | title: 'Open Issues', 42 | sortable: true 43 | }, 44 | { 45 | field: 'issueClosedCount', 46 | title: 'Closed Issues', 47 | sortable: true, 48 | }, 49 | { 50 | field: 'issueAvgClose', 51 | title: 'Avg Issue Age', 52 | sortable: true 53 | }, 54 | { 55 | field: 'prOpenCount', 56 | title: 'Open PRs', 57 | sortable: true 58 | }, 59 | { 60 | field: 'prClosedCount', 61 | title: 'Closed PRs', 62 | sortable: true 63 | }, 64 | { 65 | field: 'prAvgClose', 66 | title: 'Avg PR Age', 67 | sortable: true 68 | }, 69 | { 70 | field: 'daysSinceLastCommit', 71 | title: 'Last Commit', 72 | sortable: true 73 | }, 74 | { 75 | field: 'numContributors', 76 | title: 'Contributors', 77 | sortable: true 78 | }, 79 | { 80 | field: 'public', 81 | title: 'Public', 82 | sortable: true 83 | }, 84 | { 85 | field: 'osslifecycle', 86 | title: 'OSS Lifecycle', 87 | sortable: true 88 | }, 89 | ], 90 | data: data 91 | }); 92 | 93 | $(window).resize(function () { 94 | $('#statsTable').bootstrapTable('resetView'); 95 | $('#tags').tagsinput('refresh'); 96 | }); 97 | }); 98 | 99 | $.get('/hosts/eshost', function(data) { 100 | esHost = data; 101 | }); 102 | 103 | }); 104 | 105 | function repoLinkFormatter(value, row) { 106 | return '' + value + ''; 107 | } 108 | 109 | function esStatsFormatter(value, row) { 110 | return ''; 114 | } 115 | -------------------------------------------------------------------------------- /osstracker-console/public/js/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "kibanaPort": 5601 3 | } 4 | -------------------------------------------------------------------------------- /osstracker-console/public/overall.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | OSS Tracker 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 21 | 22 | 23 | 45 |
46 | 47 |
48 |

Entire Organization

49 |

The community manager uses this page to track the overall health of all of our projects

50 |

If you want to work with the data directly, please see the 51 | Kibana dashboard 52 |

53 |

Critical Overall Statistics

54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 |
Health Metric Per RepositoryCurrent Value
Average number of open issues? issues
Total number of open issues? issues
Average number of open PRs? PRs
Total number of open PRs? PRs
Average number of stars? stars
Average number of forks? forks
84 | 85 |

Additional Overall Statistics

86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | 107 |
Health Metric Per RepositoryCurrent Value
Average number of closed issuess? issues
Average number of closed PRs? PRs
Total number of closed issues? issues
Total number of closed PRs? PRs
108 |
109 | 110 | 124 | 125 |
126 | 127 | 128 | 129 | 130 | 131 | 132 | 133 | 134 | -------------------------------------------------------------------------------- /osstracker-console/public/ownership.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | OSS Tracker 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 24 | 25 | 26 | 48 |
49 | 50 |
51 |

Project Ownership

52 |

Function area shepherds should use this view to assign both management and engineering focal points 53 | for each project.

54 | 55 | 59 | 60 | 61 | 62 | 63 | 97 | 98 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | 128 | 129 | 130 | -------------------------------------------------------------------------------- /osstracker-console/public/perrepoprivate.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | OSS Tracker 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 24 | 25 | 26 | 48 | 49 |
50 |

Private Repositories

51 |

Focal points should use this view to compare themselves to other projects. Click the graph 52 | column if you want historical views for your project.

53 | 54 |

Statistics

55 | 56 |
60 | 61 | 62 | 63 | 64 | 65 | 78 | 79 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 103 | 104 | 105 | 106 | -------------------------------------------------------------------------------- /osstracker-console/public/perrepopublic.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | OSS Tracker 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 24 | 25 | 26 | 48 | 49 |
50 |

Public Repositories

51 |

Focal points should use this view to compare themselves to other projects. Click the graph 52 | column if you want historical views for your project.

53 | 54 |

Statistics

55 | 56 |
60 | 61 | 62 | 63 | 64 | 65 | 78 | 79 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 103 | 104 | 105 | 106 | -------------------------------------------------------------------------------- /osstracker-console/routes/fakeEmployeeDirectoryService.js: -------------------------------------------------------------------------------- 1 | exports.getGithubIds = function(callback/*(err, githubIDs)*/) { 2 | var fakeResponse = [ 3 | {"employeeId":"111111","githubId":"ghId1","email":"user1@netflix.com","name":"User One"}, 4 | {"employeeId":"222222","githubId":"ghId2","email":"user2@netflix.com","name":"User Two"}, 5 | {"employeeId":"333333","githubId":"ghId3","email":"user3@netflix.com","name":"User Three"} 6 | ]; 7 | callback(null, fakeResponse); 8 | } -------------------------------------------------------------------------------- /osstracker-console/routes/index.js: -------------------------------------------------------------------------------- 1 | var express = require('express'); 2 | var log4js = require('log4js'); 3 | var request = require('request'); 4 | var cassandra = require('cassandra-driver'); 5 | 6 | var router = express.Router(); 7 | 8 | var settings = require('../settings.json'); 9 | 10 | var employeeDirectory = require(settings.employeeDirectory); 11 | 12 | var CASS_HOST = process.env.CASS_HOST; 13 | if (!CASS_HOST) { 14 | console.error("CASS_HOST environment variable not defined"); 15 | process.exit(1); 16 | } 17 | var CASS_PORT = parseInt(process.env.CASS_PORT) || 7104; 18 | var ES_HOST = process.env.ES_HOST; 19 | if (!ES_HOST) { 20 | console.error("ES_HOST environment variable not defined"); 21 | process.exit(1); 22 | } 23 | var ES_PORT = parseInt(process.env.ES_PORT) || 7104; 24 | 25 | var logger = log4js.getLogger(); 26 | logger.setLevel('INFO'); 27 | var dbClient; 28 | var esBaseUrl = 'http://' + ES_HOST + ':' + ES_PORT; 29 | 30 | var SELECT_ALL_FROM_REPO_ORGS = "SELECT * FROM repo_orgs"; 31 | var INSERT_INTO_REPOS = "INSERT INTO repo_info (gh_repo_name, org_short, dev_lead_empid, mgr_lead_empid) VALUES (?, ?, ?, ?)"; 32 | var SELECT_ALL_FROM_REPO_OWNERSHIP = "SELECT * FROM repo_info"; 33 | 34 | // returns a single string of what elastic search DNS name should 35 | // be used in direct links in the console 36 | router.get('/hosts/eshost', function(req, res, next) { 37 | res.send(ES_HOST); 38 | }); 39 | 40 | // Response is JSON list that has repo items with repo name, repo org (short form), 41 | // employee ids for manager lead and development lead 42 | // [ { name: 'somerepo', orgName: 'CRSL', mgrLead: "12345", devLead: "56789" }, ... ] 43 | router.get('/repos', function(req, res, next) { 44 | dbClient.execute(SELECT_ALL_FROM_REPO_OWNERSHIP, [], {prepare: true}, function(err, result) { 45 | if (err) { 46 | logger.error('error ' + JSON.stringify(err)); 47 | res.status(500).end(); 48 | return; 49 | } 50 | 51 | var repos = []; 52 | for (ii = 0; ii < result.rows.length; ii++) { 53 | var repo = { 54 | name : result.rows[ii].gh_repo_name, 55 | orgName : result.rows[ii].org_short, 56 | mgrLead: result.rows[ii].mgr_lead_empid, 57 | devLead: result.rows[ii].dev_lead_empid 58 | } 59 | repos.push(repo); 60 | } 61 | 62 | res.send(repos); 63 | }); 64 | }); 65 | 66 | // 67 | // Response is JSON list that has users with name, github id, employee id, and email 68 | // [ { employeeId: '123456', githubId: 'githubusername', email: 'user@netflix.com', name: 'First Last' }, ... ] 69 | router.get('/users', function(req, res, next) { 70 | employeeDirectory.getGithubIds(function(err, response) { 71 | if (err) { 72 | logger.error('error = ' + JSON.stringify(err)); 73 | res.status(500).end(); 74 | } 75 | res.send(response); 76 | }); 77 | }); 78 | 79 | // Response is JSON list that has orgs with long and short name 80 | // [ {"orgName":"DP","orgDesc":"Data Persistence"} , {"orgName":"BDT","orgDesc":"Build and Delivery Tools"}, ... ] 81 | router.get('/repos/orgs', function(req, res, next) { 82 | dbClient.execute(SELECT_ALL_FROM_REPO_ORGS, [], {prepare: true}, function(err, result) { 83 | if (err) { 84 | logger.error('error ' + JSON.stringify(err)); 85 | res.status(500).end(); 86 | return; 87 | } 88 | var orgs = [] 89 | for (ii = 0; ii < result.rows.length; ii++) { 90 | var org = { 91 | "orgName" : result.rows[ii].org_short, 92 | "orgDesc" : result.rows[ii].org_description 93 | } 94 | orgs.push(org); 95 | } 96 | res.send(orgs) 97 | return; 98 | }); 99 | }); 100 | 101 | // Request to update the ownership of a repository 102 | // Expects repoName, repoOrg, mgrLead (employee id ), devLead (employee id) 103 | router.post('/repos/:repoName', function(req, res) { 104 | var repoName = req.params.repoName; 105 | var repoOrg = req.body.repoOrg; 106 | var repoMgrLead = req.body.mgrLead; 107 | var repoDevLead = req.body.devLead; 108 | 109 | var params = [repoName, repoOrg, repoDevLead, repoMgrLead]; 110 | logger.debug(INSERT_INTO_REPOS + ' ' + params); 111 | dbClient.execute(INSERT_INTO_REPOS, params, {prepare: true}, function(err) { 112 | if (err) { 113 | logger.error("err = " + JSON.stringify(err)); 114 | res.status(500).end(); 115 | return; 116 | } 117 | }); 118 | res.status(200).end(); 119 | return; 120 | }); 121 | 122 | // 123 | //Response is JSON list that has repo stats with various feilds of format 124 | // [ { 125 | // "name":"repoName","forks":100,"stars":200,"numContributors":20,"issueOpenCount":10,"issueClosedCount":300, 126 | // "issueAvgClose":13,"prOpenCount":8,"prClosedCount":259,"prAvgClose":3,"daysSinceLastCommit":59,"public":true, 127 | // "osslifecycle":"active"}, ... ] 128 | router.get('/repos/stats', function (req, res) { 129 | queryLatestStats(function(err, allrepos) { 130 | if (err) { 131 | logger.error("err = " + JSON.stringify(err)); 132 | res.status(500).end(); 133 | return; 134 | } 135 | 136 | var repos = []; 137 | var therepos = allrepos.repos 138 | for (ii = 0; ii < therepos.length; ii++) { 139 | var therepo = therepos[ii]; 140 | var repo = { 141 | name: therepo.repo_name, 142 | forks: therepo.forks, 143 | stars: therepo.stars, 144 | numContributors: therepo.numContributors, 145 | issueOpenCount: therepo.issues.openCount, 146 | issueClosedCount: therepo.issues.closedCount, 147 | issueAvgClose: therepo.issues.avgTimeToCloseInDays, 148 | prOpenCount: therepo.pullRequests.openCount, 149 | prClosedCount: therepo.pullRequests.closedCount, 150 | prAvgClose: therepo.pullRequests.avgTimeToCloseInDays, 151 | daysSinceLastCommit: therepo.commits.daysSinceLastCommit, 152 | public: therepo.public, 153 | osslifecycle: therepo.osslifecycle 154 | }; 155 | repos.push(repo); 156 | } 157 | res.send(repos); 158 | }); 159 | }); 160 | 161 | // Response is a single elasticsearch document with the stats from each project 162 | // format is: 163 | // {"asOfISO":"2016-02-09T08:18:44Z","asOfYYYYMMDD":"2016-02-09","avgForks":134,"avgStars":599, 164 | // "issues":{ "avgOpenCount":39,"avgClosedCount":210,"totalOpenCount":356,"totalClosedCount":1897}, 165 | // "pullRequests":{"avgOpenCount":8,"avgClosedCount":154,"totalOpenCount":73,"totalClosedCount":1389}, 166 | // "commits":{}, 167 | // "repos":[ 168 | // {"asOfISO":"2016-02-09T08:18:44Z","asOfYYYYMMDD":"2016-02-09","repo_name":"repoName","public":true, 169 | // "osslifecycle":"active","forks":172,"stars":821,"numContributors":25, 170 | // "issues": {"openCount":68,"closedCount":323,"avgTimeToCloseInDays":13}, 171 | // "pullRequests":{"openCount":8,"closedCount":259,"avgTimeToCloseInDays":3}, 172 | // "commits":{"daysSinceLastCommit":59},"contributors":["user", ... ] 173 | // }, ... 174 | // ] 175 | // } 176 | router.get('/repos/overview', function (req, res) { 177 | queryLatestStats(function(err, allrepos) { 178 | if (err) { 179 | logger.error("err = " + JSON.stringify(err)); 180 | res.status(500).end(); 181 | return; 182 | } 183 | res.send(allrepos); 184 | }); 185 | }); 186 | 187 | function queryAllStats(repoName, callback/*(err, hits)*/) { 188 | // query to search for a specific repo returning only the last document (date wise) 189 | var query = { "size": 1, "sort": [{"asOfYYYYMMDD": {"order": "desc"}}]}; 190 | var url = esBaseUrl + '/osstracker/allrepos_stats/_search'; 191 | var qArgs = { method: 'POST', uri: url, json: query}; 192 | request(qArgs, function (err, response, body) { 193 | if (err) { 194 | logger.error('error = ' + err); 195 | callback(err, null); 196 | return; 197 | } 198 | else { 199 | if (response.statusCode == 200) { 200 | callback(null, body.hits.hits[0]); 201 | return; 202 | } 203 | else { 204 | logger.error('error status code = ' + response.statusCode); 205 | callback('error status code = ' + response.statusCode, null); 206 | return; 207 | } 208 | } 209 | }); 210 | } 211 | 212 | function queryLatestStats(callback/*(err, stats)*/) { 213 | // query to search for a specific repo returning only the last document (date wise) 214 | var query = { "size": 1, "sort": [{"asOfYYYYMMDD": {"order": "desc"}}]}; 215 | var url = esBaseUrl + '/osstracker/allrepos_stats/_search'; 216 | 217 | var qArgs = { method: 'POST', uri: url, json: query}; 218 | request(qArgs, function (err, response, body) { 219 | if (err) { 220 | logger.error('error = ' + err); 221 | callback(err, null); 222 | return; 223 | } 224 | else { 225 | logger.debug("response = " + JSON.stringify(body)); 226 | if (response.statusCode == 200) { 227 | callback(null, body.hits.hits[0]._source); 228 | return; 229 | } 230 | else { 231 | logger.error('error status code = ' + response.statusCode); 232 | callback('error status code = ' + response.statusCode, null); 233 | return; 234 | } 235 | } 236 | }); 237 | } 238 | 239 | function connectToDataBase(hosts, callback/*(err, dbClient)*/) { 240 | logger.info("hosts = " + hosts) 241 | client = new cassandra.Client({ contactPoints: hosts, protocolOptions : { port : CASS_PORT }, keyspace: 'osstracker'}); 242 | if (!client) { 243 | callback("error connecting to database", null); 244 | } 245 | else { 246 | logger.info("database client = " + client); 247 | callback(null, client); 248 | } 249 | } 250 | 251 | function getDBClient() { 252 | connectToDataBase([CASS_HOST], function(err, client) { 253 | if (err) { 254 | logger.error("could not get database connection, waiting"); 255 | } 256 | else { 257 | dbClient = client; 258 | } 259 | }); 260 | } 261 | 262 | var waitForDbConnections = setInterval(function () { 263 | if (dbClient) { 264 | clearInterval(waitForDbConnections); 265 | return; 266 | } 267 | else { 268 | getDBClient(); 269 | } 270 | }, 5000); 271 | 272 | module.exports = router; 273 | -------------------------------------------------------------------------------- /osstracker-console/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "employeeDirectory": "./fakeEmployeeDirectoryService" 3 | } -------------------------------------------------------------------------------- /osstracker-console/views/error.jade: -------------------------------------------------------------------------------- 1 | extends layout 2 | 3 | block content 4 | h1= message 5 | h2= error.status 6 | pre #{error.stack} 7 | -------------------------------------------------------------------------------- /osstracker-console/views/layout.jade: -------------------------------------------------------------------------------- 1 | doctype html 2 | html 3 | head 4 | title= title 5 | link(rel='stylesheet', href='/stylesheets/style.css') 6 | body 7 | block content -------------------------------------------------------------------------------- /osstracker-ddl/elasticsearch-mappings.json: -------------------------------------------------------------------------------- 1 | { 2 | "properties": { 3 | "repo_name": { 4 | "type": "keyword", 5 | "index": "true" 6 | }, 7 | "osslifecycle": { 8 | "type": "keyword", 9 | "index": "true" 10 | } 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /osstracker-ddl/elasticsearch.txt: -------------------------------------------------------------------------------- 1 | elasticsearch index: 2 | 3 | GET http://elasticsearch:9200/osstracker 4 | 5 | Must configure the index first, or queries will fail: 6 | 7 | PUT http://elasticsearch:9200/osstracker 8 | 9 | { 10 | "mappings": { 11 | "repo_stats" : { 12 | "properties": { 13 | "repo_name": { 14 | "type": "text", 15 | "index": "false" 16 | }, 17 | "osslifecycle": { 18 | "type": "text", 19 | "index": "false" 20 | } 21 | } 22 | } 23 | } 24 | } 25 | 26 | See the size of the index: 27 | GET http://elasticsearch:9200/osstracker/repo_stats/_count 28 | -------------------------------------------------------------------------------- /osstracker-ddl/osstracker.cql: -------------------------------------------------------------------------------- 1 | DROP KEYSPACE IF EXISTS osstracker; 2 | 3 | CREATE KEYSPACE osstracker WITH replication = { 4 | 'class': 'SimpleStrategy', 5 | 'replication_factor': 3 6 | }; 7 | 8 | USE osstracker; 9 | 10 | CREATE TABLE repo_orgs ( 11 | org_short text PRIMARY KEY, 12 | org_description text 13 | ); 14 | 15 | CREATE TABLE repo_info ( 16 | gh_repo_name text PRIMARY KEY, 17 | mgr_lead_empid text, 18 | gh_exists boolean, 19 | gh_org text, 20 | dev_lead_empid text, 21 | org_short text, 22 | gh_public boolean, 23 | osslifecycle text, 24 | db_last_stats_update timestamp, 25 | es_last_stats_update timestamp 26 | ); 27 | -------------------------------------------------------------------------------- /osstracker-scraper/build.gradle: -------------------------------------------------------------------------------- 1 | apply plugin: 'scala' 2 | 3 | sourceCompatibility = 1.8 4 | targetCompatibility = 1.8 5 | 6 | dependencies { 7 | compile 'org.scala-lang:scala-library:2.12.4' 8 | compile 'org.slf4j:slf4j-api:1.7.13' 9 | compile 'org.slf4j:slf4j-log4j12:1.7.13' 10 | compile 'org.kohsuke:github-api:1.92' 11 | compile 'com.typesafe.play:play-json_2.12:2.6.8' 12 | compile 'joda-time:joda-time:2.9.1' 13 | compile 'org.rogach:scallop_2.12:3.1.1' 14 | compile 'com.datastax.cassandra:cassandra-driver-core:3.0.0' 15 | compile 'org.apache.httpcomponents:httpclient:4.5.1' 16 | testCompile 'org.scalatest:scalatest_2.12:3.0.4' 17 | } -------------------------------------------------------------------------------- /osstracker-scraper/src/main/scala/com/netflix/oss/tools/osstrackerscraper/CassandraAccess.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2016 Netflix, Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.netflix.oss.tools.osstrackerscraper 17 | 18 | import java.lang 19 | import java.util.Date 20 | 21 | import com.datastax.driver.core.exceptions.DriverException 22 | import com.datastax.driver.core._ 23 | import com.netflix.oss.tools.osstrackerscraper.OssLifecycle.OssLifecycle 24 | import org.slf4j.LoggerFactory 25 | 26 | class RepoInfo(val name: String, val devLeadId: String, val mgrLeadId: String, val org: String, 27 | val statsLastUpdateDB: Date, val statsLastUpdateES: Date, val public: Boolean, val githubOrg: String, 28 | val githubExists: Boolean, val osslifecycle: OssLifecycle) extends Ordered[RepoInfo] { 29 | 30 | override def equals(o: Any) = o match { 31 | case that: RepoInfo => that.name == this.name 32 | case _ => false 33 | } 34 | 35 | override def hashCode = name.hashCode 36 | 37 | override def compare(that: RepoInfo): Int = this.name compare that.name 38 | 39 | override def toString(): String = s"RepoOwnership($githubOrg/$name, $devLeadId, $mgrLeadId, $org, $statsLastUpdateDB, $statsLastUpdateES, $public, $githubExists, ${osslifecycle.toString}})"; 40 | 41 | } 42 | 43 | object ESDateOrdering extends Ordering[RepoInfo] { 44 | def compare(a:RepoInfo, b:RepoInfo) = { 45 | val dateCompare = a.statsLastUpdateES.compareTo(b.statsLastUpdateES) 46 | if (dateCompare == 0) { 47 | a.name compare b.name 48 | } 49 | else { 50 | dateCompare 51 | } 52 | } 53 | } 54 | 55 | class CassandraAccesss(cassHost: String, cassPort: Int) { 56 | val logger = LoggerFactory.getLogger(getClass) 57 | 58 | val SELECT_ALL_FROM_REPOS_OWNERSHIP = "SELECT * FROM repo_info" 59 | val INSERT_INTO_REPOS_OWNERSHIP = "INSERT INTO repo_info (gh_repo_name, dev_lead_empid, mgr_lead_empid, org_short, db_last_stats_update, es_last_stats_update, gh_public, gh_org, gh_exists, osslifecycle) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)" 60 | val UPDATE_REPOS_INFO_SET_NOT_EXIST = "UPDATE repo_info SET gh_exists = FALSE WHERE gh_repo_name = ?" 61 | val UPDATE_REPOS_TO_CURRENT_TIME_DB = "UPDATE repo_info SET db_last_stats_update = ? WHERE gh_repo_name = ?" 62 | val UPDATE_REPOS_TO_CURRENT_TIME_ES = "UPDATE repo_info SET es_last_stats_update = ? WHERE gh_repo_name = ?" 63 | val UPDATE_REPOS_INFO_SET_PUBLIC = "UPDATE repo_info SET gh_public = ? WHERE gh_repo_name = ?" 64 | val UPDATE_REPOS_INFO_SET_LIFECYCLE = "UPDATE repo_info SET osslifecycle = ? WHERE gh_repo_name = ?" 65 | 66 | 67 | var cluster: Cluster = Cluster.builder().addContactPoint(cassHost).withPort(cassPort).build() 68 | var session: Session = cluster.connect(Conf.OSSTRACKER_KEYSPACE) 69 | 70 | val selectAllFromReposOwnership = session.prepare(SELECT_ALL_FROM_REPOS_OWNERSHIP) 71 | val insertIntoReposOwnershipPS = session.prepare(INSERT_INTO_REPOS_OWNERSHIP) 72 | val updateReposInfoSetNotExist = session.prepare(UPDATE_REPOS_INFO_SET_NOT_EXIST) 73 | val updateReposToCurrentTimeDB = session.prepare(UPDATE_REPOS_TO_CURRENT_TIME_DB) 74 | val updateReposToCurrentTimeES = session.prepare(UPDATE_REPOS_TO_CURRENT_TIME_ES) 75 | val updateReposInfoSetPublic = session.prepare(UPDATE_REPOS_INFO_SET_PUBLIC) 76 | val updateReposInfoSetLifecycle = session.prepare(UPDATE_REPOS_INFO_SET_LIFECYCLE) 77 | 78 | def close(): Unit = { 79 | session.close() 80 | cluster.close() 81 | } 82 | 83 | def getAllRepos() : List[RepoInfo] = { 84 | try { 85 | val rs = session.execute(selectAllFromReposOwnership.bind()) 86 | import scala.collection.JavaConversions._ 87 | val allRepos = rs.all().map(row => { 88 | val repoName = row.getString("gh_repo_name") 89 | val dev_lead_empid = row.getString("dev_lead_empid") 90 | val mgr_lead_empid = row.getString("mgr_lead_empid") 91 | val org_short = row.getString("org_short") // TODO: Deal with timezones 92 | val last_stats_update_db = row.getTimestamp("db_last_stats_update") match { 93 | case null => new Date(0) 94 | case date: Date => date 95 | } 96 | val last_stats_update_es = row.getTimestamp("es_last_stats_update") match { 97 | case null => new Date(0) 98 | case date: Date => date 99 | } 100 | val public = row.getBool("gh_public") 101 | val repoOrg = row.getString("gh_org") 102 | val exists = row.getBool("gh_exists") 103 | val osslifecycle = row.getString("osslifecycle") 104 | val osslifecycleE = OssLifecycleParser.getOssLifecycle(osslifecycle) 105 | val repoOwnership = new RepoInfo(repoName, dev_lead_empid, mgr_lead_empid, org_short, last_stats_update_db, last_stats_update_es, public, repoOrg, exists, osslifecycleE) 106 | repoOwnership 107 | }) 108 | allRepos.toList 109 | } 110 | catch { 111 | case ex: DriverException => { 112 | logger.error("failed to query all repos", ex) 113 | List[RepoInfo]() 114 | } 115 | } 116 | } 117 | 118 | def newRepo(repo: RepoInfo) : Boolean = { 119 | try { 120 | val statement = new BoundStatement(insertIntoReposOwnershipPS) 121 | session.execute(statement.bind( 122 | repo.name, 123 | repo.devLeadId, 124 | repo.mgrLeadId, 125 | repo.org, 126 | repo.statsLastUpdateDB, 127 | repo.statsLastUpdateES, 128 | new lang.Boolean(repo.public), 129 | repo.githubOrg, 130 | new lang.Boolean(repo.githubExists), 131 | repo.osslifecycle.toString 132 | )) 133 | true 134 | } 135 | catch { 136 | case ex: DriverException => { 137 | logger.error("failed to upsert repo", ex) 138 | false 139 | } 140 | } 141 | } 142 | 143 | def markReposAsNonExistant(repos: Seq[String]) : Boolean = { 144 | for (repo <- repos) { 145 | try { 146 | val statement = new BoundStatement(updateReposInfoSetNotExist) 147 | session.execute(statement.bind( 148 | repo 149 | )) 150 | } 151 | catch { 152 | case ex: DriverException => { 153 | logger.error("failed to upsert repo", ex) 154 | return false 155 | } 156 | } 157 | } 158 | true 159 | } 160 | 161 | def markReposLastUpdateDateDB(repos: Seq[String]) : Boolean = { 162 | val now = new Date() 163 | 164 | for (repo <- repos) { 165 | try { 166 | val statement = new BoundStatement(updateReposToCurrentTimeDB) 167 | session.execute(statement.bind( 168 | now, 169 | repo 170 | )) 171 | } 172 | catch { 173 | case ex: DriverException => { 174 | logger.error("failed to upsert repo", ex) 175 | return false 176 | } 177 | } 178 | } 179 | true 180 | } 181 | 182 | def markReposLastUpdateDateES(repoName: String) : Boolean = { 183 | val now = new Date() 184 | 185 | try { 186 | val statement = new BoundStatement(updateReposToCurrentTimeES) 187 | session.execute(statement.bind(now, repoName)) 188 | } 189 | catch { 190 | case ex: DriverException => { 191 | logger.error("failed to upsert repo", ex) 192 | return false 193 | } 194 | } 195 | 196 | true 197 | } 198 | 199 | def updateGHPublicForRepo(repo: String, public: Boolean) : Boolean = { 200 | try { 201 | val statement = new BoundStatement(updateReposInfoSetPublic) 202 | session.execute(statement.bind( 203 | new lang.Boolean(public), 204 | repo 205 | )) 206 | } 207 | catch { 208 | case ex: DriverException => { 209 | logger.error("failed to upsert repo", ex) 210 | return false 211 | } 212 | } 213 | true 214 | } 215 | 216 | def updateLifecycleForRepo(repo: String, ossLifecycle: OssLifecycle) : Boolean = { 217 | try { 218 | val statement = new BoundStatement(updateReposInfoSetLifecycle) 219 | session.execute(statement.bind( 220 | ossLifecycle.toString, 221 | repo 222 | )) 223 | } 224 | catch { 225 | case ex: DriverException => { 226 | logger.error("failed to upsert repo", ex) 227 | return false 228 | } 229 | } 230 | true 231 | } 232 | } -------------------------------------------------------------------------------- /osstracker-scraper/src/main/scala/com/netflix/oss/tools/osstrackerscraper/Conf.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2016 Netflix, Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.netflix.oss.tools.osstrackerscraper 17 | 18 | import org.rogach.scallop.ScallopConf 19 | 20 | class Conf(args: Seq[String]) extends ScallopConf(args) { 21 | val action = opt[String](required = true) 22 | verify() 23 | } 24 | 25 | object Conf { 26 | val ACTION_UPDATE_CASSANDRA = "updatecassandra" 27 | val ACTION_UPDATE_ELASTICSEARCH = "updateelasticsearch" 28 | val OSSTRACKER_KEYSPACE = "osstracker" 29 | val SENTINAL_DEV_LEAD_ID = "111111"; // Assign to valid emp id 30 | val SENTINAL_MGR_LEAD_ID = "222222"; // Assign to valid emp id 31 | val SENTINAL_ORG = "UNKNOWN"; // Assign to unknown org until edited in console 32 | } 33 | -------------------------------------------------------------------------------- /osstracker-scraper/src/main/scala/com/netflix/oss/tools/osstrackerscraper/ElasticSearchAccess.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2016 Netflix, Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.netflix.oss.tools.osstrackerscraper 17 | 18 | import org.apache.http.client.methods._ 19 | import org.apache.http.entity.StringEntity 20 | import org.apache.http.impl.client._ 21 | import org.apache.http.util.EntityUtils 22 | import org.slf4j.LoggerFactory 23 | import play.api.libs.json.{Json, JsObject} 24 | 25 | 26 | class ElasticSearchAccess(esHost: String, esPort: Int) { 27 | val logger = LoggerFactory.getLogger(getClass) 28 | 29 | def indexDocInES(url: String, jsonDoc: String): Boolean = { 30 | 31 | val client = HttpClientBuilder.create().build() 32 | val req = new HttpPost(getFullUrl(url)) 33 | req.addHeader("Content-Type", "application/json") 34 | req.setEntity(new StringEntity(jsonDoc)) 35 | 36 | val resp = client.execute(req) 37 | resp.getStatusLine.getStatusCode match { 38 | case 201 => { 39 | true 40 | } 41 | case _ => { 42 | logger.error(s"error creating es document for url = $url") 43 | val respS = EntityUtils.toString(resp.getEntity) 44 | logger.error(s"return code = ${resp.getStatusLine} and doc = ${respS}") 45 | false 46 | } 47 | } 48 | } 49 | 50 | def getESDocForRepo(simpleDate: String, repoName: String): Option[JsObject] = { 51 | val client = HttpClientBuilder.create().build() 52 | val req = new HttpPost(getFullUrl("/osstracker/repo_stats/_search")) 53 | req.addHeader("Content-Type", "application/json") 54 | val jsonDoc = raw"""{"query":{"bool":{"must":[{"match":{"repo_name":"$repoName"}},{"match":{"asOfYYYYMMDD":"$simpleDate"}}]}}}""" 55 | req.setEntity(new StringEntity(jsonDoc)) 56 | 57 | val resp = client.execute(req) 58 | 59 | val resC = resp.getStatusLine.getStatusCode 60 | resp.getStatusLine.getStatusCode match { 61 | case 404 => None: Option[JsObject] 62 | case _ => 63 | val respS = EntityUtils.toString(resp.getEntity) 64 | val jsVal = Json.parse(respS) 65 | val hits = (jsVal \ "hits" \ "total").as[Int] 66 | hits match { 67 | case 0 => None: Option[JsObject] 68 | case _ => Some(((jsVal \ "hits" \ "hits")(0) \ "_source").get.asInstanceOf[JsObject]) 69 | } 70 | } 71 | } 72 | 73 | def getESDocForRepos(simpleDate: String): Option[JsObject] = { 74 | val client = HttpClientBuilder.create().build() 75 | val req = new HttpPost(getFullUrl("/osstracker/allrepos_stats/_search")) 76 | req.addHeader("Content-Type", "application/json") 77 | val jsonDoc = raw"""{"query":{"match":{"asOfYYYYMMDD":"$simpleDate"}}}""" 78 | req.setEntity(new StringEntity(jsonDoc)) 79 | 80 | val resp = client.execute(req) 81 | 82 | val resC = resp.getStatusLine.getStatusCode 83 | resp.getStatusLine.getStatusCode match { 84 | case 404 => None: Option[JsObject] 85 | case _ => 86 | val respS = EntityUtils.toString(resp.getEntity) 87 | val jsVal = Json.parse(respS) 88 | val hits = (jsVal \ "hits" \ "total").as[Int] 89 | hits match { 90 | case 0 => None: Option[JsObject] 91 | case _ => Some(((jsVal \ "hits" \ "hits")(0) \ "_source").get.asInstanceOf[JsObject]) 92 | } 93 | } 94 | } 95 | 96 | def getFullUrl(uri: String): String = { 97 | s"http://${esHost}:${esPort}${uri}" 98 | } 99 | } 100 | -------------------------------------------------------------------------------- /osstracker-scraper/src/main/scala/com/netflix/oss/tools/osstrackerscraper/GithubAccess.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2016 Netflix, Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.netflix.oss.tools.osstrackerscraper 17 | 18 | import java.io.IOException 19 | import java.util.{Date, Properties} 20 | 21 | import com.netflix.oss.tools.osstrackerscraper.OssLifecycle.OssLifecycle 22 | import org.kohsuke.github._ 23 | import org.slf4j.{Logger, LoggerFactory} 24 | import play.api.libs.json.{JsObject, Json} 25 | 26 | import scala.collection.JavaConversions._ 27 | 28 | case class CommitInfo(numCommits: Int, daysSinceLastCommit: Int, contributorLogins: List[String]) {} 29 | case class IssuesInfo( 30 | val closedCount: Int, 31 | val openCount: Int, 32 | val avgDayToClose: Int, 33 | val openCountWithNoLabels: Int, 34 | val openCountWithLabelBug: Int, 35 | val openCountWithLabelDuplicate: Int, 36 | val openCountWithLabelEnhancement: Int, 37 | val openCountWithLabelHelpWanted: Int, 38 | val openCountWithLabelInvalid: Int, 39 | val openCountWithLabelQuestion: Int, 40 | val openCountWithLabelWontfix: Int, 41 | val openCountTrulyOpen: Int 42 | ) {} 43 | case class PRsInfo(val closedPRsSize: Int, val avgPRs: Int) {} 44 | 45 | class GithubAccess(val asOfYYYYMMDD: String, val asOfISO: String, val connectToGithub: Boolean) { 46 | val logger = LoggerFactory.getLogger(getClass) 47 | val github: Option[GitHub] = if (connectToGithub) Some(GitHub.connect()) else None 48 | 49 | def getOSSMetaDataOSSLifecycle(repo: GHRepository): OssLifecycle = { 50 | try { 51 | val content: GHContent = repo.getFileContent("OSSMETADATA", "master") 52 | val contentIs = content.read() 53 | val props = new Properties() 54 | props.load(contentIs) 55 | val osslc = props.getProperty("osslifecycle", "UNKNOWN") 56 | OssLifecycleParser.getOssLifecycle(osslc) 57 | } 58 | catch { 59 | case ioe: IOException => { 60 | ioe.printStackTrace() 61 | OssLifecycle.Unknown 62 | } 63 | } 64 | } 65 | 66 | def getRepoStats(repo: GHRepository, public: Boolean, ossLifecycle: OssLifecycle) : JsObject = { 67 | logger.info(s"repo = ${repo.getName()}, forks = ${repo.getForks}, stars = ${repo.getWatchers}") 68 | 69 | val openPullRequests = repo.getPullRequests(GHIssueState.OPEN) 70 | logger.debug(s" openIssues = ${repo.getOpenIssueCount()}, openPullRequests = ${openPullRequests.size()}") 71 | 72 | // Note that in this case, the github-api will crash on calls to listIssues with java.lang.Error 73 | // https://github.com/kohsuke/github-api/issues/65 74 | var neverPushed = getCloseEnoughForSameDates(repo.getCreatedAt, repo.getPushedAt) 75 | 76 | val (commitInfo: CommitInfo, issuesInfo: IssuesInfo, prsInfo: PRsInfo) = if (neverPushed) { 77 | logger.warn("repo has never been pushed, so providing fake zero counts for issues and pull requests") 78 | (CommitInfo(0, 0, List[String]()), IssuesInfo(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), PRsInfo(0, 0)) 79 | } else { 80 | val commitInfo = getCommitInfo(repo) 81 | val issuesInfo = getIssuesStats(repo) 82 | val prsInfo = getClosedPullRequestsStats(repo) 83 | (commitInfo, issuesInfo, prsInfo) 84 | } 85 | 86 | val repoJson: JsObject = Json.obj( 87 | "asOfISO" -> asOfISO, 88 | "asOfYYYYMMDD" -> asOfYYYYMMDD, 89 | "repo_name" -> repo.getName(), 90 | "public" -> public, 91 | "osslifecycle" -> ossLifecycle, 92 | "forks" -> repo.getForks(), 93 | "stars" -> repo.getWatchers(), 94 | "numContributors" -> commitInfo.contributorLogins.size, 95 | "issues" -> Json.obj( 96 | "openCount" -> issuesInfo.openCount, 97 | "openCountOnlyIssues" -> issuesInfo.openCountTrulyOpen, 98 | "closedCount" -> issuesInfo.closedCount, 99 | "avgTimeToCloseInDays" -> issuesInfo.avgDayToClose, 100 | "openCountByStandardTags" -> Json.obj( 101 | "bug" -> issuesInfo.openCountWithLabelBug, 102 | "helpWanted" -> issuesInfo.openCountWithLabelHelpWanted, 103 | "question" -> issuesInfo.openCountWithLabelQuestion, 104 | "duplicate" -> issuesInfo.openCountWithLabelDuplicate, 105 | "enhancement" -> issuesInfo.openCountWithLabelEnhancement, 106 | "invalid" -> issuesInfo.openCountWithLabelInvalid, 107 | "wontfix" -> issuesInfo.openCountWithLabelWontfix 108 | ), 109 | ), 110 | "pullRequests" -> Json.obj( 111 | "openCount" -> openPullRequests.size(), 112 | "closedCount" -> prsInfo.closedPRsSize, 113 | "avgTimeToCloseInDays" -> prsInfo.avgPRs 114 | ), 115 | "commits" -> Json.obj( 116 | "daysSinceLastCommit" -> commitInfo.daysSinceLastCommit 117 | ), 118 | "contributors" -> commitInfo.contributorLogins 119 | ) 120 | logger.debug("repo json = " + repoJson) 121 | repoJson 122 | } 123 | 124 | // TODO: Is there a faster way to only pull the last commit? 125 | def getCommitInfo(repo: GHRepository) : CommitInfo = { 126 | val commits = repo.listCommits().asList() 127 | val orderedCommits = commits.sortBy(_.getCommitShortInfo.getCommitDate()) 128 | val lastCommitDate = orderedCommits(orderedCommits.length - 1).getCommitShortInfo().getCommitDate() 129 | logger.debug(s"commits, first = ${orderedCommits(0).getSHA1}, last = ${orderedCommits(orderedCommits.length - 1).getSHA1()}") 130 | val daysSinceLastCommit = daysBetween(lastCommitDate, new Date()) 131 | logger.debug(s"daysSinceLastCommit = ${daysSinceLastCommit}") 132 | 133 | val contributors = commits.filter { commit => Option(commit.getAuthor()).isDefined } 134 | val contributorLogins = contributors.map(contributor => contributor.getAuthor().getLogin()).distinct 135 | logger.debug(s"numContribitors = ${contributorLogins.length}, contributorEmails = ${contributorLogins}") 136 | CommitInfo(commits.length, daysSinceLastCommit, contributorLogins.toList) 137 | } 138 | 139 | def getClosedPullRequestsStats(repo: GHRepository) : PRsInfo = { 140 | val closedPRs = repo.getPullRequests(GHIssueState.CLOSED) 141 | val timeToClosePR = closedPRs.map(pr => { 142 | val opened = pr.getCreatedAt() 143 | val closed = pr.getClosedAt() 144 | val difference = daysBetween(opened, closed) 145 | difference 146 | }) 147 | val sumPRs = timeToClosePR.sum 148 | val avgPRs = timeToClosePR.size match { 149 | case 0 => 0 150 | case _ => sumPRs / timeToClosePR.size 151 | } 152 | logger.debug(s"avg days to close ${closedPRs.size()} pull requests = ${avgPRs} days") 153 | PRsInfo(closedPRs.size, avgPRs) 154 | } 155 | 156 | def getIssuesStats(repo: GHRepository): IssuesInfo = { 157 | val closedIssues = repo.getIssues(GHIssueState.CLOSED).filter(_.getPullRequest == null).toArray 158 | val openIssues = repo.getIssues(GHIssueState.OPEN).filter(_.getPullRequest == null).toArray 159 | getIssuesStats(closedIssues, openIssues) 160 | } 161 | 162 | def getIssuesStats(closedIssues: Array[GHIssue], openIssues: Array[GHIssue]): IssuesInfo = { 163 | val (openCountNoLabels, openCountWithLabelBug, openCountWithLabelDuplicate, 164 | openCountWithLabelEnhancement, openCountWithLabelHelpWanted, 165 | openCountWithLabelInvalid, openCountWithLabelQuestion, openCountWithLabelWontfix, 166 | openCountTrulyOpen) = getIssuesLabelStats(openIssues) 167 | 168 | 169 | val timeToCloseIssue = closedIssues.map(issue => { 170 | val opened = issue.getCreatedAt() 171 | val closed = issue.getClosedAt() 172 | val difference = daysBetween(opened, closed) 173 | difference 174 | }) 175 | val sumIssues = timeToCloseIssue.sum 176 | val avgDaysToCloseIssues = timeToCloseIssue.size match { 177 | case 0 => 0 178 | case _ => sumIssues / timeToCloseIssue.size 179 | } 180 | logger.debug(s"avg days to close ${closedIssues.length} issues = ${avgDaysToCloseIssues} days") 181 | 182 | IssuesInfo(closedIssues.size, openIssues.size, avgDaysToCloseIssues, openCountNoLabels, openCountWithLabelBug, 183 | openCountWithLabelDuplicate, openCountWithLabelEnhancement, 184 | openCountWithLabelHelpWanted, openCountWithLabelInvalid, openCountWithLabelQuestion, openCountWithLabelWontfix, 185 | openCountTrulyOpen) 186 | } 187 | 188 | def getIssuesLabelStats(openIssues: Array[GHIssue]): (Int, Int, Int, Int, Int, Int, Int, Int, Int) = { 189 | val openCountNoLabels = openIssues.count(issue => issue.getLabels.size() == 0) 190 | // standard labels that count 191 | val openCountWithLabelBug = countLabelForIssues(openIssues, "bug") 192 | val openCountWithLabelHelpWanted = countLabelForIssues(openIssues, "help wanted") 193 | val openCountWithLabelQuestion = countLabelForIssues(openIssues, "question") 194 | // standard labels that dont' count 195 | val openCountWithLabelDuplicate = countLabelForIssues(openIssues, "duplicate") 196 | val openCountWithLabelEnhancement = countLabelForIssues(openIssues, "enhancement") 197 | val openCountWithLabelInvalid = countLabelForIssues(openIssues, "invalid") 198 | val openCountWithLabelWontfix = countLabelForIssues(openIssues, "wontfix") 199 | val openCountTrulyOpen = countLabelsForTrueIssues(openIssues) 200 | ( 201 | openCountNoLabels, openCountWithLabelBug, openCountWithLabelDuplicate, 202 | openCountWithLabelEnhancement, openCountWithLabelHelpWanted, 203 | openCountWithLabelInvalid, openCountWithLabelQuestion, openCountWithLabelWontfix, 204 | openCountTrulyOpen) 205 | } 206 | 207 | def countLabelsForTrueIssues(issues: Array[GHIssue]): Int = { 208 | // note that some issues will have bug and enhancement, we need to honor the worst case label (bug) 209 | // note that some issues will have bug and invalid, we don't want to double count 210 | // so, if no label, count it 211 | // for single labels 212 | // if (bug || help wanted || question) count it 213 | // if (duplicate || enhancement || invalid || wont fix) don't count it 214 | // for multiple labels 215 | // if (bug || help wanted || question) count it 216 | // if no standard github labels count it 217 | val count: Int = issues.count(issue => { 218 | val labels = issue.getLabels.toList 219 | 220 | val shouldCount = if (labels.size == 0) { 221 | true // no labels so counts 222 | } else { 223 | if (hasBugOrQuestionLabel(labels)) { 224 | true // has bug or question, so counts 225 | } 226 | else if (hasInvalidOrWontFix(labels)) { 227 | false // has invalid or wontfix, so doesn't count 228 | } 229 | else { 230 | val duplicate = hasLabelOfName(labels, "duplicate") 231 | val enhancement = hasLabelOfName(labels, "enhancement") 232 | val helpwanted = hasLabelOfName(labels, "helpwanted") 233 | // by this point bug and question and invalid and wontfix = false 234 | val computed = (duplicate, enhancement, helpwanted) match { 235 | case (false, false, false) => true // no labels except custom labels 236 | case (false, false, true) => true // help wanted and [custom labels] 237 | case (false, true, false) => false // enhancement and [custom labels] 238 | case (false, true, true) => false // enhancement and helpwanted and [custom labels] 239 | case (true, false, false) => true // duplicate and [custom labels] 240 | case (true, false, true) => true // duplicate and helpwanted and [custom labels] 241 | case (true, true, false) => false // duplicate and enhancement and [custom labels] 242 | case (true, true, true) => false // duplicate, enhancement, help wanted and [custom labels] 243 | } 244 | computed 245 | } 246 | } 247 | 248 | 249 | // val shouldCount = if (labels.size == 0) true else { 250 | // // TODO: this doesn't work for enhancement&&help wanted (counts it, but shouldn't) 251 | // val standardCounts = hasLabelOfName(labels, "bug") || hasLabelOfName(labels, "help wanted") || hasLabelOfName(labels, "question") 252 | // val helpWantedAndEnhancement = hasLabelOfName(labels, "help wanted") && hasLabelOfName(labels, "enhancement") 253 | // val doesNotHaveSomeStandardLabels = !hasSomeStandardGithubLabels(labels) 254 | // standardCounts || doesNotHaveSomeStandardLabels 255 | // } 256 | logger.debug(s"issue ${issue.getNumber} counts = ${shouldCount}, labels = ${labels.map{_.getName}}") 257 | shouldCount 258 | }) 259 | count 260 | } 261 | 262 | // Issues with these labels ALWAYS count 263 | def hasBugOrQuestionLabel(labels: List[GHLabel]): Boolean = { 264 | // Future: Eventually we can let custom labels be configured per scraper or per project (OSSMETADATA) 265 | hasLabelOfName(labels, "bug") || hasLabelOfName(labels, "question") 266 | } 267 | 268 | // Issues with these labels will never count as long as not Bug or Question 269 | def hasInvalidOrWontFix(labels: List[GHLabel]): Boolean = { 270 | // Future: Eventually we can let custom labels be configured per scraper or per project (OSSMETADATA) 271 | hasLabelOfName(labels, "invalid") || hasLabelOfName(labels, "wontfix") 272 | } 273 | 274 | // def hasSomeStandardGithubLabels(labels: List[GHLabel]): Boolean = { 275 | // hasLabelOfName(labels, "bug") || hasLabelOfName(labels, "help wanted") || hasLabelOfName(labels, "question") || 276 | // hasLabelOfName(labels, "duplicate") || hasLabelOfName(labels, "enhancement") || hasLabelOfName(labels, "invalid") || hasLabelOfName(labels, "wontfix") 277 | // } 278 | 279 | def hasLabelOfName(labels: List[GHLabel], name: String): Boolean = { 280 | !labels.find(_.getName == name).isEmpty 281 | } 282 | 283 | def countLabelForIssues(issues: Array[GHIssue], label: String): Int = { 284 | val openCountWithLabelBug: Int = issues.count(issue => 285 | issue.getLabels.size() != 0 && 286 | !issue.getLabels.find(_.getName == label).isEmpty 287 | ) 288 | openCountWithLabelBug 289 | } 290 | 291 | def daysBetween(smaller: Date, bigger: Date): Int = { 292 | val diff = (bigger.getTime() - smaller.getTime()) / (1000 * 60 * 60 * 24) 293 | diff.toInt 294 | } 295 | 296 | def getRemainingHourlyRate(): Int = { 297 | github.get.getRateLimit.remaining 298 | } 299 | 300 | def getAllRepositoriesForOrg(githubOrg: String): List[GHRepository] = { 301 | val org = github.get.getOrganization(githubOrg) 302 | val githubRepos = org.listRepositories(100).asList().toList 303 | logger.info(s"Found ${githubRepos.size} total repos for ${githubOrg}") 304 | githubRepos 305 | } 306 | 307 | def getCloseEnoughForSameDates(d1: Date, d2: Date): Boolean = { 308 | val d1T = d1.getTime 309 | val d2T = d2.getTime 310 | val diff = Math.abs(d1T - d2T) 311 | return diff < 1000*60; // 60 seconds 312 | } 313 | } -------------------------------------------------------------------------------- /osstracker-scraper/src/main/scala/com/netflix/oss/tools/osstrackerscraper/GithubScraper.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2016 Netflix, Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.netflix.oss.tools.osstrackerscraper 17 | 18 | import org.joda.time.format.{DateTimeFormat, ISODateTimeFormat} 19 | import org.slf4j.LoggerFactory 20 | import play.api.libs.json._ 21 | import org.joda.time.{DateTime, DateTimeZone} 22 | import java.util.Date 23 | 24 | import com.netflix.oss.tools.osstrackerscraper.OssLifecycle.OssLifecycle 25 | 26 | import scala.collection.mutable.ListBuffer 27 | 28 | class GithubScraper(githubOrg: String, cassHost: String, cassPort: Int, esHost: String, esPort: Int, reportWriter: ReportWriter) { 29 | def logger = LoggerFactory.getLogger(getClass) 30 | val now = new DateTime().withZone(DateTimeZone.UTC) 31 | val dtfISO8601 = ISODateTimeFormat.dateTimeNoMillis() 32 | val dtfSimple = DateTimeFormat.forPattern("yyyy-MM-dd") 33 | def asOfISO = dtfISO8601.print(now) 34 | def asOfYYYYMMDD = dtfSimple.print(now) 35 | 36 | def updateElasticSearch(): Boolean = { 37 | val es = new ElasticSearchAccess(esHost, esPort) 38 | val cass = new CassandraAccesss(cassHost, cassPort) 39 | val github = new GithubAccess(asOfYYYYMMDD, asOfISO, true) 40 | 41 | try { 42 | println(Console.RED + s"remaining calls ${github.getRemainingHourlyRate()}" + Console.RESET) 43 | 44 | // get all the known repos from cassandra, sorted in case we run out github API calls 45 | val cassRepos = cass.getAllRepos() 46 | val cassReposNames = cassRepos.map(_.name).toSet 47 | logger.debug(s"repos(${cassReposNames.size}) in cass = $cassReposNames") 48 | 49 | // get all of the known repos from github 50 | val githubRepos = github.getAllRepositoriesForOrg(githubOrg) 51 | val githubReposNames = githubRepos.map(_.getName).toSet 52 | logger.debug(s"repos(${githubReposNames.size}) on GH = $githubReposNames") 53 | 54 | val commonRepoNames = cassReposNames.intersect(githubReposNames) 55 | val onlyInCassReposNames = cassReposNames.diff(githubReposNames) 56 | val onlyInGHReposNames = githubReposNames.diff(cassReposNames) 57 | 58 | logger.error(s"need to delete the following repos from cassandra - $onlyInCassReposNames") 59 | logger.info(s"new repos detected on github that aren't in cassandra - $onlyInGHReposNames") 60 | 61 | val commonReposCassRepos = commonRepoNames.map(name => cassRepos.find(name == _.name).get) 62 | val commonReposCassReposOrderByLastUpdate = collection.SortedSet[RepoInfo]()(ESDateOrdering) ++ commonReposCassRepos 63 | val commonReposCassReposOrderByLastUpdateNames = commonReposCassReposOrderByLastUpdate.toList.map(_.name) 64 | 65 | val orderToUpdate = commonReposCassReposOrderByLastUpdateNames ++ onlyInGHReposNames 66 | 67 | val docsList = new ListBuffer[JsObject]() 68 | 69 | // create or validate that ES document exists for each repo 70 | for (repoName <- orderToUpdate) { 71 | val ghRepo = githubRepos.find(_.getName == repoName).get 72 | val cassRepo = cassRepos.find(_.name == repoName) 73 | val (public, ossLifecycle) = cassRepo match { 74 | case Some(repo) => (repo.public, repo.osslifecycle) 75 | case _ => (false, OssLifecycle.Unknown) 76 | } 77 | 78 | val alreadyExistsDoc = es.getESDocForRepo(asOfYYYYMMDD, repoName) 79 | 80 | if (alreadyExistsDoc.isEmpty) { 81 | val stat = github.getRepoStats(ghRepo, public, ossLifecycle) 82 | val indexed = es.indexDocInES("/osstracker/repo_stats", stat.toString) 83 | if (!indexed) { 84 | return false 85 | } 86 | docsList += stat 87 | } 88 | else { 89 | logger.info(s"skipping up index of repo doc for ${repoName}, ${asOfYYYYMMDD}") 90 | docsList += alreadyExistsDoc.get 91 | } 92 | 93 | val success = cass.markReposLastUpdateDateES(repoName) 94 | if (!success) { 95 | return false 96 | } 97 | } 98 | 99 | val alreadyExists = !es.getESDocForRepos(asOfYYYYMMDD).isEmpty 100 | if (alreadyExists) { 101 | logger.info(s"skipping up index of all repos doc for ${asOfYYYYMMDD}") 102 | } 103 | else { 104 | val numRepos = docsList.size 105 | val forks: Int = (docsList(0) \ "forks").as[Int] 106 | val totalForks = docsList.map(obj => (obj \ "forks").as[Int]).sum 107 | val totalStars = docsList.map(obj => (obj \ "stars").as[Int]).sum 108 | val totalOpenIssues = docsList.map(obj => (obj \ "issues" \ "openCount").as[Int]).sum 109 | val totalClosedIssues = docsList.map(obj => (obj \ "issues" \ "closedCount").as[Int]).sum 110 | val totalOpenPRs = docsList.map(obj => (obj \ "pullRequests" \ "openCount").as[Int]).sum 111 | val totalClosedPRs = docsList.map(obj => (obj \ "pullRequests" \ "closedCount").as[Int]).sum 112 | 113 | val reposJsonDoc: JsObject = Json.obj( 114 | "asOfISO" -> asOfISO, 115 | "asOfYYYYMMDD" -> asOfYYYYMMDD, 116 | "avgForks" -> totalForks / numRepos, 117 | "avgStars" -> totalStars / numRepos, 118 | // "numContributors" -> contributorLogins.length, // TODO: Need to fold all of the repos together 119 | "issues" -> Json.obj( 120 | "avgOpenCount" -> totalOpenIssues / numRepos, 121 | "avgClosedCount" -> totalClosedIssues / numRepos, 122 | "totalOpenCount" -> totalOpenIssues, 123 | "totalClosedCount" -> totalClosedIssues 124 | // "avgTimeToCloseInDays" -> avgIssues // TODO: Need to compute average 125 | ), 126 | "pullRequests" -> Json.obj( 127 | "avgOpenCount" -> totalOpenPRs / numRepos, 128 | "avgClosedCount" -> totalClosedPRs / numRepos, 129 | "totalOpenCount" -> totalOpenPRs, 130 | "totalClosedCount" -> totalClosedPRs 131 | // "avgTimeToCloseInDays" -> avgPRs // TODO: Need to compute average 132 | ), 133 | "commits" -> Json.obj( 134 | // "daysSinceLastCommit" -> daysSinceLastCommit // TODO: Need to compute average 135 | ), 136 | "repos" -> docsList 137 | ) 138 | logger.debug("allrepos info json = " + reposJsonDoc) 139 | val indexed = es.indexDocInES("/osstracker/allrepos_stats", reposJsonDoc.toString) 140 | if (!indexed) { 141 | return false 142 | } 143 | } 144 | 145 | println(Console.RED + s"remaining calls ${github.getRemainingHourlyRate()}" + Console.RESET) 146 | } 147 | finally { 148 | cass.close() 149 | } 150 | 151 | true 152 | } 153 | 154 | 155 | def updateCassandra(): Boolean = { 156 | val cass = new CassandraAccesss(cassHost, cassPort) 157 | val github = new GithubAccess(asOfYYYYMMDD, asOfISO, true) 158 | val report = StringBuilder.newBuilder 159 | 160 | report.append(s"OSSTracker Report for ${asOfYYYYMMDD}\n\n") 161 | 162 | try { 163 | println(Console.RED + s"remaining calls ${github.getRemainingHourlyRate()}" + Console.RESET) 164 | 165 | // get all the known repos from cassandra, sorted in case we run out github API calls 166 | val cassRepos = cass.getAllRepos() 167 | val cassReposNames = cassRepos.map(_.name).toSet 168 | logger.debug(s"repos(${cassReposNames.size}) in cass = $cassReposNames") 169 | 170 | // get all of the known repos from github 171 | val githubRepos = github.getAllRepositoriesForOrg(githubOrg) 172 | val githubReposNames = githubRepos.map(_.getName).toSet 173 | logger.debug(s"repos(${githubReposNames.size}) on GH = $githubReposNames") 174 | 175 | val commonRepoNames = cassReposNames.intersect(githubReposNames) 176 | val onlyInCassReposNames = cassReposNames.diff(githubReposNames) 177 | val onlyInGHReposNames = githubReposNames.diff(cassReposNames) 178 | 179 | // add new repos to cassandra 180 | logger.debug(s"repos that should be added to cassandra = $onlyInGHReposNames") 181 | if (onlyInGHReposNames.size > 0) { 182 | report.append(s"Found the following new repositories:\n") 183 | report.append(s"**************************************************\n") 184 | for (repoName <- onlyInGHReposNames) { 185 | report.append(s"\t$repoName\n") 186 | } 187 | report.append("\n") 188 | } 189 | 190 | val reposToAdd = onlyInGHReposNames.map(repoName => { 191 | val githubRepo = githubRepos.find(ghRepo => ghRepo.getName == repoName).get 192 | val repoInfo = new RepoInfo(repoName, Conf.SENTINAL_DEV_LEAD_ID, Conf.SENTINAL_MGR_LEAD_ID, 193 | Conf.SENTINAL_ORG, new Date(0), new Date(0), !githubRepo.isPrivate, githubOrg, true, OssLifecycle.Unknown) 194 | val success = cass.newRepo(repoInfo) 195 | if (!success) { 196 | return false 197 | } 198 | }) 199 | 200 | // see what repos we should mark as non-existant in cassandra 201 | logger.error(s"repos that should be deleted from the database = $onlyInCassReposNames") 202 | if (onlyInCassReposNames.size > 0) { 203 | report.append(s"These repos should be deleted from the DB:\n") 204 | report.append(s"**************************************************\n") 205 | for (repoName <- onlyInCassReposNames) { 206 | report.append(s"\t$repoName\n") 207 | } 208 | report.append("\n") 209 | } 210 | 211 | val success1 = cass.markReposAsNonExistant(onlyInCassReposNames.toList) 212 | if (!success1) { 213 | return false 214 | } 215 | 216 | 217 | val cassReposNow = cass.getAllRepos() 218 | logger.debug(s"cassReposNow = $cassReposNow") 219 | 220 | val wentPublic = ListBuffer[String]() 221 | val wentPrivate = ListBuffer[String]() 222 | 223 | // see what repos we should change public/private in cassandra 224 | for (repo <- cassReposNow) { 225 | val cassPublic = repo.public 226 | val githubRepo = githubRepos.find(_.getName == repo.name) 227 | githubRepo match { 228 | case Some(ghRepo) => { 229 | val ghPublic = !ghRepo.isPrivate 230 | if (cassPublic != ghPublic) { 231 | logger.info(s"updating repo ${repo.name} with public = $ghPublic") 232 | val success = cass.updateGHPublicForRepo(repo.name, ghPublic) 233 | if (!success) { 234 | return false 235 | } 236 | 237 | if (ghPublic) { 238 | wentPublic += ghRepo.getName 239 | } 240 | else { 241 | wentPrivate += ghRepo.getName 242 | } 243 | } 244 | } 245 | case _ => { 246 | logger.error(s"github no longer has the repo ${repo.name}") 247 | } 248 | } 249 | } 250 | 251 | if (wentPublic.size > 0) { 252 | report.append(s"These repos went public:\n") 253 | report.append(s"**************************************************\n") 254 | for (repoName <- wentPublic) { 255 | report.append(s"\t$repoName\n") 256 | } 257 | report.append("\n") 258 | } 259 | 260 | if (wentPrivate.size > 0) { 261 | report.append(s"These repos went private:\n") 262 | report.append(s"**************************************************\n") 263 | for (repoName <- wentPrivate) { 264 | report.append(s"\t$repoName\n") 265 | } 266 | report.append("\n") 267 | } 268 | 269 | val changedLifecycle = ListBuffer[(String, OssLifecycle, OssLifecycle)]() 270 | val unknownLifecycle = ListBuffer[String]() 271 | 272 | // see what repos have changed OSS Lifecycle 273 | for (repo <- cassReposNow) { 274 | val githubRepo = githubRepos.find(_.getName == repo.name) 275 | githubRepo match { 276 | case Some(ghRepo) => { 277 | val lifecycle = github.getOSSMetaDataOSSLifecycle(ghRepo) 278 | if (lifecycle == OssLifecycle.Unknown) { 279 | unknownLifecycle += ghRepo.getName 280 | } 281 | if (lifecycle != repo.osslifecycle) { 282 | logger.info(s"updating repo ${repo.name} lifecycle from ${repo.osslifecycle} to $lifecycle") 283 | val success = cass.updateLifecycleForRepo(repo.name, lifecycle) 284 | if (!success) { 285 | return false 286 | } 287 | changedLifecycle += ((ghRepo.getName, repo.osslifecycle, lifecycle)) 288 | } 289 | } 290 | case _ => { 291 | logger.error(s"github no longer has the repo ${repo.name}") 292 | } 293 | } 294 | } 295 | 296 | if (unknownLifecycle.size > 0) { 297 | report.append(s"These repos do not have correct OSS Lifecycle files:\n") 298 | report.append(s"**************************************************\n") 299 | for (repoName <- unknownLifecycle) { 300 | report.append(s"\t$repoName\n") 301 | } 302 | report.append("\n") 303 | } 304 | 305 | if (changedLifecycle.size > 0) { 306 | report.append(s"These repos changed oss lifecycle:\n") 307 | report.append(s"**************************************************\n") 308 | for (change <- changedLifecycle) { 309 | report.append(s"\t${change._1} went from ${change._2} to ${change._3}\n") 310 | } 311 | report.append("\n") 312 | } 313 | 314 | // mark all of the repos as last updated now 315 | logger.info("updating all repos in cassandra for last updated") 316 | val success2 = cass.markReposLastUpdateDateDB(cassReposNow.map(_.name)) 317 | if (!success2) { 318 | return false 319 | } 320 | 321 | println(Console.RED + s"remaining calls ${github.getRemainingHourlyRate()}" + Console.RESET) 322 | 323 | reportWriter.processReport(report.toString) 324 | } 325 | finally { 326 | cass.close() 327 | } 328 | 329 | true 330 | } 331 | 332 | } -------------------------------------------------------------------------------- /osstracker-scraper/src/main/scala/com/netflix/oss/tools/osstrackerscraper/OssLifecycle.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2016 Netflix, Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.netflix.oss.tools.osstrackerscraper 17 | 18 | import com.netflix.oss.tools.osstrackerscraper.OssLifecycle.OssLifecycle 19 | 20 | object OssLifecycle extends Enumeration { 21 | type OssLifecycle = Value 22 | 23 | val Private = Value("private") 24 | val PrivateCollab = Value("privatecollab") 25 | val Active = Value("active") 26 | val Maintenance = Value("maintenance") 27 | val Archived = Value("archived") 28 | val Unknown = Value("UNKNOWN") 29 | val Invalid = Value("INVALID") 30 | } 31 | 32 | object OssLifecycleParser { 33 | def getOssLifecycle(value: String): OssLifecycle = { 34 | value match { 35 | case "private" => OssLifecycle.Private 36 | case "privatecollab" => OssLifecycle.PrivateCollab 37 | case "active" => OssLifecycle.Active 38 | case "maintenance" => OssLifecycle.Maintenance 39 | case "archived" => OssLifecycle.Archived 40 | case "UNKNOWN" => OssLifecycle.Unknown 41 | case _ => OssLifecycle.Invalid 42 | } 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /osstracker-scraper/src/main/scala/com/netflix/oss/tools/osstrackerscraper/ReportWriter.scala: -------------------------------------------------------------------------------- 1 | package com.netflix.oss.tools.osstrackerscraper 2 | 3 | trait ReportWriter { 4 | def processReport(reportContent: String) 5 | } -------------------------------------------------------------------------------- /osstracker-scraper/src/test/resources/archiaus-issues.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "url": "https://api.github.com/repos/Netflix/archaius/issues/396", 4 | "repository_url": "https://api.github.com/repos/Netflix/archaius", 5 | "labels_url": "https://api.github.com/repos/Netflix/archaius/issues/396/labels{/name}", 6 | "comments_url": "https://api.github.com/repos/Netflix/archaius/issues/396/comments", 7 | "events_url": "https://api.github.com/repos/Netflix/archaius/issues/396/events", 8 | "html_url": "https://github.com/Netflix/archaius/pull/396", 9 | "id": 143549661, 10 | "number": 396, 11 | "title": "Fix interpolation of string in @DefaultValue", 12 | "user": { 13 | "login": "elandau", 14 | "id": 913564, 15 | "avatar_url": "https://avatars3.githubusercontent.com/u/913564?v=4", 16 | "gravatar_id": "", 17 | "url": "https://api.github.com/users/elandau", 18 | "html_url": "https://github.com/elandau", 19 | "followers_url": "https://api.github.com/users/elandau/followers", 20 | "following_url": "https://api.github.com/users/elandau/following{/other_user}", 21 | "gists_url": "https://api.github.com/users/elandau/gists{/gist_id}", 22 | "starred_url": "https://api.github.com/users/elandau/starred{/owner}{/repo}", 23 | "subscriptions_url": "https://api.github.com/users/elandau/subscriptions", 24 | "organizations_url": "https://api.github.com/users/elandau/orgs", 25 | "repos_url": "https://api.github.com/users/elandau/repos", 26 | "events_url": "https://api.github.com/users/elandau/events{/privacy}", 27 | "received_events_url": "https://api.github.com/users/elandau/received_events", 28 | "type": "User", 29 | "site_admin": false 30 | }, 31 | "labels": [], 32 | "state": "closed", 33 | "locked": false, 34 | "assignee": null, 35 | "assignees": [], 36 | "milestone": null, 37 | "comments": 4, 38 | "created_at": "2016-03-25T17:36:57Z", 39 | "updated_at": "2017-11-11T22:17:13Z", 40 | "closed_at": "2016-03-25T19:31:11Z", 41 | "author_association": "MEMBER", 42 | "pull_request": { 43 | "url": "https://api.github.com/repos/Netflix/archaius/pulls/396", 44 | "html_url": "https://github.com/Netflix/archaius/pull/396", 45 | "diff_url": "https://github.com/Netflix/archaius/pull/396.diff", 46 | "patch_url": "https://github.com/Netflix/archaius/pull/396.patch" 47 | }, 48 | "body": "" 49 | }, 50 | { 51 | "url": "https://api.github.com/repos/Netflix/archaius/issues/375", 52 | "repository_url": "https://api.github.com/repos/Netflix/archaius", 53 | "labels_url": "https://api.github.com/repos/Netflix/archaius/issues/375/labels{/name}", 54 | "comments_url": "https://api.github.com/repos/Netflix/archaius/issues/375/comments", 55 | "events_url": "https://api.github.com/repos/Netflix/archaius/issues/375/events", 56 | "html_url": "https://github.com/Netflix/archaius/issues/375", 57 | "id": 124955950, 58 | "number": 375, 59 | "title": "PolledConfigurationSource Implementation", 60 | "user": { 61 | "login": "sarginm", 62 | "id": 16557349, 63 | "avatar_url": "https://avatars0.githubusercontent.com/u/16557349?v=4", 64 | "gravatar_id": "", 65 | "url": "https://api.github.com/users/sarginm", 66 | "html_url": "https://github.com/sarginm", 67 | "followers_url": "https://api.github.com/users/sarginm/followers", 68 | "following_url": "https://api.github.com/users/sarginm/following{/other_user}", 69 | "gists_url": "https://api.github.com/users/sarginm/gists{/gist_id}", 70 | "starred_url": "https://api.github.com/users/sarginm/starred{/owner}{/repo}", 71 | "subscriptions_url": "https://api.github.com/users/sarginm/subscriptions", 72 | "organizations_url": "https://api.github.com/users/sarginm/orgs", 73 | "repos_url": "https://api.github.com/users/sarginm/repos", 74 | "events_url": "https://api.github.com/users/sarginm/events{/privacy}", 75 | "received_events_url": "https://api.github.com/users/sarginm/received_events", 76 | "type": "User", 77 | "site_admin": false 78 | }, 79 | "labels": [ 80 | { 81 | "id": 382497790, 82 | "url": "https://api.github.com/repos/Netflix/archaius/labels/v1", 83 | "name": "v1", 84 | "color": "006b75", 85 | "default": false 86 | } 87 | ], 88 | "state": "closed", 89 | "locked": false, 90 | "assignee": null, 91 | "assignees": [], 92 | "milestone": null, 93 | "comments": 7, 94 | "created_at": "2016-01-05T12:12:16Z", 95 | "updated_at": "2016-07-13T16:34:11Z", 96 | "closed_at": "2016-07-13T16:34:11Z", 97 | "author_association": "NONE", 98 | "body": "It would be great if I get some help in consul polling.I am using Abstract Polling Scheduler and once the poling is started , it is polling the source for 5 to 6 times and suspending the thread.\n\nAbstractPollingScheduler scheduler = new FixedDelayPollingScheduler();\n scheduler.startPolling(consulConfigurationSource, myConfiguration);\n\nDo we have to do install our custom configuration to the Configuration manager. \nPlease suggest , to run the thread as long running daemon. Thanks in Advance. \n" 99 | }, 100 | { 101 | "url": "https://api.github.com/repos/Netflix/archaius/issues/366", 102 | "repository_url": "https://api.github.com/repos/Netflix/archaius", 103 | "labels_url": "https://api.github.com/repos/Netflix/archaius/issues/366/labels{/name}", 104 | "comments_url": "https://api.github.com/repos/Netflix/archaius/issues/366/comments", 105 | "events_url": "https://api.github.com/repos/Netflix/archaius/issues/366/events", 106 | "html_url": "https://github.com/Netflix/archaius/issues/366", 107 | "id": 117345576, 108 | "number": 366, 109 | "title": "Missing dependencies (PropertyFactory)", 110 | "user": { 111 | "login": "udf2457", 112 | "id": 6958684, 113 | "avatar_url": "https://avatars3.githubusercontent.com/u/6958684?v=4", 114 | "gravatar_id": "", 115 | "url": "https://api.github.com/users/udf2457", 116 | "html_url": "https://github.com/udf2457", 117 | "followers_url": "https://api.github.com/users/udf2457/followers", 118 | "following_url": "https://api.github.com/users/udf2457/following{/other_user}", 119 | "gists_url": "https://api.github.com/users/udf2457/gists{/gist_id}", 120 | "starred_url": "https://api.github.com/users/udf2457/starred{/owner}{/repo}", 121 | "subscriptions_url": "https://api.github.com/users/udf2457/subscriptions", 122 | "organizations_url": "https://api.github.com/users/udf2457/orgs", 123 | "repos_url": "https://api.github.com/users/udf2457/repos", 124 | "events_url": "https://api.github.com/users/udf2457/events{/privacy}", 125 | "received_events_url": "https://api.github.com/users/udf2457/received_events", 126 | "type": "User", 127 | "site_admin": false 128 | }, 129 | "labels": [ 130 | { 131 | "id": 4795551, 132 | "url": "https://api.github.com/repos/Netflix/archaius/labels/invalid", 133 | "name": "invalid", 134 | "color": "e6e6e6", 135 | "default": true 136 | }, 137 | { 138 | "id": 382497826, 139 | "url": "https://api.github.com/repos/Netflix/archaius/labels/v2", 140 | "name": "v2", 141 | "color": "fbca04", 142 | "default": false 143 | } 144 | ], 145 | "state": "closed", 146 | "locked": false, 147 | "assignee": null, 148 | "assignees": [], 149 | "milestone": null, 150 | "comments": 1, 151 | "created_at": "2015-11-17T12:31:58Z", 152 | "updated_at": "2016-05-25T19:57:48Z", 153 | "closed_at": "2016-05-25T19:57:48Z", 154 | "author_association": "NONE", 155 | "body": "Hi,\n\nHave the following defined in my pom.xml:\n\n```\n\n2.0.0-rc.33\n\n \n com.netflix.archaius\n archaius2-core\n ${archaius-version}\n \n```\n\nUpon loading, I get the following:\n\n```\n[2015-11-17 12:25:37,751] Artifact random:war: Artifact is being deployed, please wait...\n[2015-11-17 12:25:39,705] Artifact random:war: Error during artifact deployment. See server log for details.\n[2015-11-17 12:25:39,705] Artifact random:war: java.io.IOException: com.sun.enterprise.admin.remote.RemoteFailureException: Error occurred during deployment: Exception while loading the app : CDI deployment failure:WELD-001408: Unsatisfied dependencies for type PropertyFactory with qualifiers @Default\n at injection point [BackedAnnotatedParameter] Parameter 2 of [BackedAnnotatedConstructor] @Inject public com.netflix.archaius.ProxyFactory(Decoder, PropertyFactory)\n at com.netflix.archaius.ProxyFactory.(ProxyFactory.java:0)\n. Please see server.log for more details.\n```\n\nNote that this is before I have even created any classes that use Archaius, this is just loading the war file with the jars in the lib.\n" 156 | } 157 | ] -------------------------------------------------------------------------------- /osstracker-scraper/src/test/resources/log4j.properties: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2018 Netflix, Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | 17 | log4j.rootLogger=DEBUG, stdout 18 | 19 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender 20 | log4j.appender.stdout.Target=System.out 21 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout 22 | log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n 23 | 24 | log4j.logger.com.netflix.oss.tools.osstrackerscraper=DEBUG -------------------------------------------------------------------------------- /osstracker-scraper/src/test/scala/com/netflix/oss/tools/osstrackerscraper/GitHubAccessTest.scala: -------------------------------------------------------------------------------- 1 | package com.netflix.oss.tools.osstrackerscraper 2 | 3 | import com.fasterxml.jackson.databind.introspect.VisibilityChecker.Std 4 | import org.scalatest.FunSuite 5 | import com.fasterxml.jackson.databind.{DeserializationFeature, ObjectMapper} 6 | import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility._ 7 | import org.kohsuke.github.{GHIssue} 8 | 9 | class GitHubAccessTest extends FunSuite { 10 | test("Should correctly count issues based on label") { 11 | // copied from org.kohsuke.github.Requestor 12 | //val github = GitHub.connect("fake", "fake") 13 | val mapper = new ObjectMapper 14 | mapper.setVisibilityChecker(new Std(NONE, NONE, NONE, NONE, ANY)); 15 | mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false) 16 | 17 | val issuesJSON = scala.io.Source.fromResource("security_monkey-issues.json").mkString 18 | val issues = mapper.readValue(issuesJSON, classOf[Array[GHIssue]]) 19 | 20 | val access = new GithubAccess("a", "a", false) 21 | 22 | val stats = access.getIssuesStats(new Array[GHIssue](0), issues) 23 | assert(stats.openCountTrulyOpen == 23) 24 | 25 | val issuesJSON2 = scala.io.Source.fromResource("hollow-issues.json").mkString 26 | val issues2 = mapper.readValue(issuesJSON2, classOf[Array[GHIssue]]) 27 | 28 | val stats2 = access.getIssuesStats(new Array[GHIssue](0), issues2) 29 | assert(stats2.openCountTrulyOpen == 13) 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /osstracker-scraperapp/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM openjdk:8-alpine 2 | 3 | MAINTAINER NetflixOSS 4 | 5 | COPY build/libs/osstracker-scraperapp-*-all.jar /osstracker-scraperapp-all.jar 6 | 7 | ENV github_oauth=1111111111111111111111111111111111111111 8 | ENV github_org=Netflix 9 | ENV github_login=yourloginhere 10 | 11 | CMD ["java", "-jar", "/osstracker-scraperapp-all.jar", "--action", "updatecassandra"] 12 | -------------------------------------------------------------------------------- /osstracker-scraperapp/build.gradle: -------------------------------------------------------------------------------- 1 | apply plugin: 'scala' 2 | apply plugin: 'com.github.johnrengelman.shadow' 3 | 4 | sourceCompatibility = 1.8 5 | targetCompatibility = 1.8 6 | 7 | buildscript { 8 | repositories { jcenter() } 9 | dependencies { 10 | classpath 'com.github.jengelman.gradle.plugins:shadow:1.2.2' 11 | } 12 | } 13 | 14 | dependencies { 15 | compile project(':osstracker-scraper') 16 | compile 'org.scala-lang:scala-library:2.12.4' 17 | compile 'org.slf4j:slf4j-api:1.7.13' 18 | compile 'org.slf4j:slf4j-log4j12:1.7.13' 19 | } 20 | 21 | task runScraper(type: JavaExec, dependsOn: classes) { 22 | classpath sourceSets.main.runtimeClasspath 23 | 24 | main = 'com.netflix.oss.tools.osstrackerscraper.app.RunGithubScraper' 25 | 26 | environment = [ 27 | // You should update with your github OAUTH token and org 28 | "github_oauth" : "1111111111111111111111111111111111111111", 29 | "github_org" : "Netflix", 30 | // You should update with a a cassandra host and port 31 | "CASS_HOST" : "localhost", 32 | "CASS_PORT" : "9042", 33 | // You should update with an elasticsearch host and port 34 | "ES_HOST" : "localhost", 35 | "ES_PORT" : "9200" 36 | ] 37 | 38 | jvmArgs = [ 39 | //"-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=5005", 40 | ] 41 | 42 | args = [ 43 | //"--action", "updatecassandra", 44 | "--action", "updateelasticsearch", 45 | ] 46 | } 47 | -------------------------------------------------------------------------------- /osstracker-scraperapp/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -x 3 | 4 | if [ -z "$BASEIMAGE" ] || [ -z "$REGBASE" ]; then 5 | echo need to define BASEIMAGE and REGBASE variables 6 | exit 1 7 | fi 8 | 9 | FINALIMAGE=$REGBASE/netflixoss/osstracker-scraper:latest 10 | 11 | docker pull $BASEIMAGE 12 | docker tag -f $BASEIMAGE javabase:latest 13 | docker build -t netflixoss/osstracker-scraper:latest . 14 | docker tag -f netflixoss/osstracker-scraper:latest $FINALIMAGE 15 | 16 | RETRY_COUNT=5 17 | build_succeeded=0 18 | while [[ $RETRY_COUNT -gt 0 && $build_succeeded != 1 ]]; do 19 | docker push $FINALIMAGE 20 | if [ $? != 0 ]; then 21 | echo "push failed, will retry" 22 | RETRY_COUNT=$RETRY_COUNT-1 23 | else 24 | build_succeeded=1 25 | fi 26 | done 27 | 28 | if [[ $RETRY_COUNT -eq 0 ]]; then 29 | echo "all push retries failed, failing script" 30 | exit 1 31 | fi 32 | -------------------------------------------------------------------------------- /osstracker-scraperapp/src/main/resources/log4j.properties: -------------------------------------------------------------------------------- 1 | # 2 | # Copyright 2016 Netflix, Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | # 16 | 17 | log4j.rootLogger=INFO, stdout 18 | 19 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender 20 | log4j.appender.stdout.Target=System.out 21 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout 22 | log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n 23 | 24 | log4j.logger.com.netflix.oss.tools.osstrackerscraper=DEBUG -------------------------------------------------------------------------------- /osstracker-scraperapp/src/main/scala/com/netflix/oss/tools/osstrackerscraper/app/ConsoleReportWriter.scala: -------------------------------------------------------------------------------- 1 | package com.netflix.oss.tools.osstrackerscraper.app 2 | 3 | import com.netflix.oss.tools.osstrackerscraper.ReportWriter 4 | 5 | object ConsoleReportWriter extends ReportWriter { 6 | def processReport(reportContent: String) = { 7 | print(reportContent) 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /osstracker-scraperapp/src/main/scala/com/netflix/oss/tools/osstrackerscraper/app/RunGithubScraper.scala: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2016 Netflix, Inc. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | package com.netflix.oss.tools.osstrackerscraper.app 17 | 18 | import com.netflix.oss.tools.osstrackerscraper.{Conf, GithubScraper} 19 | import org.slf4j.LoggerFactory 20 | 21 | object RunGithubScraper { 22 | val logger = LoggerFactory.getLogger(getClass) 23 | 24 | def main(args: Array[String]) { 25 | val conf = new Conf(args) 26 | 27 | val action = conf.action() 28 | 29 | val cassHost = System.getenv("CASS_HOST") 30 | val cassPort = System.getenv("CASS_PORT").toInt 31 | 32 | val esHost = System.getenv("ES_HOST") 33 | val esPort = System.getenv("ES_PORT").toInt 34 | 35 | val githubOrg = System.getenv("github_org") 36 | 37 | if (action == Conf.ACTION_UPDATE_CASSANDRA) { 38 | val scraper = new GithubScraper(githubOrg, cassHost, cassPort, esHost, esPort, ConsoleReportWriter) 39 | val success = scraper.updateCassandra() 40 | if (!success) { 41 | System.exit(1) 42 | } 43 | logger.info(s"successfully updated the cassandra repo infos") 44 | } 45 | else if (action == Conf.ACTION_UPDATE_ELASTICSEARCH) { 46 | val scraper = new GithubScraper(githubOrg, cassHost, cassPort, esHost, esPort, ConsoleReportWriter) 47 | val success = scraper.updateElasticSearch() 48 | if (!success) { 49 | System.exit(1) 50 | } 51 | logger.info(s"successfully updated the elastic search repo infos") 52 | } 53 | else { 54 | println("you must specify an action") 55 | System.exit(1) 56 | } 57 | } 58 | } -------------------------------------------------------------------------------- /settings.gradle: -------------------------------------------------------------------------------- 1 | rootProject.name = 'osstracker' 2 | include 'osstracker-scraper' 3 | include 'osstracker-scraperapp' 4 | --------------------------------------------------------------------------------