├── .gitignore ├── .npmrc ├── CODE_OF_CONDUCT.md ├── LICENSE ├── README.md ├── index.js ├── lib ├── events │ ├── dedupeLocalEventsFilesInDirectory.js │ ├── filterPublicGitHubEvents.js │ ├── getEventsFrom.js │ ├── getPublicGitHubEvents.js │ └── githubEvents.json ├── files │ ├── safeDirectory.js │ ├── safeWriteFileToPath.js │ ├── writeEventsFile.js │ └── writeMegafile.js └── markdown │ ├── getMarkdownFromEvents.js │ └── writeMarkdownFile.js ├── logging └── logger.js ├── package.json └── test ├── cases └── rawMegafile.js ├── functions ├── filterPublicGitHubEvents.test.js ├── getEventsFrom.test.js ├── getPublicGitHubEvents.test.js ├── writeEventsFile.test.js ├── writeMarkdownFile.test.js └── writeMegafile.test.js ├── megatest.js └── util ├── dates.js ├── headings.js ├── paths.js └── people.js /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | lerna-debug.log* 8 | 9 | # Diagnostic reports (https://nodejs.org/api/report.html) 10 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 11 | 12 | # Runtime data 13 | pids 14 | *.pid 15 | *.seed 16 | *.pid.lock 17 | 18 | # Directory for instrumented libs generated by jscoverage/JSCover 19 | lib-cov 20 | 21 | # Coverage directory used by tools like istanbul 22 | coverage 23 | *.lcov 24 | 25 | # nyc test coverage 26 | .nyc_output 27 | 28 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 29 | .grunt 30 | 31 | # Bower dependency directory (https://bower.io/) 32 | bower_components 33 | 34 | # node-waf configuration 35 | .lock-wscript 36 | 37 | # Compiled binary addons (https://nodejs.org/api/addons.html) 38 | build/Release 39 | 40 | # Dependency directories 41 | node_modules/ 42 | jspm_packages/ 43 | 44 | # Snowpack dependency directory (https://snowpack.dev/) 45 | web_modules/ 46 | 47 | # TypeScript cache 48 | *.tsbuildinfo 49 | 50 | # Optional npm cache directory 51 | .npm 52 | 53 | # Optional eslint cache 54 | .eslintcache 55 | 56 | # Microbundle cache 57 | .rpt2_cache/ 58 | .rts2_cache_cjs/ 59 | .rts2_cache_es/ 60 | .rts2_cache_umd/ 61 | 62 | # Optional REPL history 63 | .node_repl_history 64 | 65 | # Output of 'npm pack' 66 | *.tgz 67 | 68 | # Yarn Integrity file 69 | .yarn-integrity 70 | 71 | # dotenv environment variables file 72 | .env 73 | .env.test 74 | 75 | # parcel-bundler cache (https://parceljs.org/) 76 | .cache 77 | .parcel-cache 78 | 79 | # Next.js build output 80 | .next 81 | out 82 | 83 | # Nuxt.js build / generate output 84 | .nuxt 85 | dist 86 | 87 | # Gatsby files 88 | .cache/ 89 | # Comment in the public line in if your project uses Gatsby and not Next.js 90 | # https://nextjs.org/blog/next-9-1#public-directory-support 91 | # public 92 | 93 | # vuepress build output 94 | .vuepress/dist 95 | 96 | # Serverless directories 97 | .serverless/ 98 | 99 | # FuseBox cache 100 | .fusebox/ 101 | 102 | # DynamoDB Local files 103 | .dynamodb/ 104 | 105 | # TernJS port file 106 | .tern-port 107 | 108 | # Stores VSCode versions used for testing VSCode extensions 109 | .vscode-test 110 | 111 | # yarn v2 112 | .yarn/cache 113 | .yarn/unplugged 114 | .yarn/build-state.yml 115 | .yarn/install-state.gz 116 | .pnp.* 117 | 118 | # local 119 | 120 | raw/ 121 | data/ 122 | reports/ 123 | report.* 124 | team.* 125 | log.* -------------------------------------------------------------------------------- /.npmrc: -------------------------------------------------------------------------------- 1 | package-lock=false 2 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | 2 | # Contributor Covenant Code of Conduct 3 | 4 | ## Our Pledge 5 | 6 | We as members, contributors, and leaders pledge to make participation in our 7 | community a harassment-free experience for everyone, regardless of age, body 8 | size, visible or invisible disability, ethnicity, sex characteristics, gender 9 | identity and expression, level of experience, education, socio-economic status, 10 | nationality, personal appearance, race, caste, color, religion, or sexual identity 11 | and orientation. 12 | 13 | We pledge to act and interact in ways that contribute to an open, welcoming, 14 | diverse, inclusive, and healthy community. 15 | 16 | ## Our Standards 17 | 18 | Examples of behavior that contributes to a positive environment for our 19 | community include: 20 | 21 | * Demonstrating empathy and kindness toward other people 22 | * Being respectful of differing opinions, viewpoints, and experiences 23 | * Giving and gracefully accepting constructive feedback 24 | * Accepting responsibility and apologizing to those affected by our mistakes, 25 | and learning from the experience 26 | * Focusing on what is best not just for us as individuals, but for the 27 | overall community 28 | 29 | Examples of unacceptable behavior include: 30 | 31 | * The use of sexualized language or imagery, and sexual attention or 32 | advances of any kind 33 | * Trolling, insulting or derogatory comments, and personal or political attacks 34 | * Public or private harassment 35 | * Publishing others' private information, such as a physical or email 36 | address, without their explicit permission 37 | * Other conduct which could reasonably be considered inappropriate in a 38 | professional setting 39 | 40 | ## Enforcement Responsibilities 41 | 42 | Community leaders are responsible for clarifying and enforcing our standards of 43 | acceptable behavior and will take appropriate and fair corrective action in 44 | response to any behavior that they deem inappropriate, threatening, offensive, 45 | or harmful. 46 | 47 | Community leaders have the right and responsibility to remove, edit, or reject 48 | comments, commits, code, wiki edits, issues, and other contributions that are 49 | not aligned to this Code of Conduct, and will communicate reasons for moderation 50 | decisions when appropriate. 51 | 52 | ## Scope 53 | 54 | This Code of Conduct applies within all community spaces, and also applies when 55 | an individual is officially representing the community in public spaces. 56 | Examples of representing our community include using an official e-mail address, 57 | posting via an official social media account, or acting as an appointed 58 | representative at an online or offline event. 59 | 60 | ## Enforcement 61 | 62 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 63 | reported to the community leaders responsible for enforcement at 64 | hello@bnb.im. 65 | All complaints will be reviewed and investigated promptly and fairly. 66 | 67 | All community leaders are obligated to respect the privacy and security of the 68 | reporter of any incident. 69 | 70 | ## Enforcement Guidelines 71 | 72 | Community leaders will follow these Community Impact Guidelines in determining 73 | the consequences for any action they deem in violation of this Code of Conduct: 74 | 75 | ### 1. Correction 76 | 77 | **Community Impact**: Use of inappropriate language or other behavior deemed 78 | unprofessional or unwelcome in the community. 79 | 80 | **Consequence**: A private, written warning from community leaders, providing 81 | clarity around the nature of the violation and an explanation of why the 82 | behavior was inappropriate. A public apology may be requested. 83 | 84 | ### 2. Warning 85 | 86 | **Community Impact**: A violation through a single incident or series 87 | of actions. 88 | 89 | **Consequence**: A warning with consequences for continued behavior. No 90 | interaction with the people involved, including unsolicited interaction with 91 | those enforcing the Code of Conduct, for a specified period of time. This 92 | includes avoiding interactions in community spaces as well as external channels 93 | like social media. Violating these terms may lead to a temporary or 94 | permanent ban. 95 | 96 | ### 3. Temporary Ban 97 | 98 | **Community Impact**: A serious violation of community standards, including 99 | sustained inappropriate behavior. 100 | 101 | **Consequence**: A temporary ban from any sort of interaction or public 102 | communication with the community for a specified period of time. No public or 103 | private interaction with the people involved, including unsolicited interaction 104 | with those enforcing the Code of Conduct, is allowed during this period. 105 | Violating these terms may lead to a permanent ban. 106 | 107 | ### 4. Permanent Ban 108 | 109 | **Community Impact**: Demonstrating a pattern of violation of community 110 | standards, including sustained inappropriate behavior, harassment of an 111 | individual, or aggression toward or disparagement of classes of individuals. 112 | 113 | **Consequence**: A permanent ban from any sort of public interaction within 114 | the community. 115 | 116 | ## Attribution 117 | 118 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], 119 | version 2.0, available at 120 | [https://www.contributor-covenant.org/version/2/0/code_of_conduct.html][v2.0]. 121 | 122 | Community Impact Guidelines were inspired by 123 | [Mozilla's code of conduct enforcement ladder][Mozilla CoC]. 124 | 125 | For answers to common questions about this code of conduct, see the FAQ at 126 | [https://www.contributor-covenant.org/faq][FAQ]. Translations are available 127 | at [https://www.contributor-covenant.org/translations][translations]. 128 | 129 | [homepage]: https://www.contributor-covenant.org 130 | [v2.0]: https://www.contributor-covenant.org/version/2/0/code_of_conduct.html 131 | [Mozilla CoC]: https://github.com/mozilla/diversity 132 | [FAQ]: https://www.contributor-covenant.org/faq 133 | [translations]: https://www.contributor-covenant.org/translations 134 | 135 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License Copyright (c) 2021 Tierney Cyren 2 | 3 | Permission is hereby granted, free 4 | of charge, to any person obtaining a copy of this software and associated 5 | documentation files (the "Software"), to deal in the Software without 6 | restriction, including without limitation the rights to use, copy, modify, merge, 7 | publish, distribute, sublicense, and/or sell copies of the Software, and to 8 | permit persons to whom the Software is furnished to do so, subject to the 9 | following conditions: 10 | 11 | The above copyright notice and this permission notice 12 | (including the next paragraph) shall be included in all copies or substantial 13 | portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF 16 | ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 17 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO 18 | EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR 19 | OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 20 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 21 | THE SOFTWARE. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Encites 2 | 3 | Encites is a WIP project to enable folks to collect and process public GitHub activity data about users. Sorry if anything's broken, I might fix it if you create an issue. 4 | 5 | ## Usage 6 | 7 | ```bash 8 | npm install encites 9 | ``` 10 | 11 | ### Preface 12 | 13 | There are a few things you'll need: 14 | 15 | - Data directory: You'll need a data directory. Regardless of what you name this directory, almost all operations of encites assume you have one. Encites will try to create one for you if one doesn't already exist when you're running some commands, but does not gaurantee that it'll succeed. 16 | - Megafile name: Since GitHub only returns 30 days or 300 results (whichever is less), Encites provides some functionality that can help you build a cache of all events if you run it often enough (how often is often enough depends on how active any given user is). In the project, this is considered a `megafile` and can be thought of as a kind of cache. To use certain kinds of functionality, you'll need to choose a name for your megafile and keep it consistent. `megafile.json` is a good default. 17 | 18 | Some additional terms and context around them: 19 | 20 | - Events: Events are the primordial that Encites gets from GitHub. Encites manipulates the Event objects it gets from GitHub through `getPublicGitHubEvents`, and can use `filterPublicGitHubEvents` to be more compact, only surfacing information that are useful for accomplishing the goals of Encites. 21 | - Events Array: An array that contains individual Events. 22 | - Events File: A file that contains an Events Array. Generally, written by one of the helper methods. 23 | - Data Directory: The directory where your JSON output files go. 24 | 25 | ### Environment Variables 26 | 27 | Encites uses [dotenv](https://www.npmjs.com/package/dotenv) to read environment variables from a `.env` file in your project. If you'd prefer not to use a `.env` file, you can define your environment variables in your operating system through normal methods. 28 | 29 | #### `ENCITES_GITHUB_PAT` 30 | 31 | This is expected to be assigned the value of a GitHub Personal Access Token (GitHub PAT, see [GitHub's docs](https://docs.github.com/en/github/authenticating-to-github/creating-a-personal-access-token) for details on how to create one). This is needed to ensure you don't get rate limited, which will almost certainly happen when using Encites without a PAT. 32 | 33 | #### `ENCITES_LOGGER` 34 | 35 | If this is assigned the value of `true`, you will get pretty log output from [pino](https://npm.im/pino). You can use [pino-pretty](http://npm.im/pino-pretty) to prettily view them in your terminal. 36 | 37 | ### Methods 38 | 39 | #### dedupeLocalEventsFilesInDirectory(dataDirectory) 40 | 41 | A utility in the pursuit of building megafiles. It will retrun a deduped array of all entires from entries files in the passed directory. 42 | 43 | * `dataDirectory` (string, required): path to your data directory to step through for events files (Encites presumes that _all_ files in that data directory are events files). 44 | 45 | ```js 46 | const { dedupeLocalEventsFilesInDirectory } = require('encites') 47 | 48 | const directory = './data/` 49 | 50 | // get all data, deduped, from your data directory 51 | const dedupedData = await dedupeLocalEventsFilesInDirectory(`${directory}`) 52 | 53 | console.log(dedupedData) 54 | ``` 55 | #### filterPublicGitHubEvents(events) 56 | 57 | Parses data from the GitHub API, provided to the method via the `events` property, and returns a nice clean array of objects representing events with only relevant information. 58 | 59 | * `events` (array, required): an array of GitHub API response data. 60 | 61 | ```js 62 | const { filterPublicGitHubEvents, getPublicGitHubEvents } = require('encites') 63 | 64 | const arrayOfGitHubUsers = ['bnb'] 65 | 66 | // get our GitHub data - make sure you've already set ENCITES_GITHUB_PAT 67 | const publicEvents = await getPublicGitHubEvents(arrayOfGitHubUsers) 68 | 69 | // filter our data into the shape the rest of the module expects 70 | const events = await filterPublicGitHubEvents(publicEvents) 71 | ``` 72 | 73 | #### getEventsFrom 74 | 75 | `getLocalEventsFrom()` has two exposed methods - `date` and `period`. These respectively allow you to **filter** events contained within events files, based on date/time information. 76 | 77 | ##### getEventsFrom.date(eventsFile, startDate) 78 | 79 | Gets all events, starting after the date passeed. 80 | 81 | * `eventsFile` (string, required): Path to an events file. You'll probably want to read a megafile rather than any given single events file. 82 | * `startDate` (string, required): The starting date. Any events before this date will be excluded from the results. Accepts any value that is valid in Luxon's [.toISO()](https://moment.github.io/luxon/docs/class/src/datetime.js~DateTime.html#instance-method-toISO) method. 83 | 84 | ```js 85 | const { getEventsFrom } = require('encites') 86 | 87 | const dataPath = './data/' 88 | const megafileName = 'megafile.json' 89 | const users = ['bnb'] 90 | 91 | const dateToCheckFrom = '2021-03-01' // yyyy-mm-dd 92 | 93 | const dateFilteredEvents = await getLocalEventsFrom.date(`${dataPath}${megafileName}`, dateToCheckFrom) 94 | ``` 95 | 96 | ##### getEventsFrom.period(eventsFile, startDate, endDate) 97 | 98 | Get all events within a given period. 99 | 100 | * `eventsFile` (string, required): Path to an events file. You'll probably want to read a megafile rather than any given single events file. 101 | * `startDate` (string, required): The starting date. Any events before this date will be excluded from the results. Accepts any value that is valid in Luxon's [.toISO()](https://moment.github.io/luxon/docs/class/src/datetime.js~DateTime.html#instance-method-toISO) method. 102 | * `endDate` (string, required): The ending date. Any events after this date will be excluded from the results. Accepts any value that is valid in Luxon's [.toISO()](https://moment.github.io/luxon/docs/class/src/datetime.js~DateTime.html#instance-method-toISO) method. 103 | 104 | 105 | ```js 106 | const { getEventsFrom } = require('encites') 107 | 108 | const dataPath = './data/' 109 | const megafileName = 'megafile.json' 110 | const users = ['bnb'] 111 | 112 | const dateToCheckFrom = '2021-03-01' // yyyy-mm-dd 113 | const dateToCheckUntil = '2021-03-28' // yyyy-mm-dd 114 | 115 | const dateFilteredEvents = await getLocalEventsFrom.period(`${dataPath}${megafileName}`, dateToCheckFrom, dateToCheckUntil) 116 | ``` 117 | 118 | #### getMarkdownFromEvents(events, title) 119 | 120 | Takes an Events Array, spits out pretty markdown. 121 | 122 | * `events` (array, required): An array of Events. 123 | * `title` (string, required): A string to be used as the title of your Markdown document. 124 | 125 | ```js 126 | const { filterPublicGitHubEvents, getMarkdownFromEvents getPublicGitHubEvents } = require('encites') 127 | 128 | // the users we're getting data for 129 | const users = ['bnb'] 130 | 131 | // fetch new data from GitHub - make sure you've already set ENCITES_GITHUB_PAT 132 | const publicEvents = await getPublicGitHubEvents(users) 133 | 134 | // structure our data in the shape we need 135 | const events = await filterPublicGitHubEvents(users) 136 | 137 | // get a markdown representation of our Events 138 | const markdown = await getMarkdownFromEvents(events) 139 | ``` 140 | 141 | #### getPublicGitHubEvents(githubUsernames) 142 | 143 | Fetches data from the GitHub API. Only fetches public data, zero authenticated/private data is fetched. 144 | 145 | You can either directly pass this along to something that consumes it, or you can write it to a file. Writing it to a file has the benefit of being able to be consumed again later if you'd like to rebuild your data or if there are new Event parsing capabilities added to the module once you're past 90 days or 300 events. 146 | 147 | * `githubUsernames` (string, required): An Array of GitHub usernames for whom you want to consume public data. 148 | 149 | ```js 150 | const { filterPublicGitHubEvents, getPublicGitHubEvents } = require('encites') 151 | 152 | const arrayOfGitHubUsers = ['bnb'] 153 | 154 | // get our GitHub data - make sure you've already set ENCITES_GITHUB_PAT 155 | const publicEvents = await getPublicGitHubEvents(arrayOfGitHubUsers) 156 | 157 | // filter our data into the shape the rest of the module expects 158 | const events = await filterPublicGitHubEvents(publicEvents) 159 | ``` 160 | 161 | #### writeEventsFile(dataDirectory, events, options) 162 | 163 | Writes an Events File to the passed Data Directory with the passed Events Array. By default, the filename is the current date in `yyyy-mm-dd` format, but you can overwrite that with options. Doing so is useful for writing megafiles. 164 | 165 | * `dataDirectory` (string, required): the path to your data directory, where an Events File will be written to. 166 | * `events` (array, required): Pass in an Event Array to be written to the provided path. 167 | * `options` (object, optional): 168 | * `filename` (string, optional): The name of the file. Useful for writing megafiles. 169 | 170 | ```js 171 | const { filterPublicGitHubEvents, getPublicGitHubEvents, writeEventsFile } = require('encites') 172 | 173 | // path to write all our files to. 174 | const dataPath = './data/' 175 | 176 | // array of users we'd like data for 177 | const users = ['bnb'] 178 | 179 | const publicEvents = await getPublicGitHubEvents(users) 180 | 181 | // fetches public data from the GitHub API 182 | const events = await filterPublicGitHubEvents(publicEvents) 183 | 184 | // write single instance of data 185 | writeEventsFile(`${dataPath}`, events) 186 | ``` 187 | 188 | #### writeMarkdownFile(markdownPath, markdownFileName, events, title) 189 | 190 | A relatively straightforward wrapper of [`getMarkdownFromEvents()`](#getmarkdownfromeventsevents) to write events out as a pretty Markdown file to a provided path with a provided filename. 191 | 192 | - `markdownPath` (string, required): The _full path_ to which you want the markdown file to be written. Does not include filename. 193 | - `markdownFileName` (string, required): The filename, including extension, that you want your resulting markdown to be written as. 194 | - `events` (array, required): An array of Events that you'd like to be parsed out into Markdown. 195 | - `title` (string, required): A string to be used as the title of your Markdown document. 196 | 197 | ```js 198 | const { filterPublicGitHubEvents, getPublicGitHubEvents, writeMarkdownFile } = require('encites') 199 | 200 | // in this case, we're fine with our markdown file being in the root 201 | const markdownPath = './' 202 | const markdownFilename = 'output.md' 203 | 204 | // the usernames we want public data for 205 | const users = ['bnb'] 206 | 207 | // fetch the public data - make sure you've already set ENCITES_GITHUB_PAT 208 | const publicEvents = await getPublicGitHubEvents(users) 209 | 210 | // fetches public data from the GitHub API 211 | const events = await getAndFilterPublicGitHubEvents(publicEvents) 212 | 213 | // write single instance of data 214 | writeMarkdownFile(markdownPath, markdownFileName, events) 215 | ``` 216 | #### writemegafile(dataPath, megafileFileName) 217 | 218 | A tiny wrapper over [`writeEventsFile()`](#writeeventsfiledatadirectory-events-options) to simplify writing megafiles. Writes a megafile to the data directory. 219 | 220 | - `dataPath` (string, required): the path to your data directory. 221 | - `megafileFileName` (string, required): the name of your megafile. `megafile.json` is a good choice. 222 | 223 | ```js 224 | const { writeMegafile } = require('encites') 225 | 226 | // data directory that we write all our files to 227 | const dataPath = './data/' 228 | 229 | // name of the megafile. Can be whatever, I've just chosen megafile. Needs to be `.json`. 230 | const megafileName = 'megafile.json' 231 | 232 | // write our megafile to the data directory 233 | writeMegafile(dataPath, megafileName) 234 | ``` 235 | 236 | ## Appendix A: Object Shapes 237 | 238 | ### Event Object 239 | 240 | An event object has a relatively specific structure. This structure is based on which `type` of event the Event Object is representing. These structures are built out in `./lib/getAndFilterPublicGitHubEvents.js`. Here's a reference for each kind: 241 | 242 | ```json 243 | 244 | // IssuesEvent 245 | { 246 | "id": "15475051640", 247 | "type": "IssuesEvent", 248 | "author": "bnb", 249 | "repo": "cutenode/delice", 250 | "title": "f", 251 | "link": "https://github.com/cutenode/delice/pull/8", 252 | "number": 8, 253 | "date": "2021-03-10T00:27:37Z", 254 | "action": "opened" 255 | }, 256 | 257 | // PullRequestEvent 258 | { 259 | "id": "15506478969", 260 | "type": "PullRequestEvent", 261 | "author": "bnb", 262 | "repo": "openjs-foundation/cross-project-council", 263 | "title": "doc: add README.md file to /TRAVEL_FUND", 264 | "link": "https://github.com/openjs-foundation/cross-project-council/pull/727", 265 | "number": 727, 266 | "date": "2021-03-11T16:27:38Z", 267 | "action": "opened" 268 | } 269 | ``` -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | const dedupeLocalEventsFilesInDirectory = require('./lib/events/dedupeLocalEventsFilesInDirectory') 2 | const filterPublicGitHubEvents = require('./lib/events/filterPublicGitHubEvents') 3 | const getEventsFrom = require('./lib/events/getEventsFrom') 4 | const getMarkdownFromEvents = require('./lib/markdown/getMarkdownFromEvents') 5 | const getPublicGitHubEvents = require('./lib/events/getPublicGitHubEvents') 6 | const writeEventsFile = require('./lib/files/writeEventsFile') 7 | const writeMarkdownFile = require('./lib/markdown/writeMarkdownFile') 8 | const writeMegafile = require('./lib/files/writeMegafile') 9 | 10 | module.exports = { 11 | dedupeLocalEventsFilesInDirectory, 12 | filterPublicGitHubEvents, 13 | getEventsFrom, 14 | getMarkdownFromEvents, 15 | getPublicGitHubEvents, 16 | writeEventsFile, 17 | writeMarkdownFile, 18 | writeMegafile 19 | } 20 | -------------------------------------------------------------------------------- /lib/events/dedupeLocalEventsFilesInDirectory.js: -------------------------------------------------------------------------------- 1 | const { readdir, readFile } = require('fs').promises 2 | const path = require('path') 3 | const logger = require('../../logging/logger') 4 | 5 | async function dedupeLocalEventsFilesInDirectory (dataDirectory, megafileName) { 6 | logger(__filename, `dataDirectory: ${dataDirectory}`) 7 | logger(__filename, `megafileName: ${megafileName}`) 8 | const resolvedDirectory = path.resolve(dataDirectory) 9 | const filesInDirectory = await readdir(resolvedDirectory) 10 | 11 | logger(__filename, `resolvedDirectory: ${resolvedDirectory}`) 12 | logger(__filename, `filesInDirectory: ${filesInDirectory}`) 13 | 14 | let allGitHubEvents = [] 15 | 16 | for await (const file of filesInDirectory) { 17 | if ((file !== `${megafileName}.json`)) { // TODO: validate that megafileName exists. If megafile isn't passed, it'll currently skip `undefined.json`. 18 | logger(__filename, `beginning to read contents of ${file}`) 19 | const rawContents = await readFile(path.resolve(`${resolvedDirectory}/${file}`), { encoding: 'utf-8' }) 20 | logger(__filename, `starting JSON parsing of ${file}`) 21 | const contents = JSON.parse(rawContents) 22 | logger(__filename, `finished JSON parsing of ${file}`) 23 | 24 | if (allGitHubEvents.length === 0) { 25 | logger(__filename, 'allGitHubEvents.length is \'0\' and we\'re direclty assigning \'contents\' to it.') 26 | allGitHubEvents = contents 27 | } else { 28 | logger(__filename, 'about to start mapping events.') 29 | const entriesIds = allGitHubEvents.map((event) => event.id) 30 | const result = contents 31 | .filter((event) => !entriesIds.includes(event.id)) 32 | .map((event) => { 33 | return event 34 | }) 35 | logger(__filename, 'done mapping events.') 36 | 37 | for (const event of result) { 38 | allGitHubEvents.push(event) 39 | } 40 | } 41 | } 42 | } 43 | 44 | return allGitHubEvents 45 | } 46 | 47 | module.exports = dedupeLocalEventsFilesInDirectory 48 | -------------------------------------------------------------------------------- /lib/events/filterPublicGitHubEvents.js: -------------------------------------------------------------------------------- 1 | const logger = require('../../logging/logger') 2 | 3 | async function filterPublicGitHubEvents (events) { 4 | logger(__filename, `length of 'events': ${events.length}`) 5 | 6 | // building out the basic structure for what we'd want to surface 7 | const userData = [] 8 | 9 | // loop over our data from GitHub and push it into userData 10 | for (const singleEvent in events) { 11 | const event = events[singleEvent] 12 | 13 | logger(__filename, `event: event.type is ${event.type}`) 14 | logger(__filename, `event: event.payload.action is ${event.payload.action}`) 15 | 16 | const eventDataToReturn = { // got all of the properties in here by just logging event and cherrypicking 17 | id: event.id, 18 | type: event.type, 19 | author: event.actor.display_login, 20 | repo: event.repo.name 21 | } 22 | 23 | if (event.type === 'IssuesEvent' && event.payload.action === 'opened') { 24 | // set up all the IssuesEvent-specific properties 25 | eventDataToReturn.title = event.payload.issue.title 26 | eventDataToReturn.link = event.payload.issue.html_url 27 | eventDataToReturn.number = event.payload.issue.number 28 | eventDataToReturn.date = event.payload.issue.created_at 29 | eventDataToReturn.action = event.payload.action 30 | 31 | // push this instance to final data 32 | userData.push(eventDataToReturn) 33 | 34 | logger(__filename, `event: pushed ${event.type} with action ${event.payload.action}.`) 35 | } 36 | 37 | if (event.type === 'PullRequestEvent' && event.payload.action === 'opened') { 38 | // set up all the PullRequestEvent-specific properties 39 | eventDataToReturn.title = event.payload.pull_request.title 40 | eventDataToReturn.link = event.payload.pull_request.html_url 41 | eventDataToReturn.number = event.payload.pull_request.number 42 | eventDataToReturn.date = event.payload.pull_request.created_at 43 | eventDataToReturn.action = event.payload.action 44 | 45 | // push this instance to final data 46 | userData.push(eventDataToReturn) 47 | 48 | logger(__filename, `event: pushed ${event.type} with action ${event.payload.action}.`) 49 | } 50 | 51 | if (event.type === 'IssueCommentEvent' && event.payload.action === 'created') { 52 | // set up all the IssueCommentEvent-specific properties 53 | eventDataToReturn.title = event.payload.issue.title 54 | eventDataToReturn.state = event.payload.issue.state 55 | eventDataToReturn.link = event.payload.issue.html_url 56 | eventDataToReturn.number = event.payload.issue.number 57 | eventDataToReturn.date = event.payload.comment.created_at 58 | eventDataToReturn.authorAssociation = event.payload.comment.author_association 59 | eventDataToReturn.commentLink = event.payload.comment.html_url 60 | eventDataToReturn.reactions = { 61 | total: event.payload.comment.reactions.total_count, 62 | '+1': event.payload.comment.reactions['+1'], 63 | '-1': event.payload.comment.reactions['-1'], 64 | laugh: event.payload.comment.reactions.laugh, 65 | hooray: event.payload.comment.reactions.hooray, 66 | confused: event.payload.comment.reactions.confused, 67 | heart: event.payload.comment.reactions.heart, 68 | rocket: event.payload.comment.reactions.rocket, 69 | eyes: event.payload.comment.reactions.eyes 70 | } 71 | 72 | // push this instance to final data 73 | userData.push(eventDataToReturn) 74 | 75 | logger(__filename, `event: pushed ${event.type} with action ${event.payload.action}.`) 76 | } 77 | 78 | if (event.type === 'PullRequestReviewEvent' && event.payload.action === 'created') { 79 | // set up all the PullRequestReviewEvent-specific properties 80 | eventDataToReturn.title = event.payload.pull_request.title 81 | eventDataToReturn.link = event.payload.pull_request.html_url 82 | eventDataToReturn.number = event.payload.pull_request.number 83 | eventDataToReturn.date = event.payload.review.submitted_at 84 | eventDataToReturn.action = event.payload.action 85 | eventDataToReturn.state = event.payload.review.state 86 | eventDataToReturn.commitId = event.payload.review.commit_id 87 | eventDataToReturn.reviewLink = event.payload.review.html_url 88 | eventDataToReturn.authorAssociation = event.payload.review.author_association 89 | 90 | // push this instance to final data 91 | userData.push(eventDataToReturn) 92 | 93 | logger(__filename, `event: pushed ${event.type} with action ${event.payload.action}.`) 94 | } 95 | } 96 | 97 | return userData 98 | } 99 | 100 | module.exports = filterPublicGitHubEvents 101 | -------------------------------------------------------------------------------- /lib/events/getEventsFrom.js: -------------------------------------------------------------------------------- 1 | const { readFile, stat } = require('fs').promises 2 | const path = require('path') 3 | const { DateTime } = require('luxon') 4 | const logger = require('../../logging/logger') 5 | 6 | // this is an internal helper to read our megafile which should 7 | // have all of our events 8 | async function fetchMegafile (pathToReadEventsFileFrom) { 9 | logger(__filename, `pathToReadEventsFileFrom: ${pathToReadEventsFileFrom}`) 10 | 11 | const normalizedPath = path.resolve(pathToReadEventsFileFrom) 12 | const megafilePath = await stat(normalizedPath) 13 | logger(__filename, `normalizedPath: ${normalizedPath}`) 14 | logger(__filename, `megafilePath: ${megafilePath}`) 15 | 16 | if (megafilePath.isFile()) { 17 | const megafile = await readFile(normalizedPath, { encoding: 'utf8' }) 18 | logger(__filename, `megafile: ${megafile}`) 19 | 20 | return megafile 21 | } 22 | 23 | if (megafilePath.isDirectory()) { 24 | throw new Error('Path passed was a directory, not a file. You need to pass a file.') 25 | } 26 | } 27 | 28 | // the date method here will give you all events FROM the date pased 29 | // UNTIL when the code is run. 30 | async function date (eventsFile, startDate) { 31 | logger(__filename, `eventsFile: ${eventsFile}`) 32 | logger(__filename, `startDate: ${startDate}`) 33 | 34 | const parsedDate = DateTime.fromISO(startDate) 35 | 36 | const events = await fetchMegafile(eventsFile) 37 | const parsedEvents = JSON.parse(events) 38 | 39 | logger(__filename, `parsedDate: ${parsedDate}`) 40 | logger(__filename, `events: ${events}`) 41 | logger(__filename, `parsedEvents: ${JSON.stringify(parsedEvents, null, 2)}`) 42 | 43 | const eventsAfterDate = parsedEvents.filter(event => { 44 | const eventDate = DateTime.fromISO(event.date) 45 | const comparison = parsedDate < eventDate 46 | logger(__filename, `eventDate: ${eventDate}`) 47 | logger(__filename, `comparison: ${comparison}`) 48 | return comparison 49 | }) 50 | 51 | return eventsAfterDate 52 | } 53 | 54 | // the period method here will give you all events from the start date 55 | // passed UNTIL the end date passed. If there are events after the end date, 56 | // they will not be incldued. 57 | async function period (eventsFile, startDate, endDate) { 58 | logger(__filename, `eventsFile: ${eventsFile}`) 59 | logger(__filename, `startDate: ${startDate}`) 60 | logger(__filename, `endDate: ${endDate}`) 61 | const parsedStartDate = DateTime.fromISO(startDate).toMillis() 62 | const parsedEndDate = DateTime.fromISO(endDate).toMillis() 63 | 64 | const events = await fetchMegafile(eventsFile) 65 | const parsedEvents = JSON.parse(events) 66 | 67 | logger(__filename, `parsedStartDate: ${parsedStartDate}`) 68 | logger(__filename, `parsedEndDate: ${parsedEndDate}`) 69 | logger(__filename, `events: ${events}`) 70 | logger(__filename, `parsedEvents: ${JSON.stringify(parsedEvents, null, 2)}`) 71 | 72 | const eventsInPeriod = parsedEvents.filter(event => { 73 | const eventDate = DateTime.fromISO(event.date).toMillis() 74 | const comparison = (parsedStartDate < eventDate) && (eventDate < parsedEndDate) 75 | 76 | logger(__filename, `eventDate: ${eventDate}`) 77 | logger(__filename, `comparison: ${comparison}`) 78 | 79 | return comparison 80 | }) 81 | 82 | return eventsInPeriod 83 | } 84 | 85 | module.exports = { 86 | date, 87 | period 88 | } 89 | -------------------------------------------------------------------------------- /lib/events/getPublicGitHubEvents.js: -------------------------------------------------------------------------------- 1 | const { Octokit } = require('@octokit/rest') 2 | const logger = require('../../logging/logger') 3 | 4 | logger(__filename, 'setting up octokit') 5 | const octokit = new Octokit({ 6 | auth: process.env.ENCITES_GITHUB_PAT, // what we use .env for 7 | userAgent: 'fetch-activity' 8 | }) 9 | logger(__filename, 'set up octokit') 10 | 11 | async function getPublicGitHubEvents (githubUsernames) { 12 | // building out the basic structure for what we'd want to surface 13 | const userData = [] 14 | 15 | logger(__filename, 'starting loop over usernames') 16 | for (const githubUsername of githubUsernames) { 17 | logger(__filename, `starting ${githubUsername}`) 18 | 19 | for await (const response of octokit.paginate.iterator(octokit.activity.listPublicEventsForUser, { username: githubUsername, per_page: 100 })) { 20 | // make a variable so we don't have to directly refer to response.data without context 21 | const githubActivityEvents = response.data 22 | 23 | logger(__filename, 'pushing response data') 24 | userData.push(...githubActivityEvents) 25 | logger(__filename, 'pushed response data') 26 | } 27 | } 28 | logger(__filename, 'finished loop over usernames') 29 | 30 | return userData 31 | } 32 | 33 | module.exports = getPublicGitHubEvents 34 | -------------------------------------------------------------------------------- /lib/events/githubEvents.json: -------------------------------------------------------------------------------- 1 | { 2 | "events": [ 3 | "CommitCommentEvent", 4 | "CreateEvent", 5 | "DeleteEvent", 6 | "ForkEvent", 7 | "GollumEvent", 8 | "IssueCommentEvent", 9 | "IssuesEvent", 10 | "IssuesEvent", 11 | "MemberEvent", 12 | "PublicEvent", 13 | "PullRequestEvent", 14 | "PullRequestReviewEvent", 15 | "PullRequestReviewCommentEvent", 16 | "PushEvent", 17 | "ReleaseEvent", 18 | "SponsorshipEvent", 19 | "WatchEvent" 20 | ] 21 | } -------------------------------------------------------------------------------- /lib/files/safeDirectory.js: -------------------------------------------------------------------------------- 1 | const path = require('path') 2 | const fs = require('fs-extra') 3 | const logger = require('../../logging/logger') 4 | 5 | async function safeDirectory (dataDirectory) { 6 | const dirname = path.resolve(dataDirectory) 7 | logger(__filename, `dirname of dataDirectory: ${dirname}`) 8 | 9 | await fs.ensureDir(dirname) 10 | return dirname 11 | } 12 | 13 | module.exports = safeDirectory 14 | -------------------------------------------------------------------------------- /lib/files/safeWriteFileToPath.js: -------------------------------------------------------------------------------- 1 | const logger = require('../../logging/logger') 2 | const fs = require('fs-extra') 3 | 4 | async function safeWriteFileToPath (pathAndFileName, data) { 5 | logger(__filename, `pathAndFileName: ${pathAndFileName}`) 6 | 7 | await fs.outputFile(pathAndFileName, data, { spaces: 2 }) 8 | } 9 | 10 | module.exports = safeWriteFileToPath 11 | -------------------------------------------------------------------------------- /lib/files/writeEventsFile.js: -------------------------------------------------------------------------------- 1 | const path = require('path') 2 | const safeWriteFileToPath = require('./safeWriteFileToPath') 3 | const safeDirectory = require('./safeDirectory') 4 | const { DateTime } = require('luxon') 5 | const logger = require('../../logging/logger') 6 | 7 | async function writeInstanceOfData (dataDirectory, events, options) { 8 | const now = DateTime.now().toISODate() 9 | const usableOptions = {} 10 | const directory = await safeDirectory(dataDirectory) 11 | 12 | logger(__filename, `now: ${now}`) 13 | logger(__filename, `resolvedPath: ${directory}`) 14 | 15 | if (options === undefined) { 16 | usableOptions.filename = `${now}.json` 17 | logger(__filename, `no custom options being used, setting filename to ${now}.json`) 18 | } else { 19 | usableOptions.filename = options.filename 20 | logger(__filename, `custom options being used, setting filename to ${options.filename}`) 21 | } 22 | const pathAndFileName = path.join(`${directory}/${usableOptions.filename}`) 23 | 24 | // write the actual file 25 | const writeFile = await safeWriteFileToPath(pathAndFileName, JSON.stringify(events, null, 2)) 26 | logger(__filename, `wrote file to ${pathAndFileName}`) 27 | 28 | return writeFile 29 | } 30 | 31 | module.exports = writeInstanceOfData 32 | -------------------------------------------------------------------------------- /lib/files/writeMegafile.js: -------------------------------------------------------------------------------- 1 | const writeEventsFile = require('./writeEventsFile') 2 | const safeDirectory = require('./safeDirectory') 3 | const dedupeLocalEventsFilesInDirectory = require('../events/dedupeLocalEventsFilesInDirectory') 4 | const logger = require('../../logging/logger') 5 | 6 | async function writeMegafile (dataDirectory, megafileFileName) { 7 | logger(__filename, `dataDirectory: ${dataDirectory}`) 8 | logger(__filename, `megafileFileName: ${megafileFileName}`) 9 | const directory = await safeDirectory(dataDirectory) 10 | 11 | logger(__filename, 'awaiting deuped data') 12 | const dedupedData = await dedupeLocalEventsFilesInDirectory(directory, megafileFileName) 13 | logger(__filename, 'collected deduped data') 14 | 15 | logger(__filename, 'beginning to write megafile') 16 | const writeFile = await writeEventsFile(`${directory}`, dedupedData, { filename: megafileFileName }) 17 | logger(__filename, 'wrote megafile') 18 | 19 | return writeFile 20 | } 21 | 22 | module.exports = writeMegafile 23 | -------------------------------------------------------------------------------- /lib/markdown/getMarkdownFromEvents.js: -------------------------------------------------------------------------------- 1 | 2 | require('dotenv').config() 3 | const logger = require('../../logging/logger') 4 | 5 | async function getMarkdownFromEvents (events, title) { 6 | const headings = { 7 | page: `# ${title}`, 8 | count: '## Total Counts', 9 | contributors: '## Contributors', 10 | issues: '## Issues Created', 11 | prs: '## Pull Requests Created', 12 | comments: '## Comments Created', 13 | reviews: '## Reviews Submitted' 14 | 15 | } 16 | 17 | // variables we popualte with logic 18 | 19 | const data = { 20 | usersWhoCreatedIssues: [], 21 | usersWhoCreatedPrs: [], 22 | usersWhoCreatedComments: [], 23 | usersWhoReviewedPrs: [], 24 | issueBullets: [], 25 | prBullets: [], 26 | commentBullets: [], 27 | reviewsBullets: [] 28 | } 29 | 30 | // counts of the things we care about 31 | 32 | const counts = { 33 | issuesCreated: 0, 34 | prsCreated: 0, 35 | reviewsSubmitted: 0, 36 | commentsCreated: 0 37 | } 38 | 39 | logger(__filename, 'beinning loop over events') 40 | for (const event of events) { 41 | logger(__filename, `loop over events number ${events[events]}`) 42 | logger(__filename, `event.type is ${event.type}`) 43 | 44 | // set up pretty markdown shorthand 45 | logger(__filename, 'setting up pretty markdown shorthand') 46 | const author = `[@${event.author}](https://github.com/${event.author})` 47 | const link = `_**${event.title} ([#${event.number}](${event.link}))**_` 48 | const repo = `[${event.repo}](https://github.com/${event.repo})` 49 | logger(__filename, 'set up pretty markdown shorthand') 50 | 51 | if (event.type === 'IssuesEvent') { 52 | logger(__filename, 'event type is IssuesEvent') 53 | 54 | counts.issuesCreated = counts.issuesCreated + 1 55 | const bullet = `* ${author} created ${link} in ${repo}` 56 | data.issueBullets.push(bullet) 57 | 58 | if (!data.usersWhoCreatedIssues.includes(author)) { 59 | logger(__filename, 'user was not in array of those who created issues') 60 | data.usersWhoCreatedIssues.push(author) 61 | } 62 | } 63 | 64 | if (event.type === 'PullRequestEvent') { 65 | logger(__filename, 'event type is PullRequestEvent') 66 | 67 | counts.prsCreated = counts.prsCreated + 1 68 | const bullet = `* ${author} created ${link} in ${repo}` 69 | data.prBullets.push(bullet) 70 | 71 | if (!data.usersWhoCreatedPrs.includes(author)) { 72 | logger(__filename, 'user was not in array of those who created prs') 73 | data.usersWhoCreatedPrs.push(author) 74 | } 75 | } 76 | 77 | if (event.type === 'IssueCommentEvent') { 78 | logger(__filename, 'event type is IssueCommentEvent') 79 | 80 | counts.commentsCreated = counts.commentsCreated + 1 81 | const bullet = `* **[#${event.number} (comment)](${event.commentLink})**: ${author} wrote a comment on ${link} in ${repo}` 82 | data.commentBullets.push(bullet) 83 | 84 | if (!data.usersWhoCreatedComments.includes(author)) { 85 | data.usersWhoCreatedComments.push(author) 86 | } 87 | } 88 | 89 | if (event.type === 'PullRequestReviewEvent') { 90 | logger(__filename, 'event type is PullRequestReviewEvent') 91 | 92 | counts.reviewsSubmitted = counts.reviewsSubmitted + 1 93 | const bullet = `* **[#${event.number} (review)](${event.reviewLink})**: ${author} reviewed ${link} in ${repo}: ` 94 | data.reviewsBullets.push(bullet) 95 | 96 | if (!data.usersWhoReviewedPrs.includes(author)) { 97 | logger(__filename, 'user was not in array of those who created prs') 98 | data.usersWhoReviewedPrs.push(author) 99 | } 100 | } 101 | } 102 | 103 | // create the sections of the markdown document 104 | // these are broken up into sections to make it easier to implement limited scope calls of the method later if we want that 105 | logger(__filename, 'setting up markdown elements') 106 | const heading = `${headings.page}` 107 | const countSection = `\n\n${headings.count}\n\n* Issues Created: ${counts.issuesCreated}\n* Pull Requests Created: ${counts.prsCreated}\n* Comments Created: ${counts.commentsCreated}\n* Reviews Submitted: ${counts.reviewsSubmitted}` 108 | const contributorsSection = `\n\n${headings.contributors}\n\n* Contributors who created Issues: ${data.usersWhoCreatedIssues.join(', ')}\n\n* Contributors who created Pull Requests: ${data.usersWhoCreatedPrs.join(', ')}\n\n* Contributors who created comments: ${data.usersWhoCreatedComments.join(', ')}` 109 | const issueSection = `\n\n${headings.issues}\n\n${data.issueBullets.join('\n')}` 110 | const prSection = `\n\n${headings.prs}\n\n${data.prBullets.join('\n')}` 111 | const commentSection = `\n\n${headings.comments}\n\n${data.commentBullets.join('\n')}` 112 | const reviewSection = `\n\n${headings.reviews}\n\n${data.reviewsBullets.join('\n')}` 113 | logger(__filename, 'set up markdown elements') 114 | 115 | logger(__filename, 'setting up markdown document') 116 | const document = `${heading}${countSection}${contributorsSection}${issueSection}${prSection}${commentSection}${reviewSection}` 117 | logger(__filename, 'set up markdown document') 118 | 119 | return document 120 | } 121 | 122 | module.exports = getMarkdownFromEvents 123 | -------------------------------------------------------------------------------- /lib/markdown/writeMarkdownFile.js: -------------------------------------------------------------------------------- 1 | const getMarkdownFromEvents = require('./getMarkdownFromEvents') 2 | const safeWriteFileToPath = require('../files/safeWriteFileToPath') 3 | const logger = require('../../logging/logger') 4 | 5 | async function writeMarkdownFile (markdownPath, markdownFileName, events, title) { 6 | logger(__filename, 'building out markdown from data') 7 | const markdown = await getMarkdownFromEvents(events, title) 8 | logger(__filename, 'writing markdown to the file') 9 | await safeWriteFileToPath(`${markdownPath}${markdownFileName}`, markdown) 10 | } 11 | 12 | module.exports = writeMarkdownFile 13 | -------------------------------------------------------------------------------- /logging/logger.js: -------------------------------------------------------------------------------- 1 | require('dotenv').config() 2 | const pino = require('pino')() 3 | const log = process.env.ENCITES_LOGGER 4 | const path = require('path') 5 | 6 | async function logger (filename, message) { 7 | const prettyFileName = filename.slice(filename.lastIndexOf(path.sep)) 8 | if (log) pino.info(`${prettyFileName}: ${message}`) 9 | } 10 | 11 | module.exports = logger 12 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "encites", 3 | "version": "4.1.1", 4 | "description": "fetches GitHub activity, providing an API for that activity and rendering it prettily in Markdown", 5 | "main": "index.js", 6 | "scripts": { 7 | "lint": "standard", 8 | "clean": "rm -rf ./data && rm -rf ./reports && rm -rf ./raw && rm team.md" 9 | }, 10 | "keywords": [ 11 | "github", 12 | "insights", 13 | "api", 14 | "markdown" 15 | ], 16 | "author": "Tierney Cyren ", 17 | "license": "MIT", 18 | "devDependencies": { 19 | "standard": "^16.0.3" 20 | }, 21 | "dependencies": { 22 | "@octokit/rest": "^18.3.2", 23 | "dotenv": "^8.2.0", 24 | "fs-extra": "^9.1.0", 25 | "luxon": "^1.26.0", 26 | "pino": "^6.11.2" 27 | }, 28 | "directories": { 29 | "lib": "lib" 30 | }, 31 | "repository": { 32 | "type": "git", 33 | "url": "git+https://github.com/cutenode/encites.git" 34 | }, 35 | "bugs": { 36 | "url": "https://github.com/cutenode/encites/issues" 37 | }, 38 | "homepage": "https://github.com/cutenode/encites#readme" 39 | } 40 | -------------------------------------------------------------------------------- /test/cases/rawMegafile.js: -------------------------------------------------------------------------------- 1 | const { getPublicGitHubEvents, writeEventsFile, writeMegafile } = require('../../') 2 | const users = require('../util/people') 3 | const dates = require('../util/dates') 4 | const createPaths = require('../util/paths') 5 | 6 | const paths = createPaths(dates) 7 | 8 | async function test () { 9 | // fetch data from GitHub 10 | const rawData = await getPublicGitHubEvents(users) 11 | const rawPath = `${paths.raw}` 12 | 13 | await writeEventsFile(rawPath, rawData) 14 | await writeMegafile(rawPath, paths.filenames.megafile) 15 | } 16 | 17 | test() 18 | -------------------------------------------------------------------------------- /test/functions/filterPublicGitHubEvents.test.js: -------------------------------------------------------------------------------- 1 | const { getPublicGitHubEvents, filterPublicGitHubEvents } = require('../../') 2 | const users = require('../util/people') 3 | 4 | async function test () { 5 | // Fetch raw data with a request to GitHub. This is required to filter the data, since there's nothing to filter if we don't have the data. 6 | const rawData = await getPublicGitHubEvents(users) 7 | // Applys our filters to the data we get from GitHub and makes it "useful" while also stripping away what we don't need. 8 | const data = await filterPublicGitHubEvents(rawData) 9 | 10 | console.log(data) 11 | } 12 | 13 | test() 14 | -------------------------------------------------------------------------------- /test/functions/getEventsFrom.test.js: -------------------------------------------------------------------------------- 1 | const { getEventsFrom } = require('../../') 2 | const dates = require('../util/dates') 3 | const createPaths = require('../util/paths') 4 | 5 | const paths = createPaths(dates) 6 | 7 | async function test () { 8 | // gets all data locally from the megafile, filtering out events *before* the date passed 9 | // note: this requires a megafile to actually exist. 10 | const dateFilteredEvents = await getEventsFrom.period(`${paths.data}${paths.megafile}`, paths.dateToCheck.from, paths.dateToCheck.until) 11 | 12 | console.log(dateFilteredEvents) 13 | } 14 | 15 | test() 16 | -------------------------------------------------------------------------------- /test/functions/getPublicGitHubEvents.test.js: -------------------------------------------------------------------------------- 1 | const { getPublicGitHubEvents } = require('../../') 2 | const users = require('../util/people') 3 | 4 | async function test () { 5 | // fetch data from GitHub 6 | const rawData = await getPublicGitHubEvents(users) 7 | 8 | console.log(rawData) 9 | } 10 | 11 | test() 12 | -------------------------------------------------------------------------------- /test/functions/writeEventsFile.test.js: -------------------------------------------------------------------------------- 1 | const { getPublicGitHubEvents, writeEventsFile } = require('../../') 2 | const dates = require('../util/dates') 3 | const users = require('../util/people') 4 | const createPaths = require('../util/paths') 5 | 6 | const paths = createPaths(dates) 7 | 8 | async function test () { 9 | // fetch raw API data from GitHub 10 | const rawData = await getPublicGitHubEvents(users) 11 | 12 | // save our publicData for future use 13 | writeEventsFile(`${paths.raw}`, rawData) 14 | } 15 | 16 | test() 17 | -------------------------------------------------------------------------------- /test/functions/writeMarkdownFile.test.js: -------------------------------------------------------------------------------- 1 | const { getEventsFrom, writeMarkdownFile } = require('../../') 2 | const dates = require('../util/dates') 3 | const createPaths = require('../util/paths') 4 | const createHeadings = require('../util/headings') 5 | 6 | const paths = createPaths(dates) 7 | const headings = createHeadings(dates) 8 | 9 | async function test () { 10 | // gets all data locally from the megafile, filtering out events *before* the date passed 11 | // note: this requires a megafile to actually exist. 12 | const dateFilteredEvents = await getEventsFrom.period(`${paths.data}${paths.megafile}`, paths.dateToCheck.from, paths.dateToCheck.until) 13 | 14 | // write the markdown to the correct location 15 | writeMarkdownFile(paths.reports.weekly, paths.filenames.weekly, dateFilteredEvents, headings.weekly) 16 | } 17 | 18 | test() 19 | -------------------------------------------------------------------------------- /test/functions/writeMegafile.test.js: -------------------------------------------------------------------------------- 1 | const { writeMegafile } = require('../../') 2 | const dates = require('../util/dates') 3 | const createPaths = require('../util/paths') 4 | 5 | const paths = createPaths(dates) 6 | 7 | async function test () { 8 | writeMegafile(`${paths.data}`, paths.filenames.megafile) 9 | } 10 | 11 | test() 12 | -------------------------------------------------------------------------------- /test/megatest.js: -------------------------------------------------------------------------------- 1 | const { getPublicGitHubEvents, filterPublicGitHubEvents, getEventsFrom, writeEventsFile, writeMarkdownFile, writeMegafile } = require('../') 2 | const users = require('./util/people') 3 | const dates = require('./util/dates') 4 | const createPaths = require('./util/paths') 5 | const createHeadings = require('./util/headings') 6 | 7 | const paths = createPaths(dates) 8 | const headings = createHeadings(dates) 9 | 10 | async function teamData () { 11 | // fetch data from GitHub 12 | const rawData = await getPublicGitHubEvents(users) 13 | // fetches public data from the GitHub API 14 | const data = await filterPublicGitHubEvents(rawData) 15 | 16 | // // save our publicData for future use 17 | await writeEventsFile(`${paths.raw}`, rawData) 18 | 19 | // attempt to save our data as a Megafile? 20 | await writeMegafile(`${paths.raw}`, paths.filenames.megafile) 21 | 22 | // write single instance of data 23 | await writeEventsFile(`${paths.data}`, data) 24 | // write the megafile 25 | await writeMegafile(`${paths.data}`, paths.filenames.megafile) 26 | 27 | // gets all data locally from the megafile, filtering out events *before* the date passed 28 | const dateFilteredEvents = await getEventsFrom.period(`${paths.data}${paths.filenames.megafile}`, paths.dateToCheck.from, paths.dateToCheck.until) 29 | 30 | // write the markdown to the correct location 31 | writeMarkdownFile(paths.reports.weekly, paths.filenames.weekly, dateFilteredEvents, headings.weekly) 32 | } 33 | 34 | teamData() 35 | -------------------------------------------------------------------------------- /test/util/dates.js: -------------------------------------------------------------------------------- 1 | const { DateTime } = require('luxon') 2 | 3 | const now = DateTime.now() 4 | const currentYear = now.year 5 | const lastMonth = now.minus({ months: 1 }) 6 | const lastDayOfPreviousMonth = lastMonth.set({ day: lastMonth.daysInMonth }) 7 | 8 | const dates = { 9 | now: now.toISODate(), // make `now` usable with ISO date format 10 | lastDayOfPreviousMonth: lastDayOfPreviousMonth.toISODate(), // gets the last day of the previous month 11 | checkFrom: `${currentYear}-${lastDayOfPreviousMonth.month.toString().padStart(2, 0)}-01`, // yyyy-mm-dd 12 | checkUntil: `${currentYear}-${lastDayOfPreviousMonth.month.toString().padStart(2, 0)}-${lastDayOfPreviousMonth.daysInMonth.toString().padStart(2, 0)}` // yyyy-mm-dd-yyyy-mm-dd 13 | } 14 | 15 | module.exports = dates 16 | -------------------------------------------------------------------------------- /test/util/headings.js: -------------------------------------------------------------------------------- 1 | function createHeadings (dates) { 2 | const headings = { 3 | monthly: `Personal GitHub Activity Report: Monthly Run at ${dates.now}`, 4 | weekly: `Personal GitHub Activity Report: Weekly Run at ${dates.now}` 5 | } 6 | 7 | return headings 8 | } 9 | 10 | module.exports = createHeadings 11 | -------------------------------------------------------------------------------- /test/util/paths.js: -------------------------------------------------------------------------------- 1 | function createPaths (dates) { 2 | const paths = { 3 | data: './test/data/', // path to write all our files to. 4 | raw: './test/raw/', // path to write raw GitHub data to - this allows us to consume it later as we add more events to output 5 | reports: { 6 | monthly: './test/reports/monthly/', // path to write monthly reports to 7 | weekly: './test/reports/weekly/' // path to write weekly reports to 8 | }, 9 | filenames: { 10 | monthly: `${dates.lastDayOfPreviousMonth}.md`, // name of the monthly report 11 | weekly: `${dates.now}.md`, // name of the weekly report 12 | megafile: 'megafile.json' // name of the megafile. can be whatever, I've just chosen megafile. Needs to be .json. 13 | }, 14 | dateToCheck: { 15 | from: dates.checkFrom, // date to start checking for events 16 | until: dates.checkUntil // date to stop checking for events 17 | } 18 | } 19 | 20 | return paths 21 | } 22 | 23 | module.exports = createPaths 24 | -------------------------------------------------------------------------------- /test/util/people.js: -------------------------------------------------------------------------------- 1 | const people = [ 2 | 'bnb' 3 | ] 4 | 5 | module.exports = people 6 | --------------------------------------------------------------------------------