├── .ebextensions ├── disable-npm.config └── yarn.config ├── .env.example ├── .env.prod.example ├── .eslintignore ├── .eslintrc.js ├── .gitignore ├── .huskyrc.js ├── .lintstagedrc.js ├── .npmrc ├── .nvmrc ├── .prettierignore ├── .prettierrc.js ├── .travis.yml ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── LICENSE ├── README.md ├── config └── default.js ├── nodemon.json ├── package.json ├── packages ├── core │ ├── package.json │ └── src │ │ ├── ethereum │ │ ├── get-block.js │ │ └── get-current-block.js │ │ ├── events │ │ ├── get-config-for-event-type.js │ │ ├── get-last-processed-block.js │ │ └── get-next-block-range.js │ │ ├── index.js │ │ ├── jobs │ │ ├── extract-events.js │ │ └── index.js │ │ ├── model │ │ ├── block-range.js │ │ ├── event.js │ │ └── index.js │ │ └── util │ │ ├── db.js │ │ ├── error-logger.js │ │ ├── job-runner.js │ │ ├── logging.js │ │ ├── with-timer.js │ │ └── with-transaction.js ├── fill-extractor-v1 │ ├── package.json │ └── src │ │ ├── 0x.js │ │ ├── fetch-log-entries.js │ │ ├── get-event-data.js │ │ └── index.js ├── fill-extractor-v2 │ ├── package.json │ └── src │ │ ├── 0x.js │ │ ├── fetch-log-entries.js │ │ ├── get-event-data.js │ │ └── index.js ├── fill-extractor-v3 │ ├── package.json │ └── src │ │ ├── 0x.js │ │ ├── fetch-log-entries.js │ │ ├── get-event-data.js │ │ └── index.js ├── limit-order-filled-extractor │ ├── package.json │ └── src │ │ ├── fetch-log-entries.js │ │ ├── get-event-data.js │ │ └── index.js ├── liquidity-provider-swap-extractor │ ├── package.json │ └── src │ │ ├── fetch-log-entries.js │ │ ├── get-event-data.js │ │ └── index.js ├── rfq-order-filled-extractor │ ├── package.json │ └── src │ │ ├── fetch-log-entries.js │ │ ├── get-event-data.js │ │ └── index.js ├── shared │ ├── package.json │ └── src │ │ ├── config.js │ │ ├── index.js │ │ └── web3.js ├── sushiswap-swap-extractor │ ├── package.json │ └── src │ │ ├── fetch-log-entries.js │ │ ├── get-event-data.js │ │ └── index.js ├── transformed-erc20-extractor │ ├── package.json │ └── src │ │ ├── fetch-log-entries.js │ │ ├── get-event-data.js │ │ └── index.js ├── uniswap-v2-swap-extractor │ ├── package.json │ └── src │ │ ├── fetch-log-entries.js │ │ ├── get-event-data.js │ │ └── index.js └── uniswap-v3-swap-extractor │ ├── package.json │ └── src │ ├── fetch-log-entries.js │ ├── get-event-data.js │ └── index.js ├── renovate.json ├── src └── index.js └── yarn.lock /.ebextensions/disable-npm.config: -------------------------------------------------------------------------------- 1 | # This file is controlled by the `eb-disable-npm` Node module. If you'd like to 2 | # modify it, you either should publish a new version of that module and update 3 | # to that version; or, you should uninstall that module and then edit this file 4 | # --uninstallation won't take the file with it. 5 | 6 | # This file prevents Elastic Beanstalk from trying to run `npm install` or 7 | # `npm rebuild` on its EC2 instances. See the README for why. 8 | 9 | files: 10 | "/opt/elasticbeanstalk/hooks/appdeploy/pre/50npm.sh": 11 | mode: "000755" 12 | owner: root 13 | group: users 14 | content: | 15 | #!/usr/bin/env bash 16 | # 17 | # Prevent installing or rebuilding like Elastic Beanstalk tries to do by 18 | # default. 19 | # 20 | # Note that this *overwrites* Elastic Beanstalk's default 50npm.sh script 21 | # (https://gist.github.com/wearhere/de51bb799f5099cec0ed28b9d0eb3663). 22 | 23 | "/opt/elasticbeanstalk/hooks/configdeploy/pre/50npm.sh": 24 | mode: "000755" 25 | owner: root 26 | group: users 27 | content: | 28 | #!/usr/bin/env bash 29 | # 30 | # Prevent installing or rebuilding like Elastic Beanstalk tries to do by 31 | # default. 32 | # 33 | # Note that this *overwrites* Elastic Beanstalk's default 50npm.sh script. 34 | # But their default script actually doesn't work at all, since the app 35 | # staging dir, where they try to run `npm install`, doesn't exist during 36 | # config deploys, so ebnode.py just aborts: 37 | # https://gist.github.com/wearhere/de51bb799f5099cec0ed28b9d0eb3663#file-ebnode-py-L140 38 | -------------------------------------------------------------------------------- /.ebextensions/yarn.config: -------------------------------------------------------------------------------- 1 | # This file introduces a script into the Elasticbeanstalk deployment process 2 | # which installs modules via Yarn instead of using the default NPM install. 3 | 4 | # NPM install is prevented by .ebextensions/disable-npm.config which overrides 5 | # the 50npm.sh script used by Elasticbeanstalk. Because 50npm.sh also installs Node, 6 | # we also need to install Node ourselves in this script. 7 | 8 | files: 9 | "/opt/elasticbeanstalk/hooks/appdeploy/pre/49_yarn.sh" : 10 | mode: "000775" 11 | owner: root 12 | group: root 13 | content: | 14 | #!/bin/bash 15 | 16 | # Install Node 10.x 17 | curl --silent --location https://rpm.nodesource.com/setup_10.x | bash -; 18 | 19 | # Install Yarn 20 | curl --silent --location https://dl.yarnpkg.com/rpm/yarn.repo | tee /etc/yum.repos.d/yarn.repo; 21 | yum -y install yarn; 22 | 23 | # Install node_modules with Yarn 24 | app="$(/opt/elasticbeanstalk/bin/get-config container -k app_staging_dir)"; 25 | cd "${app}"; 26 | echo "Inside ${app}, about to run Yarn." 27 | yarn --production; 28 | -------------------------------------------------------------------------------- /.env.example: -------------------------------------------------------------------------------- 1 | CONNECTION_STRING=mongodb://localhost:27017/zrx-tracker 2 | MAX_CHUNK_SIZE=10000 3 | MAX_POLLING_INTERVAL=300000 4 | MIN_POLLING_INTERVAL=30000 5 | NODE_ENV=development 6 | WEB3_ENDPOINT= 7 | -------------------------------------------------------------------------------- /.env.prod.example: -------------------------------------------------------------------------------- 1 | BUGSNAG_TOKEN= 2 | CONNECTION_STRING= 3 | MAX_CHUNK_SIZE=10000 4 | MAX_POLLING_INTERVAL=300000 5 | MIN_POLLING_INTERVAL=30000 6 | WEB3_ENDPOINT= 7 | -------------------------------------------------------------------------------- /.eslintignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | !.* 3 | -------------------------------------------------------------------------------- /.eslintrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | env: { 3 | jest: true, 4 | node: true, 5 | }, 6 | extends: ['airbnb-base', 'plugin:prettier/recommended'], 7 | rules: { 8 | 'no-console': 'error', 9 | 'no-underscore-dangle': 'off', 10 | 'import/prefer-default-export': 'warn', 11 | }, 12 | }; 13 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | config/local.js 2 | coverage 3 | node_modules/ 4 | .env 5 | -------------------------------------------------------------------------------- /.huskyrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | hooks: { 3 | 'pre-commit': 'lint-staged', 4 | }, 5 | }; 6 | -------------------------------------------------------------------------------- /.lintstagedrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | '*.js': 'eslint --quiet', 3 | '*.{json, md, yml}': 'prettier', 4 | }; 5 | -------------------------------------------------------------------------------- /.npmrc: -------------------------------------------------------------------------------- 1 | save-exact=true 2 | 3 | # Force npm to run node-gyp also as root, preventing permission denied errors in AWS 4 | unsafe-perm=true 5 | -------------------------------------------------------------------------------- /.nvmrc: -------------------------------------------------------------------------------- 1 | 10.19.0 2 | -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | -------------------------------------------------------------------------------- /.prettierrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | singleQuote: true, 3 | trailingComma: 'all', 4 | }; 5 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | branches: 2 | only: 3 | - master 4 | language: node_js 5 | cache: 6 | yarn: true 7 | directories: 8 | - node_modules 9 | before_install: 10 | - curl -o- -L https://yarnpkg.com/install.sh | bash 11 | - export PATH="$HOME/.yarn/bin:$PATH" 12 | install: yarn install 13 | script: 14 | - yarn run lint 15 | deploy: 16 | provider: elasticbeanstalk 17 | access_key_id: $AWS_DEPLOY_ACCESS_KEY_ID 18 | secret_access_key: $AWS_DEPLOY_SECRET_ACCESS_KEY 19 | region: $AWS_DEPLOY_REGION 20 | app: $AWS_DEPLOY_APP_NAME 21 | env: $AWS_DEPLOY_ENV 22 | bucket: $AWS_DEPLOY_BUCKET_NAME 23 | edge: true 24 | on: 25 | branch: master 26 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | In the interest of fostering an open and welcoming environment, we as 6 | contributors and maintainers pledge to making participation in our project and 7 | our community a harassment-free experience for everyone, regardless of age, body 8 | size, disability, ethnicity, sex characteristics, gender identity and expression, 9 | level of experience, education, socio-economic status, nationality, personal 10 | appearance, race, religion, or sexual identity and orientation. 11 | 12 | ## Our Standards 13 | 14 | Examples of behavior that contributes to creating a positive environment 15 | include: 16 | 17 | - Using welcoming and inclusive language 18 | - Being respectful of differing viewpoints and experiences 19 | - Gracefully accepting constructive criticism 20 | - Focusing on what is best for the community 21 | - Showing empathy towards other community members 22 | 23 | Examples of unacceptable behavior by participants include: 24 | 25 | - The use of sexualized language or imagery and unwelcome sexual attention or 26 | advances 27 | - Trolling, insulting/derogatory comments, and personal or political attacks 28 | - Public or private harassment 29 | - Publishing others' private information, such as a physical or electronic 30 | address, without explicit permission 31 | - Other conduct which could reasonably be considered inappropriate in a 32 | professional setting 33 | 34 | ## Our Responsibilities 35 | 36 | Project maintainers are responsible for clarifying the standards of acceptable 37 | behavior and are expected to take appropriate and fair corrective action in 38 | response to any instances of unacceptable behavior. 39 | 40 | Project maintainers have the right and responsibility to remove, edit, or 41 | reject comments, commits, code, wiki edits, issues, and other contributions 42 | that are not aligned to this Code of Conduct, or to ban temporarily or 43 | permanently any contributor for other behaviors that they deem inappropriate, 44 | threatening, offensive, or harmful. 45 | 46 | ## Scope 47 | 48 | This Code of Conduct applies both within project spaces and in public spaces 49 | when an individual is representing the project or its community. Examples of 50 | representing a project or community include using an official project e-mail 51 | address, posting via an official social media account, or acting as an appointed 52 | representative at an online or offline event. Representation of a project may be 53 | further defined and clarified by project maintainers. 54 | 55 | ## Enforcement 56 | 57 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 58 | reported by contacting the project team at craig@bovis.me.uk. All 59 | complaints will be reviewed and investigated and will result in a response that 60 | is deemed necessary and appropriate to the circumstances. The project team is 61 | obligated to maintain confidentiality with regard to the reporter of an incident. 62 | Further details of specific enforcement policies may be posted separately. 63 | 64 | Project maintainers who do not follow or enforce the Code of Conduct in good 65 | faith may face temporary or permanent repercussions as determined by other 66 | members of the project's leadership. 67 | 68 | ## Attribution 69 | 70 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, 71 | available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html 72 | 73 | [homepage]: https://www.contributor-covenant.org 74 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to 0x Tracker 2 | 3 | Thank you for taking the time to contribute to a 0x Tracker project! 4 | 5 | Please note that this project is released with a [Contributor Code of Conduct](CODE_OF_CONDUCT.md). By participating in this project you agree to abide by its terms. 6 | 7 | ## How to contribute 8 | 9 | ### Improve documentation 10 | 11 | Typo corrections, error fixes, better explanations, more examples etc. Open an issue regarding anything that you think it could be improved! You can use the [`documentation` label](https://github.com/0xTracker/0x-event-extractor/labels/Documentation) to find out what others have suggested! 12 | 13 | ### Improve issues 14 | 15 | Sometimes reported issues lack information, are not reproducible, or are even plain invalid. Help us out to make them easier to resolve. Handling issues takes a lot of time that we could rather spend on fixing bugs and adding features. 16 | 17 | ### Give feedback on issues 18 | 19 | We're always looking for more opinions on discussions in the issue tracker. It's a good opportunity to influence the future direction of the project. 20 | 21 | The [`question` label](https://github.com/0xTracker/0x-event-extractor/labels/Question) is a good place to find ongoing discussions. 22 | 23 | ### Write code 24 | 25 | You can use issue labels to discover issues you could help us out with! 26 | 27 | - [`feature request` issues](https://github.com/0xTracker/0x-event-extractor/labels/Feature%20Request) are features we are open to including 28 | - [`bug` issues](https://github.com/0xTracker/0x-event-extractor/labels/Bug) are known bugs we would like to fix 29 | - [`future feature` issues](https://github.com/0xTracker/0x-event-extractor/labels/Future%20Feature) are those that we'd like to get to, but not anytime soon. Please check before working on these since we may not yet want to take on the burden of supporting those features 30 | - on the [`help wanted`](https://github.com/0xTracker/0x-event-extractor/labels/Help%20Wanted) label you can always find something exciting going on 31 | 32 | You may find an issue is assigned. Please double-check before starting on this issue because somebody else is likely already working on it 33 | 34 | ### Say hi 35 | 36 | Come over and say hi anytime you feel like on [Spectrum](https://spectrum.chat/0x-tracker). 37 | 38 | ### Submitting an issue 39 | 40 | - Search the issue tracker before opening an issue 41 | - Use a descriptive title 42 | - Include as much information as possible; 43 | - Steps to reproduce the issue 44 | - Error message 45 | - Operating system **etc** 46 | 47 | ### Submitting a pull request 48 | 49 | - Non-trivial changes are often best discussed in an issue first, to prevent you from doing unnecessary work 50 | - If this is your first time submitting to an Open Source project then read the [Github guide to opening a pull request](https://opensource.guide/how-to-contribute/#opening-a-pull-request) 51 | - Use a descriptive title for the pull request and commits 52 | - Follow the pull request template for writing a description 53 | - You might be asked to do changes to your pull request, please update your existing PR rather than creating a new one 54 | 55 | > Inspired by [Signale](https://github.com/klauscfhq/signale/blob/master/contributing.md)'s contributing.md 56 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # 0x Event Extractor 2 | 3 | [![David](https://img.shields.io/david/0xtracker/0x-event-extractor.svg?style=flat-square)](https://github.com/0xTracker/0x-event-extractor) 4 | 5 | > NodeJS worker originally built for [0x Tracker](https://0xtracker.com) which extracts [0x](https://0xproject.com) fill events from the Ethereum blockchain and persists them to MongoDB. Support for V1, V2 and V3 of the 0x protocol is included with events tagged against the protocol version they belong to. 6 | 7 | ## 🧐 How It Works 8 | 9 | The extractor runs on a configurable interval, scraping a chunk of events from the blockchain using the `getLogsAsync` method of [0x.js](https://www.0xproject.com/docs/0x.js). Events are persisted to MongoDB and the processed block range is logged to ensure the range only gets extracted once. 10 | 11 | ## 👮‍♂️ Requirements 12 | 13 | To run the project locally you'll need the following installed: 14 | 15 | - NodeJS v10.16.3 16 | - Yarn v1.19.0 17 | - MongoDB v4+ 18 | 19 | The project also has support for the following development tools which you may wish to take advantage of: 20 | 21 | - [NVM](https://github.com/creationix/nvm) 22 | - [Nodemon](https://nodemon.io/) 23 | - [Prettier](https://prettier.io/docs/en/editors.html) 24 | - [ESLint](https://eslint.org/docs/user-guide/integrations#editors) 25 | 26 | ## 🐣 Getting Started 27 | 28 | Run `cp .env.example .env` to create a local environment file, then get yourself an [Infura API key](https://infura.io/register) and add your endpoint to the .env file. Update the connection string as well if necessary. 29 | 30 | Run `yarn install` to install dependencies and then run `yarn start`/`nodemon` to start the extractor. You should start to see events being persisted. 31 | 32 | ## 🌳 Project Structure 33 | 34 | The project uses a monorepo structure which accomodates different 0x.js dependencies for different versions of the 0x protocol. This structure is mostly invisible in day to day work since it is managed by Yarn. To add or remove dependencies in sub-packages however you'll need a basic understanding of [Yarn Workspaces](https://yarnpkg.com/en/docs/workspaces). 35 | 36 | ## 🛠 Configuration 37 | 38 | Configuration is handled by a combination of [dotenv](https://github.com/motdotla/dotenv) files and [node-config](https://github.com/lorenwest/node-config). If you need to tweak anything you can either edit your .env file or create a config/local.js file with overrides for the configuration found in config/default.js. 39 | 40 | ## ⚠️ Caveats 41 | 42 | On July 12th 2019 a [vulnerability was discovered](https://blog.0xproject.com/post-mortem-0x-v2-0-exchange-vulnerability-763015399578) in 0x V2 which resulted in a shutdown and redployment of the contract. Because of this the 0x Event Extractor currently only collects event logs from the latest V2 contract (dubbed v2.1), meaning that V2 events before 12th July are not collected. This will be improved in the future by the use of v2 and v2.1 extractors which will handle the pre and post vulnerability contracts respectively. 43 | 44 | ## 👨‍💻 Maintainers 45 | 46 | - Craig Bovis ([@cbovis](https://github.com/cbovis)) 47 | 48 | ## Supporters 49 | 50 | Infrastructure for 0x Tracker is generously supported by these companies. 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 |
Bugsnag
Bugsnag
CryptoCompare
CryptoCompare
Netlify
Netlify
59 | 60 | ## 👩‍⚖️ License 61 | 62 | [Apache 2.0](https://github.com/0xTracker/0x-event-extractor/blob/master/LICENSE) 63 | -------------------------------------------------------------------------------- /config/default.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | bugsnag: { 3 | token: process.env.BUGSNAG_TOKEN || null, 4 | }, 5 | database: { 6 | connectionString: process.env.CONNECTION_STRING, 7 | }, 8 | maxChunkSize: { 9 | default: parseInt(process.env.MAX_CHUNK_SIZE, 10), 10 | sushiswapSwap: { 11 | v3: 1800, // 30 minutes 12 | }, 13 | uniswapV2Swap: { 14 | v3: 1800, // 30 minutes 15 | }, 16 | uniswapV3Swap: { 17 | v3: 1800, // 30 minutes 18 | }, 19 | }, 20 | maxPollingInterval: parseInt(process.env.MAX_POLLING_INTERVAL, 10), 21 | minConfirmations: 12, 22 | minPollingInterval: parseInt(process.env.MIN_POLLING_INTERVAL, 10), 23 | pino: { 24 | elasticsearch: { 25 | batchSize: 200, 26 | index: 'logs_event_extractor', 27 | url: process.env.PINO_ELASTIC_SEARCH_URL || null, 28 | }, 29 | }, 30 | startBlock: { 31 | fill: { 32 | v2: 8140780, 33 | v3: 8952139, 34 | }, 35 | limitOrderFilled: { 36 | v4: 11591021, 37 | }, 38 | liquidityProviderSwap: { 39 | v4: 11377457, 40 | }, 41 | logFill: { 42 | v1: 4145578, 43 | }, 44 | rfqOrderFilled: { 45 | v4: 11591021, 46 | }, 47 | sushiswapSwap: { 48 | v3: 1600834642, // Represents a point in time, not a block number 49 | }, 50 | transformedErc20: { 51 | v3: 10247094, 52 | }, 53 | uniswapV2Swap: { 54 | v3: 1600834642, // Represents a point in time, not a block number 55 | }, 56 | uniswapV3Swap: { 57 | v3: 1622629214, // Represents a point in time, not a block number 58 | }, 59 | }, 60 | web3: { 61 | endpoint: process.env.WEB3_ENDPOINT, 62 | networkId: 1, 63 | }, 64 | }; 65 | -------------------------------------------------------------------------------- /nodemon.json: -------------------------------------------------------------------------------- 1 | { 2 | "ignore": ["*.test.js", "**/*.test.js"], 3 | "watch": ["packages/**/src/", ".env", "config"] 4 | } 5 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@0x-event-extractor/app", 3 | "main": "src/index.js", 4 | "scripts": { 5 | "dev": "nodemon pino . | pino-pretty -t", 6 | "lint": "eslint .", 7 | "start": "node src", 8 | "test": "jest", 9 | "test:watch": "jest --watch" 10 | }, 11 | "dependencies": { 12 | "@0x-event-extractor/core": "^1.0.0", 13 | "config": "3.2.3", 14 | "dotenv-safe": "8.1.0" 15 | }, 16 | "devDependencies": { 17 | "eb-disable-npm": "1.0.1", 18 | "eslint": "6.8.0", 19 | "eslint-config-airbnb-base": "14.2.0", 20 | "eslint-config-prettier": "6.14.0", 21 | "eslint-plugin-import": "2.22.1", 22 | "eslint-plugin-prettier": "3.1.4", 23 | "husky": "3.0.8", 24 | "jest": "26.6.1", 25 | "lint-staged": "9.4.2", 26 | "nodemon": "2.0.2", 27 | "pino-pretty": "3.6.1", 28 | "prettier": "1.19.1" 29 | }, 30 | "private": true, 31 | "workspaces": [ 32 | "packages/*" 33 | ] 34 | } 35 | -------------------------------------------------------------------------------- /packages/core/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@0x-event-extractor/core", 3 | "version": "1.0.0", 4 | "main": "src/index.js", 5 | "dependencies": { 6 | "@0x-event-extractor/fill-extractor-v1": "^1.0.0", 7 | "@0x-event-extractor/fill-extractor-v2": "^1.0.0", 8 | "@0x-event-extractor/fill-extractor-v3": "^1.0.0", 9 | "@0x-event-extractor/limit-order-filled-extractor": "^1.0.0", 10 | "@0x-event-extractor/liquidity-provider-swap-extractor": "^1.0.0", 11 | "@0x-event-extractor/rfq-order-filled-extractor": "^1.0.0", 12 | "@0x-event-extractor/shared": "^1.0.0", 13 | "@0x-event-extractor/sushiswap-swap-extractor": "^1.0.0", 14 | "@0x-event-extractor/transformed-erc20-extractor": "^1.0.0", 15 | "@0x-event-extractor/uniswap-v2-swap-extractor": "^1.0.0", 16 | "@0x-event-extractor/uniswap-v3-swap-extractor": "^1.0.0", 17 | "@bugsnag/js": "6.5.2", 18 | "bugsnag": "^2.4.3", 19 | "delay": "^4.3.0", 20 | "lodash": "^4.17.15", 21 | "mongoose": "^5.7.3", 22 | "pino": "6.0.0", 23 | "pino-elasticsearch": "4.4.0", 24 | "promise-poller": "^1.9.1" 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /packages/core/src/ethereum/get-block.js: -------------------------------------------------------------------------------- 1 | const { web3 } = require('@0x-event-extractor/shared'); 2 | 3 | const getBlock = async blockNumber => { 4 | const block = await web3.getWrapper().getBlockIfExistsAsync(blockNumber); 5 | 6 | return block; 7 | }; 8 | 9 | module.exports = getBlock; 10 | -------------------------------------------------------------------------------- /packages/core/src/ethereum/get-current-block.js: -------------------------------------------------------------------------------- 1 | const { web3 } = require('@0x-event-extractor/shared'); 2 | 3 | const getCurrentBlock = async () => { 4 | const blockNumber = await web3.getWrapper().getBlockNumberAsync(); 5 | 6 | return blockNumber; 7 | }; 8 | 9 | module.exports = getCurrentBlock; 10 | -------------------------------------------------------------------------------- /packages/core/src/events/get-config-for-event-type.js: -------------------------------------------------------------------------------- 1 | const _ = require('lodash'); 2 | const { config } = require('@0x-event-extractor/shared'); 3 | 4 | const getConfigForEventType = (eventType, protocolVersion) => { 5 | const camelEventType = _.camelCase(eventType); 6 | const defaultChunkSize = config.get(`maxChunkSize.default`); 7 | 8 | const startBlockKey = `startBlock.${camelEventType}.v${protocolVersion}`; 9 | const startBlock = config.get(startBlockKey); 10 | 11 | const chunkSizeKey = `maxChunkSize.${camelEventType}.v${protocolVersion}`; 12 | const maxChunkSize = config.get(chunkSizeKey) || defaultChunkSize; 13 | 14 | if (startBlock === undefined) { 15 | throw new Error( 16 | `Start block config not found for v${protocolVersion} ${eventType} events`, 17 | ); 18 | } 19 | 20 | return { startBlock, maxChunkSize }; 21 | }; 22 | 23 | module.exports = getConfigForEventType; 24 | -------------------------------------------------------------------------------- /packages/core/src/events/get-last-processed-block.js: -------------------------------------------------------------------------------- 1 | const BlockRange = require('../model/block-range'); 2 | 3 | const getLastProcessedBlock = async (eventType, protocolVersion) => { 4 | const query = { eventType, protocolVersion }; 5 | const options = { sort: { toBlock: -1 } }; 6 | const lastRange = await BlockRange.findOne(query, undefined, options); 7 | 8 | if (lastRange === null) { 9 | return null; 10 | } 11 | 12 | return lastRange.toBlock; 13 | }; 14 | 15 | module.exports = getLastProcessedBlock; 16 | -------------------------------------------------------------------------------- /packages/core/src/events/get-next-block-range.js: -------------------------------------------------------------------------------- 1 | const _ = require('lodash'); 2 | 3 | const getConfigForEventType = require('./get-config-for-event-type'); 4 | const getLastProcessedBlock = require('./get-last-processed-block'); 5 | 6 | const getNextBlockRange = async ({ 7 | eventType, 8 | maxBlockNumber, 9 | protocolVersion, 10 | }) => { 11 | const config = getConfigForEventType(eventType, protocolVersion); 12 | const { maxChunkSize, startBlock } = config; 13 | const lastBlock = await getLastProcessedBlock(eventType, protocolVersion); 14 | const fromBlock = lastBlock === null ? startBlock : lastBlock + 1; 15 | const toBlock = _.clamp(fromBlock + maxChunkSize, 1, maxBlockNumber); 16 | 17 | // Notify the consumer that there are no blocks to process 18 | if (toBlock < fromBlock) { 19 | return null; 20 | } 21 | 22 | return { fromBlock, toBlock }; 23 | }; 24 | 25 | module.exports = getNextBlockRange; 26 | -------------------------------------------------------------------------------- /packages/core/src/index.js: -------------------------------------------------------------------------------- 1 | const { config } = require('@0x-event-extractor/shared'); 2 | const extractorV1 = require('@0x-event-extractor/fill-extractor-v1'); 3 | const extractorV2 = require('@0x-event-extractor/fill-extractor-v2'); 4 | const extractorV3 = require('@0x-event-extractor/fill-extractor-v3'); 5 | 6 | const db = require('./util/db'); 7 | const errorLogger = require('./util/error-logger'); 8 | const getJobs = require('./jobs'); 9 | const jobRunner = require('./util/job-runner'); 10 | const logging = require('./util/logging'); 11 | const model = require('./model'); 12 | const web3 = require('../../shared/src/web3'); 13 | 14 | const configure = async initialConfig => { 15 | config.init(initialConfig); 16 | logging.init(config.get('pino')); 17 | errorLogger.configure({ 18 | bugsnagToken: config.get('bugsnag.token'), 19 | }); 20 | await db.connect(config.get('database.connectionString')); 21 | await model.init(); 22 | web3.configure({ endpoint: config.get('web3.endpoint') }); 23 | extractorV1.configure({ networkId: config.get('web3.networkId') }); 24 | extractorV2.configure({ networkId: config.get('web3.networkId') }); 25 | extractorV3.configure({ chainId: config.get('web3.networkId') }); 26 | }; 27 | 28 | const start = () => { 29 | jobRunner.runJobs(getJobs()); 30 | }; 31 | 32 | module.exports = { configure, start }; 33 | -------------------------------------------------------------------------------- /packages/core/src/jobs/extract-events.js: -------------------------------------------------------------------------------- 1 | const { config } = require('@0x-event-extractor/shared'); 2 | const fillExtractorV1 = require('@0x-event-extractor/fill-extractor-v1'); 3 | const fillExtractorV2 = require('@0x-event-extractor/fill-extractor-v2'); 4 | const fillExtractorV3 = require('@0x-event-extractor/fill-extractor-v3'); 5 | const limitOrderFilledExtractor = require('@0x-event-extractor/limit-order-filled-extractor'); 6 | const liquidityProviderSwapExtractor = require('@0x-event-extractor/liquidity-provider-swap-extractor'); 7 | const rfqOrderFilledExtractor = require('@0x-event-extractor/rfq-order-filled-extractor'); 8 | const sushiswapSwapExtractor = require('@0x-event-extractor/sushiswap-swap-extractor'); 9 | const transformedERC20Extractor = require('@0x-event-extractor/transformed-erc20-extractor'); 10 | const uniswapV2Extractor = require('@0x-event-extractor/uniswap-v2-swap-extractor'); 11 | const uniswapV3Extractor = require('@0x-event-extractor/uniswap-v3-swap-extractor'); 12 | 13 | const { getLogger } = require('../util/logging'); 14 | const BlockRange = require('../model/block-range'); 15 | const Event = require('../model/event'); 16 | const getBlock = require('../ethereum/get-block'); 17 | const getCurrentBlock = require('../ethereum/get-current-block'); 18 | const getNextBlockRange = require('../events/get-next-block-range'); 19 | const withTransaction = require('../util/with-transaction'); 20 | 21 | const performExtraction = async (maxBlockNumber, extractorConfig) => { 22 | const { 23 | eventType, 24 | fetchLogEntries, 25 | getEventData, 26 | protocolVersion, 27 | } = extractorConfig; 28 | 29 | // Scope all logging for the job to the specified protocol version and event type 30 | const logger = getLogger(`extract v${protocolVersion} ${eventType} events`); 31 | 32 | const rangeConfig = { eventType, maxBlockNumber, protocolVersion }; 33 | const nextBlockRange = await getNextBlockRange(rangeConfig); 34 | 35 | if (nextBlockRange === null) { 36 | logger.info('no more blocks to process'); 37 | return; 38 | } 39 | 40 | logger.info( 41 | `${maxBlockNumber - 42 | nextBlockRange.fromBlock} block(s) waiting to be processed`, 43 | ); 44 | 45 | const { fromBlock, toBlock } = nextBlockRange; 46 | 47 | logger.info(`fetching events from blocks ${fromBlock}-${toBlock}`); 48 | 49 | const logEntries = await fetchLogEntries(fromBlock, toBlock); 50 | const entryCount = logEntries.length; 51 | 52 | if (entryCount === 0) { 53 | logger.info(`no events found in blocks ${fromBlock}-${toBlock}`); 54 | } else { 55 | logger.info(`${entryCount} events found in blocks ${fromBlock}-${toBlock}`); 56 | } 57 | 58 | /** 59 | * Persistence operations are wrapped in a transaction to ensure consistency 60 | * between the BlockRange and Event collections. If a document exists within 61 | * the BlockRange collection, then we can assume that all the associated 62 | * events will exist in the Event collection. 63 | */ 64 | await withTransaction(async session => { 65 | if (logEntries.length > 0) { 66 | const events = logEntries.map(logEntry => ({ 67 | blockNumber: logEntry.blockNumber, 68 | data: getEventData(logEntry), 69 | dateIngested: new Date(), 70 | logIndex: logEntry.logIndex, 71 | protocolVersion, 72 | transactionHash: logEntry.transactionHash, 73 | type: eventType, 74 | })); 75 | 76 | await Event.insertMany(events, { session }); 77 | } 78 | 79 | /** 80 | * Log details of the queried block range in MongoDB. This provides 81 | * two functions: 82 | * 83 | * 1. History of scraping activity for debugging 84 | * 2. An indicator for where to scrape from on the next iteration 85 | * 86 | * If collection size became an issue then the BlockRange collection 87 | * could be safely capped at say 100,000 documents. 88 | */ 89 | await BlockRange.create( 90 | [ 91 | { 92 | dateProcessed: new Date(), 93 | events: logEntries.length, 94 | eventType, 95 | fromBlock, 96 | protocolVersion, 97 | toBlock, 98 | }, 99 | ], 100 | { session }, 101 | ); 102 | }); 103 | 104 | if (logEntries.length > 0) { 105 | logger.info(`persisted ${logEntries.length} events to database`); 106 | } 107 | }; 108 | 109 | const determineMaxQueryableBlock = currentBlock => { 110 | const minConfirmations = config.get('minConfirmations'); 111 | const maxBlock = currentBlock - minConfirmations; 112 | 113 | return maxBlock; 114 | }; 115 | 116 | const extractEvents = async () => { 117 | const logger = getLogger('event extractor'); 118 | 119 | logger.info('beginning event extraction'); 120 | logger.info('fetching current block'); 121 | 122 | const currentBlockNumber = await getCurrentBlock(); 123 | const maxBlockNumber = determineMaxQueryableBlock(currentBlockNumber); 124 | 125 | logger.info(`current block is ${currentBlockNumber}`); 126 | logger.info(`max block is ${maxBlockNumber}`); 127 | 128 | /** 129 | * Extractors are run sequentially to help avoid issues with rate 130 | * limiting in the Ethereum RPC provider. 131 | */ 132 | await performExtraction(maxBlockNumber, fillExtractorV1); 133 | await performExtraction(maxBlockNumber, fillExtractorV2); 134 | await performExtraction(maxBlockNumber, fillExtractorV3); 135 | await performExtraction(maxBlockNumber, transformedERC20Extractor); 136 | await performExtraction(maxBlockNumber, rfqOrderFilledExtractor); 137 | await performExtraction(maxBlockNumber, liquidityProviderSwapExtractor); 138 | await performExtraction(maxBlockNumber, limitOrderFilledExtractor); 139 | 140 | const maxBlock = await getBlock(maxBlockNumber); 141 | 142 | await performExtraction(maxBlock.timestamp, uniswapV2Extractor); 143 | await performExtraction(maxBlock.timestamp, uniswapV3Extractor); 144 | await performExtraction(maxBlock.timestamp, sushiswapSwapExtractor); 145 | 146 | logger.info('finished event extraction'); 147 | }; 148 | 149 | module.exports = extractEvents; 150 | -------------------------------------------------------------------------------- /packages/core/src/jobs/index.js: -------------------------------------------------------------------------------- 1 | const { config } = require('@0x-event-extractor/shared'); 2 | 3 | const extractEvents = require('./extract-events'); 4 | 5 | module.exports = () => [ 6 | { 7 | fn: extractEvents, 8 | minInterval: config.get('minPollingInterval'), 9 | maxInterval: config.get('maxPollingInterval'), 10 | }, 11 | ]; 12 | -------------------------------------------------------------------------------- /packages/core/src/model/block-range.js: -------------------------------------------------------------------------------- 1 | const mongoose = require('mongoose'); 2 | 3 | const { Schema } = mongoose; 4 | 5 | const schema = Schema({ 6 | dateProcessed: { required: true, type: Date }, 7 | events: { required: true, type: Number }, 8 | eventType: { required: true, type: String }, 9 | fromBlock: { required: true, type: Number }, 10 | protocolVersion: { default: 1, required: true, type: Number }, 11 | toBlock: { required: true, type: Number }, 12 | }); 13 | 14 | // Used for determining last processed block 15 | schema.index({ protocolVersion: 1, toBlock: -1 }); 16 | 17 | const BlockRange = mongoose.model('BlockRange', schema); 18 | 19 | module.exports = BlockRange; 20 | -------------------------------------------------------------------------------- /packages/core/src/model/event.js: -------------------------------------------------------------------------------- 1 | const mongoose = require('mongoose'); 2 | 3 | const { Schema } = mongoose; 4 | 5 | const schema = Schema({ 6 | blockNumber: { required: true, type: Number }, 7 | data: { required: true, type: Schema.Types.Mixed }, 8 | dateIngested: { required: true, type: Date }, 9 | logIndex: { required: true, type: Number }, 10 | protocolVersion: { default: 1, required: true, type: Number }, 11 | transactionHash: { required: true, type: String }, 12 | type: { required: true, type: String }, 13 | }); 14 | 15 | // Used to enforce consistency in the data 16 | schema.index({ logIndex: 1, transactionHash: 1 }, { unique: true }); 17 | 18 | const Event = mongoose.model('Event', schema); 19 | 20 | module.exports = Event; 21 | -------------------------------------------------------------------------------- /packages/core/src/model/index.js: -------------------------------------------------------------------------------- 1 | const BlockRange = require('./block-range'); 2 | const Event = require('./event'); 3 | 4 | const init = async () => { 5 | await BlockRange.createCollection(); 6 | await Event.createCollection(); 7 | }; 8 | 9 | module.exports = { BlockRange, Event, init }; 10 | -------------------------------------------------------------------------------- /packages/core/src/util/db.js: -------------------------------------------------------------------------------- 1 | const mongoose = require('mongoose'); 2 | 3 | const { logError } = require('./error-logger'); 4 | const { getLogger } = require('./logging'); 5 | 6 | mongoose.Promise = global.Promise; 7 | 8 | module.exports = { 9 | connect: async connectionString => { 10 | const logger = getLogger(); 11 | 12 | mongoose.connection.on('connecting', () => { 13 | logger.info('connecting to database'); 14 | }); 15 | 16 | mongoose.connection.on('connected', () => { 17 | logger.info('database connection established'); 18 | }); 19 | 20 | mongoose.connection.on('error', err => { 21 | logError(err); 22 | }); 23 | 24 | mongoose.connection.on('disconnecting', () => { 25 | logger.warn('disconnecting from database'); 26 | }); 27 | 28 | mongoose.connection.on('disconnected', () => { 29 | logger.warn('database connection terminated'); 30 | }); 31 | 32 | mongoose.connection.on('reconnected', () => { 33 | logger.warn('reconnected to database'); 34 | }); 35 | 36 | mongoose.connection.on('reconnectFailed', () => { 37 | logError('Database reconnection failed'); 38 | }); 39 | 40 | await mongoose.connect(connectionString, { 41 | autoIndex: false, 42 | useFindAndModify: false, 43 | useNewUrlParser: true, 44 | useUnifiedTopology: true, 45 | }); 46 | }, 47 | disconnect: () => { 48 | mongoose.disconnect(); 49 | }, 50 | }; 51 | -------------------------------------------------------------------------------- /packages/core/src/util/error-logger.js: -------------------------------------------------------------------------------- 1 | const _ = require('lodash'); 2 | const bugsnag = require('@bugsnag/js'); 3 | 4 | let bugsnagClient; 5 | 6 | const logError = (error, metaData) => { 7 | if (bugsnagClient !== undefined) { 8 | bugsnagClient.notify(error, { metaData }); 9 | } 10 | 11 | console.error(error); // eslint-disable-line no-console 12 | }; 13 | 14 | const configure = ({ appVersion, bugsnagToken }) => { 15 | if (_.isString(bugsnagToken)) { 16 | // The bugsnag client automatically attaches itself to uncaughtException 17 | // and unhandledRejection events. 18 | bugsnagClient = bugsnag({ 19 | apiKey: bugsnagToken, 20 | appVersion, 21 | }); 22 | } else { 23 | process.on('uncaughtException', console.error); // eslint-disable-line no-console 24 | process.on('unhandledRejection', console.error); // eslint-disable-line no-console 25 | } 26 | }; 27 | 28 | module.exports = { 29 | configure, 30 | logError, 31 | }; 32 | -------------------------------------------------------------------------------- /packages/core/src/util/job-runner.js: -------------------------------------------------------------------------------- 1 | const delay = require('delay'); 2 | const withRetry = require('promise-poller').default; 3 | 4 | const { logError } = require('./error-logger'); 5 | 6 | const repeatTask = (task, { minInterval, maxInterval }) => 7 | withRetry({ 8 | max: maxInterval, 9 | min: minInterval, 10 | progressCallback: (retriesRemaining, error) => logError(error), 11 | retries: 999999, // Setting a large number because poller does not work properly with Infinity 12 | strategy: 'exponential-backoff', 13 | taskFn: task, 14 | }) 15 | .then(() => delay(minInterval)) 16 | .then(() => repeatTask(task, { maxInterval, minInterval })) 17 | .catch(error => { 18 | logError(error); 19 | logError(`Stopped running ${task.name}.`); 20 | }); 21 | 22 | const runJobs = jobs => { 23 | jobs.forEach(job => 24 | repeatTask(job.fn, { 25 | minInterval: job.minInterval, 26 | maxInterval: job.maxInterval, 27 | }), 28 | ); 29 | }; 30 | 31 | module.exports.runJobs = runJobs; 32 | -------------------------------------------------------------------------------- /packages/core/src/util/logging.js: -------------------------------------------------------------------------------- 1 | const _ = require('lodash'); 2 | const os = require('os'); 3 | const pino = require('pino'); 4 | const pinoElastic = require('pino-elasticsearch'); 5 | 6 | let logger; 7 | 8 | const createIndexFormatter = prefix => logTime => { 9 | const date = new Date(logTime); 10 | const year = date.getUTCFullYear(); 11 | const month = _.padStart(date.getUTCMonth() + 1, 2, 0); 12 | const day = _.padStart(date.getUTCDate(), 2, 0); 13 | 14 | // Log indexes roll over every day 15 | return `${prefix}_${year}_${month}_${day}`; 16 | }; 17 | 18 | const init = config => { 19 | const streamToElasticsearch = 20 | config.elasticsearch.url === null || config.elasticsearch.url.length === 0 21 | ? undefined 22 | : pinoElastic({ 23 | 'bulk-size': config.elasticsearch.batchSize, 24 | consistency: 'one', 25 | ecs: true, 26 | index: createIndexFormatter(config.elasticsearch.index), 27 | node: config.elasticsearch.url, 28 | type: 'log', 29 | }); 30 | 31 | logger = pino( 32 | { 33 | base: { group: 'application', pid: process.pid, hostname: os.hostname() }, 34 | level: 'info', 35 | }, 36 | streamToElasticsearch, 37 | ); 38 | }; 39 | 40 | const getLogger = logGroup => { 41 | if (logGroup !== undefined) { 42 | return logger.child({ group: logGroup }); 43 | } 44 | 45 | return logger; 46 | }; 47 | 48 | module.exports = { getLogger, init }; 49 | -------------------------------------------------------------------------------- /packages/core/src/util/with-timer.js: -------------------------------------------------------------------------------- 1 | const withTimer = async (logger, logMessage, func) => { 2 | logger.time(logMessage); 3 | const result = await func(); 4 | logger.timeEnd(logMessage); 5 | 6 | return result; 7 | }; 8 | 9 | module.exports = withTimer; 10 | -------------------------------------------------------------------------------- /packages/core/src/util/with-transaction.js: -------------------------------------------------------------------------------- 1 | const mongoose = require('mongoose'); 2 | 3 | const withTransaction = async func => { 4 | const session = await mongoose.startSession(); 5 | 6 | session.startTransaction(); 7 | 8 | try { 9 | await func(session); 10 | await session.commitTransaction(); 11 | session.endSession(); 12 | } catch (error) { 13 | await session.abortTransaction(); 14 | session.endSession(); 15 | throw error; 16 | } 17 | }; 18 | 19 | module.exports = withTransaction; 20 | -------------------------------------------------------------------------------- /packages/fill-extractor-v1/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@0x-event-extractor/fill-extractor-v1", 3 | "description": "Extractor for v1 fill events", 4 | "version": "1.0.0", 5 | "main": "src/index.js", 6 | "dependencies": { 7 | "0x.js": "0.38.6", 8 | "@0x-event-extractor/shared": "^1.0.0" 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /packages/fill-extractor-v1/src/0x.js: -------------------------------------------------------------------------------- 1 | const { ZeroEx } = require('0x.js'); 2 | const { web3 } = require('@0x-event-extractor/shared'); 3 | 4 | let client; 5 | 6 | const getClient = () => { 7 | return client; 8 | }; 9 | 10 | const configure = options => { 11 | client = new ZeroEx(web3.getProviderEngine(), { 12 | networkId: options.networkId, 13 | }); 14 | }; 15 | 16 | module.exports = { 17 | configure, 18 | getClient, 19 | }; 20 | -------------------------------------------------------------------------------- /packages/fill-extractor-v1/src/fetch-log-entries.js: -------------------------------------------------------------------------------- 1 | const zeroEx = require('./0x'); 2 | 3 | const BLACKLISTED_ORDER_HASHES = [ 4 | '0x1cbf70d8f6dfee99ee740f4e0e90a97e8e1f0c38a14b8604adadbe28469c0ffa', 5 | ]; 6 | 7 | const fetchLogEntries = async (fromBlock, toBlock) => { 8 | const logEntries = await zeroEx 9 | .getClient() 10 | .exchange.getLogsAsync('LogFill', { fromBlock, toBlock }, {}); 11 | 12 | return logEntries.filter( 13 | event => BLACKLISTED_ORDER_HASHES.includes(event.args.orderHash) === false, 14 | ); 15 | }; 16 | 17 | module.exports = fetchLogEntries; 18 | -------------------------------------------------------------------------------- /packages/fill-extractor-v1/src/get-event-data.js: -------------------------------------------------------------------------------- 1 | const getEventData = logEntry => ({ 2 | ...logEntry, 3 | args: { 4 | ...logEntry.args, 5 | filledMakerTokenAmount: logEntry.args.filledMakerTokenAmount.toNumber(), 6 | filledTakerTokenAmount: logEntry.args.filledTakerTokenAmount.toNumber(), 7 | paidMakerFee: logEntry.args.paidMakerFee.toNumber(), 8 | paidTakerFee: logEntry.args.paidTakerFee.toNumber(), 9 | }, 10 | }); 11 | 12 | module.exports = getEventData; 13 | -------------------------------------------------------------------------------- /packages/fill-extractor-v1/src/index.js: -------------------------------------------------------------------------------- 1 | const fetchLogEntries = require('./fetch-log-entries'); 2 | const getEventData = require('./get-event-data'); 3 | const zeroEx = require('./0x'); 4 | 5 | const configure = options => { 6 | zeroEx.configure(options); 7 | }; 8 | 9 | module.exports = { 10 | configure, 11 | eventType: 'LogFill', 12 | fetchLogEntries, 13 | getEventData, 14 | protocolVersion: 1, 15 | }; 16 | -------------------------------------------------------------------------------- /packages/fill-extractor-v2/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@0x-event-extractor/fill-extractor-v2", 3 | "description": "Extractor for v2 fill events", 4 | "version": "1.0.0", 5 | "main": "src/index.js", 6 | "dependencies": { 7 | "@0x-event-extractor/shared": "^1.0.0", 8 | "@0x/contract-wrappers": "^12.1.0" 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /packages/fill-extractor-v2/src/0x.js: -------------------------------------------------------------------------------- 1 | const { ContractWrappers } = require('@0x/contract-wrappers'); 2 | const { web3 } = require('@0x-event-extractor/shared'); 3 | 4 | let contractWrappers; 5 | 6 | const configure = ({ networkId }) => { 7 | contractWrappers = new ContractWrappers(web3.getProviderEngine(), { 8 | networkId, 9 | }); 10 | }; 11 | 12 | const getClient = () => contractWrappers; 13 | 14 | module.exports = { configure, getClient }; 15 | -------------------------------------------------------------------------------- /packages/fill-extractor-v2/src/fetch-log-entries.js: -------------------------------------------------------------------------------- 1 | const zeroEx = require('./0x'); 2 | 3 | const fetchEvents = async (fromBlock, toBlock) => { 4 | const events = await zeroEx 5 | .getClient() 6 | .exchange.getLogsAsync('Fill', { fromBlock, toBlock }, {}); 7 | 8 | return events; 9 | }; 10 | 11 | module.exports = fetchEvents; 12 | -------------------------------------------------------------------------------- /packages/fill-extractor-v2/src/get-event-data.js: -------------------------------------------------------------------------------- 1 | const getEventData = logEntry => ({ 2 | ...logEntry, 3 | args: { 4 | ...logEntry.args, 5 | makerAssetFilledAmount: logEntry.args.makerAssetFilledAmount.toNumber(), 6 | takerAssetFilledAmount: logEntry.args.takerAssetFilledAmount.toNumber(), 7 | makerFeePaid: logEntry.args.makerFeePaid.toNumber(), 8 | takerFeePaid: logEntry.args.takerFeePaid.toNumber(), 9 | }, 10 | }); 11 | 12 | module.exports = getEventData; 13 | -------------------------------------------------------------------------------- /packages/fill-extractor-v2/src/index.js: -------------------------------------------------------------------------------- 1 | const fetchLogEntries = require('./fetch-log-entries'); 2 | const getEventData = require('./get-event-data'); 3 | const zeroEx = require('./0x'); 4 | 5 | const configure = options => { 6 | zeroEx.configure(options); 7 | }; 8 | 9 | module.exports = { 10 | configure, 11 | eventType: 'Fill', 12 | fetchLogEntries, 13 | getEventData, 14 | protocolVersion: 2, 15 | }; 16 | -------------------------------------------------------------------------------- /packages/fill-extractor-v3/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@0x-event-extractor/fill-extractor-v3", 3 | "description": "Extractor for v3 fill events", 4 | "version": "1.0.0", 5 | "main": "src/index.js", 6 | "dependencies": { 7 | "@0x/contract-wrappers": "12.2.0-beta.3", 8 | "@0x-event-extractor/shared": "^1.0.0" 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /packages/fill-extractor-v3/src/0x.js: -------------------------------------------------------------------------------- 1 | const { ContractWrappers } = require('@0x/contract-wrappers'); 2 | const { web3 } = require('@0x-event-extractor/shared'); 3 | 4 | let contractWrappers; 5 | 6 | const configure = ({ chainId }) => { 7 | contractWrappers = new ContractWrappers(web3.getProviderEngine(), { 8 | chainId, 9 | }); 10 | }; 11 | 12 | const getClient = () => contractWrappers; 13 | 14 | module.exports = { configure, getClient }; 15 | -------------------------------------------------------------------------------- /packages/fill-extractor-v3/src/fetch-log-entries.js: -------------------------------------------------------------------------------- 1 | const zeroEx = require('./0x'); 2 | 3 | const fetchEvents = async (fromBlock, toBlock) => { 4 | const events = await zeroEx 5 | .getClient() 6 | .exchange.getLogsAsync('Fill', { fromBlock, toBlock }, {}); 7 | 8 | return events; 9 | }; 10 | 11 | module.exports = fetchEvents; 12 | -------------------------------------------------------------------------------- /packages/fill-extractor-v3/src/get-event-data.js: -------------------------------------------------------------------------------- 1 | const getEventData = logEntry => ({ 2 | ...logEntry, 3 | args: { 4 | ...logEntry.args, 5 | makerAssetFilledAmount: logEntry.args.makerAssetFilledAmount.toNumber(), 6 | takerAssetFilledAmount: logEntry.args.takerAssetFilledAmount.toNumber(), 7 | makerFeePaid: logEntry.args.makerFeePaid.toNumber(), 8 | protocolFeePaid: logEntry.args.protocolFeePaid.toNumber(), 9 | takerFeePaid: logEntry.args.takerFeePaid.toNumber(), 10 | }, 11 | }); 12 | 13 | module.exports = getEventData; 14 | -------------------------------------------------------------------------------- /packages/fill-extractor-v3/src/index.js: -------------------------------------------------------------------------------- 1 | const fetchLogEntries = require('./fetch-log-entries'); 2 | const getEventData = require('./get-event-data'); 3 | const zeroEx = require('./0x'); 4 | 5 | const configure = options => { 6 | zeroEx.configure(options); 7 | }; 8 | 9 | module.exports = { 10 | configure, 11 | eventType: 'Fill', 12 | fetchLogEntries, 13 | getEventData, 14 | protocolVersion: 3, 15 | }; 16 | -------------------------------------------------------------------------------- /packages/limit-order-filled-extractor/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@0x-event-extractor/limit-order-filled-extractor", 3 | "description": "Extractor for LimitOrderFilled events", 4 | "version": "1.0.0", 5 | "main": "src/index.js", 6 | "dependencies": { 7 | "@0x-event-extractor/shared": "^1.0.0" 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /packages/limit-order-filled-extractor/src/fetch-log-entries.js: -------------------------------------------------------------------------------- 1 | const { web3 } = require('@0x-event-extractor/shared'); 2 | 3 | const EXCHANGE_PROXY_ADDRESS = '0xdef1c0ded9bec7f1a1670819833240f027b25eff'; 4 | const EVENT_TOPIC = 5 | '0xab614d2b738543c0ea21f56347cf696a3a0c42a7cbec3212a5ca22a4dcff2124'; 6 | 7 | const fetchEvents = async (fromBlock, toBlock) => { 8 | const logs = await web3.getWrapper().getLogsAsync({ 9 | address: EXCHANGE_PROXY_ADDRESS, 10 | fromBlock, 11 | toBlock, 12 | topics: [EVENT_TOPIC], 13 | }); 14 | 15 | return logs; 16 | }; 17 | 18 | module.exports = fetchEvents; 19 | -------------------------------------------------------------------------------- /packages/limit-order-filled-extractor/src/get-event-data.js: -------------------------------------------------------------------------------- 1 | const { web3 } = require('@0x-event-extractor/shared'); 2 | 3 | const getEventData = logEntry => { 4 | const decoded = web3.getWrapper().abiDecoder.tryToDecodeLogOrNoop(logEntry); 5 | 6 | const { 7 | feeRecipient, 8 | maker, 9 | makerToken, 10 | makerTokenFilledAmount, 11 | orderHash, 12 | pool, 13 | protocolFeePaid, 14 | taker, 15 | takerToken, 16 | takerTokenFeeFilledAmount, 17 | takerTokenFilledAmount, 18 | } = decoded.args; 19 | 20 | const eventData = { 21 | feeRecipient, 22 | maker, 23 | makerToken, 24 | makerTokenFilledAmount: makerTokenFilledAmount.toString(), 25 | orderHash, 26 | pool, 27 | protocolFeePaid: protocolFeePaid.toString(), 28 | taker, 29 | takerToken, 30 | takerTokenFeeFilledAmount: takerTokenFeeFilledAmount.toString(), 31 | takerTokenFilledAmount: takerTokenFilledAmount.toString(), 32 | }; 33 | 34 | return eventData; 35 | }; 36 | 37 | module.exports = getEventData; 38 | -------------------------------------------------------------------------------- /packages/limit-order-filled-extractor/src/index.js: -------------------------------------------------------------------------------- 1 | const fetchLogEntries = require('./fetch-log-entries'); 2 | const getEventData = require('./get-event-data'); 3 | 4 | module.exports = { 5 | configure: () => {}, 6 | eventType: 'LimitOrderFilled', 7 | fetchLogEntries, 8 | getEventData, 9 | protocolVersion: 4, 10 | }; 11 | -------------------------------------------------------------------------------- /packages/liquidity-provider-swap-extractor/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@0x-event-extractor/liquidity-provider-swap-extractor", 3 | "description": "Extractor for exchange proxy LiquidityProviderSwap events", 4 | "version": "1.0.0", 5 | "main": "src/index.js", 6 | "dependencies": { 7 | "@0x-event-extractor/shared": "^1.0.0" 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /packages/liquidity-provider-swap-extractor/src/fetch-log-entries.js: -------------------------------------------------------------------------------- 1 | const { web3 } = require('@0x-event-extractor/shared'); 2 | 3 | const EXCHANGE_PROXY_ADDRESS = '0xdef1c0ded9bec7f1a1670819833240f027b25eff'; 4 | const LIQUIDITY_PROVIDER_SWAP_EVENT_TOPIC = 5 | '0x40a6ba9513d09e3488135e0e0d10e2d4382b792720155b144cbea89ac9db6d34'; 6 | 7 | const fetchEvents = async (fromBlock, toBlock) => { 8 | const logs = await web3.getWrapper().getLogsAsync({ 9 | address: EXCHANGE_PROXY_ADDRESS, 10 | fromBlock, 11 | toBlock, 12 | topics: [LIQUIDITY_PROVIDER_SWAP_EVENT_TOPIC], 13 | }); 14 | 15 | return logs; 16 | }; 17 | 18 | module.exports = fetchEvents; 19 | -------------------------------------------------------------------------------- /packages/liquidity-provider-swap-extractor/src/get-event-data.js: -------------------------------------------------------------------------------- 1 | const { web3 } = require('@0x-event-extractor/shared'); 2 | 3 | const getEventData = logEntry => { 4 | const decoded = web3.getWrapper().abiDecoder.tryToDecodeLogOrNoop(logEntry); 5 | 6 | const { 7 | inputToken, 8 | inputTokenAmount, 9 | outputToken, 10 | outputTokenAmount, 11 | provider, 12 | recipient, 13 | } = decoded.args; 14 | 15 | const eventData = { 16 | inputToken, 17 | inputTokenAmount: inputTokenAmount.toString(), 18 | outputToken, 19 | outputTokenAmount: outputTokenAmount.toString(), 20 | provider, 21 | recipient, 22 | }; 23 | 24 | return eventData; 25 | }; 26 | 27 | module.exports = getEventData; 28 | -------------------------------------------------------------------------------- /packages/liquidity-provider-swap-extractor/src/index.js: -------------------------------------------------------------------------------- 1 | const fetchLogEntries = require('./fetch-log-entries'); 2 | const getEventData = require('./get-event-data'); 3 | 4 | module.exports = { 5 | configure: () => {}, 6 | eventType: 'LiquidityProviderSwap', 7 | fetchLogEntries, 8 | getEventData, 9 | protocolVersion: 4, 10 | }; 11 | -------------------------------------------------------------------------------- /packages/rfq-order-filled-extractor/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@0x-event-extractor/rfq-order-filled-extractor", 3 | "description": "Extractor for RfqOrderFilled events", 4 | "version": "1.0.0", 5 | "main": "src/index.js", 6 | "dependencies": { 7 | "@0x-event-extractor/shared": "^1.0.0" 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /packages/rfq-order-filled-extractor/src/fetch-log-entries.js: -------------------------------------------------------------------------------- 1 | const { web3 } = require('@0x-event-extractor/shared'); 2 | 3 | const EXCHANGE_PROXY_ADDRESS = '0xdef1c0ded9bec7f1a1670819833240f027b25eff'; 4 | const EVENT_TOPIC = 5 | '0x829fa99d94dc4636925b38632e625736a614c154d55006b7ab6bea979c210c32'; 6 | 7 | const fetchEvents = async (fromBlock, toBlock) => { 8 | const logs = await web3.getWrapper().getLogsAsync({ 9 | address: EXCHANGE_PROXY_ADDRESS, 10 | fromBlock, 11 | toBlock, 12 | topics: [EVENT_TOPIC], 13 | }); 14 | 15 | return logs; 16 | }; 17 | 18 | module.exports = fetchEvents; 19 | -------------------------------------------------------------------------------- /packages/rfq-order-filled-extractor/src/get-event-data.js: -------------------------------------------------------------------------------- 1 | const { web3 } = require('@0x-event-extractor/shared'); 2 | 3 | const getEventData = logEntry => { 4 | const decoded = web3.getWrapper().abiDecoder.tryToDecodeLogOrNoop(logEntry); 5 | 6 | const { 7 | maker, 8 | makerToken, 9 | makerTokenFilledAmount, 10 | orderHash, 11 | pool, 12 | taker, 13 | takerToken, 14 | takerTokenFilledAmount, 15 | } = decoded.args; 16 | 17 | const eventData = { 18 | maker, 19 | makerToken, 20 | makerTokenFilledAmount: makerTokenFilledAmount.toString(), 21 | orderHash, 22 | pool, 23 | taker, 24 | takerToken, 25 | takerTokenFilledAmount: takerTokenFilledAmount.toString(), 26 | }; 27 | 28 | return eventData; 29 | }; 30 | 31 | module.exports = getEventData; 32 | -------------------------------------------------------------------------------- /packages/rfq-order-filled-extractor/src/index.js: -------------------------------------------------------------------------------- 1 | const fetchLogEntries = require('./fetch-log-entries'); 2 | const getEventData = require('./get-event-data'); 3 | 4 | module.exports = { 5 | configure: () => {}, 6 | eventType: 'RfqOrderFilled', 7 | fetchLogEntries, 8 | getEventData, 9 | protocolVersion: 4, 10 | }; 11 | -------------------------------------------------------------------------------- /packages/shared/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@0x-event-extractor/shared", 3 | "version": "1.0.0", 4 | "main": "src/index.js", 5 | "dependencies": { 6 | "@0x/subproviders": "^5.0.4", 7 | "@0x/utils": "^4.5.2", 8 | "@0x/web3-wrapper": "^6.0.13", 9 | "lodash": "^4.17.15" 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /packages/shared/src/config.js: -------------------------------------------------------------------------------- 1 | const _ = require('lodash'); 2 | 3 | let config; 4 | 5 | const init = initialConfig => { 6 | config = initialConfig; 7 | }; 8 | 9 | const get = path => _.get(config, path); 10 | 11 | module.exports = { get, init }; 12 | -------------------------------------------------------------------------------- /packages/shared/src/index.js: -------------------------------------------------------------------------------- 1 | const config = require('./config'); 2 | const web3 = require('./web3'); 3 | 4 | module.exports = { config, web3 }; 5 | -------------------------------------------------------------------------------- /packages/shared/src/web3.js: -------------------------------------------------------------------------------- 1 | const { RPCSubprovider, Web3ProviderEngine } = require('@0x/subproviders'); 2 | const { Web3Wrapper } = require('@0x/web3-wrapper'); 3 | const { providerUtils } = require('@0x/utils'); 4 | 5 | let wrapper; 6 | let providerEngine; 7 | /** 8 | * Initializes a web3 provider engine and create a reusable web3 wrapper. The 9 | * singletons can be accessed via getProviderEngine and getWrapper respectively. 10 | * 11 | * @param {Object} options - Configuration options. 12 | * @param {string} options.endpoint - RPC endpoint of an Ethereum node. 13 | */ 14 | const configure = ({ endpoint }) => { 15 | providerEngine = new Web3ProviderEngine(); 16 | wrapper = new Web3Wrapper(providerEngine); 17 | 18 | // TransformedERC20 events 19 | wrapper.abiDecoder.addABI([ 20 | { 21 | anonymous: false, 22 | inputs: [ 23 | { 24 | indexed: true, 25 | internalType: 'address', 26 | name: 'taker', 27 | type: 'address', 28 | }, 29 | { 30 | indexed: false, 31 | internalType: 'address', 32 | name: 'inputToken', 33 | type: 'address', 34 | }, 35 | { 36 | indexed: false, 37 | internalType: 'address', 38 | name: 'outputToken', 39 | type: 'address', 40 | }, 41 | { 42 | indexed: false, 43 | internalType: 'uint256', 44 | name: 'inputTokenAmount', 45 | type: 'uint256', 46 | }, 47 | { 48 | indexed: false, 49 | internalType: 'uint256', 50 | name: 'outputTokenAmount', 51 | type: 'uint256', 52 | }, 53 | ], 54 | name: 'TransformedERC20', 55 | type: 'event', 56 | }, 57 | ]); 58 | 59 | // LiquidityProviderSwap events 60 | wrapper.abiDecoder.addABI([ 61 | { 62 | anonymous: false, 63 | inputs: [ 64 | { 65 | indexed: false, 66 | internalType: 'address', 67 | name: 'inputToken', 68 | type: 'address', 69 | }, 70 | { 71 | indexed: false, 72 | internalType: 'address', 73 | name: 'outputToken', 74 | type: 'address', 75 | }, 76 | { 77 | indexed: false, 78 | internalType: 'uint256', 79 | name: 'inputTokenAmount', 80 | type: 'uint256', 81 | }, 82 | { 83 | indexed: false, 84 | internalType: 'uint256', 85 | name: 'outputTokenAmount', 86 | type: 'uint256', 87 | }, 88 | { 89 | indexed: false, 90 | internalType: 'address', 91 | name: 'provider', 92 | type: 'address', 93 | }, 94 | { 95 | indexed: false, 96 | internalType: 'address', 97 | name: 'recipient', 98 | type: 'address', 99 | }, 100 | ], 101 | name: 'LiquidityProviderSwap', 102 | type: 'event', 103 | }, 104 | ]); 105 | 106 | // RfqOrderFilled events 107 | wrapper.abiDecoder.addABI([ 108 | { 109 | anonymous: false, 110 | inputs: [ 111 | { 112 | indexed: false, 113 | internalType: 'bytes32', 114 | name: 'orderHash', 115 | type: 'bytes32', 116 | }, 117 | { 118 | indexed: false, 119 | internalType: 'address', 120 | name: 'maker', 121 | type: 'address', 122 | }, 123 | { 124 | indexed: false, 125 | internalType: 'address', 126 | name: 'taker', 127 | type: 'address', 128 | }, 129 | { 130 | indexed: false, 131 | internalType: 'address', 132 | name: 'makerToken', 133 | type: 'address', 134 | }, 135 | { 136 | indexed: false, 137 | internalType: 'address', 138 | name: 'takerToken', 139 | type: 'address', 140 | }, 141 | { 142 | indexed: false, 143 | internalType: 'uint128', 144 | name: 'takerTokenFilledAmount', 145 | type: 'uint128', 146 | }, 147 | { 148 | indexed: false, 149 | internalType: 'uint128', 150 | name: 'makerTokenFilledAmount', 151 | type: 'uint128', 152 | }, 153 | { 154 | indexed: false, 155 | internalType: 'bytes32', 156 | name: 'pool', 157 | type: 'bytes32', 158 | }, 159 | ], 160 | name: 'RfqOrderFilled', 161 | type: 'event', 162 | }, 163 | ]); 164 | 165 | // LimitOrderFilled events 166 | wrapper.abiDecoder.addABI([ 167 | { 168 | anonymous: false, 169 | inputs: [ 170 | { 171 | indexed: false, 172 | internalType: 'bytes32', 173 | name: 'orderHash', 174 | type: 'bytes32', 175 | }, 176 | { 177 | indexed: false, 178 | internalType: 'address', 179 | name: 'maker', 180 | type: 'address', 181 | }, 182 | { 183 | indexed: false, 184 | internalType: 'address', 185 | name: 'taker', 186 | type: 'address', 187 | }, 188 | { 189 | indexed: false, 190 | internalType: 'address', 191 | name: 'feeRecipient', 192 | type: 'address', 193 | }, 194 | { 195 | indexed: false, 196 | internalType: 'address', 197 | name: 'makerToken', 198 | type: 'address', 199 | }, 200 | { 201 | indexed: false, 202 | internalType: 'address', 203 | name: 'takerToken', 204 | type: 'address', 205 | }, 206 | { 207 | indexed: false, 208 | internalType: 'uint128', 209 | name: 'takerTokenFilledAmount', 210 | type: 'uint128', 211 | }, 212 | { 213 | indexed: false, 214 | internalType: 'uint128', 215 | name: 'makerTokenFilledAmount', 216 | type: 'uint128', 217 | }, 218 | { 219 | indexed: false, 220 | internalType: 'uint128', 221 | name: 'takerTokenFeeFilledAmount', 222 | type: 'uint128', 223 | }, 224 | { 225 | indexed: false, 226 | internalType: 'uint256', 227 | name: 'protocolFeePaid', 228 | type: 'uint256', 229 | }, 230 | { 231 | indexed: false, 232 | internalType: 'bytes32', 233 | name: 'pool', 234 | type: 'bytes32', 235 | }, 236 | ], 237 | name: 'LimitOrderFilled', 238 | type: 'event', 239 | }, 240 | ]); 241 | 242 | providerEngine.addProvider(new RPCSubprovider(endpoint)); 243 | providerUtils.startProviderEngine(providerEngine); 244 | }; 245 | 246 | /** 247 | * Get the current global web3 wrapper instance. 248 | * 249 | * @returns {Web3Wrapper} 250 | */ 251 | const getWrapper = () => wrapper; 252 | 253 | const getProviderEngine = () => providerEngine; 254 | 255 | module.exports = { configure, getProviderEngine, getWrapper }; 256 | -------------------------------------------------------------------------------- /packages/sushiswap-swap-extractor/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@0x-event-extractor/sushiswap-swap-extractor", 3 | "description": "Extractor for Sushiswap Swap events", 4 | "version": "1.0.0", 5 | "main": "src/index.js", 6 | "dependencies": { 7 | "@0x-event-extractor/shared": "^1.0.0", 8 | "@0x/utils": "5.5.1", 9 | "graphql": "15.3.0", 10 | "graphql-request": "3.1.0" 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /packages/sushiswap-swap-extractor/src/fetch-log-entries.js: -------------------------------------------------------------------------------- 1 | const { request, gql } = require('graphql-request'); 2 | const { BigNumber } = require('@0x/utils'); 3 | 4 | const EXCHANGE_PROXY_ADDRESS = '0xdef1c0ded9bec7f1a1670819833240f027b25eff'; 5 | 6 | const convertAmount = (amount, decimals) => { 7 | return new BigNumber(amount).times( 8 | new BigNumber(10).pow(new BigNumber(decimals)), 9 | ); 10 | }; 11 | 12 | const fetchEvents = async (fromBlock, toBlock, skip = 0) => { 13 | const pageSize = 100; 14 | const query = gql`{ 15 | swaps ( 16 | first: ${pageSize}, 17 | skip: ${skip}, 18 | where: { 19 | sender: "${EXCHANGE_PROXY_ADDRESS}", 20 | timestamp_gte: ${fromBlock}, 21 | timestamp_lte: ${toBlock}, 22 | }) { 23 | id 24 | transaction { 25 | id 26 | blockNumber 27 | timestamp 28 | } 29 | pair { 30 | id 31 | token0 { 32 | id 33 | decimals 34 | } 35 | token1 { 36 | id 37 | decimals 38 | } 39 | } 40 | amount0In 41 | amount1In 42 | amount0Out 43 | amount1Out 44 | to 45 | logIndex 46 | } 47 | }`; 48 | 49 | const response = await request( 50 | 'https://api.thegraph.com/subgraphs/name/zippoxer/sushiswap-subgraph-fork', 51 | query, 52 | ); 53 | 54 | const events = response.swaps.map(swap => { 55 | const maker = swap.pair.id; // The Sushiswap pool for traded pair 56 | const taker = swap.to; 57 | const transactionHash = swap.transaction.id; 58 | const { logIndex } = swap; 59 | const { blockNumber } = swap.transaction; 60 | 61 | const amount0In = convertAmount(swap.amount0In, swap.pair.token0.decimals); 62 | const amount1In = convertAmount(swap.amount1In, swap.pair.token1.decimals); 63 | 64 | const amount0Out = convertAmount( 65 | swap.amount0Out, 66 | swap.pair.token0.decimals, 67 | ); 68 | 69 | const amount1Out = convertAmount( 70 | swap.amount1Out, 71 | swap.pair.token1.decimals, 72 | ); 73 | 74 | const fromToken = amount0In.gt(amount1In) 75 | ? swap.pair.token0 76 | : swap.pair.token1; 77 | 78 | const toToken = amount0Out.gt(amount1Out) 79 | ? swap.pair.token0 80 | : swap.pair.token1; 81 | 82 | const fromTokenAmount = amount0In.gt(amount1In) ? amount0In : amount1In; 83 | const toTokenAmount = amount0Out.gt(amount1Out) ? amount0Out : amount1Out; 84 | 85 | return { 86 | blockNumber, 87 | data: { 88 | maker, 89 | makerAmount: fromTokenAmount, 90 | makerToken: fromToken.id, 91 | taker, 92 | takerAmount: toTokenAmount, 93 | takerToken: toToken.id, 94 | }, 95 | logIndex, 96 | transactionHash, 97 | }; 98 | }); 99 | 100 | if (events.length === pageSize) { 101 | const nextEvents = await fetchEvents(fromBlock, toBlock, skip + pageSize); 102 | 103 | return events.concat(nextEvents); 104 | } 105 | 106 | return events; 107 | }; 108 | 109 | module.exports = fetchEvents; 110 | -------------------------------------------------------------------------------- /packages/sushiswap-swap-extractor/src/get-event-data.js: -------------------------------------------------------------------------------- 1 | const getEventData = logEntry => { 2 | return logEntry.data; 3 | }; 4 | 5 | module.exports = getEventData; 6 | -------------------------------------------------------------------------------- /packages/sushiswap-swap-extractor/src/index.js: -------------------------------------------------------------------------------- 1 | const fetchLogEntries = require('./fetch-log-entries'); 2 | const getEventData = require('./get-event-data'); 3 | 4 | module.exports = { 5 | configure: () => {}, 6 | eventType: 'SushiswapSwap', 7 | fetchLogEntries, 8 | getEventData, 9 | protocolVersion: 3, 10 | }; 11 | -------------------------------------------------------------------------------- /packages/transformed-erc20-extractor/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@0x-event-extractor/transformed-erc20-extractor", 3 | "description": "Extractor for exchange proxy ERC20 transform events", 4 | "version": "1.0.0", 5 | "main": "src/index.js", 6 | "dependencies": { 7 | "@0x-event-extractor/shared": "^1.0.0" 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /packages/transformed-erc20-extractor/src/fetch-log-entries.js: -------------------------------------------------------------------------------- 1 | const { web3 } = require('@0x-event-extractor/shared'); 2 | 3 | const EXCHANGE_PROXY_ADDRESS = '0xdef1c0ded9bec7f1a1670819833240f027b25eff'; 4 | const TRANSFORMED_ERC20_EVENT_TOPIC = 5 | '0x0f6672f78a59ba8e5e5b5d38df3ebc67f3c792e2c9259b8d97d7f00dd78ba1b3'; 6 | 7 | const fetchEvents = async (fromBlock, toBlock) => { 8 | const logs = await web3.getWrapper().getLogsAsync({ 9 | address: EXCHANGE_PROXY_ADDRESS, 10 | fromBlock, 11 | toBlock, 12 | topics: [TRANSFORMED_ERC20_EVENT_TOPIC], 13 | }); 14 | 15 | return logs; 16 | }; 17 | 18 | module.exports = fetchEvents; 19 | -------------------------------------------------------------------------------- /packages/transformed-erc20-extractor/src/get-event-data.js: -------------------------------------------------------------------------------- 1 | const { web3 } = require('@0x-event-extractor/shared'); 2 | 3 | const getEventData = logEntry => { 4 | const decoded = web3.getWrapper().abiDecoder.tryToDecodeLogOrNoop(logEntry); 5 | 6 | const { 7 | inputToken, 8 | inputTokenAmount, 9 | outputToken, 10 | outputTokenAmount, 11 | taker, 12 | } = decoded.args; 13 | 14 | const eventData = { 15 | inputToken, 16 | inputTokenAmount: inputTokenAmount.toString(), 17 | outputToken, 18 | outputTokenAmount: outputTokenAmount.toString(), 19 | taker, 20 | }; 21 | 22 | return eventData; 23 | }; 24 | 25 | module.exports = getEventData; 26 | -------------------------------------------------------------------------------- /packages/transformed-erc20-extractor/src/index.js: -------------------------------------------------------------------------------- 1 | const fetchLogEntries = require('./fetch-log-entries'); 2 | const getEventData = require('./get-event-data'); 3 | 4 | module.exports = { 5 | configure: () => {}, 6 | eventType: 'TransformedERC20', 7 | fetchLogEntries, 8 | getEventData, 9 | protocolVersion: 3, 10 | }; 11 | -------------------------------------------------------------------------------- /packages/uniswap-v2-swap-extractor/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@0x-event-extractor/uniswap-v2-swap-extractor", 3 | "description": "Extractor for UniswapV2 Swap events", 4 | "version": "1.0.0", 5 | "main": "src/index.js", 6 | "dependencies": { 7 | "@0x-event-extractor/shared": "^1.0.0", 8 | "@0x/utils": "5.5.1", 9 | "graphql": "15.3.0", 10 | "graphql-request": "3.1.0" 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /packages/uniswap-v2-swap-extractor/src/fetch-log-entries.js: -------------------------------------------------------------------------------- 1 | const { request, gql } = require('graphql-request'); 2 | const { BigNumber } = require('@0x/utils'); 3 | 4 | const EXCHANGE_PROXY_ADDRESS = '0xdef1c0ded9bec7f1a1670819833240f027b25eff'; 5 | 6 | const convertAmount = (amount, decimals) => { 7 | return new BigNumber(amount).times( 8 | new BigNumber(10).pow(new BigNumber(decimals)), 9 | ); 10 | }; 11 | 12 | const fetchEvents = async (fromBlock, toBlock, skip = 0) => { 13 | const pageSize = 100; 14 | const query = gql`{ 15 | swaps ( 16 | first: ${pageSize}, 17 | skip: ${skip}, 18 | where: { 19 | sender: "${EXCHANGE_PROXY_ADDRESS}", 20 | timestamp_gte: ${fromBlock}, 21 | timestamp_lte: ${toBlock}, 22 | }) { 23 | id 24 | transaction { 25 | id 26 | blockNumber 27 | timestamp 28 | } 29 | pair { 30 | id 31 | token0 { 32 | id 33 | decimals 34 | } 35 | token1 { 36 | id 37 | decimals 38 | } 39 | } 40 | amount0In 41 | amount1In 42 | amount0Out 43 | amount1Out 44 | to 45 | logIndex 46 | } 47 | }`; 48 | 49 | const response = await request( 50 | 'https://api.thegraph.com/subgraphs/name/uniswap/uniswap-v2', 51 | query, 52 | ); 53 | 54 | const events = response.swaps.map(swap => { 55 | const maker = swap.pair.id; // The Uniswap pool for traded pair 56 | const taker = swap.to; 57 | const transactionHash = swap.transaction.id; 58 | const { logIndex } = swap; 59 | const { blockNumber } = swap.transaction; 60 | 61 | const amount0In = convertAmount(swap.amount0In, swap.pair.token0.decimals); 62 | const amount1In = convertAmount(swap.amount1In, swap.pair.token1.decimals); 63 | 64 | const amount0Out = convertAmount( 65 | swap.amount0Out, 66 | swap.pair.token0.decimals, 67 | ); 68 | 69 | const amount1Out = convertAmount( 70 | swap.amount1Out, 71 | swap.pair.token1.decimals, 72 | ); 73 | 74 | const fromToken = amount0In.gt(amount1In) 75 | ? swap.pair.token0 76 | : swap.pair.token1; 77 | 78 | const toToken = amount0Out.gt(amount1Out) 79 | ? swap.pair.token0 80 | : swap.pair.token1; 81 | 82 | const fromTokenAmount = amount0In.gt(amount1In) ? amount0In : amount1In; 83 | const toTokenAmount = amount0Out.gt(amount1Out) ? amount0Out : amount1Out; 84 | 85 | return { 86 | blockNumber, 87 | data: { 88 | maker, 89 | makerAmount: fromTokenAmount, 90 | makerToken: fromToken.id, 91 | taker, 92 | takerAmount: toTokenAmount, 93 | takerToken: toToken.id, 94 | }, 95 | logIndex, 96 | transactionHash, 97 | }; 98 | }); 99 | 100 | if (events.length === pageSize) { 101 | const nextEvents = await fetchEvents(fromBlock, toBlock, skip + pageSize); 102 | 103 | return events.concat(nextEvents); 104 | } 105 | 106 | return events; 107 | }; 108 | 109 | module.exports = fetchEvents; 110 | -------------------------------------------------------------------------------- /packages/uniswap-v2-swap-extractor/src/get-event-data.js: -------------------------------------------------------------------------------- 1 | const getEventData = logEntry => { 2 | return logEntry.data; 3 | }; 4 | 5 | module.exports = getEventData; 6 | -------------------------------------------------------------------------------- /packages/uniswap-v2-swap-extractor/src/index.js: -------------------------------------------------------------------------------- 1 | const fetchLogEntries = require('./fetch-log-entries'); 2 | const getEventData = require('./get-event-data'); 3 | 4 | module.exports = { 5 | configure: () => {}, 6 | eventType: 'UniswapV2Swap', 7 | fetchLogEntries, 8 | getEventData, 9 | protocolVersion: 3, 10 | }; 11 | -------------------------------------------------------------------------------- /packages/uniswap-v3-swap-extractor/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@0x-event-extractor/uniswap-v3-swap-extractor", 3 | "description": "Extractor for UniswapV3 Swap events", 4 | "version": "1.0.0", 5 | "main": "src/index.js", 6 | "dependencies": { 7 | "@0x-event-extractor/shared": "^1.0.0", 8 | "@0x/utils": "5.5.1", 9 | "graphql": "15.3.0", 10 | "graphql-request": "3.1.0" 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /packages/uniswap-v3-swap-extractor/src/fetch-log-entries.js: -------------------------------------------------------------------------------- 1 | const { request, gql } = require('graphql-request'); 2 | const { BigNumber } = require('@0x/utils'); 3 | 4 | const EXCHANGE_PROXY_ADDRESS = '0xdef1c0ded9bec7f1a1670819833240f027b25eff'; 5 | 6 | const convertAmount = (amount, decimals) => { 7 | return new BigNumber(amount).times( 8 | new BigNumber(10).pow(new BigNumber(decimals)), 9 | ); 10 | }; 11 | 12 | const fetchEvents = async (fromBlock, toBlock, skip = 0) => { 13 | const pageSize = 100; 14 | const query = gql`{ 15 | swaps ( 16 | first: ${pageSize}, 17 | skip: ${skip}, 18 | where: { 19 | sender: "${EXCHANGE_PROXY_ADDRESS}", 20 | timestamp_gte: ${fromBlock}, 21 | timestamp_lte: ${toBlock}, 22 | }) { 23 | id 24 | transaction { 25 | id 26 | blockNumber 27 | timestamp 28 | } 29 | pool { 30 | id 31 | token0 { 32 | id 33 | decimals 34 | } 35 | token1 { 36 | id 37 | decimals 38 | } 39 | } 40 | amount0 41 | amount1 42 | recipient 43 | logIndex 44 | } 45 | }`; 46 | 47 | const response = await request( 48 | 'https://api.thegraph.com/subgraphs/name/uniswap/uniswap-v3', 49 | query, 50 | ); 51 | 52 | const events = response.swaps.map(swap => { 53 | const maker = swap.pool.id; // The Uniswap pool for traded pool 54 | const taker = swap.recipient; 55 | const transactionHash = swap.transaction.id; 56 | const { logIndex } = swap; 57 | const { blockNumber } = swap.transaction; 58 | 59 | const amount0 = convertAmount(swap.amount0, swap.pool.token0.decimals); 60 | const amount1 = convertAmount(swap.amount1, swap.pool.token1.decimals); 61 | 62 | const fromToken = amount0.lt(0) ? swap.pool.token0 : swap.pool.token1; 63 | const toToken = amount0.gt(0) ? swap.pool.token0 : swap.pool.token1; 64 | 65 | const fromTokenAmount = amount0.lt(0) ? amount0 : amount1; 66 | const toTokenAmount = amount0.gt(0) ? amount0 : amount1; 67 | 68 | return { 69 | blockNumber, 70 | data: { 71 | maker, 72 | makerAmount: Math.abs(fromTokenAmount), 73 | makerToken: fromToken.id, 74 | taker, 75 | takerAmount: Math.abs(toTokenAmount), 76 | takerToken: toToken.id, 77 | }, 78 | logIndex, 79 | transactionHash, 80 | }; 81 | }); 82 | 83 | if (events.length === pageSize) { 84 | const nextEvents = await fetchEvents(fromBlock, toBlock, skip + pageSize); 85 | 86 | return events.concat(nextEvents); 87 | } 88 | 89 | return events; 90 | }; 91 | 92 | module.exports = fetchEvents; 93 | -------------------------------------------------------------------------------- /packages/uniswap-v3-swap-extractor/src/get-event-data.js: -------------------------------------------------------------------------------- 1 | const getEventData = logEntry => { 2 | return logEntry.data; 3 | }; 4 | 5 | module.exports = getEventData; 6 | -------------------------------------------------------------------------------- /packages/uniswap-v3-swap-extractor/src/index.js: -------------------------------------------------------------------------------- 1 | const fetchLogEntries = require('./fetch-log-entries'); 2 | const getEventData = require('./get-event-data'); 3 | 4 | module.exports = { 5 | configure: () => {}, 6 | eventType: 'UniswapV3Swap', 7 | fetchLogEntries, 8 | getEventData, 9 | protocolVersion: 3, 10 | }; 11 | -------------------------------------------------------------------------------- /renovate.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": ["github>0xTracker/renovate-config-0x-tracker"] 3 | } 4 | -------------------------------------------------------------------------------- /src/index.js: -------------------------------------------------------------------------------- 1 | require('dotenv-safe').config({ 2 | example: 3 | process.env.NODE_ENV === 'production' 4 | ? '.env.prod.example' 5 | : '.env.example', 6 | }); 7 | 8 | const extractor = require('@0x-event-extractor/core'); 9 | const config = require('config'); 10 | 11 | extractor 12 | .configure(config.util.toObject()) 13 | .then(() => { 14 | extractor.start(); 15 | }) 16 | .catch(console.error); // eslint-disable-line no-console 17 | --------------------------------------------------------------------------------