├── tests
├── __init__.py
├── unit
│ ├── __init__.py
│ ├── Services
│ │ ├── __init__.py
│ │ └── coingecko_test.py
│ ├── Variables
│ │ ├── __init__.py
│ │ ├── graph_test.py
│ │ ├── trade_test.py
│ │ └── pool_test.py
│ ├── Actions
│ │ ├── __init__.py
│ │ ├── pool_updater_test.py
│ │ ├── conftest.py
│ │ ├── whitelist_test.py
│ │ ├── action_tree_test.py
│ │ ├── path_finder_test.py
│ │ ├── redis_state_test.py
│ │ ├── trader_test.py
│ │ ├── arbitrage_test.py
│ │ └── action_test.py
│ ├── Contracts
│ │ ├── __init__.py
│ │ └── pool_contracts_test.py
│ ├── big_number_test.py
│ ├── token_test.py
│ ├── async_helpers_test.py
│ ├── conftest.py
│ ├── main_test.py
│ └── settings_parser_test.py
└── system
│ ├── Actions
│ ├── __init__.py
│ ├── conftest.py
│ ├── redis_state_test.py
│ ├── action_tree_test.py
│ └── trader_test.py
│ ├── __init__.py
│ ├── Contracts
│ ├── __init__.py
│ ├── uniswap_router_test.py
│ ├── token_test.py
│ ├── uniswap_test.py
│ └── balancer_test.py
│ ├── Services
│ ├── __init__.py
│ └── coingecko_test.py
│ └── app_test.py
├── Arbie
├── resources
│ ├── __init__.py
│ └── contracts
│ │ ├── __init__.py
│ │ ├── tokens
│ │ ├── __init__.py
│ │ └── IERC20.json
│ │ ├── balancer
│ │ └── __init__.py
│ │ ├── uniswap
│ │ ├── __init__.py
│ │ └── UniswapV2Pair.json
│ │ └── contract_addresses.json
├── Store
│ └── __init__.py
├── Services
│ ├── __init__.py
│ └── coingecko.py
├── Variables
│ ├── __init__.py
│ ├── big_number.py
│ ├── trades.py
│ ├── token.py
│ ├── graph.py
│ └── pool.py
├── address.py
├── __init__.py
├── Contracts
│ ├── __init__.py
│ ├── uniswap_router.py
│ ├── event_filter.py
│ ├── pool_contract.py
│ ├── tokens.py
│ ├── uniswap.py
│ ├── balancer.py
│ └── contract.py
├── Actions
│ ├── __init__.py
│ ├── whitelist.py
│ ├── pool_updater.py
│ ├── action_tree.py
│ ├── arbitrage.py
│ ├── path_finder.py
│ ├── redis_state.py
│ ├── trader.py
│ ├── pool_finder.py
│ └── action.py
├── exception.py
├── app.py
├── prometheus.py
├── async_helpers.py
├── __main__.py
└── settings_parser.py
├── settings.gradle
├── .idea
├── .gitignore
├── vcs.xml
├── compiler.xml
├── inspectionProfiles
│ └── profiles_settings.xml
├── misc.xml
├── modules.xml
├── arbie.iml
├── gradle.xml
└── runConfigurations
│ ├── unitTests.xml
│ └── PoolFinder.xml
├── assets
└── icon
│ ├── arbie-icon.png
│ └── arbie-icon-192x192.png
├── pyproject.toml
├── pip
├── requirements-tools.txt
├── requirements.txt
└── requirements-dev.txt
├── .github
├── .codecov.yml
└── workflows
│ ├── python_publish.yml
│ └── python-branch.yml
├── gradle
└── wrapper
│ ├── gradle-wrapper.jar
│ └── gradle-wrapper.properties
├── Brig
├── README.md
├── hardhat.config.js
├── Trader
│ ├── test_account.json
│ └── trader.yml
├── Dockerfile.hardhat
├── Dockerfile.arbie
├── build.gradle
├── PoolUpdater
│ └── pool_updater.yml
├── PoolFinder
│ └── pool_finder.yml
├── PathFinder
│ └── path_finder.yml
├── docker-compose.yml
└── Storage
│ └── docker-compose.yml
├── .pre-commit-config.yaml
├── .gitignore
├── example
└── conf.yml
├── doc
└── Install.md
├── docker-compose.yml
├── Dockerfile
├── conftest.py
├── setup.py
├── LICENSE
├── README.md
├── setup.cfg
└── gradlew
/tests/__init__.py:
--------------------------------------------------------------------------------
1 | """Test pacakge."""
2 |
--------------------------------------------------------------------------------
/tests/unit/__init__.py:
--------------------------------------------------------------------------------
1 | """Unit test package."""
2 |
--------------------------------------------------------------------------------
/Arbie/resources/__init__.py:
--------------------------------------------------------------------------------
1 | """Resource package."""
2 |
--------------------------------------------------------------------------------
/tests/unit/Services/__init__.py:
--------------------------------------------------------------------------------
1 | """Unit test package."""
2 |
--------------------------------------------------------------------------------
/tests/unit/Variables/__init__.py:
--------------------------------------------------------------------------------
1 | """Unit test package."""
2 |
--------------------------------------------------------------------------------
/settings.gradle:
--------------------------------------------------------------------------------
1 | rootProject.name = "Arbie"
2 | include 'Brig'
3 |
--------------------------------------------------------------------------------
/tests/system/Actions/__init__.py:
--------------------------------------------------------------------------------
1 | """System tests for Actions."""
2 |
--------------------------------------------------------------------------------
/tests/system/__init__.py:
--------------------------------------------------------------------------------
1 | """Base module for system tests."""
2 |
--------------------------------------------------------------------------------
/tests/unit/Actions/__init__.py:
--------------------------------------------------------------------------------
1 | """Test package for Actions."""
2 |
--------------------------------------------------------------------------------
/Arbie/resources/contracts/__init__.py:
--------------------------------------------------------------------------------
1 | """Ethereum contract abi."""
2 |
--------------------------------------------------------------------------------
/tests/system/Contracts/__init__.py:
--------------------------------------------------------------------------------
1 | """Test package for contracts."""
2 |
--------------------------------------------------------------------------------
/tests/system/Services/__init__.py:
--------------------------------------------------------------------------------
1 | """Test module for 3pp services."""
2 |
--------------------------------------------------------------------------------
/.idea/.gitignore:
--------------------------------------------------------------------------------
1 | # Default ignored files
2 | /shelf/
3 | /workspace.xml
4 |
--------------------------------------------------------------------------------
/tests/unit/Contracts/__init__.py:
--------------------------------------------------------------------------------
1 | """Unit test of utility contract functions."""
2 |
--------------------------------------------------------------------------------
/Arbie/Store/__init__.py:
--------------------------------------------------------------------------------
1 | """Store is responsible for storing the application state."""
2 |
--------------------------------------------------------------------------------
/assets/icon/arbie-icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/owodunni/arbie/HEAD/assets/icon/arbie-icon.png
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.commitizen]
2 | name = "cz_conventional_commits"
3 | tag_format = "v$version"
4 |
--------------------------------------------------------------------------------
/pip/requirements-tools.txt:
--------------------------------------------------------------------------------
1 | python-semantic-release
2 | pre-commit
3 | commitizen
4 | pytest-profiling
5 |
--------------------------------------------------------------------------------
/.github/.codecov.yml:
--------------------------------------------------------------------------------
1 | coverage:
2 | status:
3 | project:
4 | tests:
5 | target: 96%
6 |
--------------------------------------------------------------------------------
/assets/icon/arbie-icon-192x192.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/owodunni/arbie/HEAD/assets/icon/arbie-icon-192x192.png
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/owodunni/arbie/HEAD/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/Arbie/resources/contracts/tokens/__init__.py:
--------------------------------------------------------------------------------
1 | """Token contract abi.
2 |
3 | Token ABIs can be found at https://etherscan.io/tokens
4 | """
5 |
--------------------------------------------------------------------------------
/Brig/README.md:
--------------------------------------------------------------------------------
1 | # Birka
2 |
3 | ## Deploy database
4 |
5 | ```
6 | docker stack deploy --compose-file docker-compose birka
7 | ```
8 |
--------------------------------------------------------------------------------
/Brig/hardhat.config.js:
--------------------------------------------------------------------------------
1 | /**
2 | * @type import('hardhat/config').HardhatUserConfig
3 | */
4 | module.exports = {
5 | solidity: "0.7.3",
6 | };
7 |
8 |
--------------------------------------------------------------------------------
/Arbie/Services/__init__.py:
--------------------------------------------------------------------------------
1 | """Services is responsible for interacting with 3pp APIs."""
2 |
3 | from Arbie.Services.coingecko import Coingecko # noqa: F401
4 |
--------------------------------------------------------------------------------
/Brig/Trader/test_account.json:
--------------------------------------------------------------------------------
1 | {"address":"0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266",
2 | "key":"0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80"}
--------------------------------------------------------------------------------
/pip/requirements.txt:
--------------------------------------------------------------------------------
1 | wheel
2 | web3
3 | sympy
4 | networkx
5 | pyyaml
6 | docopt
7 | redis
8 | hiredis
9 | prometheus_client
10 | prometheus_async
11 | requests
--------------------------------------------------------------------------------
/pip/requirements-dev.txt:
--------------------------------------------------------------------------------
1 | pytest
2 | pytest-mock
3 | pytest-cov
4 | pytest-asyncio
5 | flake8
6 | autopep8
7 | wemake-python-styleguide
8 | isort < 5.0.0
9 | black
10 |
--------------------------------------------------------------------------------
/Brig/Dockerfile.hardhat:
--------------------------------------------------------------------------------
1 | FROM node:15-slim
2 |
3 | WORKDIR /hardhat
4 |
5 | Add hardhat.config.js hardhat.config.js
6 |
7 | RUN npm init --yes && npm install --save-dev hardhat
8 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | repos:
2 | - hooks:
3 | - id: commitizen
4 | stages:
5 | - commit-msg
6 | repo: https://github.com/commitizen-tools/commitizen
7 | rev: v2.8.2
8 |
--------------------------------------------------------------------------------
/Arbie/resources/contracts/balancer/__init__.py:
--------------------------------------------------------------------------------
1 | """Balancer contract abis.
2 |
3 | Addresses and more info can be found at https://docs.balancer.finance/smart-contracts/addresses
4 | """
5 |
--------------------------------------------------------------------------------
/Arbie/resources/contracts/uniswap/__init__.py:
--------------------------------------------------------------------------------
1 | """Uniswap contract abi.
2 |
3 | Contracts, addresses and abis can be found at https://uniswap.org/docs/v2/smart-contracts/factory
4 | """
5 |
--------------------------------------------------------------------------------
/.idea/vcs.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/.idea/compiler.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/.idea/inspectionProfiles/profiles_settings.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | venv
2 | .vscode
3 | Arbie.egg-info
4 | Arbie/__init__.pyc
5 | __pycache__/
6 | build
7 | .doit.db
8 | .coverage
9 | .gradle
10 | .ipynb_checkpoints/
11 | .venv/
12 | .idea/.name
13 | fragmap.html
14 | real_account.json
15 | arbie.log*
16 |
--------------------------------------------------------------------------------
/.idea/misc.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | #Sun Jul 12 08:10:10 CEST 2020
2 | distributionUrl=https\://services.gradle.org/distributions/gradle-6.5.1-all.zip
3 | distributionBase=GRADLE_USER_HOME
4 | distributionPath=wrapper/dists
5 | zipStorePath=wrapper/dists
6 | zipStoreBase=GRADLE_USER_HOME
7 |
--------------------------------------------------------------------------------
/.idea/modules.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/Brig/Dockerfile.arbie:
--------------------------------------------------------------------------------
1 | FROM python:3.8-slim
2 |
3 |
4 | RUN apt-get update && \
5 | DEBIAN_FRONTEND=noninteractive \
6 | apt-get -y --no-install-recommends install build-essential && \
7 | apt-get clean && \
8 | rm -rf /var/lib/apt/lists/*
9 |
10 | WORKDIR /arbie
11 |
12 | RUN pip install Arbie
13 |
--------------------------------------------------------------------------------
/Arbie/Variables/__init__.py:
--------------------------------------------------------------------------------
1 | """Variable is a module with basic building blocks."""
2 |
3 | from Arbie.Variables.big_number import BigNumber # noqa: F401
4 | from Arbie.Variables.pool import Pool, Pools, PoolType # noqa: F401
5 | from Arbie.Variables.token import Balance, Balances, Token, Tokens # noqa: F401
6 | from Arbie.Variables.trades import Trade # noqa: F401
7 |
--------------------------------------------------------------------------------
/tests/system/Actions/conftest.py:
--------------------------------------------------------------------------------
1 | """Common configuration for Action system tests."""
2 |
3 | import pytest
4 |
5 | from Arbie.Actions import RedisState, Store
6 |
7 |
8 | @pytest.fixture
9 | def redis_state(redis_server):
10 | return RedisState(redis_server)
11 |
12 |
13 | @pytest.fixture
14 | def redis_store(redis_state):
15 | return Store(redis_state)
16 |
--------------------------------------------------------------------------------
/Arbie/address.py:
--------------------------------------------------------------------------------
1 | """Helper functions for interacting with eth addresses."""
2 |
3 | import os
4 |
5 | from Arbie.Variables.token import Token
6 |
7 |
8 | def dummy_address_generator() -> str:
9 | return f"0x{os.urandom(20).hex()}" # noqa: WPS432
10 |
11 |
12 | def dummy_token_generator(name, value=None) -> Token:
13 | return Token(name, dummy_address_generator(), value)
14 |
--------------------------------------------------------------------------------
/Arbie/__init__.py:
--------------------------------------------------------------------------------
1 | """Main init."""
2 |
3 | from Arbie.exception import ( # noqa: F401
4 | DeployContractError,
5 | IERC20TokenError,
6 | PoolValueError,
7 | StateError,
8 | TransactionError,
9 | )
10 | from Arbie.settings_parser import SettingsParser # noqa: F401
11 |
12 | __version__ = "0.10.2" # noqa: WPS410
13 | __version_info__ = tuple(
14 | int(i) for i in __version__.split(".") if i.isdigit()
15 | ) # noqa: WPS221
16 |
--------------------------------------------------------------------------------
/tests/unit/Variables/graph_test.py:
--------------------------------------------------------------------------------
1 | """Unittest for the action structure."""
2 | from Arbie.Variables.graph import FilteredTradingGraph, TradingGraph
3 |
4 |
5 | def test_create(pools):
6 | graph = TradingGraph(pools)
7 | assert len(graph) == 4
8 | assert len(graph.get_edges()) == 16
9 |
10 |
11 | def test_create_filter(pools):
12 | f_graph = FilteredTradingGraph(TradingGraph(pools), 0)
13 | assert len(f_graph) == 4
14 | assert len(f_graph.get_edges()) == 10
15 |
--------------------------------------------------------------------------------
/Arbie/resources/contracts/contract_addresses.json:
--------------------------------------------------------------------------------
1 | {
2 | "tokens":{
3 | "Weth":{"mainnet":"0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2"}
4 | },
5 | "balancer":{
6 | "BFactory":{
7 | "mainnet":"0x9424B1412450D0f8Fc2255FAf6046b98213B76Bd"}
8 | },
9 | "uniswap":{
10 | "UniswapV2Factory":{
11 | "mainnet":"0x5C69bEe701ef814a2B6a3EDD4B1652CB9cc5aA6f"},
12 | "UniswapV2Router02":{
13 | "mainnet":"0x7a250d5630B4cF539739dF2C5dAcb4c659F2488D"}
14 | }
15 | }
--------------------------------------------------------------------------------
/Brig/build.gradle:
--------------------------------------------------------------------------------
1 | task dockerArbie(type: Exec) {
2 | workingDir projectDir
3 |
4 | commandLine "docker", "build", "-f", "Dockerfile.arbie",
5 | "-t", "arbie:prod", ".", "--no-cache"
6 | }
7 |
8 |
9 | task dockerHardhat(type: Exec) {
10 | workingDir projectDir
11 |
12 | commandLine "docker", "build", "-f", "Dockerfile.hardhat",
13 | "-t", "hardhat:node", "."
14 | }
15 |
16 | task dockerBuild {
17 | dependsOn dockerHardhat, dockerArbie
18 | }
19 |
--------------------------------------------------------------------------------
/example/conf.yml:
--------------------------------------------------------------------------------
1 | store:
2 | address: 'redis:6379'
3 |
4 | web3:
5 | address: 'http://gluteus:8545'
6 |
7 | variables:
8 | weth:
9 | type: Token
10 | address: '0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2'
11 | uniswap_factory:
12 | type: UniswapFactory
13 | network: mainnet
14 | balancer_factory:
15 | type: BalancerFactory
16 | network: mainnet
17 |
18 | action_tree:
19 | actions:
20 | PoolFinder:
21 | output:
22 | pools: 'PoolFinder.1.pools'
23 | tokens: 'PoolFinder.1.tokens'
24 |
--------------------------------------------------------------------------------
/doc/Install.md:
--------------------------------------------------------------------------------
1 | # Install
2 | This document contains information regarding what needs to be installed to develop Arbie.
3 |
4 |
5 | ## Solidity
6 | For developing Solidity it is highly recommended to setup Ganache and Truffle.
7 |
8 | Install [NVM](https://github.com/nvm-sh/nvm).
9 |
10 | Use NVM to install Node.js version 8:
11 | ```
12 | nvm install 8
13 | ```
14 |
15 | Install Truffle and Ganache-cli
16 | ```
17 | npm install -g ganache-cli truffle
18 | ```
19 |
20 | (Optional) Install Ganache [GUI](https://www.trufflesuite.com/ganache)
21 |
22 |
--------------------------------------------------------------------------------
/tests/unit/big_number_test.py:
--------------------------------------------------------------------------------
1 | """Tests for BigNumber."""
2 |
3 | from Arbie.Variables import BigNumber
4 |
5 |
6 | def test_create_big_number():
7 | bg = BigNumber(5)
8 | assert bg.to_number() == 5
9 |
10 |
11 | def test_equals():
12 | assert BigNumber(5) == BigNumber(5)
13 |
14 |
15 | def test_list_equals():
16 | assert [BigNumber(5)] == [BigNumber(5)]
17 |
18 |
19 | def test_not_equal():
20 | assert BigNumber(5) != BigNumber(6)
21 |
22 |
23 | def test_not_list_equals():
24 | assert [BigNumber(5)] != [BigNumber(6)]
25 |
--------------------------------------------------------------------------------
/Arbie/Contracts/__init__.py:
--------------------------------------------------------------------------------
1 | """Contracts enable interacton with ETH contracts."""
2 | from Arbie.Contracts.balancer import BalancerFactory, BalancerPool # noqa: F401
3 | from Arbie.Contracts.contract import Contract, ContractFactory, Network # noqa: F401
4 | from Arbie.Contracts.event_filter import EventFilter # noqa: F401
5 | from Arbie.Contracts.tokens import GenericToken, IERC20Token, Weth # noqa: F401
6 | from Arbie.Contracts.uniswap import UniswapFactory, UniswapPair # noqa: F401
7 | from Arbie.Contracts.uniswap_router import UniswapV2Router # noqa: F401
8 |
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '3.8'
2 | services:
3 | ganache:
4 | image: trufflesuite/ganache-cli
5 | ports:
6 | - 7545:7545
7 | command: --gasLimit 0xfffffffffff --callGasLimit 0xfffffffffff -p 7545 --allowUnlimitedContractSize
8 |
9 | redis:
10 | image: "redis:alpine"
11 | ports:
12 | - "6379:6379"
13 | environment:
14 | - REDIS_REPLICATION_MODE=master
15 |
16 | arbie:
17 | image: arbie
18 | depends_on:
19 | - ganache
20 | - redis
21 | volumes:
22 | - .:/arbie
23 | command: /arbie/gradlew test
24 |
--------------------------------------------------------------------------------
/Brig/Trader/trader.yml:
--------------------------------------------------------------------------------
1 | store:
2 | address: 'gluteus:6379'
3 | web3:
4 | address: 'http://gluteus:8545'
5 | account:
6 | path: test_account.json
7 |
8 | variables:
9 | weth:
10 | type: Weth
11 | network: mainnet
12 | router:
13 | type: UniswapV2Router
14 | network: mainnet
15 |
16 | action_tree:
17 | event:
18 | 'Arbitrage.1.filtered_trades'
19 | actions:
20 | SetUpTrader:
21 | Trader:
22 | input:
23 | trades: 'Arbitrage.1.filtered_trades'
24 | dry_run: False
25 | output:
26 | profit: 'Trader.1.profit'
27 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.8-slim
2 |
3 | # Install OpenJDK-8
4 | RUN mkdir /usr/share/man/man1 && \
5 | apt-get update && \
6 | DEBIAN_FRONTEND=noninteractive \
7 | apt-get -y --no-install-recommends install build-essential \
8 | default-jdk-headless && \
9 | apt-get clean && \
10 | rm -rf /var/lib/apt/lists/*
11 |
12 | WORKDIR /arbie
13 |
14 | ADD gradlew gradlew
15 | ADD gradle gradle
16 |
17 | RUN ./gradlew
18 |
19 | ADD pip pip
20 | RUN pip install -r pip/requirements.txt
21 | RUN pip install -r pip/requirements-dev.txt
22 |
23 | ENV RUNNING_IN_DOCKER True
24 |
--------------------------------------------------------------------------------
/.idea/arbie.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
--------------------------------------------------------------------------------
/tests/unit/Actions/pool_updater_test.py:
--------------------------------------------------------------------------------
1 | """Unittest for pool updater."""
2 |
3 | from unittest.mock import MagicMock
4 |
5 | import pytest
6 |
7 | from Arbie.Actions import ActionTree, PoolUpdater, Store
8 |
9 | pytestmark = pytest.mark.asyncio
10 |
11 |
12 | class TestPoolUpdater(object):
13 | async def test_on_next(self, pools):
14 | store = Store()
15 | store.add("web3", MagicMock())
16 | store.add("all_pools", pools)
17 | tree = ActionTree(store)
18 | tree.add_action(PoolUpdater())
19 | with pytest.raises(ValueError):
20 | await tree.run()
21 |
--------------------------------------------------------------------------------
/tests/unit/Actions/conftest.py:
--------------------------------------------------------------------------------
1 | """Common fixtures for Actions."""
2 | from unittest.mock import MagicMock
3 |
4 | import pytest
5 | from pytest_mock.plugin import MockerFixture
6 |
7 | from Arbie.Actions import RedisState
8 |
9 |
10 | @pytest.fixture
11 | def redis_mock(mocker: MockerFixture) -> MagicMock:
12 | mock = MagicMock()
13 | mocker.patch("Arbie.Actions.redis_state.redis.Redis", return_value=mock)
14 | return mock
15 |
16 |
17 | @pytest.fixture
18 | def redis_state(redis_mock):
19 | state = RedisState("good.host.org:1337")
20 | assert redis_mock.ping.called
21 | return state
22 |
--------------------------------------------------------------------------------
/Brig/PoolUpdater/pool_updater.yml:
--------------------------------------------------------------------------------
1 | store:
2 | address: 'gluteus:6379'
3 |
4 | web3:
5 | address: 'http://gluteus:8545'
6 |
7 | variables:
8 | weth:
9 | type: Weth
10 | network: mainnet
11 | uniswap_factory:
12 | type: UniswapFactory
13 | network: mainnet
14 | balancer_factory:
15 | type: BalancerFactory
16 | network: mainnet
17 |
18 | action_tree:
19 | event: ['PoolFinder.1.pools', 'PoolUpdater.1.pools']
20 | actions:
21 | PoolUpdater:
22 | input:
23 | web3: web3
24 | pools: 'PoolFinder.1.pools'
25 | output:
26 | new_pools: 'PoolUpdater.1.pools'
27 |
--------------------------------------------------------------------------------
/Brig/PoolFinder/pool_finder.yml:
--------------------------------------------------------------------------------
1 | store:
2 | address: 'gluteus:6379'
3 |
4 | web3:
5 | address: 'http://gluteus:8545'
6 |
7 | variables:
8 | weth:
9 | type: Weth
10 | network: mainnet
11 | uniswap_factory:
12 | type: UniswapFactory
13 | network: mainnet
14 | balancer_factory:
15 | type: BalancerFactory
16 | network: mainnet
17 |
18 | action_tree:
19 | actions:
20 | Whitelist:
21 | output:
22 | whitelist: 'Whitelist.1.addresses'
23 | PoolFinder:
24 | input:
25 | whitelist: 'Whitelist.1.addresses'
26 | output:
27 | pools: 'PoolFinder.1.pools'
28 | tokens: 'PoolFinder.1.tokens'
29 |
--------------------------------------------------------------------------------
/Arbie/Actions/__init__.py:
--------------------------------------------------------------------------------
1 | """Actions are used for building complex interactions with smart contracts."""
2 |
3 | from Arbie.Actions.action import Action, Store # noqa: F401
4 | from Arbie.Actions.action_tree import ActionTree # noqa: F401
5 | from Arbie.Actions.arbitrage import Arbitrage # noqa:F401
6 | from Arbie.Actions.path_finder import PathFinder # noqa: F401
7 | from Arbie.Actions.pool_finder import PoolFinder # noqa: F401
8 | from Arbie.Actions.pool_updater import PoolUpdater # noqa: F401
9 | from Arbie.Actions.redis_state import RedisState # noqa: F401
10 | from Arbie.Actions.trader import SetUpTrader, Trader # noqa: F401
11 | from Arbie.Actions.whitelist import Whitelist # noqa: F401
12 |
--------------------------------------------------------------------------------
/Brig/PathFinder/path_finder.yml:
--------------------------------------------------------------------------------
1 | store:
2 | address: 'gluteus:6379'
3 |
4 | web3:
5 | address: 'http://gluteus:8545'
6 |
7 | variables:
8 | weth:
9 | type: Weth
10 | network: mainnet
11 |
12 | action_tree:
13 | event:
14 | 'PoolUpdater.1.pools'
15 | actions:
16 | PathFinder:
17 | input:
18 | pools: 'PoolUpdater.1.pools'
19 | max_depth: 4
20 | weth: weth
21 | output:
22 | trades: trades
23 | Arbitrage:
24 | input:
25 | trades: trades
26 | process_trades: 10000
27 | processes: 20
28 | top_trades: 20
29 | output:
30 | out_trades: 'Arbitrage.1.filtered_trades'
31 |
--------------------------------------------------------------------------------
/Arbie/exception.py:
--------------------------------------------------------------------------------
1 | """Unique exceptions for Arbie."""
2 |
3 |
4 | class DeployContractError(Exception):
5 | """Raised when a contract can not be deployed."""
6 |
7 |
8 | class IERC20TokenError(ValueError):
9 | """Raised when a IErc20 token is not Erc20 compliant."""
10 |
11 |
12 | class PoolValueError(ValueError):
13 | """Raised when a Pool is not initialized properly.
14 |
15 | This can hapen when a pool dosn't have atleast 2 tokens.
16 | Or when the token weights dosn't add to one.
17 | """
18 |
19 |
20 | class StateError(Exception):
21 | """Raised when there is an issue with the state."""
22 |
23 |
24 | class TransactionError(RuntimeError):
25 | """Raised when a Transaction unexpectedly fails."""
26 |
--------------------------------------------------------------------------------
/Brig/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '3'
2 | services:
3 | PoolFinder:
4 | image: arbie:prod
5 | volumes:
6 | - ./PoolFinder:/arbie
7 | command: Arbie -f pool_finder.yml
8 | ports:
9 | - 8001:8000
10 |
11 | PoolUpdater:
12 | image: arbie:prod
13 | volumes:
14 | - ./PoolUpdater:/arbie
15 | command: Arbie -f pool_updater.yml
16 | ports:
17 | - 8002:8000
18 |
19 | PathFinder:
20 | image: arbie:prod
21 | volumes:
22 | - ./PathFinder:/arbie
23 | command: Arbie -f path_finder.yml
24 | ports:
25 | - 8003:8000
26 |
27 | Trader:
28 | image: arbie:prod
29 | volumes:
30 | - ./Trader:/arbie
31 | command: Arbie -f trader.yml
32 | ports:
33 | - 8004:8000
34 |
--------------------------------------------------------------------------------
/tests/unit/Contracts/pool_contracts_test.py:
--------------------------------------------------------------------------------
1 | """Unittests of PoolContracts."""
2 |
3 | from unittest.mock import MagicMock
4 |
5 | import pytest
6 |
7 | from Arbie.Contracts.pool_contract import PoolContract
8 |
9 | pytestmark = pytest.mark.asyncio
10 |
11 |
12 | async def should_raise(promise):
13 | with pytest.raises(NotImplementedError):
14 | await promise
15 |
16 |
17 | class TestPoolContract(object):
18 | async def test_should_throw(self):
19 | pc = PoolContract(MagicMock(), None, MagicMock())
20 |
21 | await should_raise(pc.create_tokens())
22 |
23 | await should_raise(pc.get_balances())
24 |
25 | await should_raise(pc.get_weights())
26 |
27 | await should_raise(pc.get_fee())
28 |
29 | with pytest.raises(NotImplementedError):
30 | pc.get_type()
31 |
--------------------------------------------------------------------------------
/conftest.py:
--------------------------------------------------------------------------------
1 | """Help module for web3 tests."""
2 | import pytest
3 |
4 |
5 | def pytest_addoption(parser):
6 | parser.addoption("--web3_server", action="store", default="http://127.0.0.1:7545")
7 | parser.addoption("--redis_server", action="store", default="127.0.0.1:6379")
8 | parser.addoption(
9 | "--run_slow", action="store_true", default=False, help="run slow tests"
10 | )
11 |
12 |
13 | def pytest_configure(config):
14 | config.addinivalue_line("markers", "slow: mark test as slow to run")
15 |
16 |
17 | def pytest_collection_modifyitems(config, items):
18 | if config.getoption("--run_slow"):
19 | # --run_slow given in cli: do not skip slow tests
20 | return
21 | skip_slow = pytest.mark.skip(reason="need --runslow option to run")
22 | for item in items:
23 | if "slow" in item.keywords:
24 | item.add_marker(skip_slow)
25 |
--------------------------------------------------------------------------------
/Arbie/app.py:
--------------------------------------------------------------------------------
1 | """Main application."""
2 |
3 | import logging
4 |
5 | import Arbie
6 | from Arbie.settings_parser import SettingsParser
7 |
8 | logger = logging.getLogger()
9 |
10 |
11 | class App(object):
12 | """App is used for configuring and running Arbie."""
13 |
14 | def __init__(self, config):
15 | logger.info(f"Arbie version {Arbie.__version__}") # noqa: WPS609
16 | sp = SettingsParser(config)
17 | self.store = sp.setup_store()
18 | self.action_tree = sp.action_tree(self.store)
19 |
20 | async def run(self):
21 | if self.action_tree is None:
22 | logger.warning("No actions given in configuration")
23 | return
24 | try:
25 | await self.action_tree.run()
26 | except Exception as e:
27 | logger.fatal(e, exc_info=True)
28 | raise e
29 |
30 | def stop(self):
31 | self.action_tree.stop()
32 |
--------------------------------------------------------------------------------
/Brig/Storage/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '3.8'
2 | services:
3 | eth-node:
4 | image: ethereum/client-go:stable
5 | volumes:
6 | - eth-data:/data
7 | ports:
8 | - 30303:30303/tcp
9 | - 30304:30304/udp
10 | - 8545:8545
11 | command:
12 | - --http
13 | - --http.addr=0.0.0.0
14 | - --http.vhosts=*
15 | - --http.corsdomain=*
16 | - --allow-insecure-unlock
17 | - --metrics
18 | - --metrics.expensive
19 | - --pprof
20 | - --pprof.addr=0.0.0.0
21 | - --datadir=/data
22 | networks:
23 | - core
24 |
25 | redis:
26 | image: redis:alpine
27 | volumes:
28 | - redis-data:/var/lib/redis
29 | ports:
30 | - 6379:6379
31 | environment:
32 | - REDIS_REPLICATION_MODE=master
33 | networks:
34 | - core
35 |
36 | volumes:
37 | redis-data:
38 | eth-data:
39 |
40 | networks:
41 | core:
42 | external: true
43 |
--------------------------------------------------------------------------------
/.idea/gradle.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
21 |
22 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | """Packaging logic for arbie."""
2 | import os
3 |
4 | from setuptools import find_packages, setup
5 |
6 |
7 | def read(path):
8 | with open(path) as req:
9 | return req.read().splitlines()
10 |
11 |
12 | def package_files(directory, file_filter):
13 | paths = []
14 | for (path, directories, filenames) in os.walk(directory):
15 | for filename in filenames:
16 | if any(substring in filename for substring in file_filter):
17 | paths.append(os.path.join("..", path, filename))
18 | return paths
19 |
20 |
21 | extra_files = package_files("Arbie/resources", [".json"])
22 |
23 | setup(
24 | packages=find_packages(exclude=["tests", "unit", "system"]),
25 | package_data={"": extra_files},
26 | install_requires=read("pip/requirements.txt"),
27 | tests_require=read("pip/requirements-dev.txt"),
28 | entry_points={"console_scripts": ["Arbie = Arbie.__main__:main"]},
29 | )
30 |
--------------------------------------------------------------------------------
/tests/system/Services/coingecko_test.py:
--------------------------------------------------------------------------------
1 | """Integration test for Coingecko API."""
2 |
3 | import pytest
4 |
5 | from Arbie.Services import Coingecko
6 |
7 | pytestmark = pytest.mark.asyncio
8 |
9 |
10 | class TestCoingecko(object):
11 | async def test_btc_coin(self):
12 | response = await Coingecko()._coin_ticker("01coin") # noqa: WPS437
13 | assert response is None
14 |
15 | async def test_eth_coin(self):
16 | address = await Coingecko()._coin_ticker("velo-token") # noqa: WPS437
17 | assert address == "0x98ad9B32dD10f8D8486927D846D4Df8BAf39Abe2".lower()
18 |
19 | async def test_usd_coin(self):
20 | address = await Coingecko()._coin_ticker("usd-coin") # noqa: WPS437
21 | assert address == "0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48".lower()
22 |
23 | async def test_get_ids(self):
24 | ids = await Coingecko().ids() # noqa: WPS437
25 | assert len(ids) > 6000 # noqa: WPS432
26 |
--------------------------------------------------------------------------------
/tests/unit/Variables/trade_test.py:
--------------------------------------------------------------------------------
1 | """Unittest for Trade."""
2 | import pytest
3 |
4 | from Arbie.Variables import Pool, Trade
5 |
6 |
7 | @pytest.fixture
8 | def tokens(dai, eth):
9 | return dai, eth
10 |
11 |
12 | class TestTrade(object):
13 | @pytest.fixture
14 | def trade(self, tokens, eth, dai): # noqa: WPS442
15 | pool1 = Pool(tokens, [400, 1], [0.5, 0.5])
16 | pool2 = Pool(tokens, [410, 1], [0.5, 0.5])
17 | return Trade([pool1, pool2], [dai, eth, dai])
18 |
19 | def test_create(self, trade):
20 | assert len(trade.pools) == 2
21 | assert len(trade.path) == 3
22 |
23 | def test_get(self, trade, eth, dai):
24 | _, token_in, token_out = trade[0]
25 | assert token_in == dai
26 | assert token_out == eth
27 |
28 | def test_iter(self, trade, eth, dai):
29 | pools = []
30 | for pool, _in, _out in trade:
31 | pools.append(pool)
32 | assert len(pools) == 2
33 |
--------------------------------------------------------------------------------
/Arbie/Variables/big_number.py:
--------------------------------------------------------------------------------
1 | """Utility functions for createting large numbers."""
2 |
3 |
4 | class BigNumber(object):
5 | """BigNumber is used for creating etheruem friendly numbers."""
6 |
7 | def __init__(self, value, exp=18):
8 | self.value = int(round(value * 10 ** exp))
9 | self.exp = exp
10 |
11 | def __eq__(self, other):
12 | if isinstance(other, BigNumber):
13 | return self.value == other.value and self.exp == other.exp
14 | return self.value == other
15 |
16 | def __gt__(self, other):
17 | if isinstance(other, BigNumber):
18 | return self.value > other.value
19 | return self.value > other
20 |
21 | def __truediv__(self, other):
22 | return self.to_number() / other.to_number()
23 |
24 | @classmethod
25 | def from_value(cls, value, exp=18):
26 | bg = cls(0, exp)
27 | bg.value = int(value)
28 | return bg
29 |
30 | def to_number(self) -> float:
31 | return self.value / (10 ** self.exp)
32 |
--------------------------------------------------------------------------------
/.github/workflows/python_publish.yml:
--------------------------------------------------------------------------------
1 | name: Upload Python Package
2 |
3 | on:
4 | push:
5 | branches:
6 | - master
7 |
8 | jobs:
9 | release:
10 | runs-on: ubuntu-latest
11 |
12 | steps:
13 | - uses: actions/checkout@v2
14 | with:
15 | fetch-depth: 0
16 |
17 | - name: Temporarily disable "include administrators" branch protection
18 | uses: benjefferies/branch-protection-bot@master
19 | if: always()
20 | with:
21 | access-token: ${{ secrets.ACCESS_TOKEN }}
22 |
23 | - name: Python Semantic Release
24 | uses: relekang/python-semantic-release@master
25 | with:
26 | github_token: ${{ secrets.GITHUB_TOKEN }}
27 | pypi_token: ${{ secrets.PYPI_TOKEN }}
28 |
29 | - name: Enable "include administrators" branch protection
30 | uses: benjefferies/branch-protection-bot@master
31 | if: always() # Force to always run this step to ensure "include administrators" is always turned back on
32 | with:
33 | access-token: ${{ secrets.ACCESS_TOKEN }}
34 | enforce_admins: true
35 |
--------------------------------------------------------------------------------
/Arbie/Variables/trades.py:
--------------------------------------------------------------------------------
1 | """Trades contain variables for trading."""
2 |
3 | from Arbie.Variables.pool import Pools
4 | from Arbie.Variables.token import Tokens
5 |
6 |
7 | class Trade(object):
8 | def __init__(self, pools: Pools, path: Tokens, ratio=None, amount_in=None):
9 | self.pools = pools
10 | self.path = path
11 | self.amount_in = amount_in
12 | self.profit = None
13 | self.balance = None
14 | self.ratio = ratio
15 |
16 | def __len__(self):
17 | return len(self.path)
18 |
19 | def __str__(self):
20 | return f"""
21 | Trade(
22 | pools:{self.pools},
23 | path:{self.path},
24 | amount_in:{self.amount_in},
25 | profit:{self.profit},
26 | balance:{self.balance},
27 | ratio:{self.ratio})""" # noqa: WPS221
28 |
29 | def __iter__(self):
30 | return iter(self._generator())
31 |
32 | def __getitem__(self, i):
33 | return self.pools[i], self.path[i], self.path[i + 1] # noqa: WPS221
34 |
35 | def _generator(self):
36 | yield from (self[i] for i, _ in enumerate(self.pools))
37 |
--------------------------------------------------------------------------------
/.idea/runConfigurations/unitTests.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
--------------------------------------------------------------------------------
/tests/unit/token_test.py:
--------------------------------------------------------------------------------
1 | """Unittest of Token and Balance."""
2 | import pytest
3 |
4 | from Arbie.Variables import Balance, Token
5 |
6 | dai_string = "dai"
7 |
8 | dai = Token(dai_string, "aaaa")
9 | eth = Token("eth", "bbbb")
10 | tokens = [dai, eth]
11 |
12 |
13 | class TestToken(object):
14 | def test_str(self):
15 | dai_str = dai.__str__()
16 |
17 | assert dai_string in dai_str
18 |
19 | def test_repr(self):
20 | dai_repr = dai.__repr__()
21 |
22 | assert dai_string in dai_repr
23 | assert "aaaa" in dai_repr
24 |
25 |
26 | class TestBalance(object):
27 | def test_create(self):
28 | Balance.create(tokens, [1, 2])
29 |
30 | def test_create_wrong_len(self):
31 | with pytest.raises(ValueError):
32 | Balance.create(tokens, [1, 2, 3])
33 | with pytest.raises(ValueError):
34 | Balance.create(tokens, [1])
35 |
36 | def test_str_and_repr(self):
37 | bal = Balance(dai, 2.5)
38 | bal_str = bal.__str__()
39 | bal_repr = bal.__repr__()
40 |
41 | assert bal_str == bal_repr
42 | assert dai_string in bal_str
43 | assert "2.5" in bal_str
44 |
--------------------------------------------------------------------------------
/tests/unit/Actions/whitelist_test.py:
--------------------------------------------------------------------------------
1 | """Test whitelist."""
2 |
3 | import pytest
4 | from pytest_mock.plugin import MockerFixture
5 |
6 | from Arbie.Actions import ActionTree, Store, Whitelist
7 | from Arbie.Services import Coingecko
8 |
9 |
10 | class TestWhitelist(object):
11 | @pytest.mark.asyncio
12 | async def test_on_next(self, mocker: MockerFixture):
13 | mocker.patch.object(
14 | Coingecko,
15 | "coins",
16 | return_value=[
17 | "0xdac17f958d2ee523a2206206994597c13d831ec7",
18 | "0x514910771af9ca656af840dff83e8264ecf986ca",
19 | ],
20 | )
21 | store = Store()
22 | tree = ActionTree(store)
23 | tree.add_action(Whitelist())
24 | await tree.run()
25 |
26 | assert len(store.get("whitelist")) == 2
27 |
28 | @pytest.mark.asyncio
29 | async def test_on_next_no_coins(self, mocker: MockerFixture):
30 | mocker.patch.object(Coingecko, "coins", return_value=None)
31 | store = Store()
32 | tree = ActionTree(store)
33 | tree.add_action(Whitelist())
34 | with pytest.raises(ConnectionError):
35 | await tree.run()
36 |
--------------------------------------------------------------------------------
/Arbie/Actions/whitelist.py:
--------------------------------------------------------------------------------
1 | """Whitelist contains actions for finding good tokens."""
2 |
3 | import logging
4 |
5 | from Arbie.Actions import Action
6 | from Arbie.Services import Coingecko
7 |
8 | logger = logging.getLogger()
9 |
10 |
11 | class Whitelist(Action):
12 | """SetUp Trader account for trading.
13 |
14 | Coingecko is used for finding whitelisted tokens it has a limit
15 | of 10 requests/second. However in reality it is much lower.
16 | The base settings allow for 1 requests every 0.6 seconds
17 | is equal to 4 req/sec. In total we need to do ~6000 request
18 | which will take ~25 min.
19 | [Settings]
20 | input:
21 | requests: 4
22 | delay: 2
23 | retries: 4
24 | retrie_delay: 60
25 | output:
26 | whitelist: whitelist
27 | """
28 |
29 | async def on_next(self, data):
30 | gecko = Coingecko(
31 | data.requests(), data.delay(), data.retries(), data.retrie_delay()
32 | )
33 | coins = await gecko.coins()
34 | if not coins:
35 | raise ConnectionError("No Coins from Coingecko")
36 | data.whitelist(coins)
37 | logger.info(f"Whitelist: {coins}")
38 |
--------------------------------------------------------------------------------
/tests/unit/async_helpers_test.py:
--------------------------------------------------------------------------------
1 | """Unittest of async_helpers.py."""
2 | import asyncio
3 | from unittest.mock import MagicMock
4 |
5 | import pytest
6 |
7 | from Arbie.async_helpers import CircuitBreaker, async_map
8 |
9 |
10 | async def pass_through(some_value):
11 | await asyncio.sleep(0)
12 | return some_value
13 |
14 |
15 | @pytest.mark.asyncio
16 | async def test_async_map():
17 | res = await async_map(pass_through, range(3))
18 | assert res == [0, 1, 2]
19 |
20 |
21 | @pytest.mark.asyncio
22 | async def test_async_map_chunk():
23 | res = await async_map(pass_through, range(3), 2)
24 | assert res == [0, 1, 2]
25 |
26 |
27 | class TestCircuitBreaker(object):
28 | def test_safe_call(self):
29 | mock = MagicMock(side_effect=[Exception(), 1337]) # noqa: WPS432
30 | breaker = CircuitBreaker(2, 0, mock)
31 | value = breaker.safe_call()
32 | assert value == 1337 # noqa: WPS432
33 | assert mock.call_count == 2
34 |
35 | def test_safe_call_raises_after_retrie_count(self):
36 | mock = MagicMock(side_effect=ValueError()) # noqa: WPS432
37 | breaker = CircuitBreaker(1, 0, mock)
38 | with pytest.raises(ValueError):
39 | breaker.safe_call()
40 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright (C) 2010-2011 Hideo Hattori
2 | Copyright (C) 2011-2013 Hideo Hattori, Steven Myint
3 | Copyright (C) 2013-2016 Hideo Hattori, Steven Myint, Bill Wendling
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining
6 | a copy of this software and associated documentation files (the
7 | "Software"), to deal in the Software without restriction, including
8 | without limitation the rights to use, copy, modify, merge, publish,
9 | distribute, sublicense, and/or sell copies of the Software, and to
10 | permit persons to whom the Software is furnished to do so, subject to
11 | the following conditions:
12 |
13 | The above copyright notice and this permission notice shall be
14 | included in all copies or substantial portions of the Software.
15 |
16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
17 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
19 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
20 | BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
21 | ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
22 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
23 | SOFTWARE.
--------------------------------------------------------------------------------
/.github/workflows/python-branch.yml:
--------------------------------------------------------------------------------
1 | name: Python Branch Workflow
2 | on: [push]
3 |
4 | jobs:
5 | build-python:
6 |
7 | runs-on: ubuntu-latest
8 |
9 | steps:
10 | - uses: actions/checkout@v2
11 | - name: Build and push docker
12 | id: build-docker
13 | run: |
14 | echo ${{ secrets.GITHUB_TOKEN }} | docker login docker.pkg.github.com -u $GITHUB_ACTOR --password-stdin
15 | docker pull docker.pkg.github.com/$GITHUB_REPOSITORY/build-cache-no-buildkit || true
16 | docker build . -t arbie --cache-from=docker.pkg.github.com/$GITHUB_REPOSITORY/build-cache-no-buildkit
17 | docker tag arbie docker.pkg.github.com/$GITHUB_REPOSITORY/build-cache-no-buildkit && docker push docker.pkg.github.com/$GITHUB_REPOSITORY/build-cache-no-buildkit || true
18 | - name: Test and Lint
19 | run: |
20 | docker-compose run arbie ./gradlew testAndLint
21 | - name: Publish Test Report
22 | if: always()
23 | uses: scacap/action-surefire-report@v1
24 | with:
25 | github_token: ${{ secrets.GITHUB_TOKEN }}
26 | - name: Upload coverage to Codecov
27 | uses: codecov/codecov-action@v1
28 | with:
29 | token: ${{ secrets.CODECOV_TOKEN }}
30 | file: build/tests/cov/coverage.xml
31 |
--------------------------------------------------------------------------------
/.idea/runConfigurations/PoolFinder.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
--------------------------------------------------------------------------------
/tests/unit/Actions/action_tree_test.py:
--------------------------------------------------------------------------------
1 | """Unittest of ActionTree."""
2 |
3 | import pytest
4 | import yaml
5 |
6 | from Arbie.Actions import Action, ActionTree, Store
7 |
8 |
9 | class DummyAction(Action):
10 | """
11 | Dummy description for dummy action.
12 |
13 | [Settings]
14 | input:
15 | output:
16 | """
17 |
18 |
19 | store = Store()
20 |
21 |
22 | class TestActionTree(object):
23 | def test_create(self):
24 | tree = ActionTree.create({"PathFinder": {}}, store)
25 | assert len(tree.actions) == 1
26 |
27 | def test_create_bad_arg(self):
28 | config = """
29 | PathFinder:
30 | input:
31 | non_existing: []
32 | """
33 | with pytest.raises(ValueError):
34 | ActionTree.create(yaml.safe_load(config), store)
35 |
36 | def test_create_bad_action(self):
37 | with pytest.raises(ValueError):
38 | ActionTree.create({"NonExistingAction": {}}, store)
39 |
40 | def test_create_dummy_action(self):
41 | config = """
42 | PathFinder:
43 | DummyAction:
44 | """
45 |
46 | tree = ActionTree.create(
47 | yaml.safe_load(config), store, [("DummyAction", DummyAction)]
48 | )
49 | assert len(tree.actions) == 2
50 |
--------------------------------------------------------------------------------
/Arbie/prometheus.py:
--------------------------------------------------------------------------------
1 | """Helper for using prometheus_client."""
2 |
3 | import logging
4 |
5 | from prometheus_client import Gauge, Summary, start_http_server
6 |
7 |
8 | def singleton(class_):
9 | instances = {}
10 |
11 | def getinstance(*args, **kwargs): # noqa: WPS430
12 | if class_ not in instances:
13 | instances[class_] = class_(*args, **kwargs)
14 | return instances[class_]
15 |
16 | return getinstance
17 |
18 |
19 | @singleton
20 | class Prometheus(object):
21 | def __init__(self):
22 | try:
23 | start_http_server(8000) # noqa: WPS432
24 | except OSError as e:
25 | logging.getLogger().info(e)
26 |
27 | self.metric_store = {}
28 |
29 | def gauge(self, name, description):
30 | return self._get_or_create(name, description, Gauge)
31 |
32 | def summary(self, name, description):
33 | return self._get_or_create(name, description, Summary)
34 |
35 | def _get_or_create(self, name, description, metric):
36 | valid_name = self._to_valid_name(name)
37 | if valid_name in self.metric_store:
38 | return self.metric_store[valid_name]
39 | self.metric_store[valid_name] = metric(valid_name, description)
40 | return self._get_or_create(valid_name, description, metric)
41 |
42 | def _to_valid_name(self, name):
43 | return name.replace(".", "_").lower()
44 |
45 |
46 | def get_prometheus():
47 | return Prometheus()
48 |
--------------------------------------------------------------------------------
/tests/unit/Actions/path_finder_test.py:
--------------------------------------------------------------------------------
1 | """Unittests of PathFinder."""
2 | import asyncio
3 | from unittest.mock import MagicMock
4 |
5 | import pytest
6 | import yaml
7 |
8 | from Arbie import SettingsParser
9 | from Arbie.Actions import Store
10 | from Arbie.app import App
11 |
12 |
13 | @pytest.fixture
14 | def config_file():
15 | return """
16 | action_tree:
17 | actions:
18 | PathFinder:
19 | input:
20 | weth: eth
21 | min_liquidity: 4
22 | output:
23 | cycles: found_cycles
24 | Arbitrage:
25 | """
26 |
27 |
28 | @pytest.fixture
29 | def store(pools, eth) -> Store:
30 | store = Store()
31 | store.add("pools", pools)
32 | future = asyncio.Future()
33 | future.set_result(eth)
34 | weth_mock = MagicMock()
35 | weth_mock.create_token.return_value = future
36 | store.add("eth", weth_mock)
37 | return store
38 |
39 |
40 | class TestPathFinder(object):
41 | @pytest.mark.asyncio
42 | async def test_run(self, store, config_file, mocker):
43 | mocker.patch.object(SettingsParser, "setup_store", return_value=store)
44 |
45 | config = yaml.safe_load(config_file)
46 | app = App(config)
47 | assert len(app.action_tree.actions) == 2
48 | await app.run()
49 | assert len(store["found_cycles"]) == 5
50 | trades = app.store["filtered_trades"]
51 | assert trades[0].profit == pytest.approx(1.354642)
52 |
--------------------------------------------------------------------------------
/tests/system/Contracts/uniswap_router_test.py:
--------------------------------------------------------------------------------
1 | """Test arbie smart contracts."""
2 |
3 | import pytest
4 |
5 | from Arbie.Actions.arbitrage import ArbitrageFinder
6 | from Arbie.Contracts import GenericToken, UniswapV2Router
7 | from Arbie.Variables import BigNumber
8 |
9 |
10 | class TestRouter(object):
11 | @pytest.mark.asyncio
12 | def test_out_given_in(self, router, trade):
13 | trade.amount_in = 1
14 | amount_out = router.check_out_given_in(trade)
15 | assert amount_out == pytest.approx(4.008, 1e-4)
16 |
17 | profit = ArbitrageFinder(trade).calculate_profit(1)
18 | assert profit == pytest.approx(amount_out - trade.amount_in)
19 |
20 | @pytest.mark.asyncio
21 | async def test_swap(
22 | self, trade, router: UniswapV2Router, weth: GenericToken, dummy_account
23 | ):
24 | trade.amount_in = 1
25 | weth.transfer(dummy_account.address, BigNumber(2))
26 | weth.set_account(dummy_account)
27 | balance_before = await weth.balance_of(dummy_account.address)
28 |
29 | assert balance_before > 1
30 |
31 | weth.approve(router.get_address(), BigNumber(2))
32 |
33 | router.set_account(dummy_account)
34 | assert router.swap(trade)
35 | amount_out = router.check_out_given_in(trade)
36 |
37 | balance_after = await weth.balance_of(dummy_account.address)
38 | assert (
39 | balance_after.to_number() - balance_before.to_number()
40 | > amount_out - trade.amount_in - 0.001
41 | )
42 |
--------------------------------------------------------------------------------
/Arbie/Contracts/uniswap_router.py:
--------------------------------------------------------------------------------
1 | """Utility functions for interacting with Arbie.sol."""
2 |
3 | import logging
4 |
5 | from Arbie.Contracts.contract import Contract
6 | from Arbie.Contracts.tokens import GenericToken
7 | from Arbie.Variables import BigNumber, Trade
8 |
9 | logger = logging.getLogger()
10 |
11 |
12 | class UniswapV2Router(Contract):
13 | name = "UniswapV2Router02"
14 | protocol = "uniswap"
15 |
16 | def approve(self, weth: GenericToken):
17 | if weth.allowance(self.get_address()) < BigNumber(10e6): # noqa: WPS432
18 | return weth.approve(self.get_address(), BigNumber(10e8)) # noqa: WPS432
19 | return True
20 |
21 | def check_out_given_in(self, trade: Trade):
22 | path_address = list(map(lambda t: t.address, trade.path))
23 | amount_out = self.contract.functions.getAmountsOut(
24 | BigNumber(trade.amount_in).value, path_address
25 | ).call()
26 | return BigNumber.from_value(amount_out[-1]).to_number()
27 |
28 | def swap(self, trade, dry_run=False):
29 | return self._transact_info(self._swap_transaction(trade), dry_run=dry_run)
30 |
31 | def _swap_transaction(self, trade):
32 | path = list(map(lambda t: t.address, trade.path))
33 | return self.contract.functions.swapExactTokensForTokens(
34 | BigNumber(trade.amount_in).value,
35 | BigNumber(trade.amount_in).value,
36 | path,
37 | self._get_account(),
38 | self.w3.eth.getBlock("latest").timestamp + self.timeout,
39 | )
40 |
--------------------------------------------------------------------------------
/Arbie/Variables/token.py:
--------------------------------------------------------------------------------
1 | """Basic token representation."""
2 | from typing import List, NewType
3 |
4 |
5 | class Token(object):
6 | """Token is a light weight representation of a ERC20 token."""
7 |
8 | def __init__(self, name: str, address: str, price: float = None):
9 | self.name = name
10 | self.price = price
11 | self.address = address
12 |
13 | def __str__(self):
14 | return self.name
15 |
16 | def __repr__(self):
17 | return f"Token(Name: {self.name}, Price: {self.price}, Address: {self.address})" # noqa: WPS221
18 |
19 | def __eq__(self, other):
20 | return hash(self) == hash(other)
21 |
22 | def __hash__(self):
23 | return hash(self.address)
24 |
25 |
26 | class Balance(object):
27 | """Balance of token."""
28 |
29 | def __init__(self, token: Token, value: float):
30 | self.token = token
31 | self.value = value
32 |
33 | def __str__(self):
34 | return f"Balance(Token: {self.token}, Value: {self.value})"
35 |
36 | def __repr__(self):
37 | return self.__str__()
38 |
39 | @classmethod
40 | def create(cls, tokens: List[Token], values: List[float]) -> List[object]:
41 | if len(tokens) != len(values):
42 | raise ValueError("All inputs must be of same length.")
43 |
44 | balances = []
45 | for token, value in zip(tokens, values):
46 | balances.append(cls(token, value))
47 | return balances
48 |
49 |
50 | Balances = NewType("Balances", List[Balance])
51 | Tokens = NewType("Tokens", List[Token])
52 |
--------------------------------------------------------------------------------
/tests/unit/conftest.py:
--------------------------------------------------------------------------------
1 | """Help module for web3 tests."""
2 | from typing import List
3 |
4 | import pytest
5 |
6 | from Arbie.address import dummy_token_generator
7 | from Arbie.Variables import Pool, PoolType, Token
8 |
9 | small = 10000
10 | medium = 1000000
11 | large = 100000000
12 |
13 |
14 | @pytest.fixture
15 | def eth() -> Token:
16 | return dummy_token_generator("eth", 1)
17 |
18 |
19 | @pytest.fixture
20 | def dai() -> Token:
21 | return dummy_token_generator("dai", 300)
22 |
23 |
24 | @pytest.fixture
25 | def btc() -> Token:
26 | return dummy_token_generator("btc", 3.0 / 100)
27 |
28 |
29 | @pytest.fixture
30 | def yam() -> Token:
31 | return dummy_token_generator("yam", 3000)
32 |
33 |
34 | @pytest.fixture
35 | def pools(dai, eth, btc, yam) -> List[Pool]:
36 | return [
37 | Pool(
38 | [eth, dai, yam],
39 | [small / 303.0, small / 0.9, small / 0.1],
40 | [1 / 3.0, 1 / 3.0, 1 / 3.0],
41 | PoolType.uniswap,
42 | 0.004,
43 | ),
44 | Pool(
45 | [eth, btc],
46 | [large / 305.0, large / 10000],
47 | [5 / 6, 1 / 6],
48 | PoolType.uniswap,
49 | 0.01,
50 | ),
51 | Pool(
52 | [eth, dai, btc],
53 | [medium / 301.0, medium / 1.1, medium / 10020],
54 | [1 / 2.0, 1 / 4.0, 1 / 4.0],
55 | PoolType.uniswap,
56 | 0.004,
57 | ),
58 | Pool(
59 | [dai, yam],
60 | [small / 1.1, small / 0.1],
61 | [1 / 2.0, 1 / 2.0],
62 | PoolType.balancer,
63 | 0.001,
64 | ),
65 | ]
66 |
--------------------------------------------------------------------------------
/Arbie/async_helpers.py:
--------------------------------------------------------------------------------
1 | """Helper functions for working with async."""
2 |
3 | import asyncio
4 | import logging
5 | import time
6 | from concurrent.futures import ThreadPoolExecutor
7 |
8 | logger = logging.getLogger()
9 |
10 | thread_pool = ThreadPoolExecutor(max_workers=15) # noqa: WPS432
11 |
12 |
13 | def chunked(seq, chunk_size):
14 | yield from (
15 | seq[index : index + chunk_size]
16 | for index in range(0, len(seq), chunk_size) # noqa: WPS518
17 | )
18 |
19 |
20 | async def async_map_chunk(function, seq):
21 | tasks = [function(i) for i in seq]
22 | return await asyncio.gather(*tasks)
23 |
24 |
25 | async def async_map(function, seq, chunk_size=100, wait_time=0):
26 | res = []
27 | for chunk in chunked(seq, chunk_size):
28 | res.extend(await async_map_chunk(function, chunk))
29 | await asyncio.sleep(wait_time)
30 | return res
31 |
32 |
33 | async def run_async(function, *args):
34 | loop = asyncio.get_running_loop()
35 | return await loop.run_in_executor(thread_pool, function, *args)
36 |
37 |
38 | class CircuitBreaker(object):
39 | def __init__(self, retries, timeout, func):
40 | self.retries = retries
41 | self.timeout = timeout
42 | self.func = func
43 |
44 | def safe_call(self, *args):
45 | for i in range(0, self.retries):
46 | try:
47 | return self.func(*args)
48 | except Exception as e:
49 | logger.warning(f"Failed to use network resource, retry number {i}")
50 | logger.warning(f"{e}")
51 | if i == self.retries - 1:
52 | raise e
53 | time.sleep(self.timeout)
54 |
--------------------------------------------------------------------------------
/tests/unit/Variables/pool_test.py:
--------------------------------------------------------------------------------
1 | """Unittest of pool."""
2 | import pytest
3 |
4 | from Arbie import PoolValueError
5 | from Arbie.address import dummy_address_generator, dummy_token_generator
6 | from Arbie.Variables import Pool
7 |
8 |
9 | @pytest.fixture
10 | def tokens(dai, eth):
11 | return [dai, eth]
12 |
13 |
14 | @pytest.fixture
15 | def pool(tokens):
16 | return Pool(tokens, [4000, 10], [0.5, 0.5])
17 |
18 |
19 | class TestPool(object):
20 | def test_init_multi(self, tokens):
21 | new_tokens = tokens + [dummy_token_generator("sai", 100)]
22 | pool = Pool(
23 | new_tokens, [4e3, 10, 10e5], [1 / 3.0, 1 / 3.0, 1 / 3.0] # noqa: WPS221
24 | ) # noqa: WPS221
25 | assert pool.spot_price(new_tokens[0], new_tokens[1]) == 400
26 | assert pool.spot_price(new_tokens[2], new_tokens[1]) == 100000
27 |
28 | def test_init_fee_raises(self, tokens):
29 | with pytest.raises(PoolValueError):
30 | Pool(tokens, [400, 1], [0.75, 0.25], fee=2)
31 |
32 | def test_init_fee(self, tokens):
33 | address = dummy_address_generator()
34 | pool = Pool(tokens, [400, 1], [0.75, 0.25], address=address)
35 | assert pool.address == address
36 |
37 | def test_pool_bad_weights(self, tokens):
38 | with pytest.raises(PoolValueError):
39 | Pool(tokens, [400, 1], [0.7, 0.25])
40 |
41 | def test_spot_price(self, pool, dai, eth):
42 | assert pool.spot_price(dai, eth) == 400 # noqa: WPS432
43 |
44 | def test_out_give_in(self, pool, dai, eth):
45 | assert pool.out_given_in(eth, dai, 1) == pytest.approx(
46 | 363.636363636364
47 | ) # noqa: WPS432
48 |
49 | def test_in_give_out(self, pool, dai, eth):
50 | assert pool.in_given_out(dai, eth, 1) == pytest.approx(
51 | 444.444444444444
52 | ) # noqa: WPS432
53 |
--------------------------------------------------------------------------------
/Arbie/__main__.py:
--------------------------------------------------------------------------------
1 | """Arbie.
2 |
3 | Arbie is configured through a Yaml file.
4 |
5 | Usage:
6 | Arbie -f config.yml [-l arbie.log.txt]
7 | Arbie (-h | --help)
8 | Arbie (-v | --version)
9 |
10 | Options:
11 | -f --file=config.yml Load configuration file.
12 |
13 | -l --log=arbie.log Path to log files [default: arbie.log]
14 |
15 | -h --help Show this screen.
16 | -v --version Show version.
17 |
18 | """
19 | import asyncio
20 | import logging
21 | from logging.handlers import RotatingFileHandler
22 |
23 | import yaml
24 | from docopt import docopt
25 |
26 | import Arbie
27 | from Arbie.app import App
28 |
29 | max_log_size = int(10e6) # noqa: WPS432
30 |
31 |
32 | def setup_logging(log_file: str, severity=logging.INFO):
33 | formater = "%(asctime)s [%(threadName)-12.12s] [%(levelname)-5.5s] %(message)s"
34 | logging.basicConfig(
35 | level=severity, format=formater, datefmt="%m-%d %H:%M"
36 | ) # noqa: WPS323
37 |
38 | root_logger = logging.getLogger()
39 |
40 | file_handler = RotatingFileHandler(log_file, maxBytes=max_log_size, backupCount=5)
41 | file_handler.setFormatter(logging.Formatter(formater))
42 | file_handler.setLevel(severity)
43 | root_logger.addHandler(file_handler)
44 |
45 |
46 | def main(argv=None):
47 | arguments = docopt(__doc__, argv, version=Arbie.__version__) # noqa: WPS609
48 |
49 | logging.getLogger().info(f"arguments: {arguments}")
50 |
51 | setup_logging(str(arguments["--log"]))
52 |
53 | config_path = str(arguments["--file"])
54 | config = None
55 | with open(config_path, "r") as config_file:
56 | config = yaml.safe_load(config_file)
57 |
58 | app = App(config)
59 |
60 | try:
61 | asyncio.run(app.run())
62 | except Exception as ex:
63 | logging.getLogger().error(ex)
64 | raise ex
65 |
66 |
67 | if __name__ == "__main__":
68 | main()
69 |
--------------------------------------------------------------------------------
/tests/system/Contracts/token_test.py:
--------------------------------------------------------------------------------
1 | """Test uniswap contracts."""
2 | import pytest
3 |
4 | from Arbie.Contracts import ContractFactory
5 | from Arbie.Contracts.tokens import GenericToken, Weth
6 | from Arbie.Variables import BigNumber
7 |
8 | bg10 = BigNumber(10)
9 |
10 |
11 | @pytest.fixture
12 | def token_factory(w3) -> ContractFactory:
13 | return ContractFactory(w3, GenericToken)
14 |
15 |
16 | @pytest.fixture
17 | def dai(deploy_address, token_factory) -> GenericToken:
18 | return token_factory.deploy_contract(deploy_address, "Dai", "DAI", bg10.value)
19 |
20 |
21 | @pytest.mark.asyncio
22 | async def test_decimals(dai: GenericToken):
23 | assert await dai.decimals() == 18
24 |
25 |
26 | class TestToken(object):
27 | def test_equals(self, dai, token_factory):
28 | dai2 = token_factory.load_contract(
29 | owner_address=dai.owner_address, address=dai.get_address()
30 | )
31 | assert dai == dai2
32 |
33 | def test_approve(self, dai: GenericToken, deploy_address):
34 | assert dai.approve(deploy_address, bg10)
35 |
36 | @pytest.mark.asyncio
37 | async def test_transfer(self, dai: GenericToken, deploy_address, dummy_address):
38 | dai.approve(deploy_address, bg10)
39 | dai.transfer(dummy_address, bg10)
40 | bg = await dai.balance_of(dummy_address)
41 | assert bg.to_number() == 10
42 |
43 | @pytest.mark.asyncio
44 | async def test_name(self, dai: GenericToken):
45 | name = await dai.get_name()
46 | assert name == "Dai"
47 |
48 |
49 | class TestWeth(object):
50 | @pytest.mark.asyncio
51 | async def test_deposit_withdraw(self, real_weth: Weth, dummy_address):
52 | real_weth.set_owner_address(dummy_address)
53 | real_weth.deposit(2)
54 | assert await real_weth.balance_of(dummy_address) == BigNumber(2)
55 |
56 | real_weth.withdraw(2)
57 | assert await real_weth.balance_of(dummy_address) == BigNumber(0)
58 |
--------------------------------------------------------------------------------
/tests/system/Actions/redis_state_test.py:
--------------------------------------------------------------------------------
1 | """System tests for RedisState."""
2 | import pickle # noqa: S403
3 |
4 | import pytest
5 |
6 | from Arbie.Actions.redis_state import RedisState
7 | from Arbie.address import dummy_token_generator
8 |
9 | collection_key = "pool_finder.1.pools"
10 | item_key = "pool_finder.1.pools.0xAb12C"
11 |
12 |
13 | @pytest.fixture
14 | def token():
15 | return dummy_token_generator("my_token")
16 |
17 |
18 | @pytest.fixture
19 | def tokens():
20 | return [dummy_token_generator("token1"), dummy_token_generator("token2")]
21 |
22 |
23 | class TestRedisState(object):
24 | @pytest.fixture
25 | def redis_item(self, redis_state, token):
26 | redis_state.r.set(item_key, pickle.dumps(token))
27 | yield None
28 | redis_state.delete(item_key)
29 |
30 | @pytest.fixture
31 | def redis_collection(self, redis_state: RedisState, tokens):
32 | redis_state[collection_key] = tokens
33 | yield None
34 | redis_state.delete(collection_key)
35 |
36 | def test_get_empty_state(self, redis_state):
37 | with pytest.raises(KeyError):
38 | redis_state[collection_key]
39 | with pytest.raises(KeyError):
40 | redis_state[item_key]
41 |
42 | def test_get_item(self, redis_state, redis_item, token):
43 | assert redis_state[item_key] == token
44 |
45 | def test_get_collection(self, redis_state: RedisState, redis_collection, tokens):
46 | collection = redis_state[collection_key]
47 |
48 | for t in tokens:
49 | collection.index(t)
50 |
51 | def test_add_and_get(self, redis_state: RedisState, token):
52 | redis_state[item_key] = token
53 | token_round_trip = redis_state[item_key]
54 | redis_state.delete(item_key)
55 | assert token_round_trip == token
56 |
57 | def test_multiple_add_and_get(self, redis_state):
58 | redis_state[collection_key] = [1, 2]
59 | redis_state[collection_key] = [3, 4]
60 | collection = redis_state[collection_key]
61 | redis_state.delete(collection_key)
62 | assert len(collection) == 2
63 |
--------------------------------------------------------------------------------
/tests/unit/main_test.py:
--------------------------------------------------------------------------------
1 | """Unittest of __main__.py."""
2 | import pytest
3 | from pytest_mock.plugin import MockerFixture
4 |
5 | from Arbie.__main__ import main
6 | from Arbie.Actions import Store
7 |
8 |
9 | @pytest.fixture
10 | def config_file() -> str:
11 | return """
12 | action_tree:
13 | actions:
14 | PathFinder:
15 | input:
16 | weth: weth
17 | min_liquidity: 4
18 | output:
19 | cycles: found_cycles
20 | Arbitrage:
21 | """
22 |
23 |
24 | @pytest.fixture
25 | def store() -> Store:
26 | store = Store()
27 | store.add("pools", None)
28 | store.add("weth", None)
29 | return store
30 |
31 |
32 | def setup_mocks(mocker, config_file):
33 | mocker.patch("Arbie.__main__.RotatingFileHandler")
34 | mocker.patch("Arbie.__main__.logging.getLogger")
35 | if config_file is not None:
36 | mocker.patch("builtins.open", mocker.mock_open(read_data=config_file))
37 |
38 |
39 | def run_main():
40 | main(["-f", "giberich.yml"])
41 |
42 |
43 | class TestMain(object):
44 | def test_config_not_found(self, mocker: MockerFixture):
45 | setup_mocks(mocker, None)
46 | with pytest.raises(FileNotFoundError):
47 | run_main()
48 |
49 | def test_setup_and_run(self, config_file: str, store: Store, mocker: MockerFixture):
50 | setup_mocks(mocker, config_file)
51 | run_mock = mocker.patch("Arbie.settings_parser.ActionTree.run")
52 |
53 | run_main()
54 | assert run_mock.called
55 |
56 | def test_fail_on_run(self, config_file: str, store: Store, mocker: MockerFixture):
57 | setup_mocks(mocker, config_file)
58 | run_mock = mocker.patch("Arbie.settings_parser.ActionTree.run")
59 | run_mock.side_effect = ValueError("Failed to run action")
60 |
61 | with pytest.raises(ValueError):
62 | run_main()
63 |
64 | def test_key_not_in_store(self, config_file: str, mocker: MockerFixture):
65 | setup_mocks(mocker, config_file)
66 |
67 | with pytest.raises(ValueError):
68 | run_main()
69 |
--------------------------------------------------------------------------------
/Arbie/Actions/pool_updater.py:
--------------------------------------------------------------------------------
1 | """Pool updater updates pools and tokens."""
2 | import logging
3 |
4 | from Arbie import PoolValueError
5 | from Arbie.Actions import Action
6 | from Arbie.async_helpers import async_map, run_async
7 | from Arbie.Contracts import BalancerPool, ContractFactory, UniswapPair
8 | from Arbie.Variables import PoolType
9 |
10 | logger = logging.getLogger()
11 |
12 |
13 | class PoolUpdater(Action):
14 | """Pool Updater updates pools and tokens.
15 |
16 | [Settings]
17 | input:
18 | web3: web3
19 | pools: all_pools
20 | output:
21 | new_pools: all_pools
22 | """
23 |
24 | def __init__(self, config=None):
25 | self.pair_factory = None
26 | self.pool_factory = None
27 | super().__init__(config)
28 |
29 | async def on_next(self, data):
30 | web3 = data.web3()
31 |
32 | self.pair_factory = ContractFactory(web3, UniswapPair)
33 | self.pool_factory = ContractFactory(web3, BalancerPool)
34 |
35 | pools = await self._update_pools(data.pools())
36 |
37 | data.new_pools(list(filter(None, pools)))
38 |
39 | def _get_contract(self, address, pool_type):
40 | if pool_type == PoolType.uniswap:
41 | return self.pair_factory.load_contract(address=address)
42 | if pool_type == PoolType.balancer:
43 | return self.pool_factory.load_contract(address=address)
44 | raise ValueError("Cannot update pool with unknown type")
45 |
46 | async def _update_pool(self, pool):
47 | pool_contract = await run_async(
48 | self._get_contract, pool.address, pool.pool_type
49 | )
50 | balances = await pool_contract.get_balances()
51 | balances_numb = [balance.to_number() for balance in balances]
52 | try:
53 | pool.update_balances(balances_numb)
54 | except PoolValueError as e:
55 | logger.warn(e)
56 | return None
57 | logger.info(f"Updated Pool balance {pool.address}")
58 | return pool
59 |
60 | async def _update_pools(self, pools):
61 | logger.info(f"Updating pools, total {len(pools)}")
62 | return await async_map(self._update_pool, pools)
63 |
--------------------------------------------------------------------------------
/Arbie/Contracts/event_filter.py:
--------------------------------------------------------------------------------
1 | """Event filter is a helpter class for filtering events with asyncio."""
2 | import logging
3 |
4 | from prometheus_async.aio import time
5 |
6 | from Arbie.async_helpers import async_map, run_async
7 | from Arbie.prometheus import get_prometheus
8 |
9 | GET_ENTRIES = get_prometheus().summary(
10 | "event_filter_get_entries", "Time for getting entries"
11 | )
12 |
13 |
14 | class EventTransform(object):
15 | def __init__(self, event_filter, event_transform):
16 | self.event_filter = event_filter
17 | self.event_transform = event_transform
18 |
19 | def run(self):
20 | events = self.event_filter.get_all_entries()
21 | return self.event_transform(events)
22 |
23 |
24 | class EventFilter(object):
25 | def __init__(self, event, event_transform, from_block, to_block, steps):
26 | self.from_block = from_block
27 | self.to_block = to_block
28 | self.steps = steps
29 | self.event = event
30 | self.event_transform = event_transform
31 |
32 | async def find_events(self):
33 | nmb_event_chunks = int((self.to_block - self.from_block) / self.steps) + 1
34 | results = await async_map(self._get_entries, range(nmb_event_chunks))
35 |
36 | # results is a list of lists, that we need to flatten
37 | # https://stackoverflow.com/questions/952914/how-to-make-a-flat-list-out-of-list-of-lists
38 | return [item for sublist in results for item in sublist]
39 |
40 | async def _get_entries(self, index):
41 | from_block = index * self.steps + self.from_block
42 | to_block = from_block + self.steps - 1
43 | if to_block > self.to_block:
44 | to_block = self.to_block
45 | logging.getLogger().info(
46 | f"Searching for Pools in block range [{from_block}:{to_block}]"
47 | )
48 | return await self._get_entries_range(from_block, to_block)
49 |
50 | @time(GET_ENTRIES)
51 | async def _get_entries_range(self, from_block, to_block):
52 | event_filter = self.event.createFilter(
53 | fromBlock=int(from_block), toBlock=int(to_block)
54 | )
55 | ev = EventTransform(event_filter, self.event_transform)
56 | return await run_async(ev.run)
57 |
--------------------------------------------------------------------------------
/Arbie/Contracts/pool_contract.py:
--------------------------------------------------------------------------------
1 | """Base abstract class for pool contracts."""
2 |
3 | from math import isclose
4 | from typing import List
5 |
6 | from Arbie import PoolValueError
7 | from Arbie.async_helpers import async_map
8 | from Arbie.Contracts.contract import Contract
9 | from Arbie.Contracts.tokens import GenericToken
10 | from Arbie.Variables import BigNumber, Pool, PoolType, Token
11 |
12 |
13 | async def create_token(token_contract: GenericToken):
14 | return await token_contract.create_token()
15 |
16 |
17 | class PoolContract(Contract):
18 | async def get_tokens(self) -> List[GenericToken]:
19 | raise NotImplementedError()
20 |
21 | async def get_balances(self) -> List[BigNumber]:
22 | raise NotImplementedError()
23 |
24 | async def get_weights(self) -> List[float]:
25 | raise NotImplementedError()
26 |
27 | async def get_fee(self) -> float:
28 | raise NotImplementedError()
29 |
30 | def get_type(self) -> PoolType:
31 | raise NotImplementedError()
32 |
33 | async def create_tokens(self) -> List[Token]:
34 | tokens = await self.get_tokens()
35 | return await async_map(create_token, tokens)
36 |
37 | async def create_pool(self) -> Pool:
38 | tokens = await self.create_tokens()
39 | if len(tokens) < 2:
40 | raise PoolValueError(
41 | f"Pool: {self.get_address()}, has insufficient tokens: {len(tokens)}"
42 | )
43 | balances_bg = await self.get_balances()
44 | balances = list(map((lambda bg: bg.to_number()), balances_bg))
45 | if isclose(sum(balances), 0, abs_tol=1e-3): # noqa: WPS432
46 | raise PoolValueError(
47 | f"Pool: {self.get_address()}, balances {sum(balances)} == 0"
48 | )
49 |
50 | weights = await self.get_weights()
51 | if not isclose(sum(weights), 1, abs_tol=1e-3): # noqa: WPS432
52 | raise PoolValueError(
53 | f"Pool: {self.get_address()}, weights {sum(weights)} != 1"
54 | )
55 | return Pool(
56 | tokens,
57 | balances,
58 | weights,
59 | self.get_type(),
60 | await self.get_fee(),
61 | address=self.get_address(),
62 | )
63 |
--------------------------------------------------------------------------------
/tests/system/Contracts/uniswap_test.py:
--------------------------------------------------------------------------------
1 | """Test uniswap contracts."""
2 | import asyncio
3 |
4 | import pytest
5 |
6 | from Arbie import IERC20TokenError
7 | from Arbie.Contracts.tokens import BadERC20Token, GenericToken
8 | from Arbie.Contracts.uniswap import UniswapFactory, UniswapPair
9 | from Arbie.Variables import BigNumber, PoolType
10 |
11 | bg10 = BigNumber(10)
12 | bg5 = BigNumber(5)
13 |
14 | pytestmark = pytest.mark.asyncio
15 |
16 |
17 | @pytest.fixture
18 | async def factory_with_pair(factory, dai, weth) -> UniswapFactory:
19 | await factory.create_pair(dai, weth)
20 | return factory
21 |
22 |
23 | async def test_get_all_pairs_length(factory):
24 | assert await factory.all_pairs_length() == 0
25 |
26 |
27 | async def test_create_pair(factory_with_pair):
28 | assert await factory_with_pair.all_pairs_length() == 1
29 |
30 |
31 | async def test_get_all_pairs(factory_with_pair):
32 | assert len(await factory_with_pair.all_pairs()) == 1
33 |
34 |
35 | @pytest.fixture
36 | async def pair(factory_with_pair) -> UniswapPair:
37 | pairs = await factory_with_pair.all_pairs()
38 | return pairs[0]
39 |
40 |
41 | async def test_get_weights(pair):
42 | assert await pair.get_balances() == [0, 0]
43 |
44 |
45 | async def test_mint(
46 | pair: UniswapPair, dai: GenericToken, weth: GenericToken, deploy_address
47 | ):
48 | assert dai.transfer(pair.get_address(), bg10)
49 | assert weth.transfer(pair.get_address(), bg10)
50 | assert pair.mint(deploy_address)
51 | assert await pair.get_balances() == [bg10, bg10]
52 |
53 |
54 | async def test_create_pool(
55 | pair: UniswapPair, dai: GenericToken, weth: GenericToken, deploy_address
56 | ):
57 | dai.transfer(pair.get_address(), bg5)
58 | weth.transfer(pair.get_address(), bg10)
59 | pair.mint(deploy_address)
60 | pool = await pair.create_pool()
61 | tokens = await asyncio.gather(weth.create_token(), dai.create_token())
62 |
63 | assert pool.pool_type == PoolType.uniswap
64 | assert pool.spot_price(tokens[0], tokens[1]) == 2
65 | balances = pool.get_balances(tokens[0], tokens[1])
66 | assert balances[0] == 10
67 | assert balances[1] == 5
68 |
69 |
70 | async def test_create_bad_pool(
71 | factory: UniswapFactory, bad: BadERC20Token, dai: GenericToken
72 | ):
73 | pair = await factory.create_pair(bad, dai)
74 | with pytest.raises(IERC20TokenError):
75 | await pair.create_pool()
76 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Arbie
2 |
3 | [](https://www.codefactor.io/repository/github/owodunni/arbie) [](https://github.com/wemake-services/wemake-python-styleguide) [](https://codecov.io/gh/owodunni/Arbie) [](https://github.com/owodunni/arbie) [](https://github.com/owodunni/GageRnR/blob/master/LICENSE)
4 | [](https://pypi.org/project/Arbie/)
5 |
6 |
7 | Arbie is a greedy crypto pirate!
8 |
9 | 
10 |
11 | ## Run
12 |
13 | Run Brig with docker-compose:
14 |
15 | ```
16 | cd Brig && docker-compose up -d
17 | ```
18 |
19 | ## Getting started
20 |
21 | ## Develop
22 |
23 | Instructions for developing arbie using docker or virual-env.
24 |
25 | To setup the development environment run:
26 |
27 | ```
28 | ./gradlew venv && source .venv/bin/activate && ./gradlew setup
29 | ```
30 |
31 | It will run the steps bellow and make sure that all tools required for Arbie
32 | are setup.
33 |
34 | ### Docker
35 |
36 | The arbie repository can be built using docker. This is probably the simplest
37 | approach if you just want to get things building.
38 |
39 | ```
40 | docker build . -t arbie
41 | ```
42 |
43 | You can now use the newly created docker image to build and test with.
44 |
45 | test:
46 | ```
47 | docker-compose run --rm arbie ./gradlew tAL
48 | ```
49 |
50 | ### Virtual-env
51 |
52 | Create a virtual env:
53 | ```
54 | ./gradlew venv
55 | ```
56 |
57 | Run virual env:
58 | ```
59 | source .venv/bin/activate
60 | ```
61 |
62 | Install requirements:
63 | ```
64 | ./gradlew pip
65 | ```
66 |
67 | lint:
68 | ```
69 | ./gradlew lint
70 | ```
71 |
72 | ### Commits
73 |
74 | Arbie works with [semantic-release](https://python-semantic-release.readthedocs.io/en/latest/)
75 | and therefore has a special commit style. We use [Angular style](https://github.com/angular/angular.js/blob/master/DEVELOPERS.md#commits) commits. A helpful tool for ensuring the correct commit style is [commitizen](https://github.com/commitizen/cz-cli).
76 |
77 | Simply run when commiting:
78 | ```
79 | cz c
80 | ```
81 |
82 | ### Pre commit hooks
83 |
84 | To enforce code standard we use [pre-commit](https://pre-commit.com/) it manages
85 | pre commit hooks.
86 |
87 | Run to setup:
88 | ```
89 | pre-commit install
90 | ```
91 |
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [metadata]
2 | name = Arbie
3 | version = attr: Arbie.__version__
4 | author = Alexander Poole
5 | author-email = alex.o.poole@gmail.com
6 | long-description = file: README.md
7 | long-description-content-type = text/markdown
8 | requires-python = >=3.5
9 | license = MIT
10 | include_package_data=True
11 |
12 | [semantic_release]
13 | version_variable = Arbie/__init__.py:__version__
14 | commit_subject = bump(version): Release v{version}
15 |
16 | [isort]
17 | multi_line_output = 3
18 | include_trailing_comma = True
19 | force_grid_wrap = 0
20 | use_parentheses = True
21 | ensure_newline_before_comments = True
22 | line_length = 88
23 |
24 | [flake8]
25 | exclude = .git,__pycache__,build,dist
26 | max-complexity = 10
27 | inline-quotes = "
28 | max-line-length = 127
29 | max-returns = 2
30 | max-arguments = 3
31 | ignore = D102,D103,D107,DAR101,DAR201,WPS412,WPS305,WPS111,WPS211,WPS214,WPS110,D105,D101,WPS212,WPS318,WPS319,S101,WPS337,W503,W504,WPS529,RST301,RST201,C812,WPS355,WPS237,WPS615,WPS462
32 | per-file-ignores =
33 | Arbie/Actions/pool_finder.py: WPS232, WPS231
34 | Arbie/Actions/redis_state.py: WPS226
35 | Arbie/Variables/graph.py: WPS220
36 | Arbie/__main__.py: WPS323
37 | tests/system/Actions/action_tree_test.py: WPS432,WPS442
38 | tests/system/Actions/conftest.py: WPS442
39 | tests/system/Actions/pool_updater_test.py: WPS442
40 | tests/system/Actions/redis_state_test.py: WPS428, WPS442
41 | tests/system/Actions/trader_test.py: WPS442, WPS432
42 | tests/system/Contracts/balancer_test.py: WPS202,WPS442
43 | tests/system/Contracts/token_test.py: WPS432,WPS442
44 | tests/system/Contracts/uniswap_router_test.py: WPS432
45 | tests/system/Contracts/uniswap_test.py: WPS202,WPS442
46 | tests/system/app_test.py: WPS442, WPS432, E800, WPS226, WPS218
47 | tests/system/conftest.py: WPS432,WPS202,WPS442
48 | tests/unit/Actions/action_test.py: WPS432, WPS442, WPS226, WPS202
49 | tests/unit/Actions/arbitrage_test.py: WPS432, WPS442
50 | tests/unit/Actions/conftest.py: WPS442
51 | tests/unit/Actions/path_finder_test.py: WPS432, WPS442
52 | tests/unit/Actions/redis_state_test.py: WPS428, WPS432
53 | tests/unit/Actions/trader_test.py: WPS432
54 | tests/unit/Services/coingecko_test.py: WPS226, WPS114, WPS442
55 | tests/unit/Variables/graph_test.py: WPS432, WPS317, WPS345, WPS442
56 | tests/unit/Variables/pool_test.py: WPS432, WPS442
57 | tests/unit/conftest.py: WPS432,WPS202,WPS442,WPS317,WPS221
58 | tests/unit/main_test.py: WPS442
59 | tests/unit/settings_parser_test.py: WPS442
60 | tests/unit/token_test.py: WPS432,WPS609
61 |
62 | extend-ignore = E203, W503
63 |
64 | [tool:pytest]
65 | junit_family=xunit2
66 |
--------------------------------------------------------------------------------
/Arbie/Services/coingecko.py:
--------------------------------------------------------------------------------
1 | """Module for gettings tokens from Coingecko."""
2 |
3 | import logging
4 | from urllib.parse import urljoin
5 |
6 | import requests
7 |
8 | from Arbie.async_helpers import CircuitBreaker, async_map, run_async
9 |
10 | logger = logging.getLogger()
11 |
12 | COINGECKO_URL = "https://api.coingecko.com"
13 |
14 | COINS_URL = urljoin(COINGECKO_URL, "api/v3/coins/list")
15 |
16 |
17 | class Coingecko(object):
18 | def __init__(self, batch_size=5, timeout=0.6, retries=3, retrie_timeout=10):
19 | self.batch_size = batch_size
20 | self.timeout = timeout
21 | self.breaker = CircuitBreaker(retries, retrie_timeout, self._get_json)
22 |
23 | async def coins(self):
24 | ids = await self.ids()
25 | if ids:
26 | return await self.coins_from_ids(ids)
27 |
28 | async def coins_from_ids(self, ids):
29 | urls = list(map(self._coin_url, ids))
30 | addresses = await async_map(
31 | self._parse_eth_coin, urls, self.batch_size, self.timeout
32 | )
33 | addresses = set(addresses)
34 | addresses.remove(None)
35 | return list(addresses)
36 |
37 | async def ids(self):
38 | rs_json = await self._get(COINS_URL)
39 | return list(map(lambda i: i["id"], rs_json))
40 |
41 | async def _coin_ticker(self, coin_id):
42 | url = self._coin_url(coin_id)
43 | return await self._parse_eth_coin(url)
44 |
45 | def _coin_url(self, coin_id):
46 | return urljoin(COINGECKO_URL, f"api/v3/coins/{coin_id}/tickers")
47 |
48 | async def _parse_eth_coin(self, url):
49 | body = await self._get(url)
50 |
51 | for ticker in self._tickers(body):
52 | if (
53 | ticker is None
54 | or ticker["target"] != "ETH"
55 | or not self._ok(ticker["is_anomaly"])
56 | ):
57 | continue
58 |
59 | address = ticker["base"].lower()
60 | logger.info(f"Found token: {address}")
61 |
62 | eth_address_length = 42 # noqa: WPS432
63 | if len(address) == eth_address_length:
64 | return address
65 |
66 | def _tickers(self, body):
67 | tickers = body["tickers"]
68 | if tickers:
69 | return tickers
70 | return []
71 |
72 | def _ok(self, is_anomaly):
73 | return not is_anomaly
74 |
75 | def _get_json(self, url):
76 | response = requests.get(url)
77 | if not response.ok:
78 | raise ConnectionError(f"Failed to connect to {url}, response: {response}")
79 | return response.json()
80 |
81 | async def _get(self, url):
82 | logger.info(f"Requesting endpoing {url}")
83 | return await run_async(self.breaker.safe_call, url)
84 |
--------------------------------------------------------------------------------
/tests/unit/Actions/redis_state_test.py:
--------------------------------------------------------------------------------
1 | """Unittest of redis state."""
2 | import pickle # noqa: S403
3 | from unittest.mock import MagicMock
4 |
5 | import pytest
6 | from pytest_mock.plugin import MockerFixture
7 |
8 | from Arbie.Actions.redis_state import RedisState
9 |
10 | item_key = "pool_finder.1.unit_of_account"
11 | collection_key = "pool_finder.1.pools"
12 | collection_item_key = "pool_finder.1.pools.0xAb12C"
13 |
14 |
15 | class TestRedisState(object):
16 | def test_get(self, redis_state, mocker: MockerFixture):
17 | mock_collection = mocker.patch.object(
18 | RedisState, "_get_collection", return_value=[]
19 | )
20 | mock_item = mocker.patch.object(
21 | RedisState, "_get_item", return_value=MagicMock()
22 | )
23 |
24 | with pytest.raises(KeyError):
25 | redis_state[""]
26 | assert isinstance(redis_state[collection_key], list)
27 | assert mock_collection.called
28 | assert isinstance(redis_state[collection_item_key], MagicMock)
29 | assert mock_item.called
30 | assert isinstance(redis_state[item_key], MagicMock)
31 |
32 | def test_add(self, redis_state, mocker: MockerFixture):
33 | mock_collection = mocker.patch.object(RedisState, "_add_collection")
34 | mock_item = mocker.patch.object(RedisState, "_add_item")
35 |
36 | redis_state["my_key"] = None
37 | assert len(redis_state.local_state.keys()) == 1
38 |
39 | redis_state[collection_key] = None
40 | assert mock_collection.called
41 |
42 | redis_state[item_key] = None
43 | assert mock_item.called
44 |
45 | def test_get_item_raises(self, redis_state, redis_mock):
46 | redis_mock.exists.return_value = 0
47 | with pytest.raises(KeyError):
48 | redis_state[item_key]
49 | assert redis_mock.exists.called
50 |
51 | def test_get_collection_raises(self, redis_state, redis_mock):
52 | redis_mock.exists.return_value = 0
53 | with pytest.raises(KeyError):
54 | redis_state[collection_key]
55 | assert redis_mock.exists.called
56 |
57 | def test_get_collection_item_raises(self, redis_state, redis_mock):
58 | redis_mock.exists.return_value = 0
59 | with pytest.raises(KeyError):
60 | redis_state[collection_item_key]
61 | assert redis_mock.exists.called
62 |
63 | def test_get_keys(self, redis_state):
64 | redis_state["item"] = 1
65 | assert len(redis_state.keys()) == 1
66 |
67 | def test_in_redis_state(self, redis_state, redis_mock):
68 | redis_mock.exists.side_effect = Exception
69 | assert None not in redis_state
70 |
71 | def test_get_item(self, redis_state, redis_mock):
72 | redis_mock.exists.return_value = 1
73 | redis_mock.get.return_value = pickle.dumps(1337)
74 | assert redis_state[item_key] == 1337
75 |
--------------------------------------------------------------------------------
/tests/system/Contracts/balancer_test.py:
--------------------------------------------------------------------------------
1 | """Test balancer contracts."""
2 | import pytest
3 |
4 | from Arbie import IERC20TokenError
5 | from Arbie.Contracts import ContractFactory
6 | from Arbie.Contracts.balancer import BalancerFactory, BalancerPool
7 | from Arbie.Contracts.tokens import BadERC20Token, GenericToken
8 | from Arbie.Variables import BigNumber, PoolType
9 |
10 | bg10 = BigNumber(10)
11 | bg5 = BigNumber(5)
12 |
13 |
14 | @pytest.fixture
15 | def pool_factory(deploy_address, w3) -> BalancerFactory:
16 | return ContractFactory(w3, BalancerFactory).deploy_contract(deploy_address)
17 |
18 |
19 | @pytest.mark.asyncio
20 | async def test_create_new_pool(pool_factory):
21 | pool_factory.new_pool()
22 | assert len(await pool_factory.all_pools()) == 1
23 |
24 |
25 | @pytest.mark.asyncio
26 | async def test_get_number_of_tokens(pool_factory):
27 | pool_factory.new_pool()
28 | pool_factory.new_pool()
29 | pools = await pool_factory.all_pools()
30 | assert len(pools) == 2
31 | assert pools[0].get_number_of_tokens() == 0
32 |
33 |
34 | def test_bind_weight(pool_factory):
35 | pool = pool_factory.new_pool()
36 | with pytest.raises(ValueError):
37 | amount = BigNumber(5)
38 | weight = 0.2
39 | pool.bind("", amount, weight)
40 |
41 |
42 | @pytest.fixture
43 | async def pool_with_tokens(
44 | pool_factory: BalancerFactory, dai: GenericToken, weth: GenericToken
45 | ) -> BalancerPool:
46 | weight = 5
47 | return await pool_factory.setup_pool([dai, weth], [weight, weight], [bg5, bg10])
48 |
49 |
50 | @pytest.fixture
51 | async def factory_with_bad_token(
52 | pool_factory: BalancerFactory, dai: GenericToken, bad: BadERC20Token
53 | ):
54 | weight = 5
55 | await pool_factory.setup_pool(
56 | [bad, dai], [weight, weight], [bg5, bg10], approve_owner=False
57 | )
58 | return pool_factory
59 |
60 |
61 | @pytest.mark.asyncio
62 | async def test_bind_token_to_pool(pool_with_tokens: BalancerPool):
63 | tokens = await pool_with_tokens.get_tokens()
64 | assert len(tokens) == 2
65 | balances = await pool_with_tokens.get_balances()
66 | assert balances == [bg5, bg10]
67 | assert await pool_with_tokens.get_weights() == [0.5, 0.5]
68 |
69 |
70 | @pytest.mark.asyncio
71 | async def test_create_pool(
72 | pool_with_tokens: BalancerPool, dai: GenericToken, weth: GenericToken
73 | ):
74 | pool = await pool_with_tokens.create_pool()
75 | weth_token = await weth.create_token()
76 | dai_token = await dai.create_token()
77 | assert pool.pool_type == PoolType.balancer
78 | assert pool.spot_price(weth_token, dai_token) == 2
79 | assert pool.balances[0].value == 5
80 | assert pool.balances[1].value == 10
81 |
82 |
83 | @pytest.mark.asyncio
84 | async def test_create_bad_pool(factory_with_bad_token: BalancerFactory):
85 | pools = await factory_with_bad_token.all_pools()
86 | with pytest.raises(IERC20TokenError):
87 | await pools[0].create_pool()
88 |
--------------------------------------------------------------------------------
/Arbie/Actions/action_tree.py:
--------------------------------------------------------------------------------
1 | """ActionTree is a set of Actions."""
2 |
3 | import asyncio
4 | import inspect
5 | import logging
6 | import sys
7 | from typing import Dict, List, Tuple
8 |
9 | from Arbie.Actions.action import Action, Store
10 | from Arbie.prometheus import get_prometheus
11 |
12 | RUN_TIME = get_prometheus().summary("arbie_run", "Time spent running actions")
13 |
14 |
15 | def is_class_action(member):
16 | return inspect.isclass(member) and Action in member.__bases__ # noqa: WPS609
17 |
18 |
19 | def get_all_actions(extra_actions=None) -> List[Tuple[str, type(Action)]]:
20 | actions = inspect.getmembers(sys.modules["Arbie.Actions"], is_class_action)
21 | if extra_actions is not None:
22 | return actions + extra_actions
23 | return actions
24 |
25 |
26 | def create_action(name, config, extra_actions):
27 | for name_cls, action_cls in get_all_actions(extra_actions):
28 | if name_cls == name:
29 | return action_cls(config)
30 | raise ValueError(f"Action: {name} not found.")
31 |
32 |
33 | class ActionTree(object):
34 | def __init__(self, store: Store):
35 | self.store = store
36 | self.actions = []
37 | self.channels = []
38 | self.is_stopped = False
39 |
40 | def register_event(self, event_channel):
41 | self.channels.append(self.store.subscribe(event_channel))
42 |
43 | @classmethod
44 | def create(cls, action_configs: Dict, store: Store, extra_actions=None):
45 | tree = cls(store)
46 | for key, config in action_configs.items():
47 | action = create_action(key, config, extra_actions)
48 | tree.add_action(action)
49 | return tree
50 |
51 | def add_action(self, action: Action):
52 | self.actions.append(action)
53 |
54 | async def run(self):
55 | with RUN_TIME.time():
56 | self.is_stopped = False
57 | if self.channels:
58 | await self._run_continuous()
59 | else:
60 | await self._run_once()
61 |
62 | def stop(self):
63 | self.is_stopped = True
64 |
65 | async def _run_continuous(self):
66 | while not self.is_stopped:
67 | new_messages = self._get_messages()
68 | if new_messages:
69 | logging.getLogger().info(f"New messages {new_messages}")
70 | await self._run_once()
71 | else:
72 | await asyncio.sleep(0.1)
73 |
74 | async def _run_once(self):
75 | for action in self.actions:
76 | data = self.store.create_input(action)
77 | action_name = action.__class__.__name__
78 | with get_prometheus().summary(
79 | f"{action_name.lower()}_time",
80 | f"Time taken to process action {action_name}",
81 | ).time():
82 | await action.on_next(data)
83 |
84 | def _get_messages(self):
85 | messages = list(map(lambda c: c.get_message(True), self.channels))
86 | # Remove None
87 | return [message for message in messages if message]
88 |
--------------------------------------------------------------------------------
/tests/system/Actions/action_tree_test.py:
--------------------------------------------------------------------------------
1 | """System tests for ActionTree."""
2 | import asyncio
3 |
4 | import pytest
5 |
6 | from Arbie.Actions import Action, ActionTree, Store
7 |
8 |
9 | class ActionA(Action):
10 | """Dummy Action for testing.
11 |
12 | [Settings]
13 | input:
14 | times_ran_old: actionA.1.times_ran
15 | output:
16 | times_ran: actionA.1.times_ran
17 | my_result: actionA.1.result
18 | """
19 |
20 | async def on_next(self, data):
21 | data.times_ran(data.times_ran_old() + 1)
22 | data.my_result(1337)
23 |
24 |
25 | class ActionB(Action):
26 | """Dummy Action for testing.
27 |
28 | [Settings]
29 | input:
30 | times_ran_old: actionB.1.times_ran
31 | someone_elses_result: actionA.1.result
32 | output:
33 | times_ran: actionB.1.times_ran
34 | my_result: result
35 | """
36 |
37 | async def on_next(self, data):
38 | data.times_ran(data.times_ran_old() + 1)
39 | data.my_result(data.someone_elses_result() + 1)
40 |
41 |
42 | a_times_ran = "actionA.1.times_ran"
43 | b_times_ran = "actionB.1.times_ran"
44 | a_result = "actionA.1.result"
45 | b_result = "result"
46 |
47 | redis_channel = a_result
48 |
49 |
50 | @pytest.fixture
51 | def tree_a(redis_store: Store):
52 | tree = ActionTree(redis_store)
53 | tree.add_action(ActionA())
54 |
55 | redis_store.add(a_times_ran, 0)
56 | yield tree
57 | redis_store.delete(a_times_ran)
58 | redis_store.delete(a_result)
59 |
60 |
61 | @pytest.fixture
62 | def tree_b(redis_store: Store):
63 | tree = ActionTree(redis_store)
64 | tree.add_action(ActionB())
65 |
66 | redis_store.add(b_times_ran, 0)
67 | yield tree
68 | redis_store.delete(b_times_ran)
69 |
70 |
71 | pytestmark = pytest.mark.asyncio
72 |
73 |
74 | async def wait_and_stop(tree):
75 | await asyncio.sleep(0.2)
76 | tree.stop()
77 |
78 |
79 | class TestActionTree(object):
80 | async def test_subscribe_no_publisher(self, tree_a, redis_store):
81 | await tree_a.run()
82 | await tree_a.run()
83 | assert redis_store.get(a_times_ran) == 2
84 |
85 | async def test_subscribe(self, redis_store: Store, tree_a):
86 | tree_a.register_event("my_channel")
87 | redis_store.publish("my_channel", "new pool added")
88 | await asyncio.gather(tree_a.run(), wait_and_stop(tree_a))
89 | assert redis_store.get(a_times_ran) == 1
90 |
91 | # Whenever a result is added to redis
92 | # a message with that variables name is
93 | # published to the channel with the
94 | # same name as the variable.
95 | async def test_subscribe_publish(
96 | self, redis_store: Store, tree_a: ActionTree, tree_b: ActionTree
97 | ):
98 | tree_b.register_event(redis_channel)
99 |
100 | await asyncio.gather(tree_b.run(), tree_a.run(), wait_and_stop(tree_b))
101 | assert redis_store.get(b_times_ran) == 1
102 | assert redis_store.get(a_times_ran) == 1
103 | assert redis_store.get(a_result) == 1337
104 | assert redis_store.get(b_result) == 1338
105 |
--------------------------------------------------------------------------------
/tests/unit/Actions/trader_test.py:
--------------------------------------------------------------------------------
1 | """Unittest trader."""
2 |
3 | import asyncio
4 | from unittest.mock import MagicMock
5 |
6 | import pytest
7 |
8 | from Arbie import TransactionError
9 | from Arbie.Actions.trader import BalanceChecker
10 | from Arbie.Variables import BigNumber
11 |
12 |
13 | def amount_to_future(amount):
14 | future = asyncio.Future()
15 | future.set_result(BigNumber(amount))
16 | return future
17 |
18 |
19 | def setup_eth(amounts):
20 | mock = MagicMock()
21 | mock.eth = mock
22 | mock.getBalance.side_effect = list(
23 | map(lambda a: BigNumber(a).value, amounts)
24 | ) # noqa: WPS221
25 | return mock
26 |
27 |
28 | def setup_weth(amounts):
29 | mock = MagicMock()
30 | mock.balance_of.side_effect = list(map(amount_to_future, amounts))
31 | return mock
32 |
33 |
34 | def setup_mocks(eth_amounts, weth_amounts):
35 | return setup_eth(eth_amounts), setup_weth(weth_amounts)
36 |
37 |
38 | some_address = "any address"
39 |
40 |
41 | class TestBalanceChecker(object):
42 | @pytest.mark.asyncio
43 | async def test_check(self):
44 | checker = BalanceChecker(*setup_mocks([1], [2]))
45 | eth, weth = await checker.check(some_address)
46 | assert eth == 1
47 | assert weth == 2
48 |
49 | @pytest.mark.asyncio
50 | async def test_low_eth(self):
51 | checker = BalanceChecker(*setup_mocks([1], [0]))
52 | with pytest.raises(ValueError):
53 | await checker.check_and_convert(some_address, 1, 2, 10)
54 |
55 | @pytest.mark.asyncio
56 | async def test_high_eth_and_weth(self):
57 | checker = BalanceChecker(*setup_mocks([12], [11]))
58 | eth, weth = await checker.check_and_convert(some_address, 1, 2, 10)
59 | assert eth == 12
60 | assert weth == 11
61 |
62 | @pytest.mark.asyncio
63 | async def test_high_eth(self):
64 | eth_mock, weth_mock = setup_mocks([12, 2], [0, 10])
65 | checker = BalanceChecker(eth_mock, weth_mock)
66 |
67 | eth, weth = await checker.check_and_convert(some_address, 1, 2, 10)
68 | assert eth == 2
69 | assert weth == 10
70 |
71 | weth_mock.deposit.assert_called_once_with(10)
72 |
73 | @pytest.mark.asyncio
74 | async def test_high_weth(self):
75 | eth_mock, weth_mock = setup_mocks([0.5, 1], [10, 9.5])
76 | checker = BalanceChecker(eth_mock, weth_mock)
77 |
78 | eth, weth = await checker.check_and_convert(some_address, 1, 2, 10)
79 | assert eth == 1
80 | assert weth == pytest.approx(9.5)
81 |
82 | weth_mock.withdraw.assert_called_once_with(0.5)
83 |
84 | @pytest.mark.asyncio
85 | async def test_high_eth_transaction_error(self):
86 | eth_mock, weth_mock = setup_mocks([12], [0])
87 | weth_mock.deposit.return_value = False
88 |
89 | checker = BalanceChecker(eth_mock, weth_mock)
90 | with pytest.raises(TransactionError):
91 | await checker.check_and_convert(some_address, 1, 2, 10)
92 |
93 | @pytest.mark.asyncio
94 | async def test_high_weth_transaction_error(self):
95 | eth_mock, weth_mock = setup_mocks([0.5], [10])
96 | weth_mock.withdraw.return_value = False
97 |
98 | checker = BalanceChecker(eth_mock, weth_mock)
99 | with pytest.raises(TransactionError):
100 | await checker.check_and_convert(some_address, 1, 2, 10)
101 |
--------------------------------------------------------------------------------
/tests/unit/settings_parser_test.py:
--------------------------------------------------------------------------------
1 | """Unittest for settings parser."""
2 | from unittest.mock import MagicMock
3 |
4 | import pytest
5 | import yaml
6 |
7 | from Arbie import StateError
8 | from Arbie.settings_parser import Keys, SettingsParser
9 |
10 |
11 | @pytest.fixture
12 | def config():
13 | raw_conf = """
14 | version: "1.0"
15 | store:
16 | address: host_name:port
17 |
18 | web3:
19 | address: url:port
20 |
21 | variables:
22 | my_weth_token:
23 | type: Weth
24 | address: "0xdac17f958d2ee523a2206206994597c13d831ec7"
25 | uniswap_factory:
26 | type: UniswapFactory
27 | address: "0xdac17f958d2ee523a2206206994597c13d831ec7"
28 | balancer_factory:
29 | type: BalancerFactory
30 | address: "0xdac17f958d2ee523a2206206994597c13d831ec7"
31 | router:
32 | type: UniswapV2Router
33 | address: "0xdac17f958d2ee523a2206206994597c13d831ec7"
34 | amount:
35 | type: float
36 | value: 26.3
37 | start_block:
38 | type: int
39 | value: 1337
40 | token_name:
41 | type: str
42 | value: ethereum
43 |
44 | action_tree:
45 | event:
46 | arbie.1.pools
47 | actions:
48 | PathFinder:
49 | input:
50 | weth: my_weth_token
51 | output:
52 | trades: all_trades
53 | """
54 | return yaml.safe_load(raw_conf)
55 |
56 |
57 | def mock_web3(mocker):
58 | mocker.patch("Arbie.settings_parser.Web3")
59 |
60 |
61 | class TestSettingsParser(object):
62 | def test_redis_store(self, config, mocker):
63 | mocker.patch("Arbie.settings_parser.RedisState")
64 |
65 | sp = SettingsParser(config)
66 | store = sp.store()
67 | assert isinstance(store.state, MagicMock)
68 |
69 | def test_local_store(self, config):
70 | del config[Keys.store] # noqa: WPS420
71 |
72 | sp = SettingsParser(config)
73 | store = sp.store()
74 | assert isinstance(store.state, dict)
75 |
76 | def test_set_up_store(self, config, mocker):
77 | mock_web3(mocker)
78 | del config[Keys.store] # noqa: WPS420
79 |
80 | sp = SettingsParser(config)
81 | store = sp.setup_store()
82 | assert len(store.state) == 8
83 |
84 | def test_set_up_store_no_variables(self, config, mocker):
85 | mock_web3(mocker)
86 | del config[Keys.store] # noqa: WPS420
87 | del config[Keys.variables] # noqa: WPS420
88 |
89 | sp = SettingsParser(config)
90 | store = sp.setup_store()
91 | assert not store.state
92 |
93 | def test_set_up_action_tree(self, config, mocker):
94 | mock_web3(mocker)
95 | mocker.patch("Arbie.settings_parser.RedisState")
96 |
97 | sp = SettingsParser(config)
98 | store = sp.setup_store()
99 | tree = sp.action_tree(store)
100 | assert len(tree.actions) == 1
101 |
102 | def test_subscribe_raises(self, config, mocker):
103 | mock_web3(mocker)
104 | del config[Keys.store] # noqa: WPS420
105 |
106 | sp = SettingsParser(config)
107 | store = sp.setup_store()
108 | with pytest.raises(StateError):
109 | sp.action_tree(store)
110 |
--------------------------------------------------------------------------------
/tests/unit/Actions/arbitrage_test.py:
--------------------------------------------------------------------------------
1 | """Unittest of arbitrage."""
2 |
3 | import pytest
4 |
5 | from Arbie.Actions import ActionTree, Store
6 | from Arbie.Actions.arbitrage import Arbitrage, ArbitrageFinder
7 | from Arbie.address import dummy_token_generator
8 | from Arbie.Variables import Pool, Trade
9 |
10 |
11 | @pytest.fixture
12 | def tokens(dai, eth):
13 | return [dai, eth]
14 |
15 |
16 | @pytest.fixture
17 | def trade1(tokens, dai, eth):
18 | pool1 = Pool(tokens, [400, 1], [0.5, 0.5])
19 | pool2 = Pool(tokens, [410, 1], [0.5, 0.5])
20 | return Trade([pool1, pool2], [dai, eth, dai])
21 |
22 |
23 | @pytest.fixture
24 | def trade2(tokens, dai, eth):
25 | pool1 = Pool(tokens, [400, 1], [0.9, 0.1])
26 | pool2 = Pool(tokens, [410, 1], [0.1, 0.9])
27 | return Trade([pool1, pool2], [dai, eth, dai])
28 |
29 |
30 | class TestArbitrage(object):
31 | """Test Arbitrage."""
32 |
33 | def test_find_arbitrage(self, trade1: Trade):
34 | ArbitrageFinder(trade1).find_arbitrage_and_update_trade()
35 | assert trade1.amount_in == pytest.approx(2.48456731316587) # noqa: WPS432
36 |
37 | def test_find_arbitrage_unbalanced(self, trade2: Trade):
38 | ArbitrageFinder(trade2).find_arbitrage_and_update_trade()
39 | assert trade2.amount_in == pytest.approx(27.8547574719045) # noqa: WPS432
40 |
41 | def test_find_arbitrage_no_opportunity(self, tokens, dai, eth):
42 | pool1 = Pool(tokens, [400, 1], [0.9, 0.1])
43 | pool2 = Pool(tokens, [410, 1], [0.1, 0.9])
44 | trade = [Trade(pool1, eth, dai), Trade(pool2, dai, eth)]
45 |
46 | with pytest.raises(ValueError):
47 | ArbitrageFinder(trade).find_arbitrage_and_update_trade()
48 |
49 | def test_calc_optimal_arbitrage_no_opportunity(self, tokens, dai, eth):
50 | pool1 = Pool(tokens, [400, 1], [0.9, 0.1])
51 | pool2 = Pool(tokens, [410, 1], [0.1, 0.9])
52 | trade = Trade([pool1, pool2], [eth, dai, eth])
53 |
54 | with pytest.raises(ValueError) as e:
55 | ArbitrageFinder(trade).calculate_optimal_arbitrage()
56 | assert e.message == "No arbitrage opportunity found."
57 |
58 | def test_find_arbitrage_wrong_token(self, tokens, dai):
59 | pool1 = Pool(tokens, [400, 1], [0.9, 0.1])
60 | pool2 = Pool(tokens, [410, 1], [0.1, 0.9])
61 | sai = dummy_token_generator("sai", 300.0)
62 | trade = Trade([pool1, pool2], [dai, sai, dai])
63 |
64 | with pytest.raises(ValueError):
65 | ArbitrageFinder(trade).find_arbitrage_and_update_trade()
66 |
67 | def test_find_arbitrage_hard(self, dai, eth, btc):
68 | pool_dai_weth = Pool(
69 | [dai, eth], [1e9, 1e7 / 3], [0.5, 0.5], fee=0.003 # noqa: WPS221
70 | )
71 | pool_wbtc_dai = Pool(
72 | [btc, dai], [1e5, 1e9], [0.5, 0.5], fee=0.003 # noqa: WPS221
73 | )
74 | pool_wbtc_weth = Pool(
75 | [btc, eth], [1e5, 3508772], [0.5, 0.5], fee=0.003 # noqa: WPS221
76 | )
77 |
78 | trade = Trade(
79 | [pool_dai_weth, pool_wbtc_dai, pool_wbtc_weth], [eth, dai, btc, eth]
80 | )
81 | ArbitrageFinder(trade).find_arbitrage_and_update_trade()
82 |
83 | assert trade.profit == pytest.approx(510.233, 1e-4)
84 |
85 |
86 | class TestArbitrageAction(object):
87 | @pytest.mark.asyncio
88 | async def test_on_next(self, trade1, trade2):
89 | store = Store()
90 | store.add("all_trades", [trade1, trade2])
91 | tree = ActionTree(store)
92 | tree.add_action(Arbitrage())
93 | await tree.run()
94 |
95 | assert len(store.get("filtered_trades")) == 2
96 |
--------------------------------------------------------------------------------
/Arbie/Contracts/tokens.py:
--------------------------------------------------------------------------------
1 | """Utility functions for interacting with Tokens."""
2 |
3 | import asyncio
4 | import logging
5 |
6 | from Arbie.Contracts.contract import Contract
7 | from Arbie.Variables import BigNumber, Token
8 |
9 | token_protocol = "tokens" # noqa: S105
10 |
11 |
12 | class IERC20Token(Contract):
13 | name = "IERC20"
14 | protocol = token_protocol
15 |
16 | def __eq__(self, other):
17 | return self.get_address() == other.get_address()
18 |
19 | async def decimals(self) -> int:
20 | return await self._call_async(self.contract.functions.decimals())
21 |
22 | async def balance_of(self, owner: str) -> BigNumber:
23 | result = await asyncio.gather(
24 | self._call_async(self.contract.functions.balanceOf(owner)), self.decimals()
25 | )
26 | return BigNumber.from_value(result[0], result[1])
27 |
28 | def transfer(self, to: str, bg_number: BigNumber) -> bool:
29 | transaction = self.contract.functions.transfer(to, bg_number.value)
30 | return self._transact_info(transaction)
31 |
32 | def allowance(self, to: str):
33 | return self.contract.functions.allowance(self._get_account(), to).call()
34 |
35 | def approve(self, spender: str, bg_number: BigNumber) -> bool:
36 | transaction = self.contract.functions.approve(spender, bg_number.value)
37 | return self._transact_info(transaction)
38 |
39 | async def approve_owner(self):
40 | bg = await self.balance_of(self.owner_address)
41 | return self.approve(self.owner_address, bg)
42 |
43 | def create_token(self, price=0):
44 | return Token("", self.get_address(), price)
45 |
46 |
47 | class BadERC20Token(IERC20Token):
48 | name = "BadERC20"
49 | protocol = token_protocol
50 |
51 |
52 | class GenericToken(IERC20Token):
53 | name = "ERC20"
54 | protocol = token_protocol
55 |
56 | def __str__(self):
57 | return (
58 | f"GenericToken, name: {self._name().call()} address: {self.get_address()}"
59 | )
60 |
61 | def __repr__(self):
62 | return self.__str__()
63 |
64 | async def get_name(self) -> str:
65 | return await self._call_async(self._name())
66 |
67 | async def create_token(self, price=0):
68 | try:
69 | name = await self.get_name()
70 | except Exception:
71 | name = ""
72 | logging.getLogger().warning(
73 | f"Token: {self.get_address()} dosn't have a name."
74 | )
75 | return Token(name, self.get_address(), price)
76 |
77 | def _name(self):
78 | return self.contract.functions.name()
79 |
80 |
81 | class Weth(GenericToken):
82 | name = "Weth"
83 | protocol = token_protocol
84 |
85 | def deposit(self, amount, dry_run=False):
86 | transaction = self.contract.functions.deposit()
87 | # The gas amount 4249 was achieved by testing the contract and checking how much gas was needed.
88 | return self._transact_info(
89 | transaction,
90 | value=BigNumber(amount).value,
91 | gas=44249, # noqa: WPS432
92 | dry_run=dry_run,
93 | )
94 |
95 | def withdraw(self, amount, dry_run=False):
96 | transaction = self.contract.functions.withdraw(BigNumber(amount).value)
97 | return self._transact_info(transaction, dry_run=dry_run)
98 |
99 |
100 | class MaliciousToken(GenericToken):
101 | name = "MaliciousToken"
102 | protocol = token_protocol
103 |
104 | def pause(self):
105 | transaction = self.contract.functions.pause()
106 | return self._transact_info(transaction)
107 |
--------------------------------------------------------------------------------
/tests/system/Actions/trader_test.py:
--------------------------------------------------------------------------------
1 | """System tests for PoolUpdater."""
2 |
3 | import pytest
4 |
5 | from Arbie.Actions import ActionTree, SetUpTrader, Store, Trader
6 | from Arbie.Actions.arbitrage import ArbitrageFinder
7 | from Arbie.Contracts import GenericToken, UniswapV2Router, Weth
8 | from Arbie.Variables import BigNumber
9 |
10 |
11 | def send_eth(web3, from_address, to_address, value):
12 | tx_hash = web3.eth.sendTransaction(
13 | {"to": to_address, "from": from_address, "value": BigNumber(value).value}
14 | )
15 | return web3.eth.waitForTransactionReceipt(tx_hash, 180)
16 |
17 |
18 | min_profit = "min_profit"
19 | dry_run = "dry_run"
20 |
21 |
22 | @pytest.fixture
23 | def conf_dict():
24 | return {"input": {min_profit: min_profit}, "output": {}}
25 |
26 |
27 | @pytest.fixture
28 | def trade_conf(conf_dict):
29 | conf_dict["input"][dry_run] = False
30 | return conf_dict
31 |
32 |
33 | @pytest.fixture
34 | def trader_account(w3, weth: GenericToken, router, dummy_account):
35 | router.set_account(dummy_account)
36 | weth.transfer(dummy_account.address, BigNumber(2))
37 | weth.set_account(dummy_account)
38 | weth.approve(router.get_address(), BigNumber(2))
39 | return dummy_account
40 |
41 |
42 | @pytest.fixture
43 | def trade_store(w3_with_gas_strategy, router, bad_trade, trade, weth, trader_account):
44 | store = Store()
45 | store.add("router", router)
46 | store.add("filtered_trades", [bad_trade, trade])
47 | store.add("weth", weth)
48 | store.add("web3", w3_with_gas_strategy)
49 | store.add("account", trader_account)
50 | return store
51 |
52 |
53 | class TestTrader(object):
54 | @pytest.mark.asyncio
55 | async def test_on_next(
56 | self, trade_store, trade, router: UniswapV2Router, trade_conf
57 | ):
58 | trade = ArbitrageFinder(trade).find_arbitrage_and_update_trade()
59 | _, gas_cost = router.swap(trade, dry_run=True)
60 |
61 | # min profit is set to -1 because we want to execute a bad trade
62 | # and see that it is reverted without costing us gas
63 | trade_store.add(min_profit, -1)
64 | tree = ActionTree(trade_store)
65 | tree.add_action(Trader(trade_conf))
66 | await tree.run()
67 | assert trade_store.get("profit") == pytest.approx(
68 | trade.profit - gas_cost, rel=1e-4
69 | )
70 |
71 | @pytest.mark.asyncio
72 | async def test_no_profit(self, trade_store, trade_conf):
73 | trade_store.add(min_profit, 4)
74 | tree = ActionTree(trade_store)
75 | tree.add_action(Trader(trade_conf))
76 | await tree.run()
77 | assert trade_store.get("profit") == 0
78 |
79 | @pytest.mark.asyncio
80 | async def test_dry_run(self, trade_store, trade, router, conf_dict):
81 | trade = ArbitrageFinder(trade).find_arbitrage_and_update_trade()
82 |
83 | trade_store.add(min_profit, -1)
84 | tree = ActionTree(trade_store)
85 | tree.add_action(Trader(conf_dict))
86 | await tree.run()
87 | assert trade_store.get("profit") == 0
88 |
89 |
90 | class TestSetUpTrader(object):
91 | @pytest.fixture
92 | def trade_store(self, w3, router, real_weth, dummy_account):
93 | real_weth.set_account(dummy_account)
94 | store = Store()
95 | store.add("weth", real_weth)
96 | store.add("web3", w3)
97 | store.add("account", dummy_account)
98 | store.add("router", router)
99 | return store
100 |
101 | @pytest.mark.asyncio
102 | async def test_setup_trader(self, trade_store, real_weth: Weth):
103 | tree = ActionTree(trade_store)
104 | tree.add_action(SetUpTrader())
105 | await tree.run()
106 | assert trade_store.get("balance_weth") == 10
107 | real_weth.withdraw(10)
108 |
--------------------------------------------------------------------------------
/Arbie/Actions/arbitrage.py:
--------------------------------------------------------------------------------
1 | """arbitrage can be used to find to arbitrage opertunity between two Pools."""
2 |
3 | import logging
4 | from functools import partial
5 | from multiprocessing import Pool
6 | from operator import is_not
7 | from typing import Tuple
8 |
9 | from sympy import nsolve, symbols
10 |
11 | from Arbie.Actions import Action
12 | from Arbie.Variables import Trade
13 |
14 | logger = logging.getLogger()
15 | x = symbols("x")
16 |
17 |
18 | class ArbitrageFinder(object):
19 | def __init__(self, trade: Trade, precision=10e3):
20 | self.trade = trade
21 | self.precision = precision
22 |
23 | def find_arbitrage_and_update_trade(self) -> Tuple[float, float]:
24 | if len(self.trade) < 3:
25 | raise ValueError(
26 | "Can only find arbitrage opportunity between at leat 3 tokens"
27 | )
28 |
29 | if not self.token_in_pools():
30 | raise ValueError("Tokens does not exist in pools")
31 |
32 | self.trade.amount_in = self.calculate_optimal_arbitrage()
33 | self.trade.profit = self.calculate_profit(self.trade.amount_in)
34 |
35 | return self.trade
36 |
37 | def token_in_pools(self) -> bool:
38 | for pool, token_in, token_out in self.trade:
39 | if token_in not in pool.tokens or token_out not in pool.tokens:
40 | return False
41 | return True
42 |
43 | def trade_expr(self):
44 | return self.expr_builder(lambda inner, expr: inner.subs(x, expr))
45 |
46 | def arbitrage_expr(self):
47 | return self.trade_expr() - x
48 |
49 | def expr_builder(self, subs_expr): # noqa: WPS210
50 | i = 0
51 | expr = None
52 | for pool, token_in, token_out in self.trade:
53 | inner_expr = pool.out_given_in_expr(token_in, token_out, self.precision)
54 | if i > 0:
55 | expr = subs_expr(inner_expr, expr)
56 | else:
57 | expr = inner_expr
58 | i = i + 1
59 | return expr
60 |
61 | def arbitrage_diff_expr(self):
62 | return self.arbitrage_expr().diff(x)
63 |
64 | def calculate_optimal_arbitrage(self) -> float:
65 | sol = nsolve(self.arbitrage_diff_expr(), 0)
66 | if sol <= 0:
67 | raise AssertionError("No arbitrage opportunity found.")
68 | return sol / self.precision
69 |
70 | def calculate_profit(self, value) -> float:
71 | return self.arbitrage_expr().subs(x, value * self.precision) / self.precision
72 |
73 | def _initial_expr(self):
74 | pool, trade_in, trade_out = self.trade[0]
75 | return pool.out_given_in_expr(trade_in, trade_out)
76 |
77 |
78 | def _check_oportunity(trade):
79 | try:
80 | return ArbitrageFinder(trade).find_arbitrage_and_update_trade()
81 | except (AssertionError, ValueError, TypeError):
82 | return None
83 |
84 |
85 | class Arbitrage(Action):
86 | """Find optimal arbitrage opportunity for a list sorted trades.
87 |
88 | Remove all trades that are not profitable.
89 |
90 | [Settings]
91 | input:
92 | trades: all_trades
93 | process_trades: 20000
94 | top_trades: 10
95 | processes: 10
96 | output:
97 | out_trades: filtered_trades
98 | """
99 |
100 | async def on_next(self, data):
101 | raw_trades = data.trades()[: data.process_trades()]
102 |
103 | with Pool(data.processes()) as p: # noqa: WPS432
104 | trades = p.map(_check_oportunity, raw_trades)
105 |
106 | trades = list(filter(partial(is_not, None), trades))
107 | sorted_trade = sorted(
108 | trades, key=lambda trade: trade.profit / trade.amount_in, reverse=True
109 | )
110 | logger.info(f"Top trade has {sorted_trade[0].profit}")
111 | data.out_trades(sorted_trade[: data.top_trades()])
112 |
--------------------------------------------------------------------------------
/Arbie/Variables/graph.py:
--------------------------------------------------------------------------------
1 | """Graph contains different trading graphs.
2 |
3 | To create efficient Arbitrage algorithms we need graphs
4 | suited for modelling relationships between Automated Market Makers.
5 | """
6 | from typing import List
7 |
8 | import networkx as nx
9 |
10 | from Arbie.Variables.pool import Pool
11 | from Arbie.Variables.token import Token
12 |
13 |
14 | class Graph(object):
15 | """Base class for wrapping networkx graph."""
16 |
17 | def __len__(self):
18 | return len(self.graph)
19 |
20 | def __getitem__(self, key):
21 | return self.graph[key]
22 |
23 | def get_edges(self):
24 | return self.graph.edges
25 |
26 | def get_nodes(self):
27 | return self.graph.nodes
28 |
29 | def _add_edge(self, start_node: Token, end_node: Token, weight, pool):
30 | if start_node is end_node:
31 | return
32 |
33 | self.graph.add_edge(start_node, end_node, weight=weight, object=pool)
34 |
35 |
36 | class TradingGraph(Graph):
37 | """A trading graph.
38 |
39 | Represents each possible trading path between all tokens.
40 | """
41 |
42 | def __init__(self, pools: List[Pool]):
43 | self.graph = nx.MultiDiGraph()
44 | self._create_graph(pools)
45 |
46 | def _create_graph(self, pools: List[Pool]):
47 | """Create graph from a set of Pools."""
48 | for pool in pools:
49 | self._add_nodes(pool.tokens)
50 |
51 | for pool in pools: # noqa: WPS440,WPS441
52 | self._add_edges(pool) # noqa: WPS441
53 |
54 | def _add_nodes(self, tokens: List[Token]):
55 | self.graph.add_nodes_from(tokens)
56 |
57 | def _add_edges(self, pool: Pool): # noqa: WPS231
58 | for start_node in pool.tokens:
59 | for end_node in pool.tokens:
60 | if start_node == end_node:
61 | continue
62 | try:
63 | weight = pool.spot_price(start_node, end_node)
64 | except ZeroDivisionError:
65 | raise ZeroDivisionError(
66 | f"""Zero division error in
67 | pool: {pool}
68 | between token {repr(start_node)} and {repr(end_node)}
69 | """
70 | )
71 |
72 | self._add_edge(start_node, end_node, weight, pool)
73 |
74 |
75 | class FilteredTradingGraph(Graph):
76 | """A Filtered Trading Graph.
77 |
78 | Represents the best trades between each token pair.
79 | The best trade is the one that has the highest ratio.
80 | """
81 |
82 | weight_key = "weight"
83 | pool_key = "object"
84 |
85 | def __init__(self, trading_graph: TradingGraph, min_edge_liquidity):
86 | self.min_edge_liquidity = min_edge_liquidity
87 | self.graph = nx.DiGraph()
88 | self._filter_graph(trading_graph)
89 |
90 | def _check_node_liquidity(self, balance, node: Token) -> bool:
91 | return balance * node.price < self.min_edge_liquidity
92 |
93 | def _check_liquidity(self, start_node: Token, end_node: Token, pool: Pool) -> bool:
94 | t1, t2 = pool.get_balances(start_node, end_node)
95 | return self._check_node_liquidity(t1, start_node) or self._check_node_liquidity(
96 | t2, end_node
97 | )
98 |
99 | def _filter_graph(self, trading_graph: TradingGraph): # noqa: WPS231
100 | self.graph.add_nodes_from(trading_graph.get_nodes())
101 |
102 | for (start_node, end_node, data) in trading_graph.get_edges().data():
103 |
104 | # Check if there is an edge between two nodes in the filtered graph
105 | edge_data = self.graph.get_edge_data(start_node, end_node)
106 |
107 | if self._check_liquidity(start_node, end_node, data[self.pool_key]):
108 | continue
109 |
110 | if edge_data is not None:
111 | if edge_data[self.weight_key] < data[self.weight_key]:
112 | # The most valuable path is already added
113 | continue
114 |
115 | self._add_edge(
116 | start_node, end_node, data[self.weight_key], data[self.pool_key]
117 | )
118 |
--------------------------------------------------------------------------------
/Arbie/Actions/path_finder.py:
--------------------------------------------------------------------------------
1 | """Path finder contains Actions for finding paths between nodes."""
2 |
3 | import logging
4 | from typing import List
5 |
6 | import networkx as nx
7 |
8 | from Arbie.Actions import Action
9 | from Arbie.Variables import Pool, Token, Tokens, Trade
10 | from Arbie.Variables.graph import FilteredTradingGraph, TradingGraph
11 |
12 |
13 | class Node(object):
14 | """A edge in a cycle."""
15 |
16 | def __init__(self, token: Token, pool: Pool = None):
17 | self.token = token
18 | self.pool = pool
19 |
20 | def __eq__(self, other):
21 | return self.token == other.token
22 |
23 |
24 | class Cycle(object):
25 | """A cycle of nodes with the ratio of the cycle."""
26 |
27 | def __init__(self, nodes: List[Node], ratio):
28 | self.nodes = nodes
29 | self.ratio = ratio
30 |
31 |
32 | class CycleFinder(object):
33 | """Algorithm for finding cycles and the ratio of the cycle.
34 |
35 | Recursive Depth first search is done over all nodes. When reaching the start
36 | node a cycle has been found. While searching also remembers the weights for
37 | taking a cycle. This is useful for sorting the cycles after trade value.
38 | """
39 |
40 | def __init__(self, graph: nx.DiGraph, start_node: Token, max_depth):
41 | self.max_depth = max_depth
42 | self.graph = graph
43 | self.start_node = Node(start_node)
44 |
45 | def find_all_cycles(self):
46 | cycles = []
47 | self._visit_neighbours(cycles, [self.start_node], self.start_node, 1)
48 | return cycles
49 |
50 | def _visit_neighbours(
51 | self,
52 | found_cycles: List[Cycle],
53 | visited: List[Node],
54 | current_node: Node,
55 | ratio_to_current_node,
56 | ):
57 |
58 | for _, next_token, data in self.graph.edges(current_node.token, data=True):
59 | ratio_to_next_node = ratio_to_current_node * data["weight"]
60 | next_node = Node(next_token, data["object"])
61 | self._visit_node(found_cycles, visited, next_node, ratio_to_next_node)
62 |
63 | def _visit_node(
64 | self,
65 | found_cycles: List[Cycle],
66 | visited: Tokens,
67 | current_node: Node,
68 | ratio_to_current_node,
69 | ):
70 | if current_node == self.start_node:
71 | # We have come back to start. Append cycle to result
72 | found_cycles.append(Cycle(visited + [current_node], ratio_to_current_node))
73 | logging.getLogger(f"Found cycle {len(found_cycles)}!")
74 | return
75 |
76 | if current_node in visited:
77 | # We have found a cycle back to the current node. Stop
78 | return
79 |
80 | if len(visited) > self.max_depth:
81 | # We have gone to deap lets stop.
82 | return
83 |
84 | self._visit_neighbours(
85 | found_cycles, visited + [current_node], current_node, ratio_to_current_node
86 | )
87 |
88 |
89 | def create_trade(cycle: Cycle) -> Trade:
90 | path = [cycle.nodes[0].token]
91 | pools = []
92 | for node in cycle.nodes[1:]:
93 | pools.append(node.pool)
94 | path.append(node.token)
95 | return Trade(pools, path, ratio=cycle.ratio)
96 |
97 |
98 | class PathFinder(Action):
99 | """Find all trades from a trading graph.
100 |
101 | Also find the ratio of taking that trade.
102 |
103 | [Settings]
104 | input:
105 | pools: pools
106 | weth: None
107 | min_liquidity: 1
108 | max_depth: 5
109 | output:
110 | cycles: all_cycles
111 | trades: all_trades
112 | """
113 |
114 | async def on_next(self, data):
115 | logging.getLogger().info("Searching for path")
116 | graph = FilteredTradingGraph(TradingGraph(data.pools()), data.min_liquidity())
117 | finder = CycleFinder(
118 | graph.graph, await data.weth().create_token(1), data.max_depth()
119 | )
120 | cycles = sorted(finder.find_all_cycles(), key=lambda x: x.ratio)
121 | logging.getLogger().info(f"Found {len(cycles)} cycles")
122 | data.cycles(cycles)
123 | trades = []
124 | for cycle in cycles:
125 | trades.append(create_trade(cycle))
126 | data.trades(trades)
127 |
--------------------------------------------------------------------------------
/Arbie/resources/contracts/tokens/IERC20.json:
--------------------------------------------------------------------------------
1 | {"abi":
2 | [
3 | {
4 | "anonymous": false,
5 | "inputs": [
6 | {
7 | "indexed": true,
8 | "internalType": "address",
9 | "name": "owner",
10 | "type": "address"
11 | },
12 | {
13 | "indexed": true,
14 | "internalType": "address",
15 | "name": "spender",
16 | "type": "address"
17 | },
18 | {
19 | "indexed": false,
20 | "internalType": "uint256",
21 | "name": "value",
22 | "type": "uint256"
23 | }
24 | ],
25 | "name": "Approval",
26 | "type": "event"
27 | },
28 | {
29 | "anonymous": false,
30 | "inputs": [
31 | {
32 | "indexed": true,
33 | "internalType": "address",
34 | "name": "from",
35 | "type": "address"
36 | },
37 | {
38 | "indexed": true,
39 | "internalType": "address",
40 | "name": "to",
41 | "type": "address"
42 | },
43 | {
44 | "indexed": false,
45 | "internalType": "uint256",
46 | "name": "value",
47 | "type": "uint256"
48 | }
49 | ],
50 | "name": "Transfer",
51 | "type": "event"
52 | },
53 | {
54 | "inputs": [],
55 | "name": "totalSupply",
56 | "outputs": [
57 | {
58 | "internalType": "uint256",
59 | "name": "",
60 | "type": "uint256"
61 | }
62 | ],
63 | "stateMutability": "view",
64 | "type": "function"
65 | },
66 | {
67 | "inputs": [
68 | {
69 | "internalType": "address",
70 | "name": "account",
71 | "type": "address"
72 | }
73 | ],
74 | "name": "balanceOf",
75 | "outputs": [
76 | {
77 | "internalType": "uint256",
78 | "name": "",
79 | "type": "uint256"
80 | }
81 | ],
82 | "stateMutability": "view",
83 | "type": "function"
84 | },
85 | {
86 | "inputs": [
87 | {
88 | "internalType": "address",
89 | "name": "recipient",
90 | "type": "address"
91 | },
92 | {
93 | "internalType": "uint256",
94 | "name": "amount",
95 | "type": "uint256"
96 | }
97 | ],
98 | "name": "transfer",
99 | "outputs": [
100 | {
101 | "internalType": "bool",
102 | "name": "",
103 | "type": "bool"
104 | }
105 | ],
106 | "stateMutability": "nonpayable",
107 | "type": "function"
108 | },
109 | {
110 | "inputs": [
111 | {
112 | "internalType": "address",
113 | "name": "owner",
114 | "type": "address"
115 | },
116 | {
117 | "internalType": "address",
118 | "name": "spender",
119 | "type": "address"
120 | }
121 | ],
122 | "name": "allowance",
123 | "outputs": [
124 | {
125 | "internalType": "uint256",
126 | "name": "",
127 | "type": "uint256"
128 | }
129 | ],
130 | "stateMutability": "view",
131 | "type": "function"
132 | },
133 | {
134 | "inputs": [
135 | {
136 | "internalType": "address",
137 | "name": "spender",
138 | "type": "address"
139 | },
140 | {
141 | "internalType": "uint256",
142 | "name": "amount",
143 | "type": "uint256"
144 | }
145 | ],
146 | "name": "approve",
147 | "outputs": [
148 | {
149 | "internalType": "bool",
150 | "name": "",
151 | "type": "bool"
152 | }
153 | ],
154 | "stateMutability": "nonpayable",
155 | "type": "function"
156 | },
157 | {
158 | "inputs": [
159 | {
160 | "internalType": "address",
161 | "name": "sender",
162 | "type": "address"
163 | },
164 | {
165 | "internalType": "address",
166 | "name": "recipient",
167 | "type": "address"
168 | },
169 | {
170 | "internalType": "uint256",
171 | "name": "amount",
172 | "type": "uint256"
173 | }
174 | ],
175 | "name": "transferFrom",
176 | "outputs": [
177 | {
178 | "internalType": "bool",
179 | "name": "",
180 | "type": "bool"
181 | }
182 | ],
183 | "stateMutability": "nonpayable",
184 | "type": "function"
185 | }
186 | ]
187 | }
188 |
--------------------------------------------------------------------------------
/Arbie/Actions/redis_state.py:
--------------------------------------------------------------------------------
1 | """A store object that uses redis for backend."""
2 |
3 | import pickle # noqa: S403
4 |
5 | import redis
6 |
7 |
8 | def init_redis(address):
9 | host_and_port = address.split(":")
10 | if len(host_and_port) != 2:
11 | raise ValueError(f"Invalid Address to redis server: {address}")
12 | return redis.Redis(host=host_and_port[0], port=host_and_port[1], db=0)
13 |
14 |
15 | def _is_collection(key):
16 | parts = key.split(".")
17 | return len(parts) == 3 and parts[2].endswith("s")
18 |
19 |
20 | def _is_item(key):
21 | parts = key.split(".")
22 | if len(parts) == 3 and not _is_collection(key):
23 | return True
24 | return len(parts) == 4
25 |
26 |
27 | class RedisState(object):
28 | """A bridge to access the state stored in redis.
29 |
30 | This makes it possible for different Arbie instances to share state.
31 |
32 | Keys for redis is stored in the following format:
33 | namespace.version.category.identifier
34 |
35 | Namespace is the name of the Arbie instance running or the name
36 | of the Arbie instance that we wan to get data from.
37 |
38 | Version is used so that we can change the API with out worrying about breaking
39 | Arbie instances that rely on old data. When we update how we store data,
40 | we should update the version. This makes more sence later once we have started
41 | using protobuff.
42 |
43 | Category contains information regarding what keys are available. If we store
44 | a list of Tokens then the category would contain the names of the tokens. So
45 | to get the token we first get the category to figure out which tokens are
46 | available. A category containing a list of items ends with an s. A categaroy
47 | is of redis type set.
48 |
49 | Identifier contains the identifier of the actuall data. In case of a Token
50 | this would be the address of that token.
51 | """
52 |
53 | def __init__(self, host):
54 | self.r = init_redis(host)
55 | self.r.ping()
56 | self.local_state = {}
57 |
58 | def __getitem__(self, key):
59 | if _is_collection(key):
60 | return self._get_collection(key)
61 | elif _is_item(key):
62 | return self._get_item(key)
63 | return self.local_state[key]
64 |
65 | def __setitem__(self, key, value):
66 | if _is_collection(key):
67 | self._add_collection(key, value)
68 | elif _is_item(key):
69 | self._add_item(key, value)
70 | else:
71 | self.local_state[key] = value
72 | return
73 | self.publish(key, "updated")
74 |
75 | def __contains__(self, item) -> bool:
76 | if item is None:
77 | return False
78 | if item in self.local_state:
79 | return True
80 | return self.r.exists(item) == 1
81 |
82 | def subscribe(self, event_channel):
83 | p = self.r.pubsub()
84 | p.psubscribe(event_channel)
85 | return p
86 |
87 | def publish(self, event_channel, message):
88 | self.r.publish(event_channel, message)
89 |
90 | def keys(self):
91 | return self.local_state.keys()
92 |
93 | def delete(self, key):
94 | pipe = self.r.pipeline()
95 | if _is_collection(key):
96 | collection = self.r.smembers(key)
97 | for item in collection:
98 | pipe.delete(f"{key}.{item}")
99 | pipe.delete(key)
100 | pipe.execute()
101 |
102 | def _get_collection(self, key):
103 | self._exist_or_raise(key)
104 | pipe = self.r.pipeline()
105 | for item in self.r.smembers(key):
106 | item_name = item.decode("utf-8")
107 | item_key = f"{key}.{item_name}"
108 | pipe.get(item_key)
109 | raw_items = pipe.execute()
110 | return list(map(lambda i: pickle.loads(i), raw_items)) # noqa: S301
111 |
112 | def _get(self, key):
113 | self._exist_or_raise(key)
114 | return self.r.get(key)
115 |
116 | def _get_item(self, key):
117 | return pickle.loads(self._get(key)) # noqa: S301
118 |
119 | def _add_collection(self, collection_key, collection):
120 | self.delete(collection_key)
121 | pipe = self.r.pipeline()
122 | for item in collection:
123 | item_key = f"{collection_key}.{item}"
124 | pipe.set(item_key, pickle.dumps(item))
125 | pipe.sadd(collection_key, str(item))
126 | pipe.execute()
127 |
128 | def _add_item(self, key, value):
129 | self.r.set(key, pickle.dumps(value))
130 |
131 | def _exist_or_raise(self, key):
132 | if not self.__contains__(key):
133 | raise KeyError(f"key: {key} was not found in Redis")
134 |
--------------------------------------------------------------------------------
/Arbie/Contracts/uniswap.py:
--------------------------------------------------------------------------------
1 | """Utility functions for interacting with Uniswap."""
2 | import asyncio
3 | import logging
4 | from typing import List, Tuple
5 |
6 | from prometheus_async.aio import time
7 |
8 | from Arbie import DeployContractError, IERC20TokenError
9 | from Arbie.async_helpers import async_map, run_async
10 | from Arbie.Contracts.contract import Contract, ContractFactory
11 | from Arbie.Contracts.pool_contract import PoolContract
12 | from Arbie.Contracts.tokens import GenericToken
13 | from Arbie.prometheus import get_prometheus
14 | from Arbie.Variables import BigNumber, PoolType
15 |
16 | logger = logging.getLogger()
17 |
18 | CREATE_PAIR = get_prometheus().summary(
19 | "uniswap_factory_create_pair_index", "Time for creating a pair"
20 | )
21 |
22 |
23 | async def create_reserve(result: Tuple[float, GenericToken]):
24 | (value, token) = result
25 | try:
26 | exp = await token.decimals()
27 | except Exception:
28 | raise IERC20TokenError("Token doesn't contain decimals.")
29 | return BigNumber.from_value(value, exp)
30 |
31 |
32 | class UniswapPair(PoolContract):
33 | name = "UniswapV2Pair"
34 | protocol = "uniswap"
35 |
36 | fee = 0.003
37 | weight = 0.5
38 |
39 | pool_type = PoolType.uniswap
40 |
41 | def mint(self, address: str) -> bool:
42 | transaction = self.contract.functions.mint(address)
43 | return self._transact_info(transaction)
44 |
45 | async def get_token0(self) -> GenericToken:
46 | return await self._get_token(self.contract.functions.token0())
47 |
48 | async def get_token1(self) -> GenericToken:
49 | return await self._get_token(self.contract.functions.token1())
50 |
51 | async def get_tokens(self) -> List[GenericToken]:
52 | return await asyncio.gather(self.get_token0(), self.get_token1())
53 |
54 | async def get_balances(self) -> List[BigNumber]:
55 | result = await asyncio.gather(self._get_reserves(), self.get_tokens())
56 | ziped_result = list(zip(result[0], result[1]))
57 | return await async_map(create_reserve, ziped_result)
58 |
59 | async def get_fee(self) -> float:
60 | return self.fee
61 |
62 | def get_type(self) -> PoolType:
63 | return UniswapPair.pool_type
64 |
65 | async def get_weights(self) -> List[float]:
66 | return [self.weight, self.weight]
67 |
68 | async def _get_token(self, function) -> GenericToken:
69 | token_address = await self._call_async(function)
70 | cf = ContractFactory(self.w3, GenericToken)
71 | return cf.load_contract(self.owner_address, address=token_address)
72 |
73 | async def _get_reserves(self):
74 | return await self._call_async(self.contract.functions.getReserves())
75 |
76 |
77 | class UniswapFactory(Contract):
78 | name = "UniswapV2Factory"
79 | protocol = "uniswap"
80 |
81 | def __init__(self, w3, contract, timeout, **kwargs):
82 | self.cf = ContractFactory(w3, UniswapPair)
83 | super().__init__(w3, contract, timeout, **kwargs)
84 |
85 | async def all_pairs_length(self) -> int:
86 | return await self._call_async(self.contract.functions.allPairsLength())
87 |
88 | async def get_pair_address(self, index) -> str:
89 | return await self._call_async(self.contract.functions.allPairs(index))
90 |
91 | async def all_pairs(self, sleep=0) -> List[UniswapPair]:
92 | number_of_pairs = await self.all_pairs_length()
93 | return await async_map(self._create_pair_index, range(number_of_pairs))
94 |
95 | async def create_pair(
96 | self, token_a: GenericToken, token_b: GenericToken
97 | ) -> UniswapPair:
98 | transaction = self.contract.functions.createPair(
99 | token_a.get_address(), token_b.get_address()
100 | )
101 |
102 | if not self._transact_info(transaction):
103 | raise DeployContractError("Failed to deploy UniswapPair")
104 | pair_nmb = await self.all_pairs_length()
105 | return await self._create_pair_index(pair_nmb - 1)
106 |
107 | async def setup_pair(
108 | self, tokens: List[GenericToken], amounts: List[BigNumber]
109 | ) -> UniswapPair:
110 | pair = await self.create_pair(tokens[0], tokens[1])
111 |
112 | for token, amount in zip(tokens, amounts):
113 | token.transfer(pair.get_address(), amount)
114 | try:
115 | pair.mint(self.owner_address)
116 | except Exception:
117 | raise ValueError(f"Failed to mint tokens {tokens[0]},{tokens[1]}")
118 | return pair
119 |
120 | @time(CREATE_PAIR)
121 | async def _create_pair_index(self, index) -> UniswapPair:
122 | address = await self.get_pair_address(index)
123 | logger.info(f"Creating pair number {index}")
124 | return await run_async(self._load_pair, address)
125 |
126 | def _load_pair(self, address):
127 | return self.cf.load_contract(self.owner_address, address=address)
128 |
--------------------------------------------------------------------------------
/Arbie/Variables/pool.py:
--------------------------------------------------------------------------------
1 | """Test class for setting up AMMs."""
2 | from enum import Enum
3 | from math import isclose
4 | from typing import List, NewType, Tuple
5 |
6 | from sympy import symbols
7 |
8 | from Arbie import PoolValueError
9 | from Arbie.Variables.token import Balance, Balances, Token, Tokens
10 |
11 | x = symbols("x")
12 |
13 |
14 | def get_value(values: Balances, token: Token) -> Balance:
15 | for v in values:
16 | if v.token == token:
17 | return v.value
18 |
19 |
20 | class PoolType(Enum):
21 | uniswap = 0
22 | balancer = 1
23 | unknown = 99
24 |
25 |
26 | class Pool(object):
27 | """Pool can create a arbitrary AMM that can be used for testing.
28 |
29 | The math can be found here https://balancer.finance/whitepaper/
30 | """
31 |
32 | def __init__(
33 | self,
34 | tokens: Tokens,
35 | balances: List[float],
36 | weights: List[float],
37 | pool_type: PoolType = PoolType.unknown,
38 | fee: float = 0,
39 | **kwargs,
40 | ): # noqa: WPS221
41 |
42 | self.tokens = tokens
43 | self.balances = Balance.create(tokens, balances)
44 | self.weights = Balance.create(tokens, weights)
45 | self.fee = fee
46 | self.pool_type = pool_type
47 |
48 | self.address = None
49 | if "address" in kwargs:
50 | self.address = kwargs.get("address")
51 |
52 | if not isclose(sum(weights), 1, abs_tol=1e-3): # noqa: WPS432
53 | raise PoolValueError(
54 | f"Weights are not normalized, sum is {sum(weights)} for pool {self.address}"
55 | )
56 |
57 | self._check_balances()
58 |
59 | if self.fee > 1 or self.fee < 0:
60 | raise PoolValueError(
61 | f"Fee: {self.fee}, should be between 0 and 1. Fee is {self.fee}, for pool {self.address}"
62 | )
63 |
64 | def __str__(self):
65 | return f"""
66 | Pool(
67 | Tokens: {self.tokens},
68 | Balances: {self.balances},
69 | Weights: {self.weights},
70 | Fee: {self.fee}
71 | Address: {self.address})"""
72 |
73 | def __repr__(self):
74 | return self.__str__()
75 |
76 | def __hash__(self):
77 | return hash(self.address)
78 |
79 | def update_balances(self, balances: List[float]):
80 | self.balances = Balance.create(self.tokens, balances)
81 | self._check_balances()
82 |
83 | def get_weights(self, token_in: Token, token_out: Token) -> Tuple[float, float]:
84 | return (get_value(self.weights, token_in), get_value(self.weights, token_out))
85 |
86 | def get_balances(self, token_in: Token, token_out: Token) -> Tuple[float, float]:
87 | return (get_value(self.balances, token_in), get_value(self.balances, token_out))
88 |
89 | def spot_price(self, token_in: Token, token_out: Token) -> float:
90 | """Ratio between token_in and token_out.
91 |
92 | A ratio > 1 means that token_in is less valuable then token_out
93 | A ratio < 1 means that token_in is more valuable then token_out
94 |
95 | token_in / ratio = token_out
96 |
97 | If the ratio is 400 then it takes 400 token_in for 1 token_out
98 | """
99 | bi, bo = self.get_balances(token_in, token_out)
100 | wi, wo = self.get_weights(token_in, token_out)
101 | return (bi / wi) / (bo / wo)
102 |
103 | def in_given_out_expr(self, token_in: Token, token_out: Token, k=1):
104 | bi, bo = self.get_balances(token_in, token_out)
105 | wi, wo = self.get_weights(token_in, token_out)
106 |
107 | bi *= k
108 | bo *= k
109 |
110 | return bi * ((bo / (bo - x * (1 - self.fee))) ** (wo / wi) - 1) # noqa: WPS221
111 |
112 | def out_given_in_expr(self, token_in: Token, token_out: Token, k=1) -> float:
113 | """Mathematical expression for out given in.
114 |
115 | To increase the accuracy of floating point numbers the parameter k can be provided.
116 | """
117 | bi, bo = self.get_balances(token_in, token_out)
118 | wi, wo = self.get_weights(token_in, token_out)
119 |
120 | bi *= k
121 | bo *= k
122 |
123 | return bo * (1 - (bi / (bi + x * (1 - self.fee))) ** (wi / wo)) # noqa: WPS221
124 |
125 | def in_given_out(self, token_in: Token, token_out: Token, amount: float) -> float:
126 | """Mathematical expression for in given out.
127 |
128 | To increase the accuracy of floating point numbers the parameter k can be provided.
129 | """
130 | expr = self.in_given_out_expr(token_in, token_out)
131 | return expr.subs(x, amount)
132 |
133 | def out_given_in(self, token_in: Token, token_out: Token, amount: float) -> float:
134 | expr = self.out_given_in_expr(token_in, token_out)
135 | return expr.subs(x, amount)
136 |
137 | def _check_balances(self):
138 | for balance in self.balances:
139 | if isclose(balance.value, 0, abs_tol=1e-3): # noqa: WPS432
140 | raise PoolValueError(f"Balance {balance} ~ 0 for pool {self.address}")
141 |
142 |
143 | Pools = NewType("Pools", List[Pool])
144 |
--------------------------------------------------------------------------------
/tests/unit/Actions/action_test.py:
--------------------------------------------------------------------------------
1 | """Unittest of redis state."""
2 | from unittest.mock import MagicMock
3 |
4 | import pytest
5 | from pytest_mock.plugin import MockerFixture
6 |
7 | from Arbie import StateError
8 | from Arbie.Actions import Action, RedisState, Store
9 | from Arbie.Actions.action import Argument
10 |
11 |
12 | @pytest.fixture
13 | def local_store():
14 | return Store()
15 |
16 |
17 | @pytest.fixture
18 | def redis_store(redis_state):
19 | return Store(redis_state)
20 |
21 |
22 | class MockAction(object):
23 | in_variable_name = "pools"
24 | in_constant_name = "amount"
25 | out_variable_name = "tokens"
26 |
27 | redis_item_key = "arbie.1.amount"
28 | redis_collection_key = "arbie.1.pools"
29 | redis_collection_item_key = "arbie.1.pools.0xA1cB32"
30 |
31 | in_settings = {in_variable_name: "uniswap", in_constant_name: 1337}
32 | out_settings = {out_variable_name: "good_tokens"}
33 |
34 | in_settings_parsed = {
35 | in_variable_name: Argument("uniswap"),
36 | in_constant_name: Argument(1337),
37 | }
38 |
39 | redis_in_settings_parsed = {
40 | in_variable_name: Argument("arbie.1.pools"),
41 | in_constant_name: Argument(1337),
42 | }
43 |
44 | out_settings_parsed = {out_variable_name: Argument("good_tokens")}
45 |
46 |
47 | class TestAction(object):
48 | def test_get_input_output_settings(self, mocker: MockerFixture):
49 | mocker.patch(
50 | "Arbie.Actions.action.yaml.safe_load",
51 | return_value={
52 | Action.input_key: MockAction.in_settings,
53 | Action.output_key: MockAction.out_settings,
54 | },
55 | )
56 | action = Action()
57 | assert MockAction.in_settings_parsed == action.get_input_settings()
58 | assert MockAction.out_settings_parsed == action.get_output_settings()
59 |
60 | @pytest.mark.asyncio
61 | async def test_on_next(self):
62 | with pytest.raises(NotImplementedError):
63 | await Action().on_next(None)
64 |
65 |
66 | @pytest.fixture
67 | def mock_action():
68 | mock = MagicMock()
69 | mock.get_input_settings.return_value = MockAction.in_settings_parsed
70 | mock.get_output_settings.return_value = MockAction.out_settings_parsed
71 | return mock
72 |
73 |
74 | @pytest.fixture
75 | def mock_action_redis():
76 | mock = MagicMock()
77 | mock.get_input_settings.return_value = MockAction.redis_in_settings_parsed
78 | mock.get_output_settings.return_value = MockAction.out_settings_parsed
79 | return mock
80 |
81 |
82 | class TestStore(object):
83 | def test_get_raises(self, local_store):
84 | with pytest.raises(KeyError):
85 | local_store.get("key")
86 |
87 | def test_get_add(self, local_store):
88 | local_store.add("key", 1)
89 | assert local_store.get("key") == 1
90 |
91 | def test_create_input_value_error(self, local_store, mock_action):
92 | with pytest.raises(ValueError):
93 | local_store.create_input(mock_action)
94 |
95 | def test_create_input(self, local_store, mock_action):
96 | local_store.add("uniswap", 1337)
97 | local_store.create_input(mock_action)
98 |
99 | def test_publish(self, local_store: Store):
100 | with pytest.raises(StateError):
101 | local_store.publish("random channel", "random message")
102 |
103 | def test_subscribe(self, local_store: Store):
104 | with pytest.raises(StateError):
105 | local_store.subscribe("random channel")
106 |
107 | def test_delete(self, local_store: Store):
108 | with pytest.raises(StateError):
109 | local_store.delete("random key")
110 |
111 |
112 | class TestRedisStore(object):
113 | def test_get_raises(self, redis_store):
114 | with pytest.raises(KeyError):
115 | redis_store.get("key")
116 |
117 | def test_get_add(self, redis_store):
118 | redis_store.add("key", 1)
119 | assert redis_store.get("key") == 1
120 |
121 | def test_store_get(self, redis_state, mocker: MockerFixture):
122 | mock_get = mocker.patch.object(RedisState, "__getitem__")
123 | store = Store(redis_state)
124 |
125 | store[MockAction.redis_item_key] # noqa: WPS428
126 | assert mock_get.called
127 |
128 | def test_store_add(self, redis_state, mocker: MockerFixture):
129 | mock_set = mocker.patch.object(RedisState, "__setitem__")
130 | store = Store(redis_state)
131 |
132 | store.add(MockAction.redis_collection_key, None)
133 | assert mock_set.called
134 |
135 | def test_create_input_value_error(self, redis_store, mock_action):
136 | with pytest.raises(ValueError):
137 | redis_store.create_input(mock_action)
138 |
139 | def test_create_input(self, redis_store, mock_action):
140 | redis_store.add("uniswap", 1337)
141 | redis_store.create_input(mock_action)
142 |
143 | def test_create_input_from_redis(self, redis_store, mock_action_redis, mocker):
144 | mocker.patch.object(RedisState, "__contains__", return_value=True)
145 | redis_store.create_input(mock_action_redis)
146 |
--------------------------------------------------------------------------------
/Arbie/Contracts/balancer.py:
--------------------------------------------------------------------------------
1 | """Utility functions for interacting with balancer."""
2 |
3 | import logging
4 | from typing import List
5 |
6 | from Arbie import DeployContractError, IERC20TokenError
7 | from Arbie.Contracts.contract import Contract, ContractFactory
8 | from Arbie.Contracts.event_filter import EventFilter
9 | from Arbie.Contracts.pool_contract import PoolContract
10 | from Arbie.Contracts.tokens import GenericToken
11 | from Arbie.Variables import BigNumber, PoolType
12 |
13 | logger = logging.getLogger()
14 |
15 |
16 | class BalancerPool(PoolContract):
17 | name = "BPool"
18 | protocol = "balancer"
19 | pool_type = PoolType.balancer
20 |
21 | def get_number_of_tokens(self):
22 | return self.contract.functions.getNumTokens().call()
23 |
24 | async def get_tokens(self) -> List[GenericToken]:
25 | token_addresses = await self._call_async(
26 | self.contract.functions.getCurrentTokens()
27 | )
28 | cf = ContractFactory(self.w3, GenericToken)
29 | return list(
30 | map(
31 | (
32 | lambda a: cf.load_contract(
33 | owner_address=self.owner_address, address=a
34 | )
35 | ),
36 | token_addresses,
37 | )
38 | )
39 |
40 | async def get_balances(self) -> List[BigNumber]:
41 | tokens = await self.get_tokens()
42 | balances = []
43 | for token in tokens:
44 | b = self.contract.functions.getBalance(token.get_address()).call()
45 | try:
46 | decimals = await token.decimals()
47 | except Exception:
48 | raise IERC20TokenError("Bad token in balancer pool")
49 | balances.append(BigNumber.from_value(b, decimals))
50 | return balances
51 |
52 | async def get_weights(self) -> List[float]:
53 | tokens = await self.get_tokens()
54 | weights = list(
55 | map(
56 | (
57 | lambda t: self.contract.functions.getNormalizedWeight(
58 | t.get_address()
59 | ).call()
60 | ),
61 | tokens,
62 | )
63 | )
64 | sum_of_weights = sum(weights)
65 | return list(map((lambda x: x / sum_of_weights), weights))
66 |
67 | async def get_fee(self) -> float:
68 | fee = await self._call_async(self.contract.functions.getSwapFee())
69 | return BigNumber.from_value(fee).to_number()
70 |
71 | def get_type(self) -> PoolType:
72 | return BalancerPool.pool_type
73 |
74 | def bind(self, address: str, balance: BigNumber, denorm_weight: int) -> bool:
75 | if denorm_weight < 1:
76 | raise ValueError("Weight should be larger than 1")
77 | eth_safe_weight = BigNumber(denorm_weight)
78 | transaction = self.contract.functions.bind(
79 | address, balance.value, eth_safe_weight.value
80 | )
81 | return self._transact_info(transaction)
82 |
83 | def finalize(self) -> bool:
84 | return self._transact_info(self.contract.functions.finalize())
85 |
86 |
87 | class BalancerLoader(object):
88 | def __init__(self, factory: ContractFactory, address):
89 | self.factory = factory
90 | self.address = address
91 |
92 | def load_from_event(self, events):
93 | logger.info(f"Loading pool contracts in events {events}")
94 | return [
95 | self.factory.load_contract(
96 | owner_address=self.address, address=event.args.pool
97 | )
98 | for event in events
99 | ]
100 |
101 |
102 | class BalancerFactory(Contract):
103 | name = "BFactory"
104 | protocol = "balancer"
105 |
106 | def __init__(self, w3, contract, **kwargs):
107 | self.cf = ContractFactory(w3, BalancerPool)
108 | super().__init__(w3, contract, **kwargs)
109 |
110 | async def setup_pool(
111 | self,
112 | tokens: List[GenericToken],
113 | weights: List[float],
114 | amounts: List[BigNumber],
115 | approve_owner=True,
116 | ) -> BalancerPool:
117 | pool = self.new_pool()
118 |
119 | for token, weight, amount in zip(tokens, weights, amounts):
120 | if approve_owner:
121 | await token.approve_owner()
122 | token.approve(pool.get_address(), amount)
123 | pool.bind(token.get_address(), amount, weight)
124 |
125 | pool.finalize()
126 | return pool
127 |
128 | def new_pool(self) -> BalancerPool:
129 | transaction = self.contract.functions.newBPool()
130 | status, address = self._transact_status_and_contract(transaction)
131 |
132 | if not status:
133 | raise DeployContractError("Failed to deploy BalancerPool.")
134 |
135 | return self.cf.load_contract(self.owner_address, address=address)
136 |
137 | async def all_pools(self, start=0, steps=100) -> List[BalancerPool]:
138 | last_block = self.w3.eth.blockNumber
139 | bf = EventFilter(
140 | self.contract.events.LOG_NEW_POOL,
141 | self._create_pools,
142 | start,
143 | last_block,
144 | steps,
145 | )
146 | return await bf.find_events()
147 |
148 | def _create_pools(self, new_pool_events):
149 | loader = BalancerLoader(self.cf, self.owner_address)
150 | return loader.load_from_event(new_pool_events)
151 |
--------------------------------------------------------------------------------
/tests/system/app_test.py:
--------------------------------------------------------------------------------
1 | """Systemtests of app."""
2 |
3 | import asyncio
4 |
5 | import pytest
6 | import yaml
7 | from pytest_mock.plugin import MockerFixture
8 |
9 | from Arbie.app import App
10 | from Arbie.Contracts import GenericToken
11 | from Arbie.Services import Coingecko
12 | from Arbie.Variables import BigNumber
13 |
14 |
15 | class Result(object):
16 | pool_finder_pools = "PoolFinder.1.pools"
17 | pool_finder_tokens = "PoolFinder.1.tokens"
18 | whitelist_addresses = "Whitelist.1.addresses"
19 | pool_updater_pools = "PoolUpdater.1.pools"
20 | arbitrage_filtered_trades = "Arbitrage.1.filtered_trades"
21 | trader_profit = "Trader.1.profit"
22 |
23 |
24 | pytestmark = pytest.mark.asyncio
25 |
26 |
27 | async def wait_and_stop(tree, key):
28 | while True:
29 | if key in tree.store.state:
30 | tree.stop()
31 | return
32 | await asyncio.sleep(0.1)
33 |
34 |
35 | async def wait_and_run(app):
36 | await asyncio.sleep(0.2)
37 | await app.run()
38 |
39 |
40 | def replace_base_conf(config, base_config):
41 | split_conf = config.split("action_tree:")
42 | return f"""
43 | {base_config}
44 |
45 | action_tree:
46 | {split_conf[1]}
47 | """
48 |
49 |
50 | def setup_config(path, base_config):
51 | with open(f"Brig/{path}", "r") as f:
52 | return replace_base_conf(f.read(), base_config)
53 |
54 |
55 | class TestApp(object):
56 | @pytest.fixture
57 | def base_config(
58 | self,
59 | web3_server,
60 | redis_server,
61 | weth,
62 | pool_factory,
63 | pair_factory,
64 | router,
65 | deploy_address,
66 | ):
67 | return f"""
68 | store:
69 | address: {redis_server}
70 | web3:
71 | address: {web3_server}
72 | variables:
73 | weth:
74 | type: Weth
75 | address: '{weth.get_address()}'
76 | uniswap_factory:
77 | type: UniswapFactory
78 | address: '{pair_factory.get_address()}'
79 | balancer_factory:
80 | type: BalancerFactory
81 | address: '{pool_factory.get_address()}'
82 | router:
83 | type: UniswapV2Router
84 | address: '{router.get_address()}'
85 | """ # noqa: WPS221
86 |
87 | @pytest.fixture
88 | def base_config_with_account(self, base_config):
89 | split_conf = base_config.split("variables:")
90 | return f"""
91 | {split_conf[0]}
92 | account:
93 | path: Brig/Trader/test_account.json
94 | variables:
95 | {split_conf[1]}
96 | """
97 |
98 | @pytest.fixture
99 | def pool_finder_config(self, base_config):
100 | return setup_config("PoolFinder/pool_finder.yml", base_config)
101 |
102 | @pytest.fixture
103 | def path_finder_config(self, base_config):
104 | return setup_config("PathFinder/path_finder.yml", base_config)
105 |
106 | @pytest.fixture
107 | def pool_updater_config(self, base_config):
108 | return setup_config("PoolUpdater/pool_updater.yml", base_config)
109 |
110 | @pytest.fixture
111 | def trader_config(self, base_config_with_account):
112 | return setup_config("Trader/trader.yml", base_config_with_account)
113 |
114 | @pytest.fixture
115 | def pool_finder(self, pool_finder_config, mocker: MockerFixture, whitelist):
116 | mocker.patch.object(Coingecko, "coins", return_value=whitelist)
117 |
118 | config = yaml.safe_load(pool_finder_config)
119 | app = App(config)
120 | assert len(app.action_tree.actions) == 2
121 | yield app
122 | app.store.delete(Result.pool_finder_pools)
123 | app.store.delete(Result.pool_finder_tokens)
124 | app.store.delete(Result.whitelist_addresses)
125 |
126 | @pytest.fixture
127 | def pool_updater(self, pool_updater_config):
128 | config = yaml.safe_load(pool_updater_config)
129 | app = App(config)
130 | yield app
131 | app.store.delete(Result.pool_updater_pools)
132 |
133 | @pytest.fixture
134 | def path_finder(self, path_finder_config):
135 | config = yaml.safe_load(path_finder_config)
136 | app = App(config)
137 | yield app
138 | app.store.delete(Result.arbitrage_filtered_trades)
139 |
140 | @pytest.fixture
141 | def trader(self, trader_config, weth: GenericToken, dummy_account):
142 | weth.transfer(dummy_account.address, BigNumber(4))
143 | config = yaml.safe_load(trader_config)
144 | app = App(config)
145 | yield app
146 | app.store.delete(Result.trader_profit)
147 |
148 | @pytest.mark.slow
149 | async def test_full_pipeline(self, pool_finder, pool_updater, path_finder, trader):
150 | await asyncio.gather(
151 | wait_and_run(pool_finder),
152 | pool_updater.run(),
153 | wait_and_stop(pool_updater, Result.pool_updater_pools),
154 | path_finder.run(),
155 | wait_and_stop(path_finder, Result.arbitrage_filtered_trades),
156 | trader.run(),
157 | wait_and_stop(trader, Result.trader_profit),
158 | )
159 |
160 | assert len(pool_finder.store.get(Result.pool_finder_pools)) == 4
161 | assert len(pool_finder.store.get(Result.pool_finder_tokens)) == 3
162 | assert len(pool_finder.store.get(Result.whitelist_addresses)) == 4
163 | assert len(pool_updater.store.get(Result.pool_updater_pools)) == 4
164 | assert len(path_finder.store.get(Result.arbitrage_filtered_trades)) == 1
165 | assert trader.store.get(Result.trader_profit) > 3.278 # noqa: WPS459
166 |
--------------------------------------------------------------------------------
/tests/unit/Services/coingecko_test.py:
--------------------------------------------------------------------------------
1 | """unit test for Coingecko."""
2 |
3 | from unittest.mock import MagicMock
4 |
5 | import pytest
6 | from pytest_mock.plugin import MockerFixture
7 |
8 | from Arbie.Services import Coingecko
9 |
10 | pytestmark = pytest.mark.asyncio
11 |
12 | coins = [
13 | {"id": "1ai", "symbol": "1ai", "name": "1AI"},
14 | {"id": "velo-token", "symbol": "VLO", "name": "VELO Token"},
15 | {"id": "1inch", "symbol": "1inch", "name": "1inch"},
16 | {"id": "weird-coin", "symbol": "", "name": ""},
17 | ]
18 |
19 | coin_1a1 = {
20 | "name": "1AI",
21 | "tickers": [
22 | {
23 | "base": "1AI",
24 | "target": "BTC",
25 | "market": {
26 | "name": "Vindax",
27 | "identifier": "vindax",
28 | "has_trading_incentive": False,
29 | },
30 | "last": 1e-8,
31 | "volume": 51070,
32 | "converted_last": {"btc": 1e-8, "eth": 2.919e-7, "usd": 0.00036442},
33 | "converted_volume": {"btc": 0.0005107, "eth": 0.01490667, "usd": 18.61},
34 | "trust_score": None,
35 | "bid_ask_spread_percentage": 50,
36 | "timestamp": "2021-01-17T02:39:22+00:00",
37 | "last_traded_at": "2021-01-17T02:39:22+00:00",
38 | "last_fetch_at": "2021-01-18T02:17:05+00:00",
39 | "is_anomaly": False,
40 | "is_stale": True,
41 | "trade_url": "https://vindax.com/exchange-base.html?symbol=1AI_BTC",
42 | "token_info_url": None,
43 | "coin_id": "1ai",
44 | "target_coin_id": "bitcoin",
45 | }
46 | ],
47 | }
48 |
49 | coin_vlo = {
50 | "name": "VELO Token",
51 | "tickers": [
52 | {
53 | "base": "0X98AD9B32DD10F8D8486927D846D4DF8BAF39ABE2",
54 | "target": "ETH",
55 | "market": {
56 | "name": "Uniswap (v2)",
57 | "identifier": "uniswap",
58 | "has_trading_incentive": False,
59 | },
60 | "last": 0.00000243692314918803,
61 | "volume": 932296.695673962,
62 | "converted_last": {"btc": 9.41e-8, "eth": 0.00000244, "usd": 0.00333838},
63 | "converted_volume": {"btc": 0.08776486, "eth": 2.274565, "usd": 3112.36},
64 | "trust_score": "green",
65 | "bid_ask_spread_percentage": 0.614416,
66 | "timestamp": "2021-01-20T05:32:35+00:00",
67 | "last_traded_at": "2021-01-20T05:32:35+00:00",
68 | "last_fetch_at": "2021-01-20T06:01:50+00:00",
69 | "is_anomaly": False,
70 | "is_stale": False,
71 | "trade_url": "https://app.uniswap.org/#/swap?outputCurrency=0x98ad9b32dd10f8d8486927d846d4df8baf39abe2",
72 | "token_info_url": "https://info.uniswap.org/token/0x98ad9b32dd10f8d8486927d846d4df8baf39abe2",
73 | "coin_id": "velo-token",
74 | "target_coin_id": "ethereum",
75 | }
76 | ],
77 | }
78 |
79 | coin_1inch = {
80 | "name": "1inch",
81 | "tickers": [
82 | {
83 | "base": "0X111111111117DC0AA78B770FA6A738034120C302",
84 | "target": "ETH",
85 | "market": {
86 | "name": "1inch Liquidity Protocol",
87 | "identifier": "one_inch_liquidity_protocol",
88 | "has_trading_incentive": False,
89 | },
90 | "last": 0.0013669431402898,
91 | "volume": 4600000.73782359,
92 | "converted_last": {"btc": 0.0000527, "eth": 0.00136693, "usd": 1.87},
93 | "converted_volume": {"btc": 242.4, "eth": 6288, "usd": 8592901},
94 | "trust_score": "green",
95 | "bid_ask_spread_percentage": 0.602722,
96 | "timestamp": "2021-01-20T06:04:08+00:00",
97 | "last_traded_at": "2021-01-20T06:04:08+00:00",
98 | "last_fetch_at": "2021-01-20T06:04:08+00:00",
99 | "is_anomaly": True, # Modified from original inorder to test anomaly detection
100 | "is_stale": False,
101 | "token_info_url": None,
102 | "coin_id": "1inch",
103 | "target_coin_id": "ethereum",
104 | },
105 | ],
106 | }
107 |
108 |
109 | weird_coin = {
110 | "name": "1inch",
111 | "tickers": [
112 | {
113 | "base": "0X7DC0AA78B770FA6A73803",
114 | "target": "ETH",
115 | "is_anomaly": False,
116 | },
117 | ],
118 | }
119 |
120 |
121 | def bad_request(endpoint):
122 | mock = MagicMock()
123 | mock.ok = False
124 | mock.json.return_value = None
125 | return mock
126 |
127 |
128 | def load_data(endpoint): # noqa: WPS231
129 | mock = MagicMock()
130 | mock.ok = True
131 | json = None
132 | if "coins/list" in endpoint: # noqa: WPS223
133 | json = coins
134 | elif "velo" in endpoint:
135 | json = coin_vlo
136 | elif "1inch" in endpoint:
137 | json = coin_1inch
138 | elif "1ai" in endpoint:
139 | json = coin_1a1
140 | elif "weird" in endpoint:
141 | json = weird_coin
142 | mock.json.return_value = json
143 | return mock
144 |
145 |
146 | @pytest.fixture
147 | def request_mock(mocker: MockerFixture):
148 | mock = mocker.patch("Arbie.Services.coingecko.requests")
149 | mock.get.side_effect = load_data
150 | return mock
151 |
152 |
153 | class TestCoingecko(object):
154 | async def test_coins(self, request_mock: MagicMock):
155 | addresses = await Coingecko().coins()
156 | assert len(addresses) == 1
157 | assert addresses[0] == "0x98ad9b32dd10f8d8486927d846d4df8baf39abe2"
158 |
159 | async def test_bad_request(self, request_mock: MagicMock):
160 | request_mock.get.side_effect = bad_request
161 | with pytest.raises(ConnectionError):
162 | await Coingecko(2, 0, 1, 0).coins()
163 |
--------------------------------------------------------------------------------
/Arbie/Actions/trader.py:
--------------------------------------------------------------------------------
1 | """Trader contains actions for executing trades."""
2 |
3 | import logging
4 |
5 | from web3.exceptions import ContractLogicError
6 |
7 | from Arbie import TransactionError
8 | from Arbie.Actions import Action
9 | from Arbie.Variables import BigNumber
10 |
11 | logger = logging.getLogger()
12 |
13 |
14 | class BalanceChecker(object):
15 | def __init__(self, web3, weth):
16 | self.web3 = web3
17 | self.weth = weth
18 |
19 | async def check(self, address):
20 | amount_weth = await self.weth.balance_of(address)
21 | amount_eth = BigNumber.from_value(self.web3.eth.getBalance(address))
22 | return amount_eth.to_number(), amount_weth.to_number()
23 |
24 | async def check_total(self, address):
25 | return sum(await self.check(address))
26 |
27 | async def check_and_convert(self, trader_address, min_eth, min_weth, max_weth):
28 | amount_eth, amount_weth = await self.check(trader_address)
29 |
30 | # Already have sufficient amount setup
31 | if amount_eth > min_eth and amount_weth > min_weth:
32 | return amount_eth, amount_weth
33 |
34 | # We don't have sufficient supply
35 | if amount_eth + amount_weth < min_eth + min_weth:
36 | raise ValueError(
37 | f"Not enough liquidity eth: {amount_eth}, min_eth: {min_eth}, weth {amount_weth}, min_weth: {min_weth}"
38 | )
39 |
40 | # ETH is good but not weth
41 | if amount_eth > min_eth + min_weth:
42 | self._to_weth(amount_eth, min_eth, max_weth)
43 | else:
44 | self._to_eth(amount_eth, amount_weth, min_eth, min_weth)
45 |
46 | return await self.check(trader_address)
47 |
48 | def _to_weth(self, amount_eth, min_eth, max_weth):
49 | max_deposit = amount_eth - min_eth
50 | max_deposit = min(max_deposit, max_weth)
51 | status = self.weth.deposit(max_deposit)
52 | if not status:
53 | raise TransactionError("Failed to deposit eth for weth")
54 |
55 | def _to_eth(self, amount_eth, amount_weth, min_eth, min_weth):
56 | max_withdraw = amount_weth - min_weth
57 | max_withdraw = min(max_withdraw, min_eth - amount_eth)
58 | status = self.weth.withdraw(max_withdraw)
59 | if not status:
60 | raise TransactionError("Failed to withdraw weth for eth")
61 |
62 |
63 | class SetUpTrader(Action):
64 | """SetUp Trader account for trading.
65 |
66 | [Settings]
67 | input:
68 | web3: web3
69 | weth: weth
70 | router: router
71 | min_eth: 1
72 | min_weth: 2
73 | max_weth: 10
74 | account: account
75 | output:
76 | balance_eth: balance_eth
77 | balance_weth: balance_weth
78 | """
79 |
80 | async def on_next(self, data):
81 | balance_checker = BalanceChecker(data.web3(), data.weth())
82 | trader_account = data.account()
83 | amount_eth, amount_weth = await balance_checker.check_and_convert(
84 | trader_account.address,
85 | data.min_eth(),
86 | data.min_weth(),
87 | data.max_weth(),
88 | )
89 | router = data.router()
90 | if not router.approve(data.weth()):
91 | raise RuntimeError("Failed to authorize arbie to spend tokens.")
92 |
93 | logger.info(
94 | f"Finished setting up trader, eth: {amount_eth}, weth: {amount_weth}"
95 | )
96 |
97 | data.balance_eth(amount_eth)
98 | data.balance_weth(amount_weth)
99 |
100 |
101 | def _perform_trade(trade, router, min_profit, dry_run):
102 | amount_out = router.check_out_given_in(trade)
103 | try:
104 | _, gas_cost = router.swap(trade, dry_run=True)
105 | except ContractLogicError as e:
106 | logger.warning(e)
107 | return False
108 |
109 | profit = amount_out - trade.amount_in - gas_cost
110 |
111 | logger.info(
112 | f"Checking trade with profit {profit}, amount_in: {trade.amount_in}, amount out: {amount_out}, gas cost: {gas_cost}"
113 | )
114 | if profit < min_profit:
115 | return False
116 |
117 | logger.info(f"Executing trade {trade}")
118 | if dry_run:
119 | return False
120 | return router.swap(trade)
121 |
122 |
123 | def perform_trade(data, amount_weth):
124 | router = data.router()
125 | min_profit = data.min_profit()
126 | for trade in data.trades():
127 | # Make sure that we don't try to trade with more weth then we have
128 | trade.amount_in = min(trade.amount_in, amount_weth)
129 | if _perform_trade(trade, router, min_profit, data.dry_run()):
130 | return True
131 | return False
132 |
133 |
134 | class Trader(Action):
135 | """Find optimal arbitrage opportunity for a list sorted trades.
136 |
137 | Remove all trades that are not profitable.
138 |
139 | [Settings]
140 | input:
141 | web3: web3
142 | router: router
143 | trades: filtered_trades
144 | min_profit: 0.3
145 | weth: weth
146 | account: account
147 | dry_run: True
148 | output:
149 | profit: profit
150 | """
151 |
152 | async def on_next(self, data): # noqa: WPS210
153 | trader_account = data.account()
154 | balance_checker = BalanceChecker(data.web3(), data.weth())
155 | amount_eth, amount_weth = await balance_checker.check(trader_account.address)
156 | balance_pre = amount_weth + amount_eth
157 | logger.info(f"amount_eth: {amount_eth}, amount_weth: {amount_weth}")
158 |
159 | if not perform_trade(data, amount_weth):
160 | logger.warning("No trade opportunity found.")
161 |
162 | balance_post = await balance_checker.check_total(trader_account.address)
163 |
164 | data.profit(balance_post - balance_pre)
165 | logger.info("Finished trading")
166 |
--------------------------------------------------------------------------------
/Arbie/Actions/pool_finder.py:
--------------------------------------------------------------------------------
1 | """Pool finder is responsible for finding all pools."""
2 | import asyncio
3 | import logging
4 | from math import isclose
5 | from typing import List, Set, Tuple
6 |
7 | from prometheus_async.aio import time
8 |
9 | from Arbie import IERC20TokenError, PoolValueError
10 | from Arbie.Actions.action import Action
11 | from Arbie.async_helpers import async_map
12 | from Arbie.Contracts import BalancerFactory, GenericToken, UniswapFactory, UniswapPair
13 | from Arbie.Contracts.pool_contract import PoolContract
14 | from Arbie.prometheus import get_prometheus
15 | from Arbie.Variables import Pools, Token, Tokens
16 |
17 | logger = logging.getLogger()
18 |
19 | CREATE_TOKEN_TIME = get_prometheus().summary(
20 | "pool_finder_create_and_check_token", "Time for creating and checking a token"
21 | )
22 |
23 |
24 | def check_and_get_price(balances) -> Tuple[bool, float]:
25 | for balance in balances:
26 | if isclose(balance.value, 0, abs_tol=1e-3): # noqa: WPS432
27 | return True, 0
28 | return False, balances[0] / balances[1]
29 |
30 |
31 | class TokenFinder(object):
32 | def __init__(self, uoa, whitelist: Set[str]):
33 | self.uoa = uoa
34 | self.whitelist = whitelist
35 |
36 | async def create_token(self, pair: UniswapPair, price):
37 | tokens = await pair.get_tokens()
38 | t0 = tokens[0]
39 | t1 = tokens[1]
40 | if self.uoa.address == t0.get_address():
41 | return await t1.create_token(price)
42 | if self.uoa.address == t1.get_address():
43 | return await t0.create_token(1 / price)
44 |
45 | @time(CREATE_TOKEN_TIME)
46 | async def create_and_check_token(self, pair: UniswapPair) -> GenericToken:
47 | balances = None
48 | try:
49 | balances = await pair.get_balances()
50 | except IERC20TokenError:
51 | return None
52 |
53 | is_zero, price = check_and_get_price(balances)
54 | if is_zero:
55 | return None
56 |
57 | token = await self.create_token(pair, price)
58 | logger.info(f"Finished creating token from {pair.get_address()}")
59 | return token
60 |
61 | async def create_tokens(self, pairs: List[UniswapPair]) -> List[Token]:
62 | tokens = await async_map(self.create_and_check_token, pairs)
63 | tokens.append(self.uoa)
64 | token_set = set(tokens)
65 | token_set.discard(None)
66 | return list(token_set)
67 |
68 | async def filter_pairs(self, pairs: List[UniswapPair]) -> List[UniswapPair]:
69 | pairs = await async_map(self._filter_pair_in_whitelist, pairs)
70 | pair_set = set(pairs)
71 | pair_set.discard(None)
72 | return list(pair_set)
73 |
74 | async def _filter_pair_in_whitelist(self, pair: UniswapPair) -> UniswapPair:
75 | tokens = await pair.get_tokens()
76 | t0 = tokens[0].get_address().lower()
77 | t1 = tokens[1].get_address().lower()
78 | logging.getLogger().info(f"Token {t0} token {t1}")
79 | if t0 in self.whitelist and t1 in self.whitelist:
80 | return pair
81 |
82 |
83 | async def create_and_filter_pools(
84 | pool_contracts: List[PoolContract], tokens: List[Tokens]
85 | ) -> Pools:
86 | pools = []
87 | for contract in pool_contracts:
88 | logger.info(
89 | f"Filtering contract {pool_contracts.index(contract)} of {len(pool_contracts)}"
90 | )
91 | try:
92 | pool = await contract.create_pool()
93 | except IERC20TokenError as e:
94 | logger.warning(f"Failed to create pool, bad token. {e}")
95 | continue
96 | except PoolValueError as e:
97 | logger.warning(f"Failed to create pool, bad pool. {e}")
98 | continue
99 | for token in pool.tokens:
100 | try:
101 | index = tokens.index(token)
102 | # Token not found in pool
103 | except ValueError:
104 | continue
105 | if token in tokens:
106 | token.price = tokens[index].price
107 | pools.append(pool)
108 | return pools
109 |
110 |
111 | class PoolFinder(Action):
112 | """PoolFinder is responsible for finding pools.
113 |
114 | [Settings]
115 | input:
116 | weth: weth
117 | uniswap_factory: uniswap_factory
118 | whitelist: addresses
119 | output:
120 | pools: all_pools
121 | tokens: all_tokens
122 | """
123 |
124 | async def on_next(self, data):
125 | weth = await data.weth().create_token(1)
126 |
127 | uni_coro = self._get_pairs_and_tokens(
128 | data.uniswap_factory(), TokenFinder(weth, set(data.whitelist()))
129 | )
130 |
131 | pools, tokens = await self._get_pools_and_tokens(
132 | *await self._get_results(uni_coro),
133 | )
134 |
135 | data.pools(pools)
136 | data.tokens(tokens)
137 |
138 | async def _get_pools_and_tokens(self, pair_contracts, tokens):
139 | result = await asyncio.gather(
140 | create_and_filter_pools(pair_contracts, tokens),
141 | )
142 | return result[0], tokens
143 |
144 | async def _get_results(
145 | self, uni_coro
146 | ) -> Tuple[List[BalancerFactory], List[UniswapFactory], List[GenericToken]]:
147 | results = await asyncio.gather(uni_coro)
148 | pairs = results[0][0]
149 | tokens = results[0][1]
150 | return pairs, tokens
151 |
152 | async def _get_pairs_and_tokens(
153 | self, factory: UniswapFactory, token_finder: TokenFinder
154 | ):
155 | pairs = await factory.all_pairs()
156 | pairs = await token_finder.filter_pairs(pairs)
157 | logging.getLogger().info("Found all uniswap pairs, filtering tokens.")
158 | tokens = await token_finder.create_tokens(pairs)
159 | return pairs, tokens
160 |
--------------------------------------------------------------------------------
/gradlew:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 |
3 | #
4 | # Copyright 2015 the original author or authors.
5 | #
6 | # Licensed under the Apache License, Version 2.0 (the "License");
7 | # you may not use this file except in compliance with the License.
8 | # You may obtain a copy of the License at
9 | #
10 | # https://www.apache.org/licenses/LICENSE-2.0
11 | #
12 | # Unless required by applicable law or agreed to in writing, software
13 | # distributed under the License is distributed on an "AS IS" BASIS,
14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | # See the License for the specific language governing permissions and
16 | # limitations under the License.
17 | #
18 |
19 | ##############################################################################
20 | ##
21 | ## Gradle start up script for UN*X
22 | ##
23 | ##############################################################################
24 |
25 | # Attempt to set APP_HOME
26 | # Resolve links: $0 may be a link
27 | PRG="$0"
28 | # Need this for relative symlinks.
29 | while [ -h "$PRG" ] ; do
30 | ls=`ls -ld "$PRG"`
31 | link=`expr "$ls" : '.*-> \(.*\)$'`
32 | if expr "$link" : '/.*' > /dev/null; then
33 | PRG="$link"
34 | else
35 | PRG=`dirname "$PRG"`"/$link"
36 | fi
37 | done
38 | SAVED="`pwd`"
39 | cd "`dirname \"$PRG\"`/" >/dev/null
40 | APP_HOME="`pwd -P`"
41 | cd "$SAVED" >/dev/null
42 |
43 | APP_NAME="Gradle"
44 | APP_BASE_NAME=`basename "$0"`
45 |
46 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
47 | DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
48 |
49 | # Use the maximum available, or set MAX_FD != -1 to use that value.
50 | MAX_FD="maximum"
51 |
52 | warn () {
53 | echo "$*"
54 | }
55 |
56 | die () {
57 | echo
58 | echo "$*"
59 | echo
60 | exit 1
61 | }
62 |
63 | # OS specific support (must be 'true' or 'false').
64 | cygwin=false
65 | msys=false
66 | darwin=false
67 | nonstop=false
68 | case "`uname`" in
69 | CYGWIN* )
70 | cygwin=true
71 | ;;
72 | Darwin* )
73 | darwin=true
74 | ;;
75 | MINGW* )
76 | msys=true
77 | ;;
78 | NONSTOP* )
79 | nonstop=true
80 | ;;
81 | esac
82 |
83 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
84 |
85 |
86 | # Determine the Java command to use to start the JVM.
87 | if [ -n "$JAVA_HOME" ] ; then
88 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
89 | # IBM's JDK on AIX uses strange locations for the executables
90 | JAVACMD="$JAVA_HOME/jre/sh/java"
91 | else
92 | JAVACMD="$JAVA_HOME/bin/java"
93 | fi
94 | if [ ! -x "$JAVACMD" ] ; then
95 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
96 |
97 | Please set the JAVA_HOME variable in your environment to match the
98 | location of your Java installation."
99 | fi
100 | else
101 | JAVACMD="java"
102 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
103 |
104 | Please set the JAVA_HOME variable in your environment to match the
105 | location of your Java installation."
106 | fi
107 |
108 | # Increase the maximum file descriptors if we can.
109 | if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
110 | MAX_FD_LIMIT=`ulimit -H -n`
111 | if [ $? -eq 0 ] ; then
112 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
113 | MAX_FD="$MAX_FD_LIMIT"
114 | fi
115 | ulimit -n $MAX_FD
116 | if [ $? -ne 0 ] ; then
117 | warn "Could not set maximum file descriptor limit: $MAX_FD"
118 | fi
119 | else
120 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
121 | fi
122 | fi
123 |
124 | # For Darwin, add options to specify how the application appears in the dock
125 | if $darwin; then
126 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
127 | fi
128 |
129 | # For Cygwin or MSYS, switch paths to Windows format before running java
130 | if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then
131 | APP_HOME=`cygpath --path --mixed "$APP_HOME"`
132 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
133 |
134 | JAVACMD=`cygpath --unix "$JAVACMD"`
135 |
136 | # We build the pattern for arguments to be converted via cygpath
137 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
138 | SEP=""
139 | for dir in $ROOTDIRSRAW ; do
140 | ROOTDIRS="$ROOTDIRS$SEP$dir"
141 | SEP="|"
142 | done
143 | OURCYGPATTERN="(^($ROOTDIRS))"
144 | # Add a user-defined pattern to the cygpath arguments
145 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then
146 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
147 | fi
148 | # Now convert the arguments - kludge to limit ourselves to /bin/sh
149 | i=0
150 | for arg in "$@" ; do
151 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
152 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
153 |
154 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
155 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
156 | else
157 | eval `echo args$i`="\"$arg\""
158 | fi
159 | i=`expr $i + 1`
160 | done
161 | case $i in
162 | 0) set -- ;;
163 | 1) set -- "$args0" ;;
164 | 2) set -- "$args0" "$args1" ;;
165 | 3) set -- "$args0" "$args1" "$args2" ;;
166 | 4) set -- "$args0" "$args1" "$args2" "$args3" ;;
167 | 5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
168 | 6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
169 | 7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
170 | 8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
171 | 9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
172 | esac
173 | fi
174 |
175 | # Escape application args
176 | save () {
177 | for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
178 | echo " "
179 | }
180 | APP_ARGS=`save "$@"`
181 |
182 | # Collect all arguments for the java command, following the shell quoting and substitution rules
183 | eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
184 |
185 | exec "$JAVACMD" "$@"
186 |
--------------------------------------------------------------------------------
/Arbie/Actions/action.py:
--------------------------------------------------------------------------------
1 | """Base class for actions."""
2 |
3 | from typing import Dict
4 |
5 | import yaml
6 |
7 | from Arbie import StateError
8 | from Arbie.prometheus import get_prometheus
9 |
10 | ADD_TIME = get_prometheus().summary(
11 | "store_add_time", "Time spent adding items from store"
12 | )
13 | GET_TIME = get_prometheus().summary(
14 | "store_get_time", "Time spent getting items from store"
15 | )
16 |
17 |
18 | class Argument(object):
19 | def __init__(self, default_value):
20 | self.name = None
21 | self.value = None
22 | if default_value == "None":
23 | return
24 | if isinstance(default_value, str):
25 | self.name = default_value
26 | else:
27 | self.value = default_value
28 |
29 | def __eq__(self, other):
30 | return self.name == other.name and self.value == other.value
31 |
32 |
33 | def parse_settings(settings: Dict):
34 | if settings is None:
35 | return None
36 | parsed_settings = {}
37 | for key, argument in settings.items():
38 | parsed_settings[key] = Argument(argument)
39 | return parsed_settings
40 |
41 |
42 | def to_none_safe_dict(items) -> Dict[str, str]:
43 | if items:
44 | return items
45 | return {}
46 |
47 |
48 | class Action(object):
49 | """Action is a base class for data processing actions.
50 |
51 | Actions are combined to ActionTrees. Actions are configured
52 | by parsing their __doc__ and comparing with the settings that
53 | have been given when starting Arbie.
54 |
55 | Everything bellow the settings section is parsed as yaml.
56 | There are two lists that need to be configured. The input
57 | list and the output list. These are then mapped to the items
58 | that can be read to the store.
59 | [Settings]
60 | input:
61 | key_name: 1 # default_value
62 | output:
63 | key_name: variable_name
64 | """
65 |
66 | input_key = "input"
67 | output_key = "output"
68 |
69 | def __init__(self, config=None):
70 | self.settings = self._create_settings()
71 | self.settings = self._update_settings_with_config(config)
72 |
73 | def get_input_settings(self) -> Dict[str, str]:
74 | return to_none_safe_dict(self.settings[self.input_key])
75 |
76 | def get_output_settings(self) -> Dict[str, str]:
77 | return to_none_safe_dict(self.settings[self.output_key])
78 |
79 | async def on_next(self, data):
80 | raise NotImplementedError("Action does not have a on_next statement")
81 |
82 | def _create_settings(self):
83 | settings = yaml.safe_load(self.__doc__.split("[Settings]\n")[1])
84 |
85 | return {
86 | self.input_key: parse_settings(settings[self.input_key]),
87 | self.output_key: parse_settings(settings[self.output_key]),
88 | }
89 |
90 | def _update_settings_with_config(self, config):
91 | if config is None:
92 | return self.settings
93 |
94 | if self.input_key in config and config[self.input_key] is not None:
95 | self._emplace_settings(config[self.input_key], self.get_input_settings())
96 |
97 | if self.output_key in config and config[self.output_key] is not None:
98 | self._emplace_settings(config[self.output_key], self.get_output_settings())
99 |
100 | return self.settings
101 |
102 | def _emplace_settings(self, config, settings):
103 | for key, name in config.items():
104 | if key not in settings:
105 | raise ValueError(
106 | f"""Argument: {key} is not a valid input/output for action {type(self).__name__},
107 | look over your configuration"""
108 | )
109 | settings[key] = Argument(name)
110 |
111 |
112 | def get_value_lambda(value):
113 | return lambda _: value
114 |
115 |
116 | class Store(object):
117 | """Store the state of the Action Tree.
118 |
119 | Making it possible for Actions to share state between each other.
120 | It also makes it possible for actions to export state.
121 | """
122 |
123 | def __init__(self, state=None):
124 | if state is None:
125 | self.state = {}
126 | else:
127 | self.state = state
128 |
129 | def __getitem__(self, key):
130 | return self.state[key]
131 |
132 | @ADD_TIME.time()
133 | def add(self, key, item):
134 | if hasattr(item, "__len__"): # noqa: WPS421
135 | get_prometheus().gauge(
136 | f"{key.lower()}_number", f"Number of {key}"
137 | ).set( # noqa: WPS221
138 | len(item)
139 | ) # noqa: WPS221
140 | self.state[key] = item
141 |
142 | @GET_TIME.time()
143 | def get(self, key):
144 | return self.state[key]
145 |
146 | def delete(self, key):
147 | try:
148 | self.state.delete(key)
149 | except AttributeError:
150 | raise StateError("RedisState is required to remove keys from store.")
151 |
152 | def subscribe(self, event_channel):
153 | try:
154 | return self.state.subscribe(event_channel)
155 | except AttributeError:
156 | raise StateError("RedisState is required to subscribe to store.")
157 |
158 | def publish(self, event_channel, message):
159 | try:
160 | self.state.publish(event_channel, message)
161 | except AttributeError:
162 | raise StateError("RedisState is required to publish event.")
163 |
164 | def create_input(self, action):
165 | return self._create_data(
166 | action.get_input_settings(), action.get_output_settings()
167 | )
168 |
169 | def _create_data(self, input_settings, output_settings):
170 | methods = {}
171 | for key, argument in input_settings.items():
172 | if argument.name in self.state:
173 | methods[key] = self._get_lambda(argument.name)
174 | elif argument.value is not None:
175 | methods[key] = get_value_lambda(argument.value)
176 | else:
177 | raise ValueError(
178 | f"Argument {key}, with name {argument.name} not found in state and no default value"
179 | )
180 |
181 | for key_out, argument_out in output_settings.items():
182 | methods[key_out] = self._add_lambda(argument_out.name)
183 | return type("ActionData", (), methods)()
184 |
185 | def _get_lambda(self, key):
186 | return lambda _: self.get(key)
187 |
188 | def _add_lambda(self, key):
189 | return lambda _, value: self.add(key, value)
190 |
--------------------------------------------------------------------------------
/Arbie/settings_parser.py:
--------------------------------------------------------------------------------
1 | """settings parser can be used for parsing input yaml."""
2 |
3 | import json
4 | import logging
5 |
6 | from eth_account import Account
7 | from requests import Session
8 | from requests.adapters import HTTPAdapter
9 | from web3 import Web3, middleware
10 | from web3.gas_strategies.time_based import construct_time_based_gas_price_strategy
11 |
12 | from Arbie.Actions import ActionTree, RedisState, Store
13 | from Arbie.Contracts import (
14 | BalancerFactory,
15 | ContractFactory,
16 | UniswapFactory,
17 | UniswapV2Router,
18 | Weth,
19 | )
20 | from Arbie.Contracts.contract import Network
21 |
22 |
23 | def to_network(string: str) -> Network:
24 | if string.lower() == "mainnet":
25 | return Network.mainnet
26 | if string.lower() == "kovan":
27 | return Network.kovan
28 | return Network.ropsten
29 |
30 |
31 | class Keys(object):
32 | actions = "actions"
33 | address = "address"
34 | network = "network"
35 | store = "store"
36 | type_key = "type"
37 | variables = "variables"
38 | value = "value"
39 | version = "version"
40 | web3 = "web3"
41 | action_tree = "action_tree"
42 | event = "event"
43 | redis = "redis"
44 | account = "account"
45 | key = "key"
46 | path = "path"
47 | transaction_wait = "transaction_wait"
48 |
49 |
50 | def setup_gas_strategy(w3, transaction_wait):
51 | w3.eth.setGasPriceStrategy(
52 | construct_time_based_gas_price_strategy(transaction_wait)
53 | )
54 |
55 | w3.middleware_onion.add(middleware.time_based_cache_middleware)
56 | w3.middleware_onion.add(middleware.latest_block_based_cache_middleware)
57 | w3.middleware_onion.add(middleware.simple_cache_middleware)
58 |
59 |
60 | class VariableParser(object):
61 | """VariableParser parses settings config and adds variable to store."""
62 |
63 | def __init__(self, config, w3_config, account_config):
64 | self.config = config
65 | self.transaction_wait = w3_config.get(Keys.transaction_wait, 60)
66 | self.w3 = self.set_up_web3(w3_config)
67 | if account_config is None:
68 | self.account = None
69 | else:
70 | self.account = self._set_up_account(account_config)
71 |
72 | def add_variables(self, store):
73 | for name, variable_config in self.config.items():
74 | store.add(name, self._create_variable(variable_config))
75 | store.add(Keys.web3, self.w3)
76 | if self.account:
77 | store.add(Keys.account, self.account)
78 |
79 | def set_up_web3(self, config):
80 | address = config[Keys.address]
81 |
82 | adapter = HTTPAdapter(
83 | pool_connections=20, pool_maxsize=20, max_retries=10 # noqa: WPS432
84 | ) # noqa: WPS432
85 | session = Session()
86 | session.mount("http://", adapter)
87 | session.mount("https://", adapter)
88 |
89 | w3 = Web3(Web3.HTTPProvider(address, session=session))
90 | setup_gas_strategy(w3, self.transaction_wait)
91 | if not w3.isConnected():
92 | raise ConnectionError("Web3 is not connected")
93 |
94 | logging.getLogger().info(f"Connected to Node {address}")
95 | logging.getLogger().info(f"Current block is {w3.eth.blockNumber}")
96 | return w3
97 |
98 | def set_up_uniswap(self, config):
99 | return self._set_up_contracts(config, UniswapFactory)
100 |
101 | def set_up_balancer(self, config):
102 | return self._set_up_contracts(config, BalancerFactory)
103 |
104 | def set_up_token(self, config):
105 | return self._set_up_contracts(config, Weth)
106 |
107 | def set_up_router(self, config):
108 | return self._set_up_contracts(config, UniswapV2Router)
109 |
110 | def _create_variable(self, variable_config): # noqa: WPS321
111 | variable_type = variable_config[Keys.type_key]
112 | if variable_type == "UniswapFactory":
113 | return self.set_up_uniswap(variable_config)
114 | if variable_type == "BalancerFactory":
115 | return self.set_up_balancer(variable_config)
116 | if variable_type == "Weth":
117 | return self.set_up_token(variable_config)
118 | if variable_type == "UniswapV2Router":
119 | return self.set_up_router(variable_config)
120 | if variable_type == "float":
121 | return float(variable_config[Keys.value])
122 | if variable_type == "int":
123 | return int(variable_config[Keys.value])
124 | if variable_type == "str":
125 | return str(variable_config[Keys.value])
126 | raise TypeError(f"No rule for creating variable if type {variable_type}")
127 |
128 | def _set_up_contracts(self, config, contract, *kwargs):
129 | factory = ContractFactory(self.w3, contract, self.transaction_wait * 2)
130 | if Keys.network in config:
131 | return factory.load_contract(
132 | network=to_network(config[Keys.network]), account=self.account
133 | )
134 | return factory.load_contract(address=config[Keys.address], account=self.account)
135 |
136 | def _set_up_account(self, account_config):
137 | with open(account_config[Keys.path], "r") as config_file:
138 | config = json.load(config_file)
139 | return Account.from_key(config[Keys.key])
140 |
141 |
142 | class SettingsParser(object):
143 | """Settings Parser is used to configure Arbie."""
144 |
145 | def __init__(self, config):
146 | self.config = config
147 |
148 | def setup_store(self):
149 | store = self.store()
150 | if Keys.variables in self.config:
151 | self._add_variables(store)
152 | return store
153 |
154 | def store(self):
155 | if Keys.store in self.config:
156 | store_config = self.config[Keys.store]
157 | redis_state = RedisState(store_config[Keys.address])
158 | return Store(redis_state)
159 | return Store()
160 |
161 | def action_tree(self, store):
162 | if Keys.action_tree in self.config:
163 | return self._setup_action_tree(store, self.config[Keys.action_tree])
164 |
165 | def _setup_action_tree(self, store, config):
166 | tree = ActionTree.create(config[Keys.actions], store)
167 | if Keys.event not in config:
168 | return tree
169 |
170 | events = config[Keys.event]
171 | if not isinstance(events, list):
172 | events = [events]
173 | for event in events:
174 | tree.register_event(event)
175 | return tree
176 |
177 | def _add_variables(self, store):
178 | account_conf = None
179 | if Keys.account in self.config:
180 | account_conf = self.config[Keys.account]
181 |
182 | variable_parser = VariableParser(
183 | self.config[Keys.variables], self.config[Keys.web3], account_conf
184 | )
185 | variable_parser.add_variables(store)
186 |
--------------------------------------------------------------------------------
/Arbie/resources/contracts/uniswap/UniswapV2Pair.json:
--------------------------------------------------------------------------------
1 | {"abi":[{"inputs":[],"payable":false,"stateMutability":"nonpayable","type":"constructor"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"owner","type":"address"},{"indexed":true,"internalType":"address","name":"spender","type":"address"},{"indexed":false,"internalType":"uint256","name":"value","type":"uint256"}],"name":"Approval","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"sender","type":"address"},{"indexed":false,"internalType":"uint256","name":"amount0","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"amount1","type":"uint256"},{"indexed":true,"internalType":"address","name":"to","type":"address"}],"name":"Burn","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"sender","type":"address"},{"indexed":false,"internalType":"uint256","name":"amount0","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"amount1","type":"uint256"}],"name":"Mint","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"sender","type":"address"},{"indexed":false,"internalType":"uint256","name":"amount0In","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"amount1In","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"amount0Out","type":"uint256"},{"indexed":false,"internalType":"uint256","name":"amount1Out","type":"uint256"},{"indexed":true,"internalType":"address","name":"to","type":"address"}],"name":"Swap","type":"event"},{"anonymous":false,"inputs":[{"indexed":false,"internalType":"uint112","name":"reserve0","type":"uint112"},{"indexed":false,"internalType":"uint112","name":"reserve1","type":"uint112"}],"name":"Sync","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"from","type":"address"},{"indexed":true,"internalType":"address","name":"to","type":"address"},{"indexed":false,"internalType":"uint256","name":"value","type":"uint256"}],"name":"Transfer","type":"event"},{"constant":true,"inputs":[],"name":"DOMAIN_SEPARATOR","outputs":[{"internalType":"bytes32","name":"","type":"bytes32"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"MINIMUM_LIQUIDITY","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"PERMIT_TYPEHASH","outputs":[{"internalType":"bytes32","name":"","type":"bytes32"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[{"internalType":"address","name":"","type":"address"},{"internalType":"address","name":"","type":"address"}],"name":"allowance","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"internalType":"address","name":"spender","type":"address"},{"internalType":"uint256","name":"value","type":"uint256"}],"name":"approve","outputs":[{"internalType":"bool","name":"","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[{"internalType":"address","name":"","type":"address"}],"name":"balanceOf","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"internalType":"address","name":"to","type":"address"}],"name":"burn","outputs":[{"internalType":"uint256","name":"amount0","type":"uint256"},{"internalType":"uint256","name":"amount1","type":"uint256"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"decimals","outputs":[{"internalType":"uint8","name":"","type":"uint8"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"factory","outputs":[{"internalType":"address","name":"","type":"address"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"getReserves","outputs":[{"internalType":"uint112","name":"_reserve0","type":"uint112"},{"internalType":"uint112","name":"_reserve1","type":"uint112"},{"internalType":"uint32","name":"_blockTimestampLast","type":"uint32"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"internalType":"address","name":"_token0","type":"address"},{"internalType":"address","name":"_token1","type":"address"}],"name":"initialize","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"kLast","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"internalType":"address","name":"to","type":"address"}],"name":"mint","outputs":[{"internalType":"uint256","name":"liquidity","type":"uint256"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"name","outputs":[{"internalType":"string","name":"","type":"string"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[{"internalType":"address","name":"","type":"address"}],"name":"nonces","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"internalType":"address","name":"owner","type":"address"},{"internalType":"address","name":"spender","type":"address"},{"internalType":"uint256","name":"value","type":"uint256"},{"internalType":"uint256","name":"deadline","type":"uint256"},{"internalType":"uint8","name":"v","type":"uint8"},{"internalType":"bytes32","name":"r","type":"bytes32"},{"internalType":"bytes32","name":"s","type":"bytes32"}],"name":"permit","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"price0CumulativeLast","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"price1CumulativeLast","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"internalType":"address","name":"to","type":"address"}],"name":"skim","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"internalType":"uint256","name":"amount0Out","type":"uint256"},{"internalType":"uint256","name":"amount1Out","type":"uint256"},{"internalType":"address","name":"to","type":"address"},{"internalType":"bytes","name":"data","type":"bytes"}],"name":"swap","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"symbol","outputs":[{"internalType":"string","name":"","type":"string"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[],"name":"sync","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"token0","outputs":[{"internalType":"address","name":"","type":"address"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"token1","outputs":[{"internalType":"address","name":"","type":"address"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"totalSupply","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"value","type":"uint256"}],"name":"transfer","outputs":[{"internalType":"bool","name":"","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":false,"inputs":[{"internalType":"address","name":"from","type":"address"},{"internalType":"address","name":"to","type":"address"},{"internalType":"uint256","name":"value","type":"uint256"}],"name":"transferFrom","outputs":[{"internalType":"bool","name":"","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"}]}
2 |
--------------------------------------------------------------------------------
/Arbie/Contracts/contract.py:
--------------------------------------------------------------------------------
1 | """Utility functions for interacting with smart contracts."""
2 |
3 | import json
4 | from enum import Enum
5 | from typing import Tuple
6 |
7 | from eth_account import Account
8 | from pkg_resources import resource_string
9 |
10 | from Arbie.async_helpers import run_async
11 | from Arbie.Variables import BigNumber
12 |
13 |
14 | class Network(Enum):
15 | mainnet = 0
16 | kovan = 1
17 | ropsten = 2
18 |
19 |
20 | def _get_tx_params(w3, address, value=None, gas=None):
21 | """Get generic transaction parameters."""
22 | params = {
23 | "from": address,
24 | "nonce": w3.eth.getTransactionCount(address),
25 | }
26 | if value:
27 | params["value"] = value
28 | if gas:
29 | params["gas"] = gas
30 | return params
31 |
32 |
33 | def _wait_for_tx(w3, tx_hash, timeout):
34 | return w3.eth.waitForTransactionReceipt(tx_hash, timeout) # noqa: WPS432
35 |
36 |
37 | def transact(
38 | w3, address: str, transaction, timeout, value=None, gas=48814000
39 | ): # noqa: WPS432
40 | """Transact a transaction and return transaction receipt."""
41 | tx_params = _get_tx_params(w3, address, value, gas)
42 | tx_hash = transaction.transact(tx_params)
43 | return _wait_for_tx(w3, tx_hash, timeout)
44 |
45 |
46 | def signed_transaction(w3, user_account: Account, transaction, timeout, value, gas):
47 | tx_params = _get_tx_params(w3, user_account.address, value, gas)
48 |
49 | signed_txn = Account.sign_transaction(
50 | transaction.buildTransaction(tx_params), private_key=user_account.key
51 | )
52 | tx_hash = w3.eth.sendRawTransaction(signed_txn.rawTransaction)
53 | return _wait_for_tx(w3, tx_hash, timeout)
54 |
55 |
56 | class Contract(object):
57 | """Base class for contracts."""
58 |
59 | def __init__(
60 | self,
61 | w3,
62 | contract,
63 | timeout,
64 | owner_address: str = None,
65 | user_account: Account = None,
66 | ):
67 | self.w3 = w3
68 | self.owner_address = owner_address
69 | self.contract = contract
70 | self.user_account = user_account
71 | self.timeout = timeout
72 |
73 | def get_address(self) -> str:
74 | return self.contract.address
75 |
76 | def set_owner_address(self, address):
77 | if self.user_account is None:
78 | self.owner_address = address
79 | else:
80 | raise PermissionError(
81 | "A user account is set, update that instead of owner address."
82 | )
83 |
84 | def set_account(self, account):
85 | self.user_account = account
86 | self.owner_address = None
87 |
88 | def _transact(self, transaction, value=None, gas=None):
89 | if self.user_account is None:
90 | return transact(
91 | w3=self.w3,
92 | address=self.owner_address,
93 | transaction=transaction,
94 | timeout=self.timeout,
95 | value=value,
96 | gas=gas,
97 | )
98 | return signed_transaction(
99 | w3=self.w3,
100 | user_account=self.user_account,
101 | transaction=transaction,
102 | timeout=self.timeout,
103 | value=value,
104 | gas=gas,
105 | )
106 |
107 | def _transact_info(
108 | self, transaction, value=None, gas=None, dry_run=False
109 | ) -> Tuple[bool, float]:
110 | if gas is None:
111 | gas = self._estimate_gas(transaction)
112 | gas_amount = self.w3.eth.generateGasPrice()
113 |
114 | if gas_amount:
115 | gas_cost = BigNumber.from_value(gas * gas_amount).to_number()
116 | else:
117 | # No gas strategy specified
118 | gas_cost = None
119 |
120 | if dry_run:
121 | return False, gas_cost
122 | return self._transact(transaction, value, gas).status, gas_cost
123 |
124 | def _transact_status_and_contract(self, transaction) -> Tuple[bool, str]:
125 | tx_receipt = self._transact(transaction)
126 | return tx_receipt.status, tx_receipt.logs[1].address
127 |
128 | def _get_account(self):
129 | if self.user_account is None:
130 | return self.owner_address
131 | return self.user_account.address
132 |
133 | def _estimate_gas(self, transaction):
134 | tx_params = _get_tx_params(self.w3, self._get_account())
135 | built_transaction = transaction.buildTransaction(tx_params)
136 | # When estimateing gas te key gas can not be in the transaction
137 | built_transaction.pop("gas", None)
138 | return self.w3.eth.estimateGas(built_transaction) + 1
139 |
140 | async def _call_async(self, function):
141 | return await run_async(function.call)
142 |
143 |
144 | class ContractFactory(object):
145 | def __init__(self, w3, factory_class: Contract, timeout=120):
146 | self.w3 = w3
147 | self.timeout = timeout
148 |
149 | if factory_class.name is None or factory_class.protocol is None:
150 | raise ValueError(f"{factory_class} dose not contain default parameters")
151 | self.factory_class = factory_class
152 |
153 | def load_contract(self, owner_address: str = None, **kwargs) -> Contract:
154 | """Load contract require address or network to be passed in kwargs."""
155 | address = self._read_address(**kwargs)
156 | contract = self._load_contract(address)
157 | user_account = self._read_account(**kwargs)
158 | return self.factory_class(
159 | self.w3,
160 | timeout=self.timeout,
161 | owner_address=owner_address,
162 | user_account=user_account,
163 | contract=contract,
164 | )
165 |
166 | def deploy_contract(self, owner_address: str, *args) -> Contract:
167 | contract_address = self._deploy_contract(owner_address, *args)
168 | contract = self._load_contract(contract_address)
169 | return self.factory_class(
170 | self.w3,
171 | timeout=self.timeout,
172 | owner_address=owner_address,
173 | contract=contract,
174 | )
175 |
176 | def _deploy_contract(self, deploy_address: str, *args) -> str:
177 | """Deploy contract and pass on args to contract abi constructor."""
178 | contract = self.w3.eth.contract(
179 | abi=self._read_abi(), bytecode=self._read_bytecode()
180 | )
181 |
182 | transaction = contract.constructor(*args)
183 | tx_receipt = transact(self.w3, deploy_address, transaction, self.timeout)
184 |
185 | return tx_receipt.contractAddress
186 |
187 | def _read_resource(self, path: str, filename: str) -> str:
188 | if path is None:
189 | path = ""
190 | else:
191 | path = ".{0}".format(path)
192 |
193 | file_path = "Arbie.resources.contracts{0}".format(path)
194 | return resource_string(file_path, filename).decode("utf-8")
195 |
196 | def _get_address(self, network: Network):
197 | json_data = json.loads(self._read_resource(None, "contract_addresses.json"))
198 |
199 | return json_data[self.factory_class.protocol][self.factory_class.name][
200 | network.name
201 | ]
202 |
203 | def _read_abi(self):
204 | return self._read_json("abi")
205 |
206 | def _read_bytecode(self):
207 | return self._read_json("bytecode")
208 |
209 | def _read_json(self, key):
210 | filename = f"{self.factory_class.name}.json"
211 | json_data = self._read_resource(self.factory_class.protocol, filename)
212 | return json.loads(json_data)[key]
213 |
214 | def _read_address(self, **kwargs):
215 | if "address" in kwargs:
216 | return kwargs.get("address")
217 | if "network" in kwargs:
218 | return self._get_address(kwargs.get("network"))
219 |
220 | raise NameError("kwargs does not contain network or address")
221 |
222 | def _read_account(self, **kwargs):
223 | if "account" in kwargs:
224 | return kwargs.get("account")
225 |
226 | def _load_contract(self, address: str):
227 | return self.w3.eth.contract(address=address, abi=self._read_abi())
228 |
--------------------------------------------------------------------------------