... (0 , SAI) |-> Px ...
179 | Int 0))
182 |
183 |
184 |
185 |
186 | rule
187 | exec(Address:ETHAddress is given vault Vault:ETHAddress) => .K ...
188 | ... Vault |-> (_ => Address) ...
189 | ... .List => ListItem(Address is given vault Vault)
190 |
191 | rule
192 | exec(Address:ETHAddress is given vault Vault:ETHAddress) => FAIL ...
193 | V
194 | ... .List => ListItem(Address is given vault Vault)
195 | requires notBool(Vault in keys(V))
196 |
197 | endmodule
198 |
199 |
200 |
--------------------------------------------------------------------------------
/cff-models/mcd_maker.k:
--------------------------------------------------------------------------------
1 | require "uniswapv2.k"
2 |
3 | module MAKER-SYNTAX
4 | imports UNISWAPV2-SYNTAX
5 |
6 | syntax MakerSubTx ::= Int "in" ETHAddress "collateral locked"
7 | | Int "in" ETHAddress "collateral freed"
8 | | Int "in" ETHAddress "debt wiped"
9 | | Int "in" ETHAddress "debt drawn"
10 |
11 |
12 | syntax MakerTX ::= "open vault" ETHAddress "for collateral" ETHAddress
13 | | MakerSubTx "and" MakerSubTx "from vault" ETHAddress
14 | | ETHAddress "bites vault" ETHAddress
15 | | Int "increment in stability fees for" ETHAddress
16 | | Int "in" ETHAddress "and" Int "in" ETHAddress "transferred from" ETHAddress "to" ETHAddress
17 |
18 | syntax ETHTransaction ::= MakerTX
19 | syntax ETHAddress ::= "DAI"
20 |
21 | endmodule
22 |
23 | module MAKER
24 | imports MAKER-SYNTAX
25 | imports UNISWAPV2
26 |
27 |
28 | rule
29 | exec(_:Int in CAddress:ETHAddress collateral locked and _:MakerSubTx from vault Vault:ETHAddress)
30 | ...
31 |
32 | V => V[Vault <- CAddress]
33 | S => S[(Vault in CAddress) <- 0:Int][(Vault in DAI) <- 0:Int]
34 | ... .List => ListItem(open vault Vault for collateral CAddress)
35 | requires notBool(Vault in keys(V))
36 |
37 |
38 | rule
39 | exec(_:Int in CAddress:ETHAddress collateral freed and _:MakerSubTx from vault Vault:ETHAddress)
40 | ...
41 |
42 | V => V[Vault <- CAddress]
43 | S => S[(Vault in CAddress) <- 0:Int][(Vault in DAI) <- 0:Int]
44 | ... .List => ListItem(open vault Vault for collateral CAddress)
45 | requires notBool(Vault in keys(V))
46 |
47 |
48 | rule
49 | exec(CAmount:Int in CAddress:ETHAddress collateral locked and DAmount:Int in DAI debt wiped from vault Vault:ETHAddress) =>
50 | Vault in CAddress gets CAmount ~>
51 | Vault in DAI gets (0 -Int DAmount)
52 | ...
53 |
54 | ... Vault |-> CAddress ...
55 | ... (Vault in DAI) |-> TotalDebt ...
56 | ... .List => ListItem(CAmount in CAddress collateral locked and DAmount in DAI debt wiped from vault Vault)
57 | requires (TotalDebt >=Int DAmount)
58 |
59 |
60 | rule
61 | exec(CAmount:Int in CAddress:ETHAddress collateral locked and DAmount:Int in DAI debt drawn from vault Vault:ETHAddress) =>
62 | Vault in CAddress gets CAmount ~>
63 | Vault in DAI gets DAmount
64 | ...
65 |
66 | ... CAddress |-> Rate ...
67 | ... Vault |-> CAddress ...
68 | ... (CAddress , DAI) |-> Px ...
69 | ... (Vault in CAddress) |-> TotalCollateral (Vault in DAI) |-> TotalDebt ...
70 | ... .List => ListItem(CAmount in CAddress collateral locked and DAmount in DAI debt drawn from vault Vault)
71 | requires (TotalCollateral +Int CAmount) *Int Px *Int 100 *Int 1000000000000000000000000000 >=Int 150 *Int Rate *Int (TotalDebt +Int DAmount)
72 |
73 |
74 | rule
75 | exec(CAmount:Int in CAddress:ETHAddress collateral locked and DAmount:Int in DAI debt drawn from vault Vault:ETHAddress) =>
76 | Vault in CAddress gets CAmount ~>
77 | Vault in DAI gets DAmount
78 | ...
79 |
80 | ... Vault |-> CAddress ...
81 | P:Map
82 | ... .List => ListItem(CAmount in CAddress collateral locked and DAmount in DAI debt drawn from vault Vault)
83 | requires notBool((CAddress , DAI) in keys(P))
84 |
85 |
86 | rule
87 | exec(CAmount:Int in CAddress:ETHAddress collateral freed and DAmount:Int in DAI debt wiped from vault Vault:ETHAddress) =>
88 | Vault in CAddress gets (0 -Int CAmount) ~>
89 | Vault in DAI gets (0 -Int DAmount)
90 | ...
91 |
92 | ... CAddress |-> Rate ...
93 | ... Vault |-> CAddress ...
94 | ... (CAddress , DAI) |-> Px ...
95 | ... (Vault in CAddress) |-> TotalCollateral (Vault in DAI) |-> TotalDebt ...
96 | ... .List => ListItem(CAmount in CAddress collateral freed and DAmount in DAI debt wiped from vault Vault)
97 | requires ((TotalCollateral -Int CAmount) *Int Px *Int 100 *Int 1000000000000000000000000000 >=Int 150 *Int Rate *Int (TotalDebt -Int DAmount)) andBool (TotalDebt >=Int DAmount)
98 |
99 | rule
100 | exec(CAmount:Int in CAddress:ETHAddress collateral freed and DAmount:Int in DAI debt wiped from vault Vault:ETHAddress) =>
101 | Vault in CAddress gets (0 -Int CAmount) ~>
102 | Vault in DAI gets (0 -Int DAmount)
103 | ...
104 |
105 | ... Vault |-> CAddress ...
106 | P:Map
107 | ... .List => ListItem(CAmount in CAddress collateral freed and DAmount in DAI debt wiped from vault Vault)
108 | requires notBool((CAddress , DAI) in keys(P))
109 |
110 |
111 |
112 | rule
113 | exec(CAmount:Int in CAddress:ETHAddress collateral freed and DAmount:Int in DAI debt drawn from vault Vault:ETHAddress) =>
114 | Vault in CAddress gets (0 -Int CAmount) ~>
115 | Vault in DAI gets DAmount
116 | ...
117 |
118 | ... CAddress |-> Rate ...
119 | ... Vault |-> CAddress ...
120 | ... (CAddress , DAI) |-> Px ...
121 | ... (Vault in CAddress) |-> TotalCollateral (Vault in DAI) |-> TotalDebt ...
122 | ... .List => ListItem(CAmount in CAddress collateral freed and DAmount in DAI debt drawn from vault Vault)
123 | requires (TotalCollateral -Int CAmount) *Int Px *Int 100 *Int 1000000000000000000000000000 >=Int 150 *Int Rate *Int (TotalDebt +Int DAmount)
124 |
125 |
126 | rule
127 | exec(CAmount:Int in CAddress:ETHAddress collateral freed and DAmount:Int in DAI debt drawn from vault Vault:ETHAddress) =>
128 | Vault in CAddress gets (0 -Int CAmount) ~>
129 | Vault in DAI gets DAmount
130 | ...
131 |
132 | ... Vault |-> CAddress ...
133 | P:Map
134 | ... .List => ListItem(CAmount in CAddress collateral freed and DAmount in DAI debt drawn from vault Vault)
135 | requires notBool((CAddress , DAI) in keys(P))
136 |
137 |
138 | // TODO : use specific amount, sometimes fat finger limits the amount
139 | rule
140 | exec(Address:ETHAddress bites vault Vault:ETHAddress) =>
141 | Vault in DAI gets (0 -Int TotalDebt) ~>
142 | Vault in CAddress gets 0 -Int TotalCollateral ~>
143 | Address in DAI gets (0 -Int TotalDebt *Int Rate /Int 1000000000000000000000000000) ~>
144 | Address in CAddress gets TotalCollateral // win the reverse auction
145 | // Address in CAddress gets (0 -Int ((90 *Int TotalDebt *Int Rate) /Int (Px *Int 100 *Int 1000000000000000000000000000)) ) //10% discount, abstraction for auction
146 | ...
147 |
148 | ... CAddress |-> Rate ...
149 | ... Vault |-> CAddress ...
150 | ... (CAddress, DAI) |-> Px ...
151 | ... (Vault in CAddress) |-> TotalCollateral (Vault in DAI) |-> TotalDebt ...
152 | ... .List => ListItem(Address bites vault Vault)
153 | requires (TotalCollateral *Int Px *Int 100 *Int 1000000000000000000000000000 Int 0)
154 |
155 |
156 |
157 | rule
158 | exec(CAmount:Int in CAddress:ETHAddress and DAmount:Int in DAI transferred from SrcVault:ETHAddress to DstVault:ETHAddress) =>
159 | SrcVault in CAddress gets (0 -Int CAmount) ~>
160 | SrcVault in DAI gets (0 -Int DAmount) ~>
161 | DstVault in CAddress gets CAmount ~>
162 | DstVault in DAI gets DAmount
163 | ...
164 |
165 | ... SrcVault |-> CAddress DstVault |-> CAddress ...
166 | ... .List => ListItem(CAmount in CAddress and DAmount in DAI transferred from SrcVault to DstVault)
167 |
168 |
169 | rule
170 | exec(_:Int increment in stability fees for CAddress:ETHAddress) ...
171 | R => R[CAddress <- 1000000000000000000000000000:Int] //10**27
172 | requires notBool(CAddress in keys(R))
173 |
174 | rule
175 | exec(Delta:Int increment in stability fees for CAddress:ETHAddress) => .K ...
176 | ... CAddress |-> (OldRate => (OldRate +Int Delta)) ...
177 | ... .List => ListItem(Delta increment in stability fees for CAddress)
178 |
179 |
180 |
181 | endmodule
182 |
183 |
184 |
--------------------------------------------------------------------------------
/cff-models/mev.k:
--------------------------------------------------------------------------------
1 | require "uniswapv2.k"
2 | require "mcd_maker.k"
3 |
4 | module MEV-SYNTAX
5 | imports UNISWAPV2-SYNTAX
6 | imports MAKER-SYNTAX
7 | endmodule
8 |
9 | module MEV
10 | imports UNISWAPV2
11 | imports MAKER
12 | endmodule
13 |
--------------------------------------------------------------------------------
/cff-models/simple.k:
--------------------------------------------------------------------------------
1 |
2 | require "uniswap.k"
3 | require "uniswapv2.k"
4 |
5 | module SIMPLE
6 |
7 | imports UNISWAP
8 | imports UNISWAPV2
9 |
10 | claim
11 |
12 | User adds 2000 BBT and 100 0 of liquidity; // UniswapV2
13 |
14 | => .K
15 |
16 | .Map => ?S:Map
17 | .List => ?_
18 | ensures ({?S[UniswapV2 in BBT]}:>Int >=Int 0 ) andBool ({?S[UniswapV2 in 0]}:>Int >=Int 0 )
19 |
20 | endmodule
21 |
22 |
--------------------------------------------------------------------------------
/cff-models/uniswap.k:
--------------------------------------------------------------------------------
1 | require "execution.k"
2 |
3 | module UNISWAP-SYNTAX
4 | imports EXECUTION-SYNTAX
5 |
6 | syntax UniswapTX ::= ETHAddress "in" ETHAddress "swaps" Int "input" "for" ETHAddress
7 | | ETHAddress "in" ETHAddress "swaps" Int "for" ETHAddress "output"
8 | | ETHAddress "adds" Int "tokens and" Int "eth of liquidity to" ETHAddress
9 | | ETHAddress "removes" Int "tokens and" Int "eth of liquidity from" ETHAddress
10 | syntax ETHTransaction ::= UniswapTX
11 | syntax ETHAddress ::= "Uniswap"
12 | syntax CurrencyPair ::= ETHAddress "," ETHAddress
13 | syntax InternalCmd ::= "GetPrice" ETHAddress ETHAddress // Oracle query
14 | syntax Statement ::= InternalCmd
15 | endmodule
16 |
17 | module UNISWAP
18 | imports UNISWAP-SYNTAX
19 | imports EXECUTION
20 |
21 | rule exec(Address:ETHAddress in TokenIn:ETHAddress swaps TradeAmount:Int input for TokenOut:ETHAddress fee GasFee:Int) =>
22 | Address in TokenIn gets 0 -Int TradeAmount ~>
23 | Address in TokenOut gets (997 *Int TradeAmount *Int USwapBalanceOut) /Int (1000 *Int USwapBalanceIn +Int 997 *Int TradeAmount) ~>
24 | Uniswap in TokenIn gets TradeAmount ~>
25 | Uniswap in TokenOut gets 0 -Int (997 *Int TradeAmount *Int USwapBalanceOut) /Int (1000 *Int USwapBalanceIn +Int 997 *Int TradeAmount) ~>
26 | Address in 0 gets 0 -Int GasFee
27 | ...
28 |
29 | ... (Uniswap in TokenOut) |-> USwapBalanceOut (Uniswap in TokenIn) |-> USwapBalanceIn ...
30 | ... .List => ListItem(Address in TokenIn swaps TradeAmount input for TokenOut fee GasFee)
31 | // requires (UserBalance >=Int TradeAmount)
32 |
33 | rule exec(Address:ETHAddress in TokenIn:ETHAddress swaps TradeAmount:Int for TokenOut:ETHAddress output fee GasFee:Int) =>
34 | Address in TokenIn gets 0 -Int ((1000 *Int USwapBalanceIn *Int TradeAmount) /Int (997 *Int (USwapBalanceOut -Int TradeAmount)) +Int 1) ~>
35 | Address in TokenOut gets TradeAmount ~>
36 | Uniswap in TokenIn gets ((1000 *Int USwapBalanceIn *Int TradeAmount) /Int (997 *Int (USwapBalanceOut -Int TradeAmount)) +Int 1) ~>
37 | Uniswap in TokenOut gets 0 -Int TradeAmount ~>
38 | Address in 0 gets 0 -Int GasFee
39 | ...
40 |
41 | ... (Uniswap in TokenOut) |-> USwapBalanceOut (Uniswap in TokenIn) |-> USwapBalanceIn ...
42 | ... .List => ListItem(Address in TokenIn swaps TradeAmount for TokenOut output fee GasFee)
43 | // requires (UserBalance >=Int TradeAmount)
44 |
45 | // todo accurate pricing rules
46 | // todo fees
47 | rule exec(LiquidityProvider:ETHAddress adds TokenAmount:Int tokens and ETHAmount:Int eth of liquidity to Token:ETHAddress) =>
48 | Uniswap in Token gets TokenAmount ~> Uniswap in 0 gets ETHAmount
49 | ...
50 |
51 | ... .List => ListItem(LiquidityProvider adds TokenAmount tokens and ETHAmount eth of liquidity to Token)
52 |
53 | rule exec(LiquidityProvider:ETHAddress removes TokenAmount:Int tokens and ETHAmount:Int eth of liquidity from Token:ETHAddress) => Uniswap in Token gets 0 -Int TokenAmount ~> Uniswap in 0 gets 0 -Int ETHAmount
54 | ...
55 |
56 | ... .List => ListItem(LiquidityProvider removes TokenAmount tokens and ETHAmount eth of liquidity from Token)
57 |
58 | // rule GetPrice Token0 Token1 => .K ...
59 | // ... .List => ListItem(GetPrice Token0 Token1)
60 | // ... (Uniswap in Token0) |-> Qty0 (Uniswap in Token1) |-> Qty1 ...
61 | // Px => Px[(Token0 , Token1) <- (Qty1 /Int Qty0)][(Token1 , Token0) <- (Qty0 /Int Qty1)]
62 |
63 | // insertion rule
64 | //rule .K
65 | // .Set => SetItem(40 adds 1000 tokens and 1000 eth of liquidity to 100)
66 | // X => X -Int 1
67 | // requires(X >Int 0)
68 |
69 | endmodule
70 |
--------------------------------------------------------------------------------
/cff-models/uniswapv2.k:
--------------------------------------------------------------------------------
1 | require "execution.k"
2 |
3 | module UNISWAPV2-SYNTAX
4 | imports EXECUTION-SYNTAX
5 | syntax UniswapV2TX ::= ETHAddress "swaps for" ETHAddress "by providing" Int ETHAddress "and" Int ETHAddress "with change" Int
6 | | ETHAddress "adds" Int ETHAddress "and" Int ETHAddress "of liquidity"
7 | | ETHAddress "removes" Int ETHAddress "and" Int ETHAddress "of liquidity"
8 | syntax ETHTransaction ::= UniswapV2TX
9 | syntax ETHAddress ::= "UniswapV2"
10 | syntax CurrencyPair ::= ETHAddress "," ETHAddress
11 | syntax InternalCmd ::= "GetPrice" ETHAddress ETHAddress // Oracle query
12 | syntax Statement ::= InternalCmd
13 | endmodule
14 |
15 | module UNISWAPV2
16 | imports UNISWAPV2-SYNTAX
17 | imports EXECUTION
18 |
19 | rule exec(Address:ETHAddress swaps for TokenOut:ETHAddress by providing AmountInTokenIn:Int TokenIn:ETHAddress and AmountInTokenOut:Int TokenOut:ETHAddress with change AmountOutTokenIn:Int fee GasFee:Int) =>
20 | Address in TokenIn gets 0 -Int AmountInTokenIn ~>
21 | UniswapV2 in TokenIn gets AmountInTokenIn ~>
22 | Address in TokenOut gets 0 -Int AmountInTokenOut ~>
23 | UniswapV2 in TokenOut gets AmountInTokenOut ~>
24 | Address in TokenIn gets AmountOutTokenIn ~>
25 | UniswapV2 in TokenIn gets 0 -Int AmountOutTokenIn ~>
26 | Address in TokenOut gets (((997 *Int AmountInTokenIn -Int 1000 *Int AmountOutTokenIn) *Int USwapV2BalanceOut) /Int (1000 *Int (USwapV2BalanceIn -Int AmountOutTokenIn) +Int 997 *Int AmountInTokenIn)) +Int ((AmountInTokenOut *Int 997) /Int (1000)) ~>
27 | UniswapV2 in TokenOut gets 0 -Int ( (((997 *Int AmountInTokenIn -Int 1000 *Int AmountOutTokenIn) *Int USwapV2BalanceOut) /Int (1000 *Int (USwapV2BalanceIn -Int AmountOutTokenIn) +Int 997 *Int AmountInTokenIn)) +Int ((AmountInTokenOut *Int 997) /Int (1000)) ) ~>
28 | Address in 0 gets 0 -Int GasFee ~>
29 | GetPrice TokenIn TokenOut
30 | ...
31 |
32 | ... (UniswapV2 in TokenOut) |-> USwapV2BalanceOut (UniswapV2 in TokenIn) |-> USwapV2BalanceIn ...
33 | ... .List => ListItem(Address swaps for TokenOut by providing AmountInTokenIn TokenIn and AmountInTokenOut TokenOut with change AmountOutTokenIn fee GasFee)
34 | // requires (UserBalance >=Int TradeAmount)
35 |
36 | // todo accurate pricing rules
37 | // todo fees
38 | rule exec(LiquidityProvider:ETHAddress adds Amount0:Int Token0:ETHAddress and Amount1:Int Token1:ETHAddress of liquidity) =>
39 | UniswapV2 in Token0 gets Amount0 ~> UniswapV2 in Token1 gets Amount1 ~>
40 | GetPrice Token0 Token1
41 | ...
42 |
43 | ... .List => ListItem(LiquidityProvider adds Amount0 Token0 and Amount1 Token1 of liquidity)
44 |
45 | rule exec(LiquidityProvider:ETHAddress removes Amount0:Int Token0:ETHAddress and Amount1:Int Token1:ETHAddress of liquidity) =>
46 | UniswapV2 in Token0 gets 0 -Int Amount0 ~> UniswapV2 in Token1 gets 0 -Int Amount1 ~>
47 | GetPrice Token0 Token1
48 | ...
49 |
50 | ... .List => ListItem(LiquidityProvider removes Amount0 Token0 and Amount1 Token1 of liquidity)
51 |
52 | rule GetPrice Token0 Token1 => .K ...
53 | ... .List => ListItem(GetPrice Token0 Token1)
54 | ... (UniswapV2 in Token0) |-> Qty0 (UniswapV2 in Token1) |-> Qty1 ...
55 | Px => Px[(Token0 , Token1) <- (Qty1 /Int Qty0)][(Token1 , Token0) <- (Qty0 /Int Qty1)]
56 |
57 |
58 | endmodule
59 |
--------------------------------------------------------------------------------
/cff-models/vault.k:
--------------------------------------------------------------------------------
1 | require "airdrop.k"
2 |
3 | module VAULT-SYNTAX
4 | imports AIRDROP-SYNTAX
5 | syntax VaultTx ::= ETHAddress "deposits" NFTList
6 | | ETHAddress "withdraws" NFTList
7 | syntax ETHTransaction ::= VaultTx
8 | syntax ETHAddress ::= "BAYCToken" | "Vault"
9 | endmodule
10 |
11 | module VAULT
12 | imports VAULT-SYNTAX
13 | imports AIRDROP
14 |
15 | rule exec(Player:ETHAddress deposits NFT:Int fee GasFee:Int) =>
16 | Player in BAYCToken gets -1 // fungible token
17 | ...
18 |
19 | S => S[(Vault in NFT) <- 1:Int][(Player in NFT) <- 0:Int]
20 | ... .List => ListItem(Player deposits NFT fee GasFee)
21 |
22 | rule exec(Player:ETHAddress deposits NFT:Int , NFTs:NFTList fee GasFee:Int) =>
23 | Player in BAYCToken gets -1 ~> // fungible token
24 | Player deposits NFTs fee GasFee
25 | ...
26 |
27 | S => S[(Vault in NFT) <- 1:Int][(Player in NFT) <- 0:Int]
28 | ... .List => ListItem(Player deposits NFT fee GasFee)
29 |
30 | rule exec(Player:ETHAddress withdraws NFT:Int fee GasFee:Int) =>
31 | Player in BAYCToken gets 1 // fungible token
32 | ...
33 |
34 | S => S[(Vault in NFT) <- 0:Int][(Player in NFT) <- 1:Int]
35 | ... .List => ListItem(Player withdraws NFT fee GasFee)
36 |
37 | rule exec(Player:ETHAddress withdraws NFT:Int , NFTs:NFTList fee GasFee:Int) =>
38 | Player in BAYCToken gets 1 ~> // fungible token
39 | Player withdraws NFTs fee GasFee
40 | ...
41 |
42 | S => S[(Vault in NFT) <- 0:Int][(Player in NFT) <- 1:Int]
43 | ... .List => ListItem(Player withdraws NFT fee GasFee)
44 |
45 |
46 |
47 |
48 | endmodule
49 |
--------------------------------------------------------------------------------
/cff_model_equivalence/uniswap_model_check.py:
--------------------------------------------------------------------------------
1 | from sympy import *
2 | from sympy.parsing.sympy_parser import parse_expr
3 | from sympy.unify.usympy import *
4 |
5 |
6 | def can_unify(eq1, eq2, substitutions):
7 | # process equations into symbolic python library sympy
8 | # expand into standard polynomial form
9 | try:
10 | for substitution in substitutions:
11 | eq1 = eq1.replace(substitution[0], substitution[1])
12 | eq2 = eq2.replace(substitution[0], substitution[1])
13 | print("Attempting to unify", eq1, "\n", eq2)
14 | eq1 = expand(parse_expr(eq1))
15 | eq2 = expand(parse_expr(eq2))
16 | print("Attempting to unify", eq1, eq2)
17 | print("SUBSTITUTION SUCCESSFUL", next(unify(eq1, eq2, variables=eq1.free_symbols)))
18 | return True
19 | except:
20 | return False
21 |
22 |
23 | # decomposition of formal paths, sourced from https://github.com/runtimeverification/verified-smart-contracts/blob/master/uniswap/results/ethToTokenSwapInput.txt line 374
24 | # (to find, grep for only line with Status: SUCCESS)
25 | formalverification_path_ethToTokenSwapInput = "msgvalue * 997 * token_reserve / ((selfbalance - msgvalue) * 1000 + msgvalue * 997)"
26 | # CFF model return value, sourced from our file uniswap.k
27 | cff_ethToTokenSwapInput = "(997 * TradeAmount * USwapBalanceOut) / (1000 * USwapBalanceIn + 997 * TradeAmount)"
28 |
29 | # we may have to make manual variable substitutions to account for differences in execution (by inspection)
30 | # in the Uniswap EVM code, the EVM adds the ETH value of the transaction to the Uniswap balance before executing logic
31 | # USwapBalanceIn in our spec represents the balance *before* this call, so we must perform the substraction for equivalence
32 | # (this is the kind of manual reasoning required to prove specs equivalent)
33 | substitutions = [("(selfbalance - msgvalue)", "USwapBalanceIn")]
34 |
35 |
36 | # this will fail, due to execution path differences in CFF/the bytecode we manually validate and specify next
37 | print(can_unify(cff_ethToTokenSwapInput, formalverification_path_ethToTokenSwapInput, []))
38 | # this will now succeed
39 | print(can_unify(cff_ethToTokenSwapInput, formalverification_path_ethToTokenSwapInput, substitutions))
40 |
41 |
42 | # decomposition of formal paths, sourced from https://github.com/runtimeverification/verified-smart-contracts/blob/master/uniswap/results/ethToTokenSwapOutput.txt
43 | # (to find, grep for only line with Status: SUCCESS)
44 | formalverification_path_ethToTokenSwapOutput = "((selfbalance - msgvalue) * token_bought * 1000) / ((token_reserve - token_bought) * 997) + 1"
45 | # CFF model return value, sourced from our file uniswap.k
46 | cff_ethToTokenSwapOutput = "((1000 * USwapBalanceIn * TradeAmount) / (997 * (USwapBalanceOut - TradeAmount)) + 1)"
47 |
48 | # now, we do the same check for ethToTokenSwapOutput
49 | print(can_unify(cff_ethToTokenSwapOutput, formalverification_path_ethToTokenSwapOutput, []))
50 | print(can_unify(cff_ethToTokenSwapOutput, formalverification_path_ethToTokenSwapOutput, substitutions))
51 |
--------------------------------------------------------------------------------
/data-scripts/active_region.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import os
3 |
4 | input_dir = sys.argv[1]
5 | output_dir = sys.argv[2]
6 |
7 | for filename in os.listdir(input_dir):
8 | fin = open(os.path.join(input_dir,filename), 'r')
9 | outfile = os.path.join(output_dir, 'txcount_' + filename)
10 |
11 | block_to_numTx = {}
12 |
13 | lines = fin.readlines()
14 |
15 | fout = open(outfile, 'w')
16 | fout.write("Block,TxCount\n")
17 | for i in range(0,len(lines), 2):
18 | block_num = lines[i].strip().split(" ")[-1]
19 | if block_num not in block_to_numTx:
20 | block_to_numTx[block_num] = 0
21 | block_to_numTx[block_num] += 1
22 |
23 |
24 | for block in block_to_numTx:
25 | fout.write(str(block) + "," + str(block_to_numTx[block]) + "\n")
26 |
27 | fout.close()
28 |
--------------------------------------------------------------------------------
/data-scripts/calculate_outstanding_lptokens.py:
--------------------------------------------------------------------------------
1 | import argparse
2 | import csv, os
3 | import pandas as pd
4 | import logging
5 | from exchanges import topics_from_text
6 | from collections import defaultdict
7 |
8 | parser = argparse.ArgumentParser(description='Get UniswapV2 Reserves')
9 | parser.add_argument(
10 | '-v', '--verbose',
11 | help="Be verbose",
12 | action="store_const", dest="loglevel", const=logging.INFO,
13 | default=logging.WARNING
14 | )
15 |
16 | parser.add_argument(
17 | '-e', '--exchange',
18 | help="sushiswap/uniswapv2",
19 | default='sushiswap'
20 | )
21 |
22 |
23 | args = parser.parse_args()
24 | logging.basicConfig(level=args.loglevel, format='%(message)s')
25 |
26 | logger = logging.getLogger(__name__)
27 |
28 | exchange_name = args.exchange
29 |
30 | uniswapv2_logs = 'latest-data/all_logs_uniswapv2.csv'
31 | # sushiswap_logs = 'latest-data/all_logs_sushiswap.csv'
32 | sushiswap_logs = 'latest-data/sushiswap_eth_usdc_logs.csv'
33 |
34 | exchange_logs = {'uniswapv2' : uniswapv2_logs, 'sushiswap' : sushiswap_logs}
35 |
36 | logsdict = csv.DictReader(open(exchange_logs[exchange_name]), delimiter=',',
37 | quotechar='"', quoting=csv.QUOTE_MINIMAL)
38 |
39 | block_to_supply = defaultdict(lambda : defaultdict(lambda: 0))
40 | address_to_supply = defaultdict(lambda: 0)
41 | #Interested in only Transfer events
42 | interested_topics = ['0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef']
43 |
44 |
45 | parsed = 0
46 | for log in logsdict:
47 | topics = topics_from_text(log['topics'])
48 | if topics[0] not in interested_topics:
49 | continue
50 | txhash = log['transaction_hash']
51 | block_number = log['block_number']
52 | address = log['address']
53 | data = log['data']
54 | data = data[2:] # strip 0x from hex
55 | action_requested = None
56 |
57 | if topics[0] == interested_topics[0]:
58 | # transfer
59 | from_address = topics[1]
60 | to_address = topics[2]
61 | if int(from_address, 16) == 0:
62 | #minting lp tokens
63 | block_num = int(block_number)
64 | value = int(data, 16)
65 | address_to_supply[address] += value
66 | block_to_supply[block_num][address] = address_to_supply[address]
67 | parsed += 1
68 | elif int(to_address, 16) == 0:
69 | # burning lp tokens
70 | block_num = int(block_number)
71 | value = int(data, 16)
72 | address_to_supply[address] -= value
73 | block_to_supply[block_num][address] = address_to_supply[address]
74 | parsed += 1
75 | if (parsed % 10000 == 0):
76 | logger.info("Parsed %d" %(parsed))
77 |
78 |
79 | filepath = 'latest-data/%s-lptokens.csv' % (exchange_name)
80 |
81 | logger.info("Writing to %s" % (filepath))
82 |
83 | fout = open(filepath, 'w')
84 | fout.write('Block,Address,Supply\n')
85 | for block_num in block_to_supply:
86 | for address in block_to_supply[block_num]:
87 | supply = block_to_supply[block_num][address]
88 | fout.write("%d,%s,%d\n" % (block_num, address, supply))
89 |
90 |
--------------------------------------------------------------------------------
/data-scripts/configure.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # AMM
4 |
5 | # get data
6 |
7 | python3 get_uniswapv2_pairs.py
8 | python3 get_uniswapv2_relayers.py
9 | python3 get_uswapv2_logs.py
10 |
11 | python3 get_bq_relayers.py
12 | python3 get_uswap_logs.py
13 |
14 | #process data
15 |
16 | mkdir -p latest-data/uniswapv1-processed
17 | mkdir -p latest-data/uniswapv2-processed
18 | mkdir -p latest-data/sushiswap-processed
19 |
20 |
21 | python3 uniswap_trades.py &> uniswap_trades.nohup
22 |
23 | python3 get_top_uniswapv2_pairs.py
24 | python3 uniswapv2_trades.py sushiswap &> sushiswap_trades.nohup
25 | python3 uniswapv2_trades.py uniswapv2 &> uniswapv2_trades.nohup
26 | python3 uniswapv2_reserves.py -e sushiswap &> sushiswap_reserves.nohup
27 | python3 uniswapv2_reserves.py -e uniswapv2 &> uniswapv2_reserves.nohup
28 |
29 | mkdir -p latest-data/active-region/sushiswap
30 | mkdir -p latest-data/active-region/uniswapv2
31 | mkdir -p latest-data/active-region/uniswapv1
32 |
33 | python3 active_region.py latest-data/uniswapv1-processed/ latest-data/active-region/uniswapv1/
34 | python3 active_region.py latest-data/sushiswap-processed/ latest-data/active-region/sushiswap/
35 | python3 active_region.py latest-data/uniswapv2-processed/ latest-data/active-region/uniswapv2/
36 |
37 |
--------------------------------------------------------------------------------
/data-scripts/exchanges.py:
--------------------------------------------------------------------------------
1 | import json
2 | from persistence import persist_to_file
3 | import web3
4 | from web3 import Web3
5 | from eth_abi import decode_abi
6 |
7 | ERC20_ABI = json.loads('[{"constant":true,"inputs":[],"name":"name","outputs":[{"name":"","type":"string"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"_spender","type":"address"},{"name":"_value","type":"uint256"}],"name":"approve","outputs":[{"name":"","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"totalSupply","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"_from","type":"address"},{"name":"_to","type":"address"},{"name":"_value","type":"uint256"}],"name":"transferFrom","outputs":[{"name":"","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"decimals","outputs":[{"name":"","type":"uint8"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[{"name":"_owner","type":"address"}],"name":"balanceOf","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"symbol","outputs":[{"name":"","type":"string"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"_to","type":"address"},{"name":"_value","type":"uint256"}],"name":"transfer","outputs":[{"name":"","type":"bool"}],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[{"name":"_owner","type":"address"},{"name":"_spender","type":"address"}],"name":"allowance","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"anonymous":false,"inputs":[{"indexed":true,"name":"_from","type":"address"},{"indexed":true,"name":"_to","type":"address"},{"indexed":false,"name":"_value","type":"uint256"}],"name":"Transfer","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"_owner","type":"address"},{"indexed":true,"name":"_spender","type":"address"},{"indexed":false,"name":"_value","type":"uint256"}],"name":"Approval","type":"event"}]') # noqa: 501
8 | UNISWAP_ABI = json.loads('[{"name": "TokenPurchase", "inputs": [{"type": "address", "name": "buyer", "indexed": true}, {"type": "uint256", "name": "eth_sold", "indexed": true}, {"type": "uint256", "name": "tokens_bought", "indexed": true}], "anonymous": false, "type": "event"}, {"name": "EthPurchase", "inputs": [{"type": "address", "name": "buyer", "indexed": true}, {"type": "uint256", "name": "tokens_sold", "indexed": true}, {"type": "uint256", "name": "eth_bought", "indexed": true}], "anonymous": false, "type": "event"}, {"name": "AddLiquidity", "inputs": [{"type": "address", "name": "provider", "indexed": true}, {"type": "uint256", "name": "eth_amount", "indexed": true}, {"type": "uint256", "name": "token_amount", "indexed": true}], "anonymous": false, "type": "event"}, {"name": "RemoveLiquidity", "inputs": [{"type": "address", "name": "provider", "indexed": true}, {"type": "uint256", "name": "eth_amount", "indexed": true}, {"type": "uint256", "name": "token_amount", "indexed": true}], "anonymous": false, "type": "event"}, {"name": "Transfer", "inputs": [{"type": "address", "name": "_from", "indexed": true}, {"type": "address", "name": "_to", "indexed": true}, {"type": "uint256", "name": "_value", "indexed": false}], "anonymous": false, "type": "event"}, {"name": "Approval", "inputs": [{"type": "address", "name": "_owner", "indexed": true}, {"type": "address", "name": "_spender", "indexed": true}, {"type": "uint256", "name": "_value", "indexed": false}], "anonymous": false, "type": "event"}, {"name": "setup", "outputs": [], "inputs": [{"type": "address", "name": "token_addr"}], "constant": false, "payable": false, "type": "function", "gas": 175875}, {"name": "addLiquidity", "outputs": [{"type": "uint256", "name": "out"}], "inputs": [{"type": "uint256", "name": "min_liquidity"}, {"type": "uint256", "name": "max_tokens"}, {"type": "uint256", "name": "deadline"}], "constant": false, "payable": true, "type": "function", "gas": 82616}, {"name": "removeLiquidity", "outputs": [{"type": "uint256", "name": "out"}, {"type": "uint256", "name": "out"}], "inputs": [{"type": "uint256", "name": "amount"}, {"type": "uint256", "name": "min_eth"}, {"type": "uint256", "name": "min_tokens"}, {"type": "uint256", "name": "deadline"}], "constant": false, "payable": false, "type": "function", "gas": 116814}, {"name": "__default__", "outputs": [], "inputs": [], "constant": false, "payable": true, "type": "function"}, {"name": "ethToTokenSwapInput", "outputs": [{"type": "uint256", "name": "out"}], "inputs": [{"type": "uint256", "name": "min_tokens"}, {"type": "uint256", "name": "deadline"}], "constant": false, "payable": true, "type": "function", "gas": 12757}, {"name": "ethToTokenTransferInput", "outputs": [{"type": "uint256", "name": "out"}], "inputs": [{"type": "uint256", "name": "min_tokens"}, {"type": "uint256", "name": "deadline"}, {"type": "address", "name": "recipient"}], "constant": false, "payable": true, "type": "function", "gas": 12965}, {"name": "ethToTokenSwapOutput", "outputs": [{"type": "uint256", "name": "out"}], "inputs": [{"type": "uint256", "name": "tokens_bought"}, {"type": "uint256", "name": "deadline"}], "constant": false, "payable": true, "type": "function", "gas": 50463}, {"name": "ethToTokenTransferOutput", "outputs": [{"type": "uint256", "name": "out"}], "inputs": [{"type": "uint256", "name": "tokens_bought"}, {"type": "uint256", "name": "deadline"}, {"type": "address", "name": "recipient"}], "constant": false, "payable": true, "type": "function", "gas": 50671}, {"name": "tokenToEthSwapInput", "outputs": [{"type": "uint256", "name": "out"}], "inputs": [{"type": "uint256", "name": "tokens_sold"}, {"type": "uint256", "name": "min_eth"}, {"type": "uint256", "name": "deadline"}], "constant": false, "payable": false, "type": "function", "gas": 47503}, {"name": "tokenToEthTransferInput", "outputs": [{"type": "uint256", "name": "out"}], "inputs": [{"type": "uint256", "name": "tokens_sold"}, {"type": "uint256", "name": "min_eth"}, {"type": "uint256", "name": "deadline"}, {"type": "address", "name": "recipient"}], "constant": false, "payable": false, "type": "function", "gas": 47712}, {"name": "tokenToEthSwapOutput", "outputs": [{"type": "uint256", "name": "out"}], "inputs": [{"type": "uint256", "name": "eth_bought"}, {"type": "uint256", "name": "max_tokens"}, {"type": "uint256", "name": "deadline"}], "constant": false, "payable": false, "type": "function", "gas": 50175}, {"name": "tokenToEthTransferOutput", "outputs": [{"type": "uint256", "name": "out"}], "inputs": [{"type": "uint256", "name": "eth_bought"}, {"type": "uint256", "name": "max_tokens"}, {"type": "uint256", "name": "deadline"}, {"type": "address", "name": "recipient"}], "constant": false, "payable": false, "type": "function", "gas": 50384}, {"name": "tokenToTokenSwapInput", "outputs": [{"type": "uint256", "name": "out"}], "inputs": [{"type": "uint256", "name": "tokens_sold"}, {"type": "uint256", "name": "min_tokens_bought"}, {"type": "uint256", "name": "min_eth_bought"}, {"type": "uint256", "name": "deadline"}, {"type": "address", "name": "token_addr"}], "constant": false, "payable": false, "type": "function", "gas": 51007}, {"name": "tokenToTokenTransferInput", "outputs": [{"type": "uint256", "name": "out"}], "inputs": [{"type": "uint256", "name": "tokens_sold"}, {"type": "uint256", "name": "min_tokens_bought"}, {"type": "uint256", "name": "min_eth_bought"}, {"type": "uint256", "name": "deadline"}, {"type": "address", "name": "recipient"}, {"type": "address", "name": "token_addr"}], "constant": false, "payable": false, "type": "function", "gas": 51098}, {"name": "tokenToTokenSwapOutput", "outputs": [{"type": "uint256", "name": "out"}], "inputs": [{"type": "uint256", "name": "tokens_bought"}, {"type": "uint256", "name": "max_tokens_sold"}, {"type": "uint256", "name": "max_eth_sold"}, {"type": "uint256", "name": "deadline"}, {"type": "address", "name": "token_addr"}], "constant": false, "payable": false, "type": "function", "gas": 54928}, {"name": "tokenToTokenTransferOutput", "outputs": [{"type": "uint256", "name": "out"}], "inputs": [{"type": "uint256", "name": "tokens_bought"}, {"type": "uint256", "name": "max_tokens_sold"}, {"type": "uint256", "name": "max_eth_sold"}, {"type": "uint256", "name": "deadline"}, {"type": "address", "name": "recipient"}, {"type": "address", "name": "token_addr"}], "constant": false, "payable": false, "type": "function", "gas": 55019}, {"name": "tokenToExchangeSwapInput", "outputs": [{"type": "uint256", "name": "out"}], "inputs": [{"type": "uint256", "name": "tokens_sold"}, {"type": "uint256", "name": "min_tokens_bought"}, {"type": "uint256", "name": "min_eth_bought"}, {"type": "uint256", "name": "deadline"}, {"type": "address", "name": "exchange_addr"}], "constant": false, "payable": false, "type": "function", "gas": 49342}, {"name": "tokenToExchangeTransferInput", "outputs": [{"type": "uint256", "name": "out"}], "inputs": [{"type": "uint256", "name": "tokens_sold"}, {"type": "uint256", "name": "min_tokens_bought"}, {"type": "uint256", "name": "min_eth_bought"}, {"type": "uint256", "name": "deadline"}, {"type": "address", "name": "recipient"}, {"type": "address", "name": "exchange_addr"}], "constant": false, "payable": false, "type": "function", "gas": 49532}, {"name": "tokenToExchangeSwapOutput", "outputs": [{"type": "uint256", "name": "out"}], "inputs": [{"type": "uint256", "name": "tokens_bought"}, {"type": "uint256", "name": "max_tokens_sold"}, {"type": "uint256", "name": "max_eth_sold"}, {"type": "uint256", "name": "deadline"}, {"type": "address", "name": "exchange_addr"}], "constant": false, "payable": false, "type": "function", "gas": 53233}, {"name": "tokenToExchangeTransferOutput", "outputs": [{"type": "uint256", "name": "out"}], "inputs": [{"type": "uint256", "name": "tokens_bought"}, {"type": "uint256", "name": "max_tokens_sold"}, {"type": "uint256", "name": "max_eth_sold"}, {"type": "uint256", "name": "deadline"}, {"type": "address", "name": "recipient"}, {"type": "address", "name": "exchange_addr"}], "constant": false, "payable": false, "type": "function", "gas": 53423}, {"name": "getEthToTokenInputPrice", "outputs": [{"type": "uint256", "name": "out"}], "inputs": [{"type": "uint256", "name": "eth_sold"}], "constant": true, "payable": false, "type": "function", "gas": 5542}, {"name": "getEthToTokenOutputPrice", "outputs": [{"type": "uint256", "name": "out"}], "inputs": [{"type": "uint256", "name": "tokens_bought"}], "constant": true, "payable": false, "type": "function", "gas": 6872}, {"name": "getTokenToEthInputPrice", "outputs": [{"type": "uint256", "name": "out"}], "inputs": [{"type": "uint256", "name": "tokens_sold"}], "constant": true, "payable": false, "type": "function", "gas": 5637}, {"name": "getTokenToEthOutputPrice", "outputs": [{"type": "uint256", "name": "out"}], "inputs": [{"type": "uint256", "name": "eth_bought"}], "constant": true, "payable": false, "type": "function", "gas": 6897}, {"name": "tokenAddress", "outputs": [{"type": "address", "name": "out"}], "inputs": [], "constant": true, "payable": false, "type": "function", "gas": 1413}, {"name": "factoryAddress", "outputs": [{"type": "address", "name": "out"}], "inputs": [], "constant": true, "payable": false, "type": "function", "gas": 1443}, {"name": "balanceOf", "outputs": [{"type": "uint256", "name": "out"}], "inputs": [{"type": "address", "name": "_owner"}], "constant": true, "payable": false, "type": "function", "gas": 1645}, {"name": "transfer", "outputs": [{"type": "bool", "name": "out"}], "inputs": [{"type": "address", "name": "_to"}, {"type": "uint256", "name": "_value"}], "constant": false, "payable": false, "type": "function", "gas": 75034}, {"name": "transferFrom", "outputs": [{"type": "bool", "name": "out"}], "inputs": [{"type": "address", "name": "_from"}, {"type": "address", "name": "_to"}, {"type": "uint256", "name": "_value"}], "constant": false, "payable": false, "type": "function", "gas": 110907}, {"name": "approve", "outputs": [{"type": "bool", "name": "out"}], "inputs": [{"type": "address", "name": "_spender"}, {"type": "uint256", "name": "_value"}], "constant": false, "payable": false, "type": "function", "gas": 38769}, {"name": "allowance", "outputs": [{"type": "uint256", "name": "out"}], "inputs": [{"type": "address", "name": "_owner"}, {"type": "address", "name": "_spender"}], "constant": true, "payable": false, "type": "function", "gas": 1925}, {"name": "name", "outputs": [{"type": "bytes32", "name": "out"}], "inputs": [], "constant": true, "payable": false, "type": "function", "gas": 1623}, {"name": "symbol", "outputs": [{"type": "bytes32", "name": "out"}], "inputs": [], "constant": true, "payable": false, "type": "function", "gas": 1653}, {"name": "decimals", "outputs": [{"type": "uint256", "name": "out"}], "inputs": [], "constant": true, "payable": false, "type": "function", "gas": 1683}, {"name": "totalSupply", "outputs": [{"type": "uint256", "name": "out"}], "inputs": [], "constant": true, "payable": false, "type": "function", "gas": 1713}]')
9 |
10 | my_provider = Web3.HTTPProvider('https://mainnet.infura.io/v3/c534d76d934f40498f6d6113a46c6ab3')
11 | w3 = Web3(my_provider)
12 |
13 |
14 | """1 IDEX 57849 55.5056% 0x2a0c0dbecc7e4d658f48e01e3fa353f44050c208
15 | 2 DEx.top 11628 11.1570% 0x7600977eb9effa627d6bd0da2e5be35e11566341
16 | 3 Ether Delta 6596 6.3288% 0x8d12a197cb00d4747a1fe03395095ce2a5cc6819
17 | 4 Bancor Network 6569 6.3029% ???
18 | 5 DDEX 5146 4.9375% 0x12459c951127e0c374ff9105dda097662a027093
19 | 6 Token Store 3750 3.5981% 0x1ce7ae555139c5ef5a57cc8d814a867ee6ee33d8
20 | 7 Star Bit 3448 3.3083% 0x12459c951127e0c374ff9105dda097662a027093
21 | 8 Kyber Network 2550 2.4467% ?????
22 | 9 Joyso 2205 2.1157% 0x04f062809b244e37e7fdc21d9409469c989c2342
23 | 10 Oasis Dex 1865 1.7894% 0x12459c951127e0c374ff9105dda097662a027093
24 | 11 Radar Relay 1303 1.2502% 0x12459c951127e0c374ff9105dda097662a027093
25 | 12 Paradex 820 0.7868% 0x12459c951127e0c374ff9105dda097662a027093
26 | 13 Airswap 243 0.2332% 0x8fd3121013a07c57f0d69646e86e7a4880b467b7
27 | 14 TokenJar 106 0.1017% 0x12459c951127e0c374ff9105dda097662a027093
28 | 15 The Ocean 91 0.0873% 0x12459c951127e0c374ff9105dda097662a027093
29 | 16 Erc dEX 25 0.0240% 0x12459c951127e0c374ff9105dda097662a027093
30 | 17 Enclaves 22 0.0211% 0xed06d46ffb309128c4458a270c99c824dc127f5d
31 | 18 Shark Relay 6 0.0058% 0x12459c951127e0c374ff9105dda097662a027093
32 | 19 Bamboo Relay 0 0.0000%
33 | 20 IDT Exchange 0 0.0000%
34 | 21 Tokenlon 0 0.0000%
35 |
36 | Source: dexwatch Thus Sep 20 3:48PM EST
37 | """
38 |
39 |
40 | dex_list = ["0x2a0c0dbecc7e4d658f48e01e3fa353f44050c208", # IDEX
41 | "0x7600977eb9effa627d6bd0da2e5be35e11566341", # DEx.top
42 | "0x8d12a197cb00d4747a1fe03395095ce2a5cc6819", # Etherdelta (done)
43 | "0x12459c951127e0c374ff9105dda097662a027093", # 0x v1 (done)
44 | "0x4f833a24e1f95d70f028921e27040ca56e09ab0b", # 0x v2 (done)
45 | "0x1ce7ae555139c5ef5a57cc8d814a867ee6ee33d8", # Token Store (done)
46 | "0x04f062809b244e37e7fdc21d9409469c989c2342", # Joyso
47 | "0x8fd3121013a07c57f0d69646e86e7a4880b467b7", # Airswap
48 | "0xed06d46ffb309128c4458a270c99c824dc127f5d", # Enclaves
49 | ]
50 |
51 |
52 | #bancor_relayers = open('latest-data/bancor_relayers').read().strip().splitlines()
53 | #kyber_relayers = open('latest-data/kyber_relayers').read().strip().splitlines()
54 | bancor_relayers = []
55 | kyber_relayers = []
56 | uniswap_relayers = []
57 | #uniswap_relayers = open('latest-data/uniswap_relayers').read().strip().splitlines()
58 |
59 |
60 | dex_list = dex_list + bancor_relayers + kyber_relayers + uniswap_relayers
61 |
62 | def topics_from_text(raw_text):
63 | return json.loads(raw_text.replace('\'', '\"'))
64 |
65 | def parse_address(raw_hex):
66 | """ Extract address from lowest hex bits, ignoring junk. """
67 | return raw_hex[-40:]
68 |
69 | def parse_amount(raw_hex):
70 | return int(raw_hex, 16)
71 |
72 | @persist_to_file('uniswap.dat')
73 | def get_uniswap_token(address):
74 | token_addr = address
75 | erc20 = w3.eth.contract(address=Web3.toChecksumAddress('0x' + address), abi=UNISWAP_ABI)
76 | try:
77 | token_addr = erc20.functions.tokenAddress().call()
78 | except web3.exceptions.BadFunctionCallOutput: # todo handle chainsync errors?
79 | pass
80 | except web3.exceptions.ContractLogicError:
81 | print("web3.exceptions.ContractLogicError", address)
82 | return token_addr.lower().replace("0x", "")
83 |
84 | @persist_to_file('decimals.dat')
85 | def get_decimals_for(address):
86 | if int(address, 16) == 0 or int(address, 16) == 1364068194842176056990105843868530818345537040110:
87 | return 18
88 | erc20 = w3.eth.contract(address=Web3.toChecksumAddress('0x' + address), abi=ERC20_ABI)
89 | try:
90 | decimals = int(erc20.functions.decimals().call())
91 | except web3.exceptions.BadFunctionCallOutput: # todo handle chainsync errors?
92 | return 0
93 | return decimals
94 |
95 | @persist_to_file('labels.dat')
96 | def get_node_label_for(address):
97 | if int(address, 16) == 0 or int(address, 16) == 1364068194842176056990105843868530818345537040110 or address.lower() == "c0829421c1d260bd3cb3e0f06cfe2d52db2ce315" or address.lower() == "c02aaa39b223fe8d0a0e5c4f27ead9083c756cc2" or address.lower() == "2956356cd2a2bf3202f771f50d3d14a367b48070":
98 | return ("ETH", "ETH")
99 | erc20 = w3.eth.contract(address=Web3.toChecksumAddress('0x' + address), abi=ERC20_ABI)
100 | name = "Unknown"
101 | symbol = "Unknown"
102 | try:
103 | name = erc20.functions.name().call()
104 | except OverflowError:
105 | pass
106 | except web3.exceptions.BadFunctionCallOutput:
107 | pass
108 | try:
109 | symbol = erc20.functions.symbol().call()
110 | except OverflowError:
111 | pass
112 | except web3.exceptions.BadFunctionCallOutput:
113 | pass
114 | return (symbol, "%s (%s)\n0x%s" % (name, symbol, address))
115 |
116 | def parse_bancor(topics, data, address):
117 | tokenget_addr = parse_address(topics[1])
118 | tokenget,tokenget_label = get_node_label_for(tokenget_addr)
119 | amountget = (parse_amount(data[0:64]) / (10 ** get_decimals_for(tokenget_addr)))
120 | tokengive_addr = parse_address(topics[2])
121 | tokengive,tokengive_label = get_node_label_for(tokengive_addr)
122 | amountgive = (parse_amount(data[64:128]) / (10 ** get_decimals_for(tokengive_addr)))
123 | assert(len(data) == 192)
124 | return (tokenget_addr, tokenget_label, tokenget, amountget, tokengive_addr, tokengive_label, tokengive, amountgive)
125 |
126 | def parse_etherdelta_clone(topics, data, address, data_length):
127 | tokenget_addr = parse_address(data[0:64])
128 | tokenget,tokenget_label = get_node_label_for(tokenget_addr)
129 | amountget = (parse_amount(data[64:128]) / (10 ** get_decimals_for(tokenget_addr)))
130 | tokengive_addr = parse_address(data[128:192])
131 | tokengive,tokengive_label = get_node_label_for(tokengive_addr)
132 | amountgive = (parse_amount(data[192:256]) / (10 ** get_decimals_for(tokengive_addr)))
133 | assert(len(data) == data_length)
134 | return (tokenget_addr, tokenget_label, tokenget, amountget, tokengive_addr, tokengive_label, tokengive, amountgive)
135 |
136 | parse_etherdelta = lambda topics, data, address : parse_etherdelta_clone(topics, data, address, 384)
137 | parse_tokenstore = lambda topics, data, address : parse_etherdelta_clone(topics, data, address, 448)
138 |
139 | def parse_0x(topics, data, address):
140 | tokenget_addr = parse_address(data[128:192])
141 | tokenget,tokenget_label = get_node_label_for(tokenget_addr)
142 | amountget = (parse_amount(data[256:320]) / (10 ** get_decimals_for(tokenget_addr)))
143 | tokengive_addr = parse_address(data[64:128])
144 | tokengive,tokengive_label = get_node_label_for(tokengive_addr)
145 | amountgive = (parse_amount(data[192:256]) / (10 ** get_decimals_for(tokengive_addr)))
146 | assert(len(data) == 512)
147 | return (tokenget_addr, tokenget_label, tokenget, amountget, tokengive_addr, tokengive_label, tokengive, amountgive)
148 |
149 | def parse_0x_v2(topics, data, address):
150 | abi_data = decode_abi(['address', 'address', 'uint256', 'uint256', 'uint256', 'uint256', 'bytes', 'bytes'], bytes(Web3.toBytes(hexstr=data)))
151 | tokenget_addr = parse_address(Web3.toHex(abi_data[-1])[2:])
152 | tokenget,tokenget_label = get_node_label_for(tokenget_addr)
153 | amountget = (parse_amount(data[192:256]) / (10 ** get_decimals_for(tokenget_addr)))
154 | tokengive_addr = parse_address(Web3.toHex(abi_data[-2])[2:])
155 | tokengive,tokengive_label = get_node_label_for(tokengive_addr)
156 | amountgive = (parse_amount(data[128:192]) / (10 ** get_decimals_for(tokengive_addr)))
157 | assert(len(data) >= 896)
158 | return (tokenget_addr, tokenget_label, tokenget, amountget, tokengive_addr, tokengive_label, tokengive, amountgive)
159 |
160 | def parse_kyber(topics, data, address):
161 | abi_data = decode_abi(['address', 'address', 'uint256', 'uint256', 'address', 'uint256', 'address', 'address', 'bytes'], bytes(Web3.toBytes(hexstr=data)))
162 | tokenget_addr = parse_address(abi_data[0])
163 | tokenget,tokenget_label = get_node_label_for(tokenget_addr)
164 | amountget = (abi_data[2] / (10 ** get_decimals_for(tokenget_addr)))
165 | tokengive_addr = parse_address(abi_data[1])
166 | tokengive,tokengive_label = get_node_label_for(tokengive_addr)
167 | amountgive = (abi_data[3] / (10 ** get_decimals_for(tokengive_addr)))
168 | assert(len(data) >= 512)
169 | return (tokenget_addr, tokenget_label, tokenget, amountget, tokengive_addr, tokengive_label, tokengive, amountgive)
170 |
171 | def parse_uniswap_tokenpurchase(topics, data, address):
172 | abi_data = decode_abi(['uint256', 'address', 'uint256', 'uint256'], Web3.toBytes(hexstr="".join([x.replace("0x", "") for x in topics])))
173 | tokenget_addr = get_uniswap_token(address[2:])
174 | tokenget,tokenget_label = get_node_label_for(tokenget_addr)
175 | amountget = (abi_data[-1] / (10 ** get_decimals_for(tokenget_addr)))
176 | tokengive_addr = "0"*40 # (eth given by definition)
177 | tokengive,tokengive_label = get_node_label_for(tokengive_addr)
178 | amountgive = (abi_data[-2] / (10 ** get_decimals_for(tokengive_addr)))
179 | assert(len(topics) == 4)
180 | return (tokenget_addr, tokenget_label, tokenget, amountget, tokengive_addr, tokengive_label, tokengive, amountgive, abi_data[-1], abi_data[-2])
181 |
182 | def parse_uniswap_ethpurchase(topics, data, address):
183 | abi_data = decode_abi(['uint256', 'address', 'uint256', 'uint256'], Web3.toBytes(hexstr="".join([x.replace("0x", "") for x in topics])))
184 | tokenget_addr = "0"*40 # (eth gotten by definition)
185 | tokenget,tokenget_label = get_node_label_for(tokenget_addr)
186 | amountget = (abi_data[-1] / (10 ** get_decimals_for(tokenget_addr)))
187 | tokengive_addr = get_uniswap_token(address[2:])
188 | tokengive,tokengive_label = get_node_label_for(tokengive_addr)
189 | amountgive = (abi_data[-2] / (10 ** get_decimals_for(tokengive_addr)))
190 | assert(len(topics) == 4)
191 | return (tokenget_addr, tokenget_label, tokenget, amountget, tokengive_addr, tokengive_label, tokengive, amountgive, abi_data[-1], abi_data[-2])
192 |
193 |
194 | def get_trade_data_from_log_item(topics, data, address):
195 | exchange = None
196 | parser = None
197 | if address == '0x12459c951127e0c374ff9105dda097662a027093': # 0x v1
198 | if topics[0] == '0x0d0b9391970d9a25552f37d436d2aae2925e2bfe1b2a923754bada030c498cb3':
199 | exchange = "0x v1"
200 | parser = parse_0x
201 | elif address == '0x4f833a24e1f95d70f028921e27040ca56e09ab0b': # 0x v2
202 | if topics[0] == '0x0bcc4c97732e47d9946f229edb95f5b6323f601300e4690de719993f3c371129':
203 | exchange = "0x v2"
204 | parser = parse_0x_v2
205 | elif address == '0x8d12a197cb00d4747a1fe03395095ce2a5cc6819': # etherdelta
206 | # TODO handle clones
207 | if topics[0] == '0x6effdda786735d5033bfad5f53e5131abcced9e52be6c507b62d639685fbed6d': # trade log event
208 | exchange = "Etherdelta"
209 | parser = parse_etherdelta
210 | elif address == '0x1ce7ae555139c5ef5a57cc8d814a867ee6ee33d8': # Tokenstore
211 | if topics[0] == '0x3314c351c2a2a45771640a1442b843167a4da29bd543612311c031bbfb4ffa98':
212 | exchange = "Tokenstore"
213 | parser = parse_tokenstore
214 | elif address in bancor_relayers:
215 | if topics[0] == '0x276856b36cbc45526a0ba64f44611557a2a8b68662c5388e9fe6d72e86e1c8cb':
216 | exchange = "Bancor"
217 | parser = parse_bancor
218 | elif address in kyber_relayers:
219 | if topics[0] == '0xd30ca399cb43507ecec6a629a35cf45eb98cda550c27696dcb0d8c4a3873ce6c':
220 | exchange = "Kyber"
221 | parser = parse_kyber
222 | elif address in uniswap_relayers:
223 | if topics[0] == '0x7f4091b46c33e918a0f3aa42307641d17bb67029427a5369e54b353984238705':
224 | exchange = "Uniswap"
225 | parser = parse_uniswap_ethpurchase
226 | if topics[0] == '0xcd60aa75dea3072fbc07ae6d7d856b5dc5f4eee88854f5b4abf7b680ef8bc50f' and len(topics) == 4: # (ZRXcoin has same event; eg https://etherscan.io/tx/0x3d774851984b665b6db16d8bbf7a138520c76db923599fc8929b29edd384db7b#eventlog)
227 | exchange = "Uniswap"
228 | parser = parse_uniswap_tokenpurchase
229 | else:
230 | print("PARSING FAILED", address)
231 | #else:
232 | # # parsing failed
233 | # return None
234 |
235 | if not parser:
236 | # no logs to parse
237 | return []
238 |
239 | # 1 log generated; return it
240 | (tokenget_addr, tokenget_label, tokenget, amountget, tokengive_addr, tokengive_label, tokengive, amountgive, rawget, rawgive) = parser(topics, data, address)
241 | return [(tokenget_addr, tokenget_label, tokenget, amountget, tokengive_addr, tokengive_label, tokengive, amountgive, exchange, rawget, rawgive)]
242 |
--------------------------------------------------------------------------------
/data-scripts/get_token_names.py:
--------------------------------------------------------------------------------
1 | import pandas as pd
2 | from exchanges import get_node_label_for
3 |
4 | #df = pd.read_csv('../sushiswap_mev.csv')
5 | df = pd.read_csv('../uniswapv1_mev.csv')
6 | name_file = 'data/token_names.csv'
7 | name_dict = pd.read_csv(name_file).to_dict('list')
8 |
9 | print(name_dict)
10 |
11 | #for token in set(df.token0).union(set(df.token1)):
12 | for token in set(df.token0).union(set(df.token1)):
13 | token_address = str(hex(int(token)))[2:]
14 | token_address = '0'*(40-len(token_address)) + token_address
15 | symbol = get_node_label_for(token_address)[0]
16 | name_dict['name'].append(symbol)
17 | name_dict['address'].append(token)
18 | name_df = pd.DataFrame(name_dict).drop_duplicates()
19 | name_df.to_csv(name_file, index=False)
20 |
--------------------------------------------------------------------------------
/data-scripts/get_top_uniswapv2_pairs.py:
--------------------------------------------------------------------------------
1 | sushiswap_factory = "0xc0aee478e3658e2610c5f7a4a2e1777ce9e4f2ac"
2 | uniswapv2_fctory = "0x5c69bee701ef814a2b6a3edd4b1652cb9cc5aa6f"
3 |
4 | import pandas as pd
5 | import csv
6 | from exchanges import parse_address
7 | token_counts = {}
8 | pairs_to_tokens = {}
9 |
10 |
11 | df = pd.read_csv('latest-data/all_logs_uniswapv2_factory.csv')
12 | for _, row in df.iterrows():
13 | topics = row['topics'][1:-1]
14 | data = row['data'][2:]
15 | topics = topics.replace("'","").replace(" ", "").split(',')
16 | if len(topics) != 3:
17 | print(topics)
18 | else:
19 | address = parse_address(row['address'])
20 | token0_addr = parse_address(topics[1])
21 | token1_addr = parse_address(topics[2])
22 | if (address, token0_addr) not in token_counts:
23 | token_counts[(address, token0_addr)] = 0
24 | if (address, token1_addr) not in token_counts:
25 | token_counts[(address, token1_addr)] = 0
26 | token_counts[(address, token0_addr)] += 1
27 | token_counts[(address, token1_addr)] += 1
28 | pair_address = data[24:64]
29 | pairs_to_tokens[(address, pair_address)] = (token0_addr, token1_addr)
30 |
31 |
32 | uniswapv2_top_tokens = 'latest-data/uniswapv2_top_tokens.csv'
33 | uniswapv2_pairs = 'latest-data/uniswapv2_pairs.csv'
34 |
35 | with open(uniswapv2_top_tokens, 'w') as csvfile:
36 | spamwriter = csv.writer(csvfile, delimiter=',',
37 | quotechar='"', quoting=csv.QUOTE_MINIMAL)
38 |
39 | spamwriter.writerow("exchange,token,num_pairs".split(","))
40 | for address in token_counts:
41 | spamwriter.writerow(['0x'+address[0], '0x'+address[1], token_counts[(address)]])
42 |
43 | with open(uniswapv2_pairs, 'w') as csvfile:
44 | spamwriter = csv.writer(csvfile, delimiter=',',
45 | quotechar='"', quoting=csv.QUOTE_MINIMAL)
46 |
47 | spamwriter.writerow("exchange,pair,token0,token1".split(","))
48 | for address in pairs_to_tokens:
49 | spamwriter.writerow(['0x'+address[0], '0x'+address[1], '0x'+pairs_to_tokens[address][0] , '0x'+pairs_to_tokens[address][1]])
50 |
--------------------------------------------------------------------------------
/data-scripts/get_uniswapv2_pairs.py:
--------------------------------------------------------------------------------
1 | #0x0d3648bd0f6ba80134a33ba9275ac585d9d315f0ad8355cddefde31afa28d0e9
2 |
3 | #0xc0aee478e3658e2610c5f7a4a2e1777ce9e4f2ac Sushiswap factory
4 | #0x5c69bee701ef814a2b6a3edd4b1652cb9cc5aa6f Uniswapv2 factory
5 |
6 | import csv, os
7 | from google.cloud import bigquery
8 |
9 | os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = "bq.json"
10 | client = bigquery.Client()
11 |
12 |
13 | event_signature = "0x0d3648bd0f6ba80134a33ba9275ac585d9d315f0ad8355cddefde31afa28d0e9" #PairCreated from V2 factory
14 |
15 | query = """SELECT log_index,transaction_hash,address,data,topics,logs.block_timestamp,logs.block_number FROM `bigquery-public-data.crypto_ethereum.logs` AS logs JOIN UNNEST(topics) AS topic WHERE topic IN UNNEST(@topics) ORDER BY block_number ASC"""
16 |
17 |
18 | topics = set([event_signature])
19 | aqp = bigquery.ArrayQueryParameter('topics', 'STRING', topics)
20 | query_params = [aqp]
21 | job_config = bigquery.QueryJobConfig()
22 | job_config.query_parameters = query_params
23 | query_job = client.query(
24 | query,
25 | # Location must match that of the dataset(s) referenced in the query.
26 | location='US',
27 | job_config=job_config) # API request - starts the query
28 |
29 |
30 | with open('latest-data/all_logs_uniswapv2_factory.csv', 'w') as csvfile:
31 | spamwriter = csv.writer(csvfile, delimiter=',',
32 | quotechar='"', quoting=csv.QUOTE_MINIMAL)
33 |
34 | spamwriter.writerow("log_index,transaction_hash,address,data,topics,block_timestamp,block_number".split(","))
35 | for item in query_job:
36 | spamwriter.writerow(item)
37 |
38 | assert query_job.state == 'DONE'
39 | print("[database fetcher] Wrote all logs")
40 |
41 |
--------------------------------------------------------------------------------
/data-scripts/get_uniswapv2_relayers.py:
--------------------------------------------------------------------------------
1 |
2 | sushiswap_factory = "0xc0aee478e3658e2610c5f7a4a2e1777ce9e4f2ac"
3 | uniswapv2_fctory = "0x5c69bee701ef814a2b6a3edd4b1652cb9cc5aa6f"
4 |
5 | import pandas as pd
6 | from exchanges import parse_address
7 | token_counts = {}
8 | sushiswap_pairs = set()
9 | uniswapv2_pairs = set()
10 |
11 |
12 | df = pd.read_csv('latest-data/all_logs_uniswapv2_factory.csv')
13 | for _, row in df.iterrows():
14 | topics = row['topics'][1:-1]
15 | data = row['data'][2:]
16 | topics = topics.replace("'","").replace(" ", "").split(',')
17 | if len(topics) != 3:
18 | print(topics)
19 | else:
20 | token0_addr = parse_address(topics[1])
21 | token1_addr = parse_address(topics[2])
22 | if token0_addr not in token_counts:
23 | token_counts[token0_addr] = 0
24 | if token1_addr not in token_counts:
25 | token_counts[token1_addr] = 0
26 | token_counts[token0_addr] += 1
27 | token_counts[token1_addr] += 1
28 | if row['address'] == sushiswap_factory :
29 | pair_address = data[24:64]
30 | sushiswap_pairs.add(pair_address)
31 | print(pair_address)
32 | elif row['address'] == uniswapv2_fctory:
33 | pair_address = data[24:64]
34 | uniswapv2_pairs.add(pair_address)
35 | print(pair_address)
36 |
37 |
38 | sushiswap_file = 'latest-data/sushiswap_relayers'
39 | f_sushiswap = open(sushiswap_file, 'w')
40 | for pair_address in sushiswap_pairs:
41 | f_sushiswap.write('0x'+ pair_address + '\n')
42 |
43 | uniswapv2_file = 'latest-data/uniswapv2_relayers'
44 | f_uniswapv2 = open(uniswapv2_file, 'w')
45 | for pair_address in uniswapv2_pairs:
46 | f_uniswapv2.write('0x' + pair_address + '\n')
47 |
--------------------------------------------------------------------------------
/data-scripts/get_uswap_logs.py:
--------------------------------------------------------------------------------
1 | import csv, os
2 | from google.cloud import bigquery
3 | from exchanges import uniswap_relayers
4 |
5 | FIELDS_TO_GRAB = 'block_number,transaction_hash,to_address,from_address,address,num_logs,gas,gas_price,receipt_gas_used,input,transaction_index'
6 |
7 | os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = "etharbskey.json"
8 | client = bigquery.Client()
9 |
10 |
11 | query = """SELECT log_index,transaction_hash,logs.transaction_index,address,data,topics,logs.block_timestamp,logs.block_number,gas,gas_price,receipt_gas_used FROM
12 | `bigquery-public-data.crypto_ethereum.logs` AS logs
13 | JOIN `bigquery-public-data.crypto_ethereum.transactions` AS transactions ON logs.transaction_hash = transactions.hash
14 | WHERE
15 | logs.address in UNNEST(@uniswap_relayers) ORDER BY block_number ASC, transaction_index ASC"""
16 |
17 | aqp = bigquery.ArrayQueryParameter('uniswap_relayers', 'STRING', uniswap_relayers)
18 | query_params = [aqp]
19 | job_config = bigquery.QueryJobConfig()
20 | job_config.query_parameters = query_params
21 | query_job = client.query(
22 | query,
23 | # Location must match that of the dataset(s) referenced in the query.
24 | location='US',
25 | job_config=job_config) # API request - starts the query
26 |
27 |
28 | with open('latest-data/all_logs_uniswap.csv', 'w') as csvfile:
29 | spamwriter = csv.writer(csvfile, delimiter=',',
30 | quotechar='"', quoting=csv.QUOTE_MINIMAL)
31 |
32 | spamwriter.writerow("log_index,transaction_hash,transaction_index,address,data,topics,block_timestamp,block_number,gas,gas_price,receipt_gas_used".split(","))
33 | for item in query_job:
34 | spamwriter.writerow(item)
35 |
36 | assert query_job.state == 'DONE'
37 | print("[database fetcher] Wrote all logs")
38 |
39 |
--------------------------------------------------------------------------------
/data-scripts/get_uswapv2_logs.py:
--------------------------------------------------------------------------------
1 | import csv, os
2 | from google.cloud import bigquery
3 |
4 |
5 | os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = "bq.json"
6 | client = bigquery.Client()
7 |
8 |
9 | uniswapv2_relayers = open('latest-data/uniswapv2_relayers').read().strip().splitlines()
10 | sushiswap_relayers = open('latest-data/sushiswap_relayers').read().strip().splitlines()
11 |
12 |
13 | query = """SELECT log_index,transaction_hash,logs.transaction_index,address,data,topics,logs.block_timestamp,logs.block_number,gas,gas_price,receipt_gas_used FROM
14 | `bigquery-public-data.crypto_ethereum.logs` AS logs
15 | JOIN `bigquery-public-data.crypto_ethereum.transactions` AS transactions ON logs.transaction_hash = transactions.hash
16 | WHERE
17 | logs.address in UNNEST(@relayers) ORDER BY block_number ASC, transaction_index ASC"""
18 |
19 | for exchange_relayers in (('sushiswap', sushiswap_relayers), ('uniswapv2', uniswapv2_relayers)):
20 | aqp = bigquery.ArrayQueryParameter('relayers', 'STRING', exchange_relayers[1])
21 | query_params = [aqp]
22 | job_config = bigquery.QueryJobConfig()
23 | job_config.query_parameters = query_params
24 | query_job = client.query(
25 | query,
26 | # Location must match that of the dataset(s) referenced in the query.
27 | location='US',
28 | job_config=job_config) # API request - starts the query
29 | with open('latest-data/all_logs_%s.csv' % (exchange_relayers[0]), 'w') as csvfile:
30 | spamwriter = csv.writer(csvfile, delimiter=',',
31 | quotechar='"', quoting=csv.QUOTE_MINIMAL)
32 |
33 | spamwriter.writerow("log_index,transaction_hash,transaction_index,address,data,topics,block_timestamp,block_number,gas,gas_price,receipt_gas_used".split(","))
34 | for item in query_job:
35 | spamwriter.writerow(item)
36 |
37 | assert query_job.state == 'DONE'
38 | print("Wrote all logs for %s" %(exchange_relayers[0]) )
39 |
40 |
--------------------------------------------------------------------------------
/data-scripts/persistence.py:
--------------------------------------------------------------------------------
1 | # https://stackoverflow.com/questions/16463582/memoize-to-disk-python-persistent-memoization
2 |
3 | import json
4 |
5 | def persist_to_file(file_name):
6 |
7 | def decorator(original_func):
8 |
9 | try:
10 | cache = json.load(open(file_name, 'r'))
11 | except (IOError, ValueError):
12 | cache = {}
13 |
14 | def new_func(param):
15 | if param not in cache:
16 | cache[param] = original_func(param)
17 | json.dump(cache, open(file_name, 'w'))
18 | return cache[param]
19 |
20 | return new_func
21 |
22 | return decorator
23 |
--------------------------------------------------------------------------------
/data-scripts/uniswapv1_reserves.py:
--------------------------------------------------------------------------------
1 | import argparse
2 | import csv, os
3 | import pandas as pd
4 | import logging
5 | from uniswapv1 import UniswapV1
6 | from collections import defaultdict
7 | import glob
8 |
9 | #fin = open('data-scripts/latest-data/uniswapv1-processed/89d24a6b4ccb1b6faa2625fe562bdd9a23260359.csv', 'r')
10 |
11 | filenames = glob.glob('latest-data/uniswapv1-processed/*.csv')
12 | fout = open('latest-data/uniswapv1-reserves.csv', 'w')
13 | fout.write('Block,Address,Token0,Token1,Reserve0,Reserve1\n')
14 | exchange_acc = 'UniswapV1'
15 |
16 | for filename in filenames:
17 | print("processing {} ...".format(filename))
18 | pair_address = filename.split('/')[-1].split('.')[0]
19 | fin = open(filename, 'r')
20 |
21 | block_to_reserves = defaultdict(lambda : {})
22 |
23 | uniswapv1 = UniswapV1()
24 |
25 | block_number = 0
26 | token = int(str(pair_address), 16)
27 | for line in fin.readlines():
28 | if 'block' in line:
29 | words = line.split()
30 | block_number = int(words[-1])
31 | else:
32 | uniswapv1.process(line)
33 | balances = uniswapv1.config()
34 | block_to_reserves[block_number][token] = balances[exchange_acc][str(token)]
35 | block_to_reserves[block_number]['0'] = balances[exchange_acc]['0']
36 |
37 | for block_num in block_to_reserves:
38 | token_reserve = block_to_reserves[block_num][token]
39 | eth_reserve = block_to_reserves[block_num]['0']
40 | fout.write("%d,%s,%d,%d,%d,%d\n" % (block_num, pair_address, token, 0, token_reserve, eth_reserve))
41 |
42 |
43 |
44 |
--------------------------------------------------------------------------------
/data-scripts/uniswapv2_reserves.py:
--------------------------------------------------------------------------------
1 | import argparse
2 | import csv, os
3 | import pandas as pd
4 | import logging
5 | from exchanges import topics_from_text
6 |
7 | parser = argparse.ArgumentParser(description='Get UniswapV2 Reserves')
8 | parser.add_argument(
9 | '-v', '--verbose',
10 | help="Be verbose",
11 | action="store_const", dest="loglevel", const=logging.INFO,
12 | default=logging.WARNING
13 | )
14 |
15 | parser.add_argument(
16 | '-e', '--exchange',
17 | help="sushiswap/uniswapv2",
18 | default='sushiswap'
19 | )
20 |
21 |
22 | args = parser.parse_args()
23 | logging.basicConfig(level=args.loglevel, format='%(message)s')
24 |
25 | logger = logging.getLogger(__name__)
26 |
27 | exchange_name = args.exchange
28 |
29 | uniswapv2_logs = 'latest-data/all_logs_uniswapv2.csv'
30 | sushiswap_logs = 'latest-data/all_logs_sushiswap.csv'
31 |
32 | exchange_logs = {'uniswapv2' : uniswapv2_logs, 'sushiswap' : sushiswap_logs}
33 |
34 | uniswapv2_pairs = pd.read_csv('latest-data/uniswapv2_pairs.csv').set_index('pair')
35 |
36 | logsdict = csv.DictReader(open(exchange_logs[exchange_name]), delimiter=',',
37 | quotechar='"', quoting=csv.QUOTE_MINIMAL)
38 |
39 | logs = {}
40 | block_to_reserves = {}
41 | txhashes = []
42 | events= []
43 | tx_to_block = {}
44 |
45 | #Interested in only Sync events
46 | interested_topics = ['0x1c411e9a96e071241c2f21f7726b17ae89e3cab4c78be50e062b03a9fffbbad1']
47 |
48 | for log in logsdict:
49 | topics = topics_from_text(log['topics'])
50 | if topics[0] not in interested_topics:
51 | continue
52 | hash = log['transaction_hash']
53 | if not hash in logs:
54 | logs[hash] = []
55 | txhashes.append(hash)
56 | logs[hash].append((log['address'], log['data'], topics, log['gas_price'], log['receipt_gas_used'], log['block_number']))
57 | tx_to_block[hash] = log['block_number']
58 |
59 | parsed = 0
60 |
61 | for txhash in txhashes:
62 | for logitem in logs[txhash]:
63 | address = logitem[0]
64 | data = logitem[1]
65 | topics = logitem[2]
66 | data = data[2:] # strip 0x from hex
67 | action_requested = None
68 | if topics[0] == interested_topics[0]:
69 | # sync reserves
70 | reserve0= int(str(data[:64]), 16)
71 | reserve1= int(str(data[64:128]), 16)
72 | block_num = int(tx_to_block[txhash])
73 | if block_num not in block_to_reserves:
74 | block_to_reserves[block_num] = {}
75 | block_to_reserves[block_num][address] = (reserve0, reserve1)
76 | parsed += 1
77 | if (parsed % 10000 == 0):
78 | logger.info("Parsed %d" %(parsed))
79 |
80 |
81 | filepath = 'latest-data/%s-reserves.csv' % (exchange_name)
82 |
83 | logger.info("Writing to %s" % (filepath))
84 |
85 | fout = open(filepath, 'w')
86 | fout.write('Block,Address,Token0,Token1,Reserve0,Reserve1\n')
87 | for block_num in block_to_reserves:
88 | for address in block_to_reserves[block_num]:
89 | token0 = int(str(uniswapv2_pairs.loc[address].token0), 16)
90 | token1 = int(str(uniswapv2_pairs.loc[address].token1), 16)
91 | reserve0 = block_to_reserves[block_num][address][0]
92 | reserve1 = block_to_reserves[block_num][address][1]
93 | fout.write("%d,%s,%d,%d,%d,%d\n" % (block_num, address, token0, token1, reserve0, reserve1))
94 |
95 |
--------------------------------------------------------------------------------
/data-scripts/uniswapv2_trades.py:
--------------------------------------------------------------------------------
1 | import csv, os
2 | import pandas as pd
3 | import logging
4 | import sys
5 |
6 | logging.basicConfig(level=logging.INFO, format='%(message)s')
7 | logger = logging.getLogger(__name__)
8 |
9 |
10 |
11 | from exchanges import get_trade_data_from_log_item, get_uniswap_token, topics_from_text
12 |
13 | exchange_name = sys.argv[1]
14 |
15 | uniswapv2_logs = 'latest-data/all_logs_uniswapv2.csv'
16 | sushiswap_logs = 'latest-data/all_logs_sushiswap.csv'
17 |
18 | exchange_logs = {'uniswapv2' : uniswapv2_logs, 'sushiswap' : sushiswap_logs}
19 | outputdir = 'latest-data/' + exchange_name + '-processed'
20 |
21 | uniswapv2_pairs = pd.read_csv('latest-data/uniswapv2_pairs.csv').set_index('pair')
22 |
23 | logsdict = csv.DictReader(open(exchange_logs[exchange_name]), delimiter=',',
24 | quotechar='"', quoting=csv.QUOTE_MINIMAL)
25 |
26 | # logs sorted by block number and then transaction indices (all logs from same txhash are consecutive)
27 |
28 | events_by_address = {}
29 |
30 | #Interested in only Mint, Burn and Swap events
31 | interested_topics = ['0x4c209b5fc8ad50758f13e2e1088ba56a560dff690a1c6fef26394f4c03821c4f','0xdccd412f0b1252819cb1fd330b93224ca42612892bb3f4f789976e6d81936496','0xd78ad95fa46c994b6551d0da85fc275fe613ce37657fb8d5e3d130840159d822']
32 |
33 | parsed = 0
34 | for log in logsdict:
35 | topics = topics_from_text(log['topics'])
36 | if topics[0] not in interested_topics:
37 | continue
38 | txhash = log['transaction_hash']
39 | block_number = log['block_number']
40 | address = log['address']
41 | data = log['data']
42 | gas_price = int(log['gas_price'])
43 | receipt_gas_used = int(log['receipt_gas_used'])
44 | data = data[2:] # strip 0x from hex
45 | token0 = int(str(uniswapv2_pairs.loc[address].token0), 16)
46 | token1 = int(str(uniswapv2_pairs.loc[address].token1), 16)
47 | action_requested = None
48 | if topics[0] == interested_topics[0]:
49 | # add liquidity
50 | if len(topics) < 2:
51 | logger.warning(address, txhash, topics, data)
52 | continue
53 | provider = int(str(topics[1]), 16)
54 | amount0 = int(str(data[:64]), 16)
55 | amount1 = int(str(data[64:]), 16)
56 | action_requested = "%d adds %d %d and %d %d of liquidity;" % (provider, amount0, token0, amount1, token1)
57 | elif topics[0] == interested_topics[1]:
58 | # remove liquidity
59 | if len(topics) < 3:
60 | logger.warning(address, txhash, topics, data)
61 | continue
62 | remover = int(str(topics[1]), 16) #msg.sender, because to can be to another contract
63 | amount0 = int(str(data[:64]), 16)
64 | amount1 = int(str(data[64:]), 16)
65 | action_requested = "%d removes %d %d and %d %d of liquidity;" % (remover, amount0, token0, amount1, token1)
66 | elif topics[0] == interested_topics[2]:
67 | if len(topics) < 3:
68 | logger.warning(address, txhash, topics, data)
69 | continue
70 | trader = int(str(topics[1]), 16) #msg.sender, because to can be to another contract
71 | amount0_in = int(str(data[:64]), 16)
72 | amount1_in = int(str(data[64:128]), 16)
73 | amount0_out = int(str(data[128:192]), 16)
74 | amount1_out = int(str(data[192:256]), 16)
75 |
76 | # exactly one input amount zero and exactly one output amount zero is the most common
77 | # one input amount 0 and both output amounts non zero is common
78 | # both input amounts non zero and exactly one output amount non zero is rare ~ 100ppm
79 |
80 | if (amount1_out == 0):
81 | # address swaps for Y by providing x X and y Y with change x X
82 | action_requested = "%d swaps for %d by providing %d %d and %d %d with change %d fee %d ;" % (
83 | trader, token0, amount1_in, token1, amount0_in, token0 , amount1_out, gas_price * receipt_gas_used)
84 | elif (amount0_out == 0):
85 | action_requested = "%d swaps for %d by providing %d %d and %d %d with change %d fee %d ;" % (
86 | trader, token1, amount0_in, token0, amount1_in, token1 , amount0_out, gas_price * receipt_gas_used)
87 | elif (amount1_in == 0):
88 | action_requested = "%d swaps for %d by providing %d %d and %d %d with change %d fee %d ;" % (
89 | trader, token1, amount0_in, token0, amount1_in, token1 , amount0_out, gas_price * receipt_gas_used)
90 | elif (amount0_in == 0):
91 | action_requested = "%d swaps for %d by providing %d %d and %d %d with change %d fee %d ;" % (
92 | trader, token0, amount1_in, token1, amount0_in, token0 , amount1_out, gas_price * receipt_gas_used)
93 | else:
94 | logger.warning(address, txhash, topics, data)
95 | continue
96 |
97 | if action_requested is not None:
98 | if not (address) in events_by_address:
99 | events_by_address[address] = []
100 | event = "// transaction %s block %s\n%s" % (txhash, block_number, action_requested)
101 | events_by_address[address].append(event)
102 | parsed += 1
103 | if (parsed % 10000 == 0):
104 | logger.info("Parsed %d" %(parsed))
105 |
106 | for address in events_by_address:
107 | filepath = '%s/%s.csv' % (outputdir, address)
108 | open(filepath, 'w').write("\n".join(events_by_address[address]) + '\n')
109 | logger.info("Written %s" % (filepath))
110 |
111 | logger.info("Done...")
112 |
--------------------------------------------------------------------------------
/data-scripts/validate_lptokens.py:
--------------------------------------------------------------------------------
1 | import requests
2 | import sys
3 | import json
4 |
5 | DEBUG = True
6 | id = 0
7 | def storage(address, index, block):
8 | global id
9 | data = {}
10 | data['jsonrpc'] = '2.0'
11 | data['method'] = 'eth_getStorageAt'
12 | data['params'] = [address, index, block]
13 | data['id'] = id
14 | id += 1
15 | r = requests.post('http://localhost:8545', json=data)
16 | response = json.loads(r.content)
17 | if DEBUG:
18 | # print(data)
19 | print(response['result'])
20 | if int(response['result'], 16) == int('0x6e71edae12b1b97f4d1f60370fef10105fa2faae0126114a169c64845d6126c9',16):
21 | print('voila!!!')
22 |
23 | def call(address, block):
24 | global id
25 | data = {}
26 | data['jsonrpc'] = '2.0'
27 | data['method'] = 'eth_call'
28 | data['params'] = [{"to":address, "data":"0x0"}, block]
29 | data['id'] = id
30 | id += 1
31 | r = requests.post('http://localhost:8545', json=data)
32 | response = json.loads(r.content)
33 | if DEBUG:
34 | # print(data)
35 | print(response)
36 |
37 | address = '0x397ff1542f962076d0bfe58ea045ffa2d347aca0'
38 | block_number = 13770000
39 | block = hex(block_number+1)
40 |
41 | for index in range(0,100):
42 | storage(address, hex(index), block)
43 |
44 | # call(address, block)
--------------------------------------------------------------------------------
/execution_times.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | TIMEFORMAT=%R,%U,%S
3 | WORK_DIR=$(HOME)/github-mev
4 | RESULTS_FILE=$WORK_DIR/results/uniswapv2_execution_time.csv
5 | echo TxCount,RealTime,UserTime,SysTime,Filename > $RESULTS_FILE
6 | for file in `ls -rS $WORK_DIR/data-scripts/latest-data/uniswapv2-processed/0x*.csv`
7 | do
8 | lines=`wc -l $file | awk '{print $1}'`
9 | num_tx=$(( $lines / 2 ))
10 | num_sampled=`grep "^$num_tx," $RESULTS_FILE | wc -l`
11 | if [ $num_sampled -lt 2 ]
12 | then
13 | runtime=$({ time krun $file ; } 2>&1 >/dev/null)
14 | echo $num_tx,$runtime,$file >> $RESULTS_FILE
15 | fi
16 | done
17 |
--------------------------------------------------------------------------------
/find_mev.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | from subprocess import Popen, PIPE
4 | import re
5 | from pathlib import Path
6 |
7 | MAKER_DATA = """
8 | exec(276203969674087029088707267242881295499809928317 opens vault 155042);
9 | exec(276203969674087029088707267242881295499809928317 locks 9051654598458972160 collateral to vault 155042);
10 | exec(276203969674087029088707267242881295499809928317 draws 800000000000000000000 debt from vault 155042);
11 | """
12 |
13 |
14 | BLANK_SPEC = """module BOUND
15 | imports MEV
16 | rule
17 | 0 in 0 gets 0;
18 | 0 in SAI gets 0;
19 | %s
20 | 0 bites vault %s;
21 | => ?X
22 |
23 | ( (Uniswap in 0) |-> 0 (Uniswap in SAI) |-> 0 ) =>?S:Map
24 | .Set => ?_:Set
25 | .List => ?_
26 | .Map => ?_
27 | .Map => ?_
28 | ensures ( ({?S[0 in SAI]}:>Int <=Int 0) andBool ?X ==K DONE) orBool (?X ==K FAIL)
29 | endmodule
30 | """
31 |
32 | def find_mev_cdp(program, spec_file, outfile, starting_value, end_value):
33 | global BLANK_SPEC
34 | for v in range(starting_value, end_value+1):
35 | output = ""
36 | spec = BLANK_SPEC % (program + MAKER_DATA, v)
37 | Path(os.path.dirname(spec_file)).mkdir(parents=True, exist_ok=True)
38 | open(spec_file, "w").write(spec)
39 | print("Starting proof..." + str(v))
40 | sys.stdout.flush()
41 | pipe = Popen("kprove -v --debug --default-claim-type all-path --z3-impl-timeout 500 " + spec_file, shell=True, stdout=PIPE, stderr=PIPE)
42 | output = pipe.stdout.read() + pipe.stderr.read()
43 | output = str(output, "utf-8")
44 | print(output)
45 | if "#True" not in output:
46 | print("MEV FOUND!")
47 | print("Writing MEV configuration to", outfile, "...")
48 | open(outfile, "w").write(output)
49 | return
50 |
51 | def main():
52 | PROGRAM = open('data/' + sys.argv[1]).read()
53 | spec_file = sys.argv[1]+'/bound.k'
54 | outfile = 'output/'+sys.argv[1]+'.out'
55 | find_mev_cdp(PROGRAM, spec_file, outfile, 155042, 155042)
56 |
57 | if __name__ == '__main__':
58 | main()
59 |
--------------------------------------------------------------------------------
/find_mev_kprove_uniswapv2.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | from subprocess import Popen, PIPE
4 | import re
5 | from pathlib import Path
6 |
7 | def get_claim(addresses, lower_balance_bounds, upper_balance_bounds, tokens):
8 | lower_bound_claim = "{{?S[{address} in {token}]}}:>Int >=Int {bound}"
9 | lower_bound_claim = "{{?S[{address} in {token}]}}:>Int <=Int {bound}"
10 | component_claims = []
11 | for address in addresses:
12 | #upper bound on both tokens
13 | component_claims.append("((({{?S[{address} in {token0}]}}:>Int <=Int {xbound}) orBool ({{?S[{address} in {token1}]}}:>Int Int Int <=Int {ybound})))".format(address=address, token0=tokens[0], token1=tokens[1], xbound=upper_balance_bounds[address][tokens[0]], ybound=upper_balance_bounds[address][tokens[0]]))
14 | #lower bound on both tokens
15 | component_claims.append("((({{?S[{address} in {token0}]}}:>Int >=Int {xbound}) orBool ({{?S[{address} in {token1}]}}:>Int >Int {ybound})) andBool (({{?S[{address} in {token0}]}}:>Int >Int {xbound}) orBool ({{?S[{address} in {token1}]}}:>Int >=Int {ybound})))".format(address=address, token0=tokens[0], token1=tokens[1], xbound=lower_balance_bounds[address][tokens[0]], ybound=lower_balance_bounds[address][tokens[0]]))
16 |
17 | claim = " andBool ".join(component_claims)
18 | return claim
19 |
20 |
21 | def reordering_mev(program, spec_file, outfile, acc, tokens, balances, pre_price, post_price):
22 | BLANK_SPEC = """module BOUND
23 | imports MEV
24 | rule
25 | {acc} in {token0} gets {balance0} ;
26 | {acc} in {token1} gets {balance1} ;
27 | {transactions}
28 | => ?X
29 |
30 | .Map =>?S:Map
31 | .Set => ?_:Set
32 | .List => ?_
33 | .Map => ?_:Map
34 | .Map => ?_
35 | ensures ( {claim} andBool (?X ==K DONE) ) orBool (?X ==K FAIL)
36 | endmodule
37 | """
38 | #({{?P[({token0}, {token1})]}}:>Int >=Int {limit0}) andBool (({{?P[({token1}, {token0})]}}:>Int >=Int {limit1}))
39 | #limit0=post_price[0], limit1=post_price[1])
40 |
41 | program = program.strip()
42 |
43 | addresses = set()
44 | all_transactions = program.split('\n')
45 | print(all_transactions)
46 | for i in range(1, len(all_transactions), 2):
47 | chunks = all_transactions[i].split()
48 | print(chunks)
49 | addresses.add(chunks[0])
50 |
51 | print(addresses)
52 |
53 | lower_balance_bounds = {}
54 | upper_balance_bounds = {}
55 |
56 | MAX = 99999999999999999999999999999999
57 | MIN = -99999999999999999999999999999999
58 |
59 | for address in addresses:
60 | lower_balance_bounds[address] = {tokens[0] : MAX, tokens[1] : MAX}
61 | upper_balance_bounds[address] = {tokens[0] : MIN, tokens[1] : MIN}
62 |
63 | claim = get_claim(addresses, lower_balance_bounds, upper_balance_bounds, tokens)
64 | print(claim)
65 |
66 | spec = BLANK_SPEC.format(acc=acc, token0=tokens[0], token1=tokens[1], balance0=balances[0], balance1=balances[1],transactions=program, claim=claim)
67 | output = ""
68 | Path(os.path.dirname(spec_file)).mkdir(parents=True, exist_ok=True)
69 | print("Writing spec to", spec_file)
70 | open(spec_file, "w").write(spec)
71 | print("Starting proof..." )
72 | sys.stdout.flush()
73 | pipe = Popen("kprove --default-claim-type all-path " + spec_file, shell=True, stdout=PIPE, stderr=PIPE)
74 | output = pipe.stdout.read() + pipe.stderr.read()
75 | output = str(output, "utf-8")
76 | print(output)
77 | if "#True" not in output:
78 | print("MEV FOUND!")
79 | print("Writing MEV configuration to", outfile, "...")
80 | open(outfile, "w").write(output)
81 | else:
82 | print("MEV NOT FOUND!")
83 | print("Writing MEV configuration to", outfile, "...")
84 | open(outfile, "w").write("MEV NOT FOUND!")
85 |
86 | def main():
87 | PROGRAM = open('data/' + sys.argv[1]).read()
88 | spec_file = sys.argv[1]+'/bound.k'
89 | outfile = 'output/'+sys.argv[1]+'.out'
90 | find_mev_cdp(PROGRAM, spec_file, outfile, 155042, 155042)
91 |
92 | if __name__ == '__main__':
93 | main()
94 |
--------------------------------------------------------------------------------
/find_mev_krun_maker.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | from subprocess import Popen, PIPE
4 | import re
5 | from pathlib import Path
6 | import itertools
7 | import random
8 |
9 | def all_orderings(all_transactions):
10 | ret = list(itertools.permutations(all_transactions))
11 | random.shuffle(ret)
12 | print("Num all reorderings ", len(ret))
13 | ret = [x for x in ret if valid_ordering(x)]
14 | print("Num valid reorderings", len(ret))
15 | return ret
16 |
17 | def valid_ordering(transaction_ordering):
18 | for transaction in transaction_ordering:
19 | if 'locks' in transaction:
20 | return True
21 | elif 'draws' in transaction:
22 | return False
23 | return True
24 |
25 | def reordering_mev(program, program_file, outfile, acc, tokens, balances, pre_price, post_price, pair_address, maker_prologue, maker_epilogue):
26 |
27 | program = program.strip()
28 |
29 | addresses = set()
30 | transactions = program.split('\n')
31 | all_transactions = [transaction for transaction in transactions if not transaction.strip().startswith('//')]
32 | #print(all_transactions)
33 | for i in range(0, len(all_transactions)):
34 | chunks = all_transactions[i].split()
35 | #print(chunks)
36 | addresses.add(chunks[0])
37 |
38 | #print(addresses)
39 |
40 | lower_balance_bounds = {}
41 | upper_balance_bounds = {}
42 |
43 | MAX = 99999999999999999999999999999999
44 | MIN = -99999999999999999999999999999999
45 |
46 | for address in addresses:
47 | lower_balance_bounds[address] = {tokens[0] : MAX, tokens[1] : MAX}
48 | upper_balance_bounds[address] = {tokens[0] : MIN, tokens[1] : MIN}
49 |
50 | PROLOGUE = """{acc} in {token0} gets {balance0} ;
51 | {acc} in {token1} gets {balance1} ;
52 | """.format(acc=acc, token0=tokens[0], token1=tokens[1], balance0=balances[0], balance1=balances[1])
53 |
54 | PROLOGUE = PROLOGUE + maker_prologue + '\n'
55 |
56 | path_num = 0
57 | for transaction_ordering in all_orderings(all_transactions):
58 | output = ""
59 | Path(os.path.dirname(program_file)).mkdir(parents=True, exist_ok=True)
60 | #print("Writing program to", program_file)
61 | open(program_file, "w").write(PROLOGUE + '\n'.join(transaction_ordering) + maker_epilogue)
62 | sys.stdout.flush()
63 | pipe = Popen("krun " + program_file, shell=True, stdout=PIPE, stderr=PIPE)
64 | output = pipe.stdout.read() + pipe.stderr.read()
65 | output = str(output, "utf-8")
66 | outfilename = outfile+str(path_num)
67 | print("Writing output to", outfilename, "...")
68 | open(outfilename, "w").write(output)
69 | path_num += 1
70 |
71 | def main():
72 | PROGRAM = open('data/' + sys.argv[1]).read()
73 | program_file = sys.argv[1]+'/bound.k'
74 | outfile = 'output/'+sys.argv[1]+'.out'
75 | find_mev_cdp(PROGRAM, program_file, outfile, 155042, 155042)
76 |
77 | if __name__ == '__main__':
78 | main()
79 |
--------------------------------------------------------------------------------
/find_mev_krun_mcd.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | from subprocess import Popen, PIPE
4 | import re
5 | from pathlib import Path
6 | import itertools
7 | import random
8 | import concurrent.futures
9 |
10 | def all_orderings(all_transactions):
11 | num_transactions = len(all_transactions)
12 | if num_transactions < 10:
13 | ret = list(itertools.permutations(all_transactions))
14 | random.shuffle(ret)
15 | else:
16 | ret = []
17 | for i in range(400000):
18 | ret.append(random.sample(all_transactions, num_transactions))
19 |
20 | print("Num all reorderings ", len(ret))
21 | return ret
22 |
23 |
24 | def valid_ordering(transaction_ordering):
25 | '''
26 | for transaction in transaction_ordering:
27 | if 'locks' in transaction:
28 | return True
29 | elif 'draws' in transaction:
30 | return False
31 | '''
32 | return True
33 |
34 | def reordering_mev(program, program_file, outfile, acc, pair_address, maker_prologue, maker_epilogue, num_workers):
35 |
36 | num_workers = int(num_workers)
37 | program = program.strip()
38 |
39 |
40 | # addresses = set()
41 | transactions = program.split('\n')
42 | all_transactions = [transaction for transaction in transactions if not transaction.strip().startswith('//')]
43 | '''
44 | print(all_transactions)
45 | for i in range(0, len(all_transactions)):
46 | chunks = all_transactions[i].split()
47 | #print(chunks)
48 | addresses.add(chunks[0])
49 |
50 | #print(addresses)
51 |
52 | lower_balance_bounds = {}
53 | upper_balance_bounds = {}
54 |
55 | MAX = 99999999999999999999999999999999
56 | MIN = -99999999999999999999999999999999
57 |
58 | for address in addresses:
59 | lower_balance_bounds[address] = {tokens[0] : MAX, tokens[1] : MAX}
60 | upper_balance_bounds[address] = {tokens[0] : MIN, tokens[1] : MIN}
61 | '''
62 |
63 | PROLOGUE = maker_prologue + '\n'
64 | path_num = 0
65 |
66 |
67 | with concurrent.futures.ThreadPoolExecutor(max_workers=num_workers) as executor:
68 | for transaction_ordering in all_orderings(all_transactions):
69 | executor.submit(process_tx_order, transaction_ordering, program_file, PROLOGUE, maker_epilogue, outfile, path_num)
70 | path_num += 1
71 |
72 |
73 |
74 | def process_tx_order(transaction_ordering, program_file, prologue, maker_epilogue, outfile, path_num):
75 | output = ""
76 | Path(os.path.dirname(program_file)).mkdir(parents=True, exist_ok=True)
77 | #print("Writing program to", program_file)
78 | f = open(program_file + str(path_num), "w")
79 | f.write(prologue + '\n'.join(transaction_ordering) + maker_epilogue)
80 | f.flush()
81 | os.fsync(f.fileno())
82 | f.close()
83 | # sys.stdout.flush()
84 | # pipe = Popen("krun " + program_file, shell=True, stdout=PIPE, stderr=PIPE)
85 | # output = pipe.stdout.read() + pipe.stderr.read()
86 | # output = str(output, "utf-8")
87 | # outfilename = outfile+str(path_num)
88 | # print("Writing output to", outfilename, "...")
89 | # f = open(outfilename, "w")
90 | # f.write(output)
91 | # f.close()
92 |
--------------------------------------------------------------------------------
/find_mev_krun_uniswapv2.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | from subprocess import Popen, PIPE
4 | import re
5 | from pathlib import Path
6 | import itertools
7 | import random
8 |
9 | def all_orderings(all_transactions):
10 | ret = list(itertools.permutations(all_transactions))
11 | random.shuffle(ret)
12 | return ret
13 |
14 | def reordering_mev(program, program_file, outfile, acc, tokens, balances, pre_price, post_price, pair_address, blocknum, convergence):
15 |
16 | program = program.strip()
17 |
18 | addresses = set()
19 | transactions = program.split('\n')
20 | all_transactions = [transaction for transaction in transactions if not transaction.strip().startswith('//')]
21 | print(all_transactions)
22 | for i in range(0, len(all_transactions)):
23 | chunks = all_transactions[i].split()
24 | print(chunks)
25 | addresses.add(chunks[0])
26 |
27 | print(addresses)
28 |
29 | lower_balance_bounds = {}
30 | upper_balance_bounds = {}
31 |
32 | MAX = 99999999999999999999999999999999
33 | MIN = -99999999999999999999999999999999
34 |
35 | for address in addresses:
36 | lower_balance_bounds[address] = {tokens[0] : MAX, tokens[1] : MAX}
37 | upper_balance_bounds[address] = {tokens[0] : MIN, tokens[1] : MIN}
38 |
39 | PROLOGUE = """{acc} in {token0} gets {balance0} ;
40 | {acc} in {token1} gets {balance1} ;
41 | """.format(acc=acc, token0=tokens[0], token1=tokens[1], balance0=balances[0], balance1=balances[1])
42 | path_num = 0
43 | for transaction_ordering in all_orderings(all_transactions):
44 | output = ""
45 | Path(os.path.dirname(program_file)).mkdir(parents=True, exist_ok=True)
46 | #print("Writing program to", program_file)
47 | open(program_file, "w").write(PROLOGUE + '\n'.join(transaction_ordering))
48 | sys.stdout.flush()
49 | pipe = Popen("krun " + program_file, shell=True, stdout=PIPE, stderr=PIPE)
50 | output = pipe.stdout.read() + pipe.stderr.read()
51 | output = str(output, "utf-8")
52 | outfilename = outfile+str(path_num)
53 | print("Writing output to", outfilename, "...")
54 | open(outfilename, "w").write(output)
55 | path_num += 1
56 |
57 | def main():
58 | PROGRAM = open('data/' + sys.argv[1]).read()
59 | program_file = sys.argv[1]+'/bound.k'
60 | outfile = 'output/'+sys.argv[1]+'.out'
61 | find_mev_cdp(PROGRAM, program_file, outfile, 155042, 155042)
62 |
63 | if __name__ == '__main__':
64 | main()
65 |
--------------------------------------------------------------------------------
/find_mev_uniswapv1.py:
--------------------------------------------------------------------------------
1 | import os,sys
2 | from pathlib import Path
3 | from collections import defaultdict
4 | from uniswapv1 import UniswapV1
5 | import itertools
6 | import logging
7 | import random
8 |
9 | def all_orderings(all_transactions):
10 | num_transactions = len(all_transactions)
11 | if num_transactions < 10:
12 | ret = list(itertools.permutations(all_transactions))
13 | random.shuffle(ret)
14 | return ret
15 | else:
16 | ret = []
17 | for i in range(400000):
18 | ret.append(random.sample(all_transactions, num_transactions))
19 | return ret
20 |
21 | def default_to_regular(d):
22 | if isinstance(d, defaultdict):
23 | d = {k: default_to_regular(v) for k, v in d.items()}
24 | return d
25 |
26 |
27 | def transaction_to_hash(data, transactions):
28 | metadata = []
29 | transactions = transactions.split('\n')
30 | for transaction in transactions:
31 | for idx in range(len(data)):
32 | if transaction in data[idx]:
33 | metadata.append(data[idx-1].split()[2])
34 | break
35 | return ','.join(metadata)
36 |
37 |
38 | def reordering_mev(program, program_file, outfile, exchange_acc, tokens, balances, pair_address, prices, block, convergence):
39 |
40 | program = program.strip()
41 |
42 | transactions = program.split('\n')
43 | all_transactions = [transaction.strip() for transaction in transactions if not transaction.strip().startswith('//')]
44 | logging.info(all_transactions)
45 |
46 | token0 = tokens[0]
47 | token1 = tokens[1]
48 |
49 | lower_bounds = defaultdict(lambda : defaultdict(lambda: 99999999999999999999999999999999))
50 | upper_bounds = defaultdict(lambda : defaultdict(lambda: -99999999999999999999999999999999))
51 |
52 | lower_bound_paths = defaultdict(lambda : ('', {}))
53 | upper_bound_paths = defaultdict(lambda: ('', {}))
54 |
55 | Path(os.path.dirname(program_file)).mkdir(parents=True, exist_ok=True)
56 | path_to_mev = {}
57 |
58 | path_num = 0
59 | for transaction_ordering in all_orderings(all_transactions):
60 | u = UniswapV1({tokens[0] : balances[0], tokens[1] : balances[1]}, exchange_acc)
61 | for transaction in transaction_ordering:
62 | u.process(transaction)
63 | token_balances = u.config()
64 | mev = 0
65 | for acc in token_balances:
66 | if acc == exchange_acc:
67 | continue
68 | balance0 = token_balances[acc][token0]
69 | balance1 = token_balances[acc][token1]
70 | total_balance = balance0 * prices[token0] + balance1 * prices[token1]
71 | if total_balance < lower_bounds[acc][token0]:
72 | lower_bounds[acc][token0] = total_balance
73 | lower_bound_paths[acc] = ('\n'.join(transaction_ordering), token_balances)
74 | if total_balance > upper_bounds[acc][token0]:
75 | upper_bounds[acc][token0] = total_balance
76 | upper_bound_paths[acc] = ('\n'.join(transaction_ordering), token_balances)
77 | extortion = upper_bounds[acc][token0] - lower_bounds[acc][token0]
78 | #mev += extortion
79 | mev = max(mev, extortion)
80 | path_num += 1
81 | path_to_mev[path_num] = mev
82 |
83 | sorted_items = sorted(path_to_mev.items())
84 | #print("Writing hill climbing data to {} ...".format(program_file))
85 | if convergence:
86 | fout = open(program_file, 'w')
87 | fout.write('pathnum,mev\n')
88 | fout.write('\n'.join(["{},{}".format(path_num, mev) for path_num, mev in sorted_items]))
89 | fout.close()
90 |
91 | mev = 0
92 | argmax_acc = 0
93 | for acc in lower_bounds:
94 | extortion = upper_bounds[acc][token0] - lower_bounds[acc][token0]
95 | # mev += extortion
96 | if extortion >= mev :
97 | mev = extortion
98 | argmax_acc = acc
99 |
100 | return mev, transaction_to_hash(transactions, default_to_regular(upper_bound_paths[argmax_acc][0])), transaction_to_hash(transactions, default_to_regular(lower_bound_paths[argmax_acc][0]))
101 |
102 | # print(upper_bound_paths)
103 | # print(lower_bound_paths)
104 |
--------------------------------------------------------------------------------
/find_mev_uniswapv2.py:
--------------------------------------------------------------------------------
1 | import os,sys
2 | from pathlib import Path
3 | from collections import defaultdict
4 | from uniswapv2 import UniswapV2
5 | import itertools
6 | import logging
7 | import random
8 |
9 | def all_orderings(all_transactions):
10 | num_transactions = len(all_transactions)
11 | if num_transactions < 10:
12 | ret = list(itertools.permutations(all_transactions))
13 | random.shuffle(ret)
14 | return ret
15 | else:
16 | ret = []
17 | for i in range(400000):
18 | ret.append(random.sample(all_transactions, num_transactions))
19 | return ret
20 |
21 | def default_to_regular(d):
22 | if isinstance(d, defaultdict):
23 | d = {k: default_to_regular(v) for k, v in d.items()}
24 | return d
25 |
26 |
27 | def transaction_to_hash(data, transactions):
28 | metadata = []
29 | transactions = transactions.split('\n')
30 | for transaction in transactions:
31 | for idx in range(len(data)):
32 | if transaction in data[idx]:
33 | metadata.append(data[idx-1].split()[2])
34 | break
35 | return ','.join(metadata)
36 |
37 |
38 | def reordering_mev(program, program_file, outfile, exchange_acc, tokens, balances, pair_address, prices, block, convergence):
39 |
40 | program = program.strip()
41 |
42 | transactions = program.split('\n')
43 | all_transactions = [transaction.strip() for transaction in transactions if not transaction.strip().startswith('//')]
44 | logging.info(all_transactions)
45 |
46 | token0 = tokens[0]
47 | token1 = tokens[1]
48 |
49 | lower_bounds = defaultdict(lambda : defaultdict(lambda: 99999999999999999999999999999999))
50 | upper_bounds = defaultdict(lambda : defaultdict(lambda: -99999999999999999999999999999999))
51 |
52 | lower_bound_paths = defaultdict(lambda : ('', {}))
53 | upper_bound_paths = defaultdict(lambda: ('', {}))
54 |
55 | Path(os.path.dirname(program_file)).mkdir(parents=True, exist_ok=True)
56 | path_to_mev = {}
57 |
58 | path_num = 0
59 | for transaction_ordering in all_orderings(all_transactions):
60 | u = UniswapV2({tokens[0] : balances[0], tokens[1] : balances[1]}, exchange_acc)
61 | for transaction in transaction_ordering:
62 | u.process(transaction)
63 | token_balances = u.config()
64 | mev = 0
65 | for acc in token_balances:
66 | if acc == exchange_acc:
67 | continue
68 | balance0 = token_balances[acc][token0]
69 | balance1 = token_balances[acc][token1]
70 | total_balance = balance0 * prices[token0] + balance1 * prices[token1]
71 | if total_balance < lower_bounds[acc][token0]:
72 | lower_bounds[acc][token0] = total_balance
73 | lower_bound_paths[acc] = ('\n'.join(transaction_ordering), token_balances)
74 | if total_balance > upper_bounds[acc][token0]:
75 | upper_bounds[acc][token0] = total_balance
76 | upper_bound_paths[acc] = ('\n'.join(transaction_ordering), token_balances)
77 | extortion = upper_bounds[acc][token0] - lower_bounds[acc][token0]
78 | #mev += extortion
79 | mev = max(mev, extortion)
80 | path_num += 1
81 | path_to_mev[path_num] = mev
82 |
83 | sorted_items = sorted(path_to_mev.items())
84 | #print("Writing hill climbing data to {} ...".format(program_file))
85 | if convergence:
86 | fout = open(program_file, 'w')
87 | fout.write('pathnum,mev\n')
88 | fout.write('\n'.join(["{},{}".format(path_num, mev) for path_num, mev in sorted_items]))
89 | fout.close()
90 |
91 | mev = 0
92 | argmax_acc = 0
93 | for acc in lower_bounds:
94 | extortion = upper_bounds[acc][token0] - lower_bounds[acc][token0]
95 | # mev += extortion
96 | if extortion >= mev :
97 | mev = extortion
98 | argmax_acc = acc
99 |
100 | return mev, transaction_to_hash(transactions, default_to_regular(upper_bound_paths[argmax_acc][0])), transaction_to_hash(transactions, default_to_regular(lower_bound_paths[argmax_acc][0]))
101 |
102 | # print(upper_bound_paths)
103 | # print(lower_bound_paths)
104 |
--------------------------------------------------------------------------------
/find_risky_cdps.py:
--------------------------------------------------------------------------------
1 | import pandas as pd
2 | from functools import reduce
3 |
4 |
5 | exchange_name = 'uniswapv2'
6 | collateral_type = '31358499851466632982272067240987752480060719095994161751935692443478204088320'
7 | pair_address = '0xa478c2975ab1ea89e8196811f51a7b7ade33eb11'
8 |
9 | reserves = pd.read_csv('data-scripts/latest-data/%s-reserves.csv' % (exchange_name))
10 | reserves.Block = reserves.Block.astype(int)
11 | reserves = reserves[reserves.Address == pair_address]
12 |
13 | start_block = reserves.Block.min()
14 | end_block = reserves.Block.max()
15 |
16 |
17 | cdp_states = pd.read_csv('maker-data/mcd/latest-data/cdp_states.csv')
18 | cdp_states = cdp_states[cdp_states.Collateral_type == collateral_type]
19 | cdp_states.Block = cdp_states.Block.astype(int)
20 | cdp_states['tx_count'] = cdp_states.groupby('CDP').cumcount()
21 |
22 | rates = pd.read_csv('maker-data/mcd/latest-data/maker_fees.csv')
23 | rates.Fees = rates.Fees.astype(float)
24 | rates.Block = rates.Block.astype(int)
25 |
26 | oracle_prices = pd.read_csv('maker-data/mcd/latest-data/spot_prices.csv')
27 | oracle_prices = oracle_prices[oracle_prices.CollateralType == collateral_type]
28 | oracle_prices.Block = oracle_prices.Block.astype(int)
29 | oracle_prices.SpotPrice = oracle_prices.SpotPrice.astype(float)
30 |
31 |
32 | #filter before merging
33 | rates = rates[(rates.Block <= end_block) & (rates.Block >= start_block)]
34 | cdp_states = cdp_states[(cdp_states.Block <= end_block) & (cdp_states.Block >= start_block)]
35 | oracle_prices = oracle_prices[(oracle_prices.Block <= end_block) & (oracle_prices.Block >= start_block)]
36 |
37 |
38 | rates.set_index('Block', inplace=True)
39 | cdp_states.set_index('Block', inplace=True)
40 | reserves.set_index('Block', inplace=True)
41 | oracle_prices.set_index('Block', inplace=True)
42 |
43 |
44 | dfs = [cdp_states, reserves, rates, oracle_prices]
45 |
46 | df_merged = reduce(lambda left,right: pd.merge(left,right,on=['Block'], how='outer'), dfs)
47 | df_merged = df_merged.reset_index().sort_values('Block', kind='mergesort') #mergesort for stable sort
48 | df_merged = df_merged.fillna(method='ffill').dropna()
49 |
50 | df_merged.Collateral = (df_merged.Collateral.astype(float) / 10**18)
51 |
52 | df_merged.Debt = (df_merged.Debt.astype(float) / 10**18)
53 | df_merged['Tab'] = df_merged.Debt * df_merged.Fees / 10**27
54 | df_merged.Reserve0 = df_merged.Reserve0.astype(float)
55 | df_merged.Reserve1 = df_merged.Reserve1.astype(float)
56 |
57 | df_merged['Uniswap_price'] = df_merged['Reserve0'] / df_merged['Reserve1']
58 |
59 | df = df_merged[['Block', 'Tab', 'Collateral', 'Uniswap_price', 'Reserve0', 'Reserve1', 'SpotPrice', 'Debt', 'CDP', 'tx_count']]
60 |
61 | #filter out CDPs without debt
62 | df = df[df.Tab > 0]
63 |
64 | df['Uniswap_ratio'] = ( (df.Collateral * df.Uniswap_price) / (df.Tab) )
65 | df['Oracle_ratio'] = ( (df.Collateral * df.SpotPrice) / (df.Tab * 10**27) ) * 1.5
66 |
67 | df = df.sort_values('Uniswap_ratio')
68 | fd = df[df.Debt > 0]
69 | filtered = fd[(fd.Tab > 300) & (fd.Oracle_ratio > 1.5) & (fd.Uniswap_ratio > 1.5)].drop_duplicates('CDP')
70 | filtered2 = fd[(fd.Tab > 300) & (fd.Oracle_ratio > 1.5)].drop_duplicates('CDP')
71 |
72 | filtered.to_csv('insertion_targets.csv', index=False)
73 |
74 | filtered2.to_csv('easy_targets.csv', index=False)
75 |
--------------------------------------------------------------------------------
/find_uniswap_bounds.py:
--------------------------------------------------------------------------------
1 | from kutils import find_integer_bound
2 |
3 | UNISWAP_LOWER_BOUND = "{?S[Uniswap in 0]}:>Int >=Int %d"
4 | UNISWAP_UPPER_BOUND = "{?S[Uniswap in 0]}:>Int <=Int %d"
5 | PROGRAM = """Uniswap in 0 gets 0;
6 | Uniswap in 1454383474624795085458277788004692202315323288702 gets 0;
7 | // transaction 0x003c5d067ee03836a4374f4d5c3466e0d8328f62496b5dd1cbdf4d6db6aeacfe
8 | 572342420797838882173629907578269659270010768697 adds 100000000000000000000 tokens and 20000000000000000 eth of liquidity to 1454383474624795085458277788004692202315323288702;
9 | // transaction 0xd54a6243159e4068cb50aebf4645b117bd6bfd627cf7819734c48bd984d5f4e8
10 | 616870745215506062750269120831072350349526031960 in 0 swaps 295147905179352825856 input for 1454383474624795085458277788004692202315323288702 fee 478923078075;
11 | // transaction 0x3a2fd182d016977a87d33ecf67e81d930e07f47d623320a41028f5bbf869db32
12 | 368839096625159992408953185476375976377428775247 in 1454383474624795085458277788004692202315323288702 swaps 120443408692820097171972 input for 0 fee 1177002069144;
13 | // transaction 0x0adb278e7096e67aca0bdb32f9794322a7b8be5835fe21ef0aa012e85b2e77dc
14 | 616870745215506062750269120831072350349526031960 adds 83290999999999977725 tokens and 125401513679990865 eth of liquidity to 1454383474624795085458277788004692202315323288702;
15 | // transaction 0xb0cece3304dc2563fbb2d17736c751b8b441f04683e529c0a251799b7286f6b8
16 | 368839096625159992408953185476375976377428775247 in 1454383474624795085458277788004692202315323288702 swaps 2419684155471892146710118 input for 0 fee 1008871952048;"""
17 |
18 | find_integer_bound(PROGRAM, "uniswap_lower_bound.out", UNISWAP_LOWER_BOUND, 999999999999999999999999999999999)
19 | find_integer_bound(PROGRAM, "uniswap_upper_bound.out", UNISWAP_UPPER_BOUND, 0)
20 |
--------------------------------------------------------------------------------
/kutils.py:
--------------------------------------------------------------------------------
1 | from subprocess import Popen, PIPE
2 | import re, random, os
3 |
4 | BLANK_SPEC = """module BOUND
5 | imports MEV
6 | rule
7 | %s
8 | => ?X
9 |
10 | .Map =>?S:Map
11 | .Set => ?_:Set
12 | .List => ?_
13 | .Map => ?_
14 | .Map => ?_
15 | 1 => ?_
16 |
17 | ensures (?X ==K DONE andBool(%s)) orBool (?X ==K FAIL)
18 | endmodule
19 | """
20 |
21 | def find_integer_bound(program, outfile, bound_clause, starting_value):
22 | bound = starting_value
23 | previous_output = ""
24 | output = ""
25 | while True:
26 | spec = BLANK_SPEC % (program, bound_clause % (bound))
27 | open("bound.k", "w").write(spec)
28 | print("Starting proof...")
29 | pipe = Popen("kprove -v --debug --default-claim-type all-path --z3-impl-timeout 500 bound.k", shell=True, stdout=PIPE, stderr=PIPE)
30 | output = pipe.stdout.read() + pipe.stderr.read()
31 | output = str(output, "utf-8")
32 | if "#True" in output:
33 | print("BOUND FOUND!", bound)
34 | print("Writing best configuration to", outfile, "...")
35 | print(previous_output)
36 | open(outfile, "w").write(previous_output)
37 | break
38 | output = output[output.find(""):]
39 | output = output[:output.find("")+15]
40 | print("Found new bound", output)
41 | if not "Uniswap in 0 |-> " in output and "" in output:
42 | bound = 0
43 | else:
44 | bound = int(output.split("Uniswap in 0 |-> ")[1].split(" ")[0])
45 | print("-" * 15, "\nBETTER BOUND:", bound)
46 | previous_output = output
47 | print("All done :)")
48 |
49 |
50 | def get_final_configuration(program):
51 | os.system('kompile mev.k --backend llvm') # use llvm backend for faster execution
52 | open('torun.mev', 'w').write(program)
53 | pipe = Popen("krun torun.mev", shell=True, stdout=PIPE, stderr=PIPE)
54 | output = pipe.stdout.read() + pipe.stderr.read()
55 | output = str(output, "utf-8")
56 |
57 | print("K OUTPUT")
58 | print(output)
59 |
60 |
61 |
--------------------------------------------------------------------------------
/maker-data/actions.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import pandas as pd
3 | import requests
4 | from gql import gql, Client
5 | from gql.transport.requests import RequestsHTTPTransport
6 | import json
7 | import matplotlib.pyplot as plt
8 | import argparse
9 |
10 |
11 | parser = argparse.ArgumentParser(description='Fetch Maker Data')
12 |
13 | parser.add_argument(
14 | '-v', '--verbose',
15 | help="Be verbose",
16 | action="store_const", dest="loglevel", const=logging.INFO,
17 | default=logging.WARNING
18 | )
19 |
20 |
21 | args = parser.parse_args()
22 |
23 | logging.basicConfig(level=args.loglevel, format='%(message)s')
24 | logger = logging.getLogger(__name__)
25 |
26 |
27 | CDPS_URL = 'https://mkr.tools/api/v1/cdps'
28 | BLOCKS_URL = 'https://mkr.tools/api/v1/blocks'
29 | GRAPHQL_URL='https://sai-mainnet.makerfoundation.com/v1'
30 |
31 | time_checkpoints = ["2015-01-01", "2019-04-01"]
32 | #time_checkpoints = ["2018-01-01", "2019-04-01", "2019-07-01", "2019-10-01", "2020-01-01", "2020-04-01", "2020-10-15"]
33 |
34 | def get_filter_criterion(start_time, end_time):
35 | return '{{time: {{lessThan: "{end_time}", greaterThanOrEqualTo: "{start_time}"}}}}'.format(start_time=start_time, end_time=end_time)
36 |
37 | def fetch_data_from_web(filter_criterion):
38 | sample_transport=RequestsHTTPTransport(
39 | url=GRAPHQL_URL,
40 | retries=3,
41 | )
42 |
43 | client = Client(
44 | transport=sample_transport,
45 | fetch_schema_from_transport=True,
46 | )
47 |
48 | # (filter: {{ratio: {{lessThan: "{unsafe_ratio}"}}, art: {{greaterThanOrEqualTo: "{min_debt}"}}}})
49 | # allCupActs (filter: {{time: {{lessThan: "2020-03-16", greaterThan: "2020-03-03"}}}}) # actions_data.txt
50 | query_string = """
51 | query {{
52 | allCupActs (filter: {filter_criterion}) {{
53 | totalCount
54 | nodes {{
55 | id
56 | act
57 | arg
58 | lad
59 | art
60 | block
61 | deleted
62 | ink
63 | ire
64 | pip
65 | tab
66 | ratio
67 | time
68 | tx
69 | }}
70 | }}
71 | }}
72 | """.format(filter_criterion=filter_criterion)
73 | logger.info(query_string)
74 |
75 | query = gql(query_string)
76 |
77 | return client.execute(query)
78 |
79 |
80 | def analyse(data):
81 | pass
82 |
83 | def format_for_mev(action):
84 | if action['act'] == 'BITE':
85 | return
86 | owner = int(action['lad'], 16)
87 | if owner == 0:
88 | return
89 | print("// transaction {}".format(action['tx']))
90 | id = action['id']
91 | if action['act'] == 'OPEN':
92 | owner = int(action['lad'], 16)
93 | print("{} opens vault {};".format(owner, id))
94 | elif action['act'] == 'LOCK':
95 | print("{} locks {} collateral to vault {};".format(owner, int(float(action['arg']) * 1e18), id))
96 | elif action['act'] == 'DRAW':
97 | print("{} draws {} debt from vault {};".format(owner, int(float(action['arg']) * 1e18), id))
98 | elif action['act'] == 'WIPE':
99 | print("{} wipes {} debt from vault {};".format(owner, int(float(action['arg']) * 1e18), id))
100 | elif action['act'] == 'FREE':
101 | print("{} frees {} collateral from vault {};".format(owner, int(float(action['arg']) * 1e18), id))
102 | elif action['act'] == 'GIVE':
103 | print("{} is given vault {};".format(int(action['arg'], 16), id))
104 |
105 |
106 | for i in range(len(time_checkpoints) - 1):
107 | start_time = time_checkpoints[i]
108 | end_time = time_checkpoints[i+1]
109 |
110 | logger.info("Fetching from {} to {} ...".format(start_time, end_time) )
111 |
112 | filter_criterion = get_filter_criterion(start_time, end_time)
113 | data = fetch_data_from_web(filter_criterion)
114 | #print(data)
115 | filename='actions-data-{start_time}-{end_time}.txt'.format(start_time=start_time, end_time=end_time)
116 |
117 | logger.info("Writing to {} ...".format(filename) )
118 |
119 | with open(filename, 'w') as outfile:
120 | json.dump(data, outfile)
121 | '''
122 |
123 | data = fetch_data_from_file('all_actions_data.txt')
124 | '''
125 |
126 | '''
127 | df = pd.DataFrame(data['allCupActs']['nodes'])
128 |
129 | numeric_cols = ['ratio', 'art', 'ink', 'ire', 'tab', 'pip', 'block', 'id']
130 | for col in numeric_cols:
131 | df[col] = pd.to_numeric(df[col])
132 |
133 | df.sort_values('block', inplace=True)
134 | '''
135 | #df.apply(format_for_mev, axis=1)
136 |
--------------------------------------------------------------------------------
/maker-data/mcd/calc_cdp_state.py:
--------------------------------------------------------------------------------
1 | import json
2 | import csv, os
3 | import pandas as pd
4 | import logging
5 | from collections import defaultdict
6 |
7 | logging.basicConfig(level=logging.INFO, format='%(message)s')
8 | logger = logging.getLogger(__name__)
9 |
10 | maker_logs = 'latest-data/all_logs_maker.csv'
11 |
12 | logsdict = csv.DictReader(open(maker_logs), delimiter=',',
13 | quotechar='"', quoting=csv.QUOTE_MINIMAL)
14 |
15 | logs = {}
16 | events_by_collateral = {}
17 | txhashes = []
18 | events= []
19 | tx_to_block = {}
20 | cdp_to_state = defaultdict(lambda : [0,0])
21 | debt_type = 611382286831621467233887798921843936019654057231
22 |
23 | #Interested in only frob, Bite, fold, fork
24 | interested_topics = ['0x76088703', '0xa716da86', '0xb65337df', '0x870c616d']
25 |
26 | def hex_to_int(raw_hex, bits=256):
27 | val = int(raw_hex, 16)
28 | # check MSB
29 | if (val & (1 << (bits - 1))) != 0:
30 | val = val - (1 << bits)
31 | return val
32 |
33 |
34 | for log in logsdict:
35 | topics = json.loads(log['topics'].replace('\'', '\"'))
36 | if topics[0][:10] not in interested_topics:
37 | continue
38 | hash = log['transaction_hash']
39 | if not hash in logs:
40 | logs[hash] = []
41 | txhashes.append(hash)
42 | logs[hash].append((log['address'], log['data'], topics, log['gas_price'], log['receipt_gas_used'], log['block_number']))
43 | tx_to_block[hash] = log['block_number']
44 |
45 | parsed = 0
46 | rate = 10**27
47 | liquidated_cdps = set()
48 |
49 | fout = open('latest-data/cdp_states.csv', 'w')
50 | fout.write('Block,CDP,Collateral_type,Collateral,Debt\n')
51 | for txhash in txhashes:
52 | modified_cdps = set()
53 | block = tx_to_block[txhash]
54 | for logitem in logs[txhash]:
55 | address = logitem[0]
56 | data = logitem[1]
57 | topics = logitem[2]
58 | data = data[2:] # strip 0x from hex
59 | collateral_type = ''
60 | action_requested = None
61 | if topics[0][:10] == interested_topics[0]:
62 | # cdp manipulation
63 | collateral_type = int(str(data[136:200]), 16)
64 | cdp_handler = int(str(data[200:264]), 16)
65 | collateral_amount = hex_to_int(str(data[392:456]))
66 | debt_amount = hex_to_int(str(data[456:520]))
67 | cdp_to_state[cdp_handler][0] = cdp_to_state[cdp_handler][0] + collateral_amount
68 | cdp_to_state[cdp_handler][1] = cdp_to_state[cdp_handler][1] + debt_amount
69 | modified_cdps.add((cdp_handler, collateral_type))
70 |
71 | elif topics[0][:10] == interested_topics[1]:
72 | # Liquidate CDP
73 | if len(topics) < 3:
74 | logger.warning(address, txhash, topics, data)
75 | continue
76 | cdp_handler = int(str(topics[2]), 16)
77 | liquidated_cdps.add(cdp_handler)
78 |
79 |
80 | elif topics[0][:10] == interested_topics[3]:
81 | # cdp fungibility
82 | collateral_type = int(str(data[136:200]), 16)
83 | src_cdp = int(str(data[200:264]), 16)
84 | dst_cdp = int(str(data[264:328]), 16)
85 | collateral_amount = hex_to_int(str(data[328:392]))
86 | debt_amount = hex_to_int(str(data[392:456]))
87 | cdp_to_state[src_cdp][0] = cdp_to_state[src_cdp][0] - collateral_amount
88 | cdp_to_state[src_cdp][1] = cdp_to_state[src_cdp][1] - debt_amount
89 | cdp_to_state[dst_cdp][0] = cdp_to_state[dst_cdp][0] + collateral_amount
90 | cdp_to_state[dst_cdp][1] = cdp_to_state[dst_cdp][1] + debt_amount
91 | modified_cdps.add((src_cdp, collateral_type))
92 | modified_cdps.add((dst_cdp, collateral_type))
93 |
94 | if (parsed % 10000 == 0):
95 | logger.info("Parsed %d" %(parsed))
96 |
97 | parsed+= 1
98 |
99 | # dont bother about liquidated ones because mcd semantics doesnt handle bites well
100 | for (cdp_handler, collateral_type) in modified_cdps:
101 | if cdp_handler not in liquidated_cdps:
102 | action_requested = "{block},{cdp},{collateral_type},{collateral},{debt}\n".format(block=block,cdp=cdp_handler,collateral_type=collateral_type,collateral=cdp_to_state[cdp_handler][0],debt=cdp_to_state[cdp_handler][1])
103 | fout.write(action_requested)
104 |
105 | fout.close()
106 |
--------------------------------------------------------------------------------
/maker-data/mcd/configure.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # MCD
4 |
5 | mkdir -p latest-data/maker-processed
6 |
7 | # get data
8 |
9 | python3 get_core_maker_logs.py
10 |
11 | # process data
12 |
13 | python3 parse_maker_logs.py
14 |
15 | python3 calc_cdp_state.py
16 |
17 | python3 maker_fees.py
18 |
19 | python3 maker_spot_prices.py
20 |
--------------------------------------------------------------------------------
/maker-data/mcd/get_core_maker_logs.py:
--------------------------------------------------------------------------------
1 | import json
2 | import csv, os, sys
3 | from google.cloud import bigquery
4 |
5 | os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = "bq.json"
6 | client = bigquery.Client()
7 |
8 | contract_addresses = json.load(open('changelog.json'))
9 | core_contracts = [contract_addresses[x].lower() for x in ['MCD_VAT', 'MCD_CAT', 'MCD_SPOT']]
10 |
11 | query = """SELECT log_index,transaction_hash,logs.transaction_index,address,data,topics,logs.block_timestamp,logs.block_number,gas,gas_price,receipt_gas_used FROM
12 | `bigquery-public-data.crypto_ethereum.logs` AS logs
13 | JOIN `bigquery-public-data.crypto_ethereum.transactions` AS transactions ON logs.transaction_hash = transactions.hash
14 | WHERE
15 | logs.address in UNNEST(@contracts) ORDER BY block_number ASC, transaction_index ASC"""
16 |
17 | aqp = bigquery.ArrayQueryParameter('contracts', 'STRING', core_contracts)
18 | query_params = [aqp]
19 | job_config = bigquery.QueryJobConfig()
20 | job_config.query_parameters = query_params
21 | query_job = client.query(
22 | query,
23 | # Location must match that of the dataset(s) referenced in the query.
24 | location='US',
25 | job_config=job_config) # API request - starts the query
26 |
27 | with open('latest-data/all_logs_maker.csv', 'w') as csvfile:
28 | spamwriter = csv.writer(csvfile, delimiter=',',
29 | quotechar='"', quoting=csv.QUOTE_MINIMAL)
30 |
31 | spamwriter.writerow("log_index,transaction_hash,transaction_index,address,data,topics,block_timestamp,block_number,gas,gas_price,receipt_gas_used".split(","))
32 | for item in query_job:
33 | spamwriter.writerow(item)
34 |
35 | assert query_job.state == 'DONE'
36 | print("Wrote all logs" )
37 |
38 |
39 |
--------------------------------------------------------------------------------
/maker-data/mcd/maker_fees.py:
--------------------------------------------------------------------------------
1 | import sys
2 | from collections import defaultdict
3 | f = open('latest-data/maker-processed/31358499851466632982272067240987752480060719095994161751935692443478204088320.csv', 'r')
4 |
5 |
6 | block_to_fees = defaultdict(lambda : {})
7 | block_number = 0
8 | cumfees = 10**27
9 |
10 | fout = open('latest-data/maker_fees.csv', 'w')
11 |
12 | for line in f.readlines():
13 | line = line.replace(';','').strip()
14 | if 'block' in line:
15 | words = line.split()
16 | block_number = int(words[-1])
17 | elif 'increment' in line:
18 | words = line.split()
19 | increment = int(words[0])
20 | cumfees += increment
21 | block_to_fees[block_number] = cumfees
22 |
23 | fout.write('Block,Fees\n')
24 | for block in sorted(block_to_fees):
25 | fout.write("%d,%d\n" %(block, block_to_fees[block]))
26 |
27 |
--------------------------------------------------------------------------------
/maker-data/mcd/maker_spot_prices.py:
--------------------------------------------------------------------------------
1 | import json
2 | import csv, os
3 | import pandas as pd
4 | import logging
5 |
6 | logging.basicConfig(level=logging.INFO, format='%(message)s')
7 | logger = logging.getLogger(__name__)
8 |
9 | maker_logs = 'latest-data/all_logs_maker.csv'
10 |
11 | outfile = 'latest-data/spot_prices.csv'
12 |
13 | logsdict = csv.DictReader(open(maker_logs), delimiter=',',
14 | quotechar='"', quoting=csv.QUOTE_MINIMAL)
15 |
16 | logs = {}
17 | events_by_collateral = {}
18 | txhashes = []
19 | events= []
20 | tx_to_block = {}
21 |
22 | #Interested in only file
23 | interested_topics = ['0x1a0b287e']
24 | what = '0x73706f74'
25 |
26 | for log in logsdict:
27 | topics = json.loads(log['topics'].replace('\'', '\"'))
28 | if topics[0][:10] not in interested_topics:
29 | continue
30 | hash = log['transaction_hash']
31 | if not hash in logs:
32 | logs[hash] = []
33 | txhashes.append(hash)
34 | logs[hash].append((log['address'], log['data'], topics, log['gas_price'], log['receipt_gas_used'], log['block_number']))
35 | tx_to_block[hash] = log['block_number']
36 |
37 | parsed = 0
38 |
39 | for txhash in txhashes:
40 | for logitem in logs[txhash]:
41 | address = logitem[0]
42 | data = logitem[1]
43 | topics = logitem[2]
44 | data = data[2:] # strip 0x from hex
45 | action_requested = None
46 | collateral_type = ''
47 | if topics[0][:10] == interested_topics[0] and topics[2][:10] == what:
48 | # oracle update
49 | collateral_type = int(str(data[136:200]), 16)
50 | spot_price = int(str(data[264:328]), 16)
51 | action_requested = "%d,%s,%d" % (collateral_type, tx_to_block[txhash], spot_price)
52 | events.append(action_requested)
53 | parsed += 1
54 | if (parsed % 10000 == 0):
55 | logger.info("Parsed %d" %(parsed))
56 |
57 | logger.info("Writing All...")
58 | fout = open(outfile, 'w')
59 | fout.write("CollateralType,Block,SpotPrice\n")
60 | fout.write("\n".join(events) + '\n')
61 |
62 |
--------------------------------------------------------------------------------
/maker-data/mcd/parse_maker_logs.py:
--------------------------------------------------------------------------------
1 | import json
2 | import csv, os
3 | import pandas as pd
4 | import logging
5 |
6 | logging.basicConfig(level=logging.INFO, format='%(message)s')
7 | logger = logging.getLogger(__name__)
8 |
9 | maker_logs = 'latest-data/all_logs_maker.csv'
10 |
11 | outputdir = 'latest-data/maker-processed'
12 |
13 | logsdict = csv.DictReader(open(maker_logs), delimiter=',',
14 | quotechar='"', quoting=csv.QUOTE_MINIMAL)
15 |
16 | logs = {}
17 | events_by_collateral = {}
18 | txhashes = []
19 | events= []
20 | tx_to_block = {}
21 | debt_type = 611382286831621467233887798921843936019654057231
22 |
23 | #Interested in only frob, Bite, fold, fork
24 | interested_topics = ['0x76088703', '0xa716da86', '0xb65337df', '0x870c616d']
25 |
26 | def hex_to_int(raw_hex, bits=256):
27 | val = int(raw_hex, 16)
28 | # check MSB
29 | if (val & (1 << (bits - 1))) != 0:
30 | val = val - (1 << bits)
31 | return val
32 |
33 |
34 | for log in logsdict:
35 | topics = json.loads(log['topics'].replace('\'', '\"'))
36 | if topics[0][:10] not in interested_topics:
37 | continue
38 | hash = log['transaction_hash']
39 | if not hash in logs:
40 | logs[hash] = []
41 | txhashes.append(hash)
42 | logs[hash].append((log['address'], log['data'], topics, log['gas_price'], log['receipt_gas_used'], log['block_number']))
43 | tx_to_block[hash] = log['block_number']
44 |
45 | parsed = 0
46 |
47 | for txhash in txhashes:
48 | for logitem in logs[txhash]:
49 | address = logitem[0]
50 | data = logitem[1]
51 | topics = logitem[2]
52 | data = data[2:] # strip 0x from hex
53 | action_requested = None
54 | collateral_type = ''
55 | if topics[0][:10] == interested_topics[0]:
56 | # cdp manipulation
57 | collateral_type = int(str(data[136:200]), 16)
58 | cdp_handler = int(str(data[200:264]), 16)
59 | collateral_amount = hex_to_int(str(data[392:456]))
60 | debt_amount = hex_to_int(str(data[456:520]))
61 | if collateral_amount >= 0:
62 | subaction1 = "%d in %d collateral locked" % (collateral_amount, collateral_type)
63 | else:
64 | subaction1 = "%d in %d collateral freed" % (0 - collateral_amount, collateral_type)
65 | if debt_amount < 0:
66 | subaction2 = "%d in %d debt wiped" % (0 - debt_amount, debt_type)
67 | else:
68 | subaction2 = "%d in %d debt drawn" % (debt_amount, debt_type)
69 | action_requested = "%s and %s from vault %d ;" % (subaction1, subaction2, cdp_handler)
70 | elif topics[0][:10] == interested_topics[1]:
71 | # Liquidate CDP
72 | if len(topics) < 3:
73 | logger.warning(address, txhash, topics, data)
74 | continue
75 | collateral_type = int(str(topics[1]), 16)
76 | cdp_handler = int(str(topics[2]), 16)
77 | liquidator = int(str(data[192:256]), 16)
78 |
79 | action_requested = "%d bites vault %d ;" % (liquidator, cdp_handler)
80 |
81 | elif topics[0][:10] == interested_topics[2]:
82 | collateral_type = int(str(data[136:200]), 16)
83 | rate_inc = hex_to_int(str(data[264:328]))
84 | action_requested = "%d increment in stability fees for %d ;" % (rate_inc, collateral_type)
85 |
86 | elif topics[0][:10] == interested_topics[3]:
87 | # cdp fungibility
88 | collateral_type = int(str(data[136:200]), 16)
89 | src_cdp = int(str(data[200:264]), 16)
90 | dst_cdp = int(str(data[264:328]), 16)
91 | collateral_amount = hex_to_int(str(data[328:392]))
92 | debt_amount = hex_to_int(str(data[392:456]))
93 | action_requested = "%d in %d and %d in %d transferred from %d to %d ;" %(collateral_amount, collateral_type, debt_amount, debt_type, src_cdp, dst_cdp)
94 |
95 | if action_requested is not None:
96 | if not (collateral_type) in events_by_collateral:
97 | events_by_collateral[collateral_type] = []
98 | event = "// transaction %s block %s\n%s" % (txhash, tx_to_block[txhash], action_requested)
99 | events_by_collateral[collateral_type].append(event)
100 | events.append(event)
101 | parsed += 1
102 | if (parsed % 10000 == 0):
103 | logger.info("Parsed %d" %(parsed))
104 |
105 | for collateral in events_by_collateral:
106 | filepath = '%s/%s.csv' % (outputdir, collateral)
107 | open(filepath, 'w').write("\n".join(events_by_collateral[collateral]) + '\n')
108 | logger.info("Written %s" % (filepath))
109 |
110 | logger.info("Writing All...")
111 | open('%s/all.csv' % (outputdir), 'w').write("\n".join(events) + '\n')
112 |
--------------------------------------------------------------------------------
/mcd_mev_from_logs.py:
--------------------------------------------------------------------------------
1 | import glob
2 | from collections import defaultdict
3 | import matplotlib.pyplot as plt
4 |
5 | run_ids = set()
6 | for filename in glob.glob('run-output/*'):
7 | id = filename[:filename.find('.out')]
8 | run_ids.add(id)
9 | mev = defaultdict(lambda : 0)
10 | block_to_mev = defaultdict(lambda : 0)
11 | block_to_count = defaultdict(lambda : 0)
12 | for id in run_ids:
13 | print(id)
14 | temp = id[:id.find('0x') - 1][-12 :]
15 | block = int(temp[temp.find('-') + 1 : ])
16 | for run_instance in glob.glob(id+'*'):
17 | lines = open(run_instance, 'r').readlines()
18 | for line in lines:
19 | if '|->' in line and ' 0 in DAI'in line:
20 | liquidated_debt = int(line.split()[-1])
21 | # mev is half the liquidation debt
22 | mev[id] = max(mev[id], liquidated_debt / 2)
23 | if id in mev:
24 | block_to_mev[block] += mev[id]
25 | block_to_count[block] += 1
26 |
27 | print(run_ids - set(mev.keys()))
28 | print(mev)
29 | print(block_to_mev)
30 | print(block_to_count)
31 |
--------------------------------------------------------------------------------
/mev_from_logs.py:
--------------------------------------------------------------------------------
1 | import glob
2 | import argparse
3 | from collections import defaultdict
4 | import logging
5 |
6 |
7 | parser = argparse.ArgumentParser(description='Find MEV from krun logs')
8 |
9 | parser.add_argument(
10 | '-v', '--verbose',
11 | help="Be verbose",
12 | action="store_const", dest="loglevel", const=logging.INFO,
13 | default=logging.WARNING
14 | )
15 |
16 | parser.add_argument(
17 | '-e', '--exchange',
18 | help="sushiswap/uniswapv2",
19 | default='uniswapv2'
20 | )
21 |
22 |
23 | parser.add_argument(
24 | '-b', '--block',
25 | help="Block number to find MEV in",
26 | default='11006503'
27 | )
28 |
29 | parser.add_argument(
30 | '-a', '--address',
31 | help="pair address",
32 | default='0xa2107fa5b38d9bbd2c461d6edf11b11a50f6b974'
33 |
34 | )
35 |
36 | args = parser.parse_args()
37 | logging.basicConfig(level=args.loglevel, format='%(message)s')
38 |
39 | logger = logging.getLogger(__name__)
40 |
41 |
42 | if args.exchange == 'uniswapv2':
43 | exchange_acc = 'UniswapV2'
44 | elif args.exchange == 'sushiswap':
45 | exchange_acc = 'Sushiswap'
46 |
47 |
48 | log_filenames = glob.glob('output/%s-%s.out*' % (args.block, args.address) )
49 | token0 = '1097077688018008265106216665536940668749033598146'
50 | token1 = '464057641162257223597913127019930606481545201354'
51 |
52 | lower_bounds = defaultdict(lambda : {token0 : 99999999999999999999999999999999})
53 | upper_bounds = defaultdict(lambda : {token0 : -99999999999999999999999999999999})
54 |
55 | for log_filename in log_filenames:
56 | balances = {}
57 | f = open(log_filename, 'r')
58 | for line in f.readlines():
59 | if '|->' in line:
60 | chunks = line.strip().split()
61 | acc = chunks[0]
62 | token = chunks[2]
63 | balance = chunks[4]
64 | if acc not in balances:
65 | balances[acc] = {}
66 | balances[acc][token] = int(balance)
67 | for acc in balances:
68 | if acc == exchange_acc:
69 | continue
70 | balance0 = balances[acc][token0]
71 | balance1 = balances[acc][token1]
72 | total_balance = balance0 + (balances[exchange_acc][token0] * balance1) / balances[exchange_acc][token1]
73 | lower_bounds[acc][token0] = min(lower_bounds[acc][token0], total_balance)
74 | upper_bounds[acc][token0] = max(upper_bounds[acc][token0], total_balance)
75 |
76 |
77 | for acc in lower_bounds:
78 | print(acc)
79 | print(upper_bounds[acc][token0] - lower_bounds[acc][token0])
80 |
--------------------------------------------------------------------------------
/parse_output.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | out = open('out2').read()
4 | states = out.count("#Or")
5 | print("Found %d states." % (states))
6 |
7 | max_amt = -1
8 |
9 | for line in out.splitlines():
10 | if "0 in 0 |-" in line and line.index("0 in 0 |-") == 8:
11 | amt = int(line.split()[-1])
12 | max_amt = amt if amt > max_amt else max_amt
13 | print(amt)
14 |
15 | print("miner makes at most %d" % (max_amt))
16 | os.system("grep -C 20 '0 in 0 |-> %d' out" % (max_amt))
17 |
--------------------------------------------------------------------------------
/plot_scripts/plot_convergence.py:
--------------------------------------------------------------------------------
1 | import matplotlib
2 | matplotlib.use('Agg')
3 | import pandas as pd
4 | import argparse
5 | from matplotlib import pyplot as plt
6 | import numpy as np
7 | from cycler import cycler
8 | import glob #KB
9 | import sys
10 |
11 | exchange_name = sys.argv[1]
12 | if exchange_name == 'uniswapv1':
13 | exchange = 'Uniswap V1'
14 | elif exchange_name == 'uniswapv2':
15 | exchange = 'Uniswap V2'
16 | elif exchange_name == 'sushiswap':
17 | exchange = 'Sushiswap'
18 |
19 | filenames = glob.glob(exchange_name + "/*")
20 |
21 | dataframes = []
22 |
23 | # for filename in filenames:
24 | # df = pd.read_csv(filename)
25 | # df['mev'] = df['mev'] / df['mev'].max() * 100
26 | # df['pathfrac'] = df['pathnum'] / df['pathnum'].max() * 100
27 | # x = []
28 | # y = []
29 | # for index, row in df.iterrows():
30 | # x = x + [row['pathfrac']]
31 | # y = y + [row['mev']]
32 | # dataframes.append((x,y))
33 |
34 |
35 |
36 |
37 | # for (x,y) in dataframes:
38 | # plt.plot(x,y)
39 | # plt.savefig('convergence_random.pdf')
40 |
41 |
42 | max_path = 0
43 |
44 | for filename in filenames:
45 | try:
46 | df = pd.read_csv(filename)
47 | except pd.errors.EmptyDataError:
48 | continue
49 | try:
50 | if df['mev'].max() < 1:
51 | continue
52 | df['mev'] = df['mev'] / df['mev'].max() * 100
53 | except TypeError:
54 | print(filename,df)
55 | continue
56 | max_path = max(max_path, df['pathnum'].max())
57 | df['pathfrac'] = df['pathnum'] / df['pathnum'].max() * 100
58 | df['pathfrac'] = df['pathfrac'].round(6)
59 |
60 | df.set_index('pathfrac', inplace=True)
61 | df = df.drop(columns='pathnum')
62 | #df.set_index('pathnum', inplace=True)
63 | dataframes.append(df)
64 |
65 | df = pd.concat(dataframes,axis=1,sort=False).fillna(method='ffill').fillna(0)
66 |
67 | '''
68 | x = list(range(max_path))
69 | df['min'] = df.min(axis=1)
70 | df['med'] = df.median(axis=1)
71 | df['first'] = df.quantile(0.25, axis=1)
72 | df['third'] = df.quantile(0.75, axis=1)
73 | df['max'] = df.max(axis=1)
74 |
75 | # for num in range(max_path):
76 | # x += [num]
77 | # y += df.groupby(df.index)
78 |
79 | colors=['#377eb8','#ff7f00','#4daf4a','#f781bf','#a65628','#984ea3','#999999','#e41a1c','#dede00']
80 | all_y = [('first', 'Q1',1) , ('med', 'Median',2), ('third', 'Q3',1), ('max', 'Max',1)]
81 | fig, ax = plt.subplots()
82 | custom_cycler = (cycler(color=['#377eb8','#ff7f00','#4daf4a','#f781bf','#a65628']) +
83 | cycler(linestyle=['solid', 'dotted', 'dashed','dashdot', 'solid']))
84 |
85 | ax.set_prop_cycle(custom_cycler)
86 | df.plot(y='min', logx=True, label='Min', ax=ax)
87 |
88 | for col, lbl,lw in all_y:
89 | df.plot(y=col, logx=True, label=lbl, ax = ax, lw=lw)
90 |
91 | plt.xlabel('Percentage of Paths Explored')
92 | plt.ylabel('MEV Convergence Percentile')
93 | plt.title(exchange + ' MEV Convergence')
94 | # plt.xlim(0, 100)
95 | plt.ylim(-5, 105)
96 | #df.plot(logx=True)
97 | plt.savefig('convergence-'+exchange_name+'.pdf')
98 | '''
99 |
--------------------------------------------------------------------------------
/plot_scripts/plot_execution_times.py:
--------------------------------------------------------------------------------
1 | import matplotlib
2 | matplotlib.use('Agg')
3 | from matplotlib import pyplot as plt
4 | import sys
5 | import pandas as pd
6 | import numpy as np
7 | import math
8 |
9 |
10 | df = pd.read_csv(sys.argv[1])
11 | df['CPUTime'] = df['UserTime'] + df['SysTime']
12 | df['logCPUTime'] = np.log10(df['CPUTime'])
13 | df['logTxCount'] = np.log10(df['TxCount'])
14 |
15 | x = []
16 | y = []
17 | z = []
18 | df_copy = df[0:0]
19 |
20 |
21 | space = np.linspace(0,20000,20)
22 | for index in range(1,len(space)):
23 | df2 = df[(df['TxCount'] < space[index]) & (df['TxCount'] >= math.floor(space[index-1]))]
24 | count = df2.count()[0]
25 | if count != 0:
26 | p = df2.sample(min(2, count))
27 | for index, row in p.iterrows():
28 | x = x + [row['TxCount']]
29 | y = y + [row['CPUTime']]
30 | z = z + [row['RealTime']]
31 |
32 |
33 | space = np.linspace(20000,120000,500)
34 | for index in range(1,len(space)):
35 | df2 = df[(df['TxCount'] < space[index]) & (df['TxCount'] >= math.floor(space[index-1]))]
36 | count = df2.count()[0]
37 | if count != 0:
38 | avg_cpu = (1.0 * df2['CPUTime'].sum()) / count
39 | avg_tx = (df2['TxCount'].sum()) // count
40 | avg_real = (1.0 * df2['RealTime'].sum()) / count
41 |
42 | x = x + [avg_tx]
43 | y = y + [avg_cpu]
44 | z = z + [avg_real]
45 |
46 |
47 | fig = plt.figure()
48 | plt.xlabel('Transaction Count')
49 | plt.ylabel('Time (in seconds)')
50 | # plt.xlim(0.5, 150000)
51 | # plt.ylim(10, 15000)
52 | # plt.gca().set_aspect('equal', adjustable='box')
53 |
54 | plt.scatter(x,y, marker='.', color='#377eb8', label="CPU Time") #7293CB'
55 | plt.scatter(x,z, marker='+', linewidth=0.5, color='#ff7f00', label="Real Time") #D35E60
56 | plt.legend(loc="upper left")
57 |
58 | # fig, ax1 = plt.subplots()
59 | # ax2 = ax1.twinx()
60 | # ax1.set_xlabel('Transaction Count')
61 | # ax1.set_ylabel('CPU Time (in seconds)')
62 |
63 | # ax2.set_ylabel('Real Time (in seconds)')
64 |
65 |
66 | # ax1.scatter(x,y, marker='.', color='b')
67 | # ax2.scatter(x,z, marker='.', color='g')
68 | plt.savefig('updated-execution_times.pdf',bbox_inches='tight')
69 |
--------------------------------------------------------------------------------
/plot_scripts/plot_mev.py:
--------------------------------------------------------------------------------
1 | import pandas as pd
2 | import numpy as np
3 | import matplotlib.pyplot as plt
4 | from cycler import cycler
5 | import sys
6 |
7 | #exchange_name = 'uniswapv2'
8 | #exchange_name = 'uniswapv1'
9 | #exchange_name = 'sushiswap'
10 |
11 | exchange_name = sys.argv[1]
12 |
13 | if exchange_name == 'uniswapv2':
14 | token_file = 'token_names.csv'
15 | exchange_acc = 'UniswapV2'
16 | elif exchange_name == 'sushiswap':
17 | token_file = 'token_names.csv'
18 | exchange_acc = 'Sushiswap'
19 | elif exchange_name == 'uniswapv1':
20 | exchange_acc = 'UniswapV1'
21 | token_file = 'v1_token_names.csv'
22 |
23 |
24 | # Full MEV
25 | df = pd.read_csv(exchange_name + '_mev.csv')
26 | token_names = pd.read_csv(token_file).set_index('address').T.to_dict('records')[0]
27 | print(token_names)
28 | df['name0'] = df['token0'].map(token_names)
29 | df['name1'] = df['token1'].map(token_names)
30 |
31 | if exchange_name == 'uniswapv1':
32 | df['name0'] = 'ETH'
33 |
34 | df['pair_name'] = df['name0'] + '/' + df['name1']
35 |
36 | max_mev = df.sort_values('mev', ascending=False).drop_duplicates(['pair'])
37 |
38 | max_mev['mev'] = max_mev['mev'] / 10**18 #KB
39 |
40 | print(max_mev[['pair', 'block', 'mev', 'pair_name']])
41 |
42 | # APPROX MEV
43 | df2 = pd.read_csv(exchange_name + '_approx_mev.csv')
44 | df2['name0'] = df2['token0'].map(token_names)
45 | df2['name1'] = df2['token1'].map(token_names)
46 |
47 | if exchange_name == 'uniswapv1':
48 | df2['name0'] = 'ETH'
49 |
50 |
51 | df2['pair_name'] = df2['name0'] + '/' + df2['name1']
52 |
53 | approx_max_mev = df2.sort_values('mev', ascending=False).drop_duplicates(['pair'])
54 |
55 | approx_max_mev['mev'] = approx_max_mev['mev'] / 10**18 #KB
56 |
57 | print(approx_max_mev[['pair', 'block', 'mev', 'pair_name']])
58 |
59 |
60 | # plot
61 | ax = approx_max_mev.plot('pair_name', 'mev', kind='bar', color='#ff7f00', label='Intractable Blocks', position=0, width=0.4, hatch='\\', edgecolor='gray')
62 | ax.set_ylabel('MEV (in ETH)')
63 | ax.set_xlabel('Pair')
64 | max_mev.plot('pair_name', 'mev', kind='bar', ax=ax, color='#377eb8', label='Tractable Blocks', position=1, width=0.4)
65 |
66 | plt.xticks(rotation=20)
67 | plt.title(exchange_acc + ' MEV ')
68 | plt.savefig(exchange_name + '-mev.pdf')
69 |
--------------------------------------------------------------------------------
/process_data.py:
--------------------------------------------------------------------------------
1 | import matplotlib
2 | matplotlib.use('Agg')
3 | from matplotlib import pyplot as plt
4 | import sys
5 | from uniswap import Uniswap
6 | from copy import deepcopy
7 |
8 | fin = open(sys.argv[1], 'r')
9 | fout = open('data/uniswap_data_cut_'+sys.argv[2] + '_' +sys.argv[3], 'w')
10 |
11 | prices = {}
12 | lines = []
13 |
14 | start_point = False
15 | block_number = 0
16 |
17 | def get_price(cfg):
18 | return cfg['SAI']/ cfg['0']
19 |
20 | def bootstrapped_data(f, cfg, rows):
21 | f.write("Uniswap in 0 gets {:.0f};\n".format(cfg['0']))
22 | f.write("Uniswap in SAI gets {:.0f};\n".format(cfg['SAI']))
23 | f.write("GetPrice SAI 0;\n")
24 | for row in rows:
25 | f.write(row)
26 |
27 | uniswap = Uniswap()
28 | line_num = 0
29 |
30 | for line in fin.readlines():
31 | line_num += 1
32 | if 'block' in line:
33 | words = line.split()
34 | block_number = int(words[-1])
35 | if block_number > int(sys.argv[3]):
36 | break
37 |
38 | uniswap.process(line)
39 |
40 | if block_number < int(sys.argv[2]):
41 | config = deepcopy(uniswap.config()) #arghhhhh!
42 | elif block_number <= int(sys.argv[3]):
43 | try:
44 | prices[block_number] = get_price(uniswap.config())
45 | except ZeroDivisionError:
46 | pass
47 | lines.append(line)
48 |
49 | '''
50 | for line in fin.readlines():
51 | line_num += 1
52 | if line_num < int(sys.argv[2]):
53 | uniswap.process(line)
54 | config = uniswap.config()
55 | elif line_num <= int(sys.argv[3]):
56 | fout.write(line)
57 | '''
58 |
59 | print("Uniswap in 0", "{:.1f}".format(config['0']))
60 | print("Uniswap in SAI", "{:.1f}".format(config['SAI']))
61 | print(get_price(config))
62 |
63 | bootstrapped_data(fout, config, lines)
64 |
65 | plt.plot(list(prices.keys()), list(prices.values()))
66 | plt.savefig('img/sai_prices.png')
67 |
--------------------------------------------------------------------------------
/proofs/execution.k:
--------------------------------------------------------------------------------
1 | module EXECUTION-SYNTAX
2 | imports DOMAINS-SYNTAX
3 |
4 | syntax Executable ::= "exec(" ETHTransaction ")"
5 | syntax ETHTransaction ::= ETHTransaction "fee" Int
6 | | ETHTransaction "block" Int
7 |
8 | syntax ETHAddress ::= Int | "ETH" | "BBT" | "Miner" | "User"
9 | syntax BalanceEntry ::= ETHAddress "in" ETHAddress
10 |
11 | syntax Statement ::= ETHAddress "in" ETHAddress "gets" Int // set token balance
12 | | ETHAddress "in" ETHAddress "gives" Int // set token balance
13 | | ETHTransaction
14 | | "DONE" | "FAIL"
15 | | Executable
16 |
17 | syntax Block ::= Statement ";"
18 | | Block Statement ";"
19 | endmodule
20 |
21 | module EXECUTION
22 | imports EXECUTION-SYNTAX
23 | imports DOMAINS
24 |
25 | configuration
26 | $PGM:Block // Instructions left to execute
27 | .Set // Available transactions for reordering
28 | .Map // System state; mapping of all (Address in Token) to Balance (all type Integer)
29 | .List
30 | .Map
31 | .Map // Mapping of Maker Vault to Owner
32 | .Map // Stability fees for Maker
33 | 0 |-> 0
34 | 1 // Number of transactions a miner can insert
35 |
36 | // Boring stuff; setup state before miner can execute. Setup S and aggregate available txs.
37 | rule Address:ETHAddress in Token:ETHAddress gets _:Int ...
38 | B => B[(Address in Token) <- 0:Int] requires notBool((Address in Token) in keys(B))
39 |
40 | rule Address:ETHAddress in Token:ETHAddress gets Amount:Int => .K ...
41 | ... (Address in Token) |-> (OldBalance => (Amount +Int OldBalance)) ...
42 | //requires (Amount >=Int (0 -Int OldBalance))
43 |
44 | rule Address:ETHAddress in Token:ETHAddress gives Amount:Int => Address in Token gets (0 -Int Amount) ...
45 |
46 |
47 | // rule TX:ETHTransaction => .K ...
48 | // ... .Set => SetItem(TX)
49 |
50 | // rule .K => exec(TX)
51 | // ... SetItem(TX:ETHTransaction) => .Set ...
52 |
53 | rule TX:ETHTransaction => exec(TX) ...
54 |
55 | // TX Censoring
56 | // rule _:ETHTransaction => .K ...
57 |
58 | rule B:Block S:Statement; => B ~> S ...
59 | rule S:Statement; => S ...
60 |
61 | // execution ends when nothing left to execute or txs to re-order
62 | rule
63 | .K => DONE
64 | .Set
65 |
66 | endmodule
67 |
--------------------------------------------------------------------------------
/proofs/govattack.k:
--------------------------------------------------------------------------------
1 | require "flashloan.k"
2 |
3 | module GOVATTACK
4 |
5 | imports FLASHLOAN
6 |
7 | rule
8 | RandomUser votes for 0 with stake Y ETH fee 0;
9 | RandomUser votes against 0 with stake Z ETH fee 0;
10 | User borrows X ETH calls into User votes for 0 with stake X ETH fee 0 returns X ETH fee 0;
11 | User claims reward for 0 fee 0;
12 |
13 | => .K
14 |
15 | (Pool in ETH) |-> X:Int => ?S:Map
16 | 0 |-> 0 => ?T:Map
17 | .List => ?_
18 | // requires (Alpha >Int 0) andBool (Alpha Int <=Int 0 ) andBool ({?S[Pool in ETH]}:>Int >=Int 0 )
20 |
21 | endmodule
22 |
23 |
--------------------------------------------------------------------------------
/proofs/instability.k:
--------------------------------------------------------------------------------
1 | require "proof.k"
2 |
3 | module INSTABILITY
4 |
5 | imports UNISWAPV2
6 |
7 | rule
8 |
9 | On UniswapV2 697323163401596485410334513241460920685086001293 swaps for 1097077688018008265106216665536940668749033598146 by providing 1300000000000000000000 1096451400262405796991039590211805051831004063880 and 0 1097077688018008265106216665536940668749033598146 with change 0 fee 1767957155464 ;
10 | On Sushiswap Miner swaps for 1097077688018008265106216665536940668749033598146 by providing Alpha:Int 1096451400262405796991039590211805051831004063880 and 0 1097077688018008265106216665536940668749033598146 with change 0 fee 0 ;
11 | On UniswapV2 Miner swaps for Alpha 1096451400262405796991039590211805051831004063880 by providing 1097077688018008265106216665536940668749033598146 fee 0 ;
12 |
13 | => .K
14 |
15 | (Sushiswap in 1096451400262405796991039590211805051831004063880) |-> 107495485843438764484770 (Sushiswap in 1097077688018008265106216665536940668749033598146) |-> 49835502094518088853633 (UniswapV2 in 1096451400262405796991039590211805051831004063880) |-> 5945498629669852264883 (UniswapV2 in 1097077688018008265106216665536940668749033598146) |-> 2615599823603823616442 => ?S:Map
16 | .List => ?_
17 | requires (Alpha >Int 0) andBool (Alpha Int 0) andBool (Y >Int 0) //10**22
19 | ensures ({?S[Miner in 1096451400262405796991039590211805051831004063880]}:>Int <=Int 0 ) andBool ({?S[Miner in 1097077688018008265106216665536940668749033598146]}:>Int <=Int 0 )
20 |
21 | endmodule
22 |
--------------------------------------------------------------------------------
/proofs/proof.k:
--------------------------------------------------------------------------------
1 | require "uniswapv2.k"
2 | //require "maker.k"
3 |
4 | module PROOF-SYNTAX
5 | imports UNISWAPV2-SYNTAX
6 | // import MAKER-SYNTAX
7 | endmodule
8 |
9 | module PROOF
10 | imports UNISWAPV2-SYNTAX
11 | endmodule
12 |
--------------------------------------------------------------------------------
/proofs/run_proof.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | block=$1
3 | tx=$2
4 | uniswapv2_address=$3
5 | sushiswap_address=$4
6 |
7 | #sushiswap_tx=`sed "/$tx/q" ~/github-mev/data-scripts/latest-data/sushiswap-processed/"$sushiswap_address".csv | grep -v "^//" | sed -e 's/^/On Sushiswap /'`
8 |
9 | sushiswap_tx=`grep -A1 "block $block" ~/github-mev/data-scripts/latest-data/sushiswap-processed/"$sushiswap_address".csv | sed "/$tx/q" | grep -v "^//" | sed -e 's/^/On Sushiswap /'`
10 |
11 | uniswapv2_tx=`grep -A1 "block $block" ~/github-mev/data-scripts/latest-data/uniswapv2-processed/"$uniswapv2_address".csv | sed "/$tx/q" | grep -v "^//" | sed -e 's/^/On UniswapV2 /'`
12 |
13 | sushiswap_state=`sed "/$block/q" ~/github-mev/data-scripts/latest-data/sushiswap-reserves.csv | grep $sushiswap_address | tail -n 1`
14 |
15 | uniswapv2_state=`sed "/$block/q" ~/github-mev/data-scripts/latest-data/uniswapv2-reserves.csv | grep $uniswapv2_address | tail -n 1`
16 |
17 |
18 | sushiswap_token0=`echo $sushiswap_state | cut -d, -f3 `
19 | sushiswap_token1=`echo $sushiswap_state | cut -d, -f4 `
20 | sushiswap_amount0=`echo $sushiswap_state | cut -d, -f5 `
21 | sushiswap_amount1=`echo $sushiswap_state | cut -d, -f6 `
22 |
23 | uniswapv2_token0=`echo $uniswapv2_state | cut -d, -f3 `
24 | uniswapv2_token1=`echo $uniswapv2_state | cut -d, -f4 `
25 | uniswapv2_amount0=`echo $uniswapv2_state | cut -d, -f5 `
26 | uniswapv2_amount1=`echo $uniswapv2_state | cut -d, -f6 `
27 |
28 | echo 'require "proof.k"
29 |
30 | module INSTABILITY
31 |
32 | imports UNISWAPV2
33 |
34 | claim ' > instability.k
35 |
36 | echo $sushiswap_tx >> instability.k
37 | echo $uniswapv2_tx >> instability.k
38 |
39 | echo " On UniswapV2 Miner swaps for 1097077688018008265106216665536940668749033598146 by providing Alpha:Int 1096451400262405796991039590211805051831004063880 and 0 1097077688018008265106216665536940668749033598146 with change 0 fee 0 ;
40 | On Sushiswap Miner swaps for Alpha 1096451400262405796991039590211805051831004063880 by providing 1097077688018008265106216665536940668749033598146 fee 0 ;
41 |
42 | => .K
43 |
44 | (Sushiswap in $sushiswap_token0) |-> $sushiswap_amount0 (Sushiswap in $sushiswap_token1) |-> $sushiswap_amount1 (UniswapV2 in $uniswapv2_token0) |-> $uniswapv2_amount0 (UniswapV2 in $uniswapv2_token1) |-> $uniswapv2_amount1 => ?S:Map
45 | .List => ?_
46 | requires (Alpha >Int 0) andBool (Alpha Int <=Int 0 ) andBool ({?S[Miner in 1097077688018008265106216665536940668749033598146]}:>Int <=Int 0 )
48 |
49 | endmodule" >> instability.k
50 |
51 |
52 |
53 |
--------------------------------------------------------------------------------
/proofs/toy.k:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/pdaian/mev/34690d8ead9323fe62c2c77efcec6e16aac8f8ab/proofs/toy.k
--------------------------------------------------------------------------------
/proofs/uniswapv2.k:
--------------------------------------------------------------------------------
1 | require "execution.k"
2 |
3 | module UNISWAPV2-SYNTAX
4 | imports EXECUTION-SYNTAX
5 | syntax UniswapV2TX ::= "On" Exchange ETHAddress "swaps for" ETHAddress "by providing" Int ETHAddress "and" Int ETHAddress "with change" Int
6 | | "On" Exchange ETHAddress "swaps for" Int ETHAddress "by providing" ETHAddress
7 | | "On" Exchange ETHAddress "adds" Int ETHAddress "and" Int ETHAddress "of liquidity"
8 | | "On" Exchange ETHAddress "removes" Int ETHAddress "and" Int ETHAddress "of liquidity"
9 | syntax ETHTransaction ::= UniswapV2TX
10 | syntax Exchange ::= "UniswapV2" | "Sushiswap"
11 | syntax ETHAddress ::= Exchange
12 | syntax CurrencyPair ::= ETHAddress "," ETHAddress
13 | syntax InternalCmd ::= "GetPrice" ETHAddress ETHAddress // Oracle query
14 | syntax Statement ::= InternalCmd
15 | endmodule
16 |
17 | module UNISWAPV2
18 | imports UNISWAPV2-SYNTAX
19 | imports EXECUTION
20 |
21 | rule exec(On Amm:Exchange Address:ETHAddress swaps for TokenOut:ETHAddress by providing AmountInTokenIn:Int TokenIn:ETHAddress and AmountInTokenOut:Int TokenOut:ETHAddress with change AmountOutTokenIn:Int fee GasFee:Int) =>
22 | Address in TokenIn gets 0 -Int AmountInTokenIn ~>
23 | Amm in TokenIn gets AmountInTokenIn ~>
24 | Address in TokenOut gets 0 -Int AmountInTokenOut ~>
25 | Amm in TokenOut gets AmountInTokenOut ~>
26 | Address in TokenIn gets AmountOutTokenIn ~>
27 | Amm in TokenIn gets 0 -Int AmountOutTokenIn ~>
28 | Address in TokenOut gets (((997 *Int AmountInTokenIn -Int 1000 *Int AmountOutTokenIn) *Int USwapV2BalanceOut) /Int (1000 *Int (USwapV2BalanceIn -Int AmountOutTokenIn) +Int 997 *Int AmountInTokenIn)) +Int ((AmountInTokenOut *Int 997) /Int (1000)) ~>
29 | Amm in TokenOut gets 0 -Int ( (((997 *Int AmountInTokenIn -Int 1000 *Int AmountOutTokenIn) *Int USwapV2BalanceOut) /Int (1000 *Int (USwapV2BalanceIn -Int AmountOutTokenIn) +Int 997 *Int AmountInTokenIn)) +Int ((AmountInTokenOut *Int 997) /Int (1000)) ) ~>
30 | Address in 0 gets 0 -Int GasFee
31 | //GetPrice TokenIn TokenOut
32 | ...
33 |
34 | ... (Amm in TokenOut) |-> USwapV2BalanceOut (Amm in TokenIn) |-> USwapV2BalanceIn ...
35 | ... .List => ListItem(On Amm Address swaps for TokenOut by providing AmountInTokenIn TokenIn and AmountInTokenOut TokenOut with change AmountOutTokenIn fee GasFee)
36 | // requires (UserBalance >=Int TradeAmount)
37 |
38 |
39 |
40 | rule exec(On Amm:Exchange Address:ETHAddress swaps for AmountOutTokenOut:Int TokenOut:ETHAddress by providing TokenIn:ETHAddress fee GasFee:Int) =>
41 | Address in TokenOut gets AmountOutTokenOut ~>
42 | Amm in TokenOut gets 0 -Int AmountOutTokenOut ~>
43 | Address in TokenIn gets 0 -Int ((1000 *Int USwapV2BalanceIn *Int AmountOutTokenOut) /Int (997 *Int (USwapV2BalanceOut -Int AmountOutTokenOut )) +Int 1) ~>
44 | Amm in TokenIn gets ((1000 *Int USwapV2BalanceIn *Int AmountOutTokenOut) /Int (997 *Int (USwapV2BalanceOut -Int AmountOutTokenOut )) +Int 1) ~>
45 | Address in 0 gets 0 -Int GasFee
46 | //GetPrice TokenIn TokenOut
47 | ...
48 |
49 | ... (Amm in TokenOut) |-> USwapV2BalanceOut (Amm in TokenIn) |-> USwapV2BalanceIn ...
50 | ... .List => ListItem(On Amm Address swaps for AmountOutTokenOut TokenOut by providing TokenIn fee GasFee)
51 | // requires (UserBalance >=Int TradeAmount)
52 |
53 |
54 |
55 |
56 | rule exec(On Amm:Exchange Address:ETHAddress swaps for TokenOut:ETHAddress by providing AmountInTokenIn:Int TokenIn:ETHAddress and AmountInTokenOut:Int TokenOut:ETHAddress with change AmountOutTokenIn:Int fee GasFee:Int) => FAIL ...
57 | S
58 | _:Set => .Set
59 | ... .List => ListItem(On Amm Address swaps for TokenOut by providing AmountInTokenIn TokenIn and AmountInTokenOut TokenOut with change AmountOutTokenIn fee GasFee)
60 | requires notBool (((Amm in TokenOut) in keys(S)) andBool ((Amm in TokenIn) in keys(S)) )
61 |
62 |
63 |
64 | // todo accurate pricing rules
65 | // todo fees
66 | rule exec(On Amm:Exchange LiquidityProvider:ETHAddress adds Amount0:Int Token0:ETHAddress and Amount1:Int Token1:ETHAddress of liquidity) =>
67 | Amm in Token0 gets Amount0 ~> Amm in Token1 gets Amount1
68 | //GetPrice Token0 Token1
69 | ...
70 |
71 | ... .List => ListItem(On Amm LiquidityProvider adds Amount0 Token0 and Amount1 Token1 of liquidity)
72 |
73 | rule exec(On Amm:Exchange LiquidityProvider:ETHAddress removes Amount0:Int Token0:ETHAddress and Amount1:Int Token1:ETHAddress of liquidity) =>
74 | Amm in Token0 gets 0 -Int Amount0 ~> Amm in Token1 gets 0 -Int Amount1
75 | //GetPrice Token0 Token1
76 | ...
77 |
78 | ... .List => ListItem(On Amm LiquidityProvider removes Amount0 Token0 and Amount1 Token1 of liquidity)
79 |
80 | rule GetPrice Token0 Token1 => .K ...
81 | ... .List => ListItem(GetPrice Token0 Token1)
82 | ... (Amm in Token0) |-> Qty0 (Amm in Token1) |-> Qty1 ...
83 | Px => Px[(Token0 , Token1) <- (Qty1 /Int Qty0)][(Token1 , Token0) <- (Qty0 /Int Qty1)]
84 |
85 |
86 | endmodule
87 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | matplotlib
2 | pandas
3 |
--------------------------------------------------------------------------------
/run_all_experiments.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | ./run_tractable_experiments.sh sushiswap > sushiswap_mev.csv 2> sushiswap_mev.err &
4 | ./run_intractable_experiments.sh sushiswap > sushiswap_approx_mev.csv 2> sushiswap_approx_mev.err &
5 | wait
6 | ./run_intractable_experiments.sh uniswapv2 > uniswapv2_approx_mev.csv 2> uniswapv2_approx_mev.err &
7 | ./run_tractable_experiments.sh uniswapv2 > uniswapv2_mev.csv 2> uniswapv2_mev.err &
8 |
--------------------------------------------------------------------------------
/run_all_random_experiments.sh:
--------------------------------------------------------------------------------
1 | exchange=$1
2 | for line in `cat boundary_blocks`
3 | do
4 | start_block=`echo $line | cut -d, -f1`
5 | end_block=`echo $line | cut -d, -f2`
6 | month=`echo $line | cut -d, -f3`
7 | cmd="./run_random_experiments.sh $exchange $month $start_block $end_block"
8 | echo $cmd
9 | eval $cmd
10 | done
--------------------------------------------------------------------------------
/run_convergence_experiments.sh:
--------------------------------------------------------------------------------
1 | python3 run_uniswapv2_experiments.py -a 0xa2107fa5b38d9bbd2c461d6edf11b11a50f6b974 -b 10950202
2 | python3 run_uniswapv2_experiments.py -a 0xd3d2e2692501a5c9ca623199d38826e513033a17 -b 10984435
3 | python3 run_uniswapv2_experiments.py -a 0x2fdbadf3c4d5a8666bc06645b8358ab803996e28 -b 10986514
4 | python3 run_uniswapv2_experiments.py -a 0xc5be99a02c6857f9eac67bbce58df5572498f40c -b 10786519
5 | python3 run_uniswapv2_experiments.py -a 0xd90a1ba0cbaaaabfdc6c814cdf1611306a26e1f8 -b 10687813
6 | python3 run_uniswapv2_experiments.py -a 0x8175362afbeee32afb22d05adc0bbd08de32f5ae -b 10912377
7 | python3 run_uniswapv2_experiments.py -a 0xce84867c3c02b05dc570d0135103d3fb9cc19433 -b 10841486
8 | python3 run_uniswapv2_experiments.py -a 0x0d4a11d5eeaac28ec3f61d100daf4d40471f1852 -b 11005879
9 | python3 run_uniswapv2_experiments.py -a 0xa478c2975ab1ea89e8196811f51a7b7ade33eb11 -b 10954173
10 | python3 run_uniswapv2_experiments.py -a 0xb4e16d0168e52d35cacd2c6185b44281ec28c9dc -b 10993020
11 |
--------------------------------------------------------------------------------
/run_intractable_experiments.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | exchange_name=$1
3 | cmds_file=intractable_cmds_all
4 | rm -f $cmds_file
5 |
6 | waitforjobs() {
7 | while test $(jobs -p | wc -w) -ge "$1"; do wait -n; done
8 | }
9 |
10 | echo exchange,pair,token0,token1,block,numtransactions,mev
11 |
12 | for file in `find data-scripts/latest-data/$exchange_name-processed/ -type f -exec wc -l {} + | sort -rn | tr -s ' ' | cut -d' ' -f3 | grep 0x | head -n 10`
13 | do
14 | temp=${file%.csv}
15 | address=${temp##*/}
16 | for block in `sort -rt, -k2 -n data-scripts/latest-data/active-region/$exchange_name/txcount_$address.csv | head -n 30 | cut -f1 -d,`
17 | do
18 | cmd="python3 run_uniswapv2_experiments.py -b $block -a $address -e $exchange_name &"
19 | echo $cmd >> $cmds_file
20 | waitforjobs 15
21 | eval $cmd
22 | done
23 | #wait
24 | done
25 |
--------------------------------------------------------------------------------
/run_liquidated_mcd_experiments.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | num_cores=40
3 | function limited_parallel {
4 | while [ `jobs | wc -l` -ge $num_cores ]
5 | do
6 | sleep 5
7 | done
8 | }
9 | for vault in `grep bite maker-data/mcd/latest-data/maker-processed/31358499851466632982272067240987752480060719095994161751935692443478204088320.csv | cut -d' ' -f 4`
10 | do
11 | limited_parallel
12 | block=`grep -B 1 "bites vault $vault" maker-data/mcd/latest-data/maker-processed/31358499851466632982272067240987752480060719095994161751935692443478204088320.csv | head -n 1 | cut -d' ' -f 5`
13 | cmd="python3 run_mcd_experiments.py -sb $block -eb $block -cdp $vault &"
14 | echo $cmd
15 | eval $cmd
16 | done
17 |
--------------------------------------------------------------------------------
/run_maker_experiments.py:
--------------------------------------------------------------------------------
1 | from subprocess import Popen, PIPE
2 | import matplotlib
3 | matplotlib.use('Agg')
4 | from matplotlib import pyplot as plt
5 | import sys, os
6 | from copy import deepcopy
7 | import pandas as pd
8 | import argparse
9 | import logging
10 | #from find_mev_krun_uniswapv2 import reordering_mev
11 | #from find_mev_uniswapv1 import reordering_mev
12 | from find_mev_krun_maker import reordering_mev
13 |
14 | parser = argparse.ArgumentParser(description='Run UniswapV1 experiments')
15 |
16 | parser.add_argument(
17 | '-v', '--verbose',
18 | help="Be verbose",
19 | action="store_const", dest="loglevel", const=logging.INFO,
20 | default=logging.WARNING
21 | )
22 |
23 | parser.add_argument(
24 | '-e', '--exchange',
25 | help="uniswapv1",
26 | default='uniswapv1'
27 | )
28 |
29 |
30 | parser.add_argument(
31 | '-sb', '--start_block',
32 | help="Block number to find MEV in",
33 | required=True
34 | )
35 |
36 | parser.add_argument(
37 | '-eb', '--end_block',
38 | help="Block number to find MEV in",
39 | required=True
40 | )
41 |
42 | parser.add_argument(
43 | '-a', '--address',
44 | help="pair address",
45 | default='89d24a6b4ccb1b6faa2625fe562bdd9a23260359'
46 |
47 | )
48 |
49 | parser.add_argument(
50 | '-cdp', '--cdp',
51 | help="CDP id",
52 | required=True
53 | )
54 |
55 | sai_token_address = '89d24a6b4ccb1b6faa2625fe562bdd9a23260359'
56 | sai_token = '786821374916005576892310737142965798721793950553'
57 |
58 |
59 | args = parser.parse_args()
60 | logging.basicConfig(level=args.loglevel, format='%(message)s')
61 |
62 | logger = logging.getLogger(__name__)
63 |
64 | logger.info('Block : %s', args.start_block)
65 | logger.info('Block : %s', args.end_block)
66 |
67 | exchange_name = args.exchange
68 |
69 | reserves = pd.read_csv('data-scripts/latest-data/%s-reserves.csv' % (exchange_name))
70 | #uniswapv2_pairs = pd.read_csv('data-scripts/latest-data/data/uniswapv2_pairs.csv').set_index('pair')
71 |
72 | # TODO : check if exists
73 | transactions_filepath = 'data-scripts/latest-data/' + exchange_name + '-processed/' + args.address + '.csv'
74 | transactions = ''
75 | for block in range(int(args.start_block), int(args.end_block) + 1):
76 | block_str = str(block)
77 | pipe = Popen('grep -A 1 "block ' + block_str + '" ' + transactions_filepath, shell=True, stdout=PIPE, stderr=PIPE)
78 | component_transactions = pipe.stdout.read() + pipe.stderr.read()
79 | component_transactions = str(component_transactions, "utf-8")
80 | transactions = transactions + component_transactions
81 |
82 | maker_transactions_filepath = 'maker-data/maker_data.txt'
83 | pipe = Popen('grep "vault ' + args.cdp + ';" ' + maker_transactions_filepath, shell=True, stdout=PIPE, stderr=PIPE)
84 | maker_transactions = pipe.stdout.read() + pipe.stderr.read()
85 | maker_transactions = str(maker_transactions, "utf-8")
86 | transactions = transactions + maker_transactions
87 |
88 |
89 | #post_reserve = reserves[(reserves.Address == args.address) & (reserves.Block == int(args.end_block))]
90 | #post_price = (int(post_reserve.Reserve1) // int(post_reserve.Reserve0) , int(post_reserve.Reserve0) // int(post_reserve.Reserve1) )
91 | post_price = (0, 0) #TODO : handle properly
92 |
93 | balances = (0,0)
94 |
95 | pre_reserve = reserves[(reserves.Address == args.address) & (reserves.Block < int(args.start_block))]
96 | tokens = (str(pre_reserve.iloc[0]['Token0']).replace(sai_token, 'SAI'), str(pre_reserve.iloc[0]['Token1']).replace(sai_token, 'SAI'))
97 | if len(pre_reserve) < 1:
98 | pre_price = (0,0) # TODO : subtle issue wrt MEV here
99 | else:
100 | pre_reserve = pre_reserve.iloc[-1]
101 | pre_price = (int(pre_reserve.Reserve1) // int(pre_reserve.Reserve0) , int(pre_reserve.Reserve0) // int(pre_reserve.Reserve1) )
102 | balances = (int(pre_reserve.Reserve0), int(pre_reserve.Reserve1))
103 |
104 | logger.info(pre_reserve)
105 |
106 | if exchange_name == 'uniswapv1':
107 | acc = 'Uniswap'
108 |
109 | identifier = args.cdp + '-' + args.start_block + '-' + args.end_block + '-' + args.address
110 |
111 | spec_file = 'experiments-maker-'+ exchange_name+'/' + identifier + '/bound.k'
112 | outfile = 'output/'+ identifier +'.out'
113 |
114 |
115 |
116 |
117 | transactions = transactions.split('\n')
118 |
119 | maker_prologue = '\n'.join(filter(lambda x: 'opens' in x, transactions))
120 |
121 | transactions = '\n'.join([transaction for transaction in transactions if 'opens' not in transaction])
122 |
123 | maker_epilogue = '\n0 bites vault {} ;'.format(args.cdp)
124 |
125 | # replace address w/ semantics keyword
126 | transactions = transactions.replace(sai_token, 'SAI')
127 |
128 | logger.info(maker_prologue)
129 |
130 | logger.info(transactions)
131 |
132 | logger.info(maker_epilogue)
133 |
134 | reordering_mev(transactions, spec_file, outfile, acc, tokens, balances, pre_price, post_price, args.address, maker_prologue, maker_epilogue)
135 |
--------------------------------------------------------------------------------
/run_mcd_experiments.py:
--------------------------------------------------------------------------------
1 | from subprocess import Popen, PIPE
2 | import matplotlib
3 | matplotlib.use('Agg')
4 | from matplotlib import pyplot as plt
5 | import sys, os
6 | from copy import deepcopy
7 | import pandas as pd
8 | import argparse
9 | import logging
10 | #from find_mev_krun_uniswapv2 import reordering_mev
11 | #from find_mev_uniswapv1 import reordering_mev
12 | from find_mev_krun_mcd import reordering_mev
13 | import time
14 |
15 | start_time = time.time()
16 |
17 | parser = argparse.ArgumentParser(description='Run MCD experiments')
18 |
19 | parser.add_argument(
20 | '-v', '--verbose',
21 | help="Be verbose",
22 | action="store_const", dest="loglevel", const=logging.INFO,
23 | default=logging.WARNING
24 | )
25 |
26 | parser.add_argument(
27 | '-e', '--exchange',
28 | help="uniswapv2",
29 | default='uniswapv2'
30 | )
31 |
32 |
33 | parser.add_argument(
34 | '-sb', '--start_block',
35 | help="Block number to find MEV in",
36 | required=True
37 | )
38 |
39 | parser.add_argument(
40 | '-eb', '--end_block',
41 | help="Block number to find MEV in",
42 | required=True
43 | )
44 | parser.add_argument(
45 | '-n', '--num_workers',
46 | help="Number of threads to use",
47 | required=True
48 | )
49 |
50 | parser.add_argument(
51 | '-a', '--address',
52 | help="pair address",
53 | default='0xa478c2975ab1ea89e8196811f51a7b7ade33eb11'
54 |
55 | )
56 |
57 | parser.add_argument(
58 | '-cdp', '--cdp',
59 | help="CDP id",
60 | required=True
61 | )
62 |
63 |
64 | #sai_token_address = '89d24a6b4ccb1b6faa2625fe562bdd9a23260359'
65 | dai_token_address = 'a478c2975ab1ea89e8196811f51a7b7ade33eb11'
66 | #sai_token = '786821374916005576892310737142965798721793950553'
67 | dai_token = '611382286831621467233887798921843936019654057231'
68 |
69 | collateral_type = '31358499851466632982272067240987752480060719095994161751935692443478204088320'
70 | collateral_token = '1097077688018008265106216665536940668749033598146'
71 |
72 | args = parser.parse_args()
73 | logging.basicConfig(level=args.loglevel, format='%(message)s')
74 |
75 | logger = logging.getLogger(__name__)
76 |
77 | logger.info('Block : %s', args.start_block)
78 | logger.info('Block : %s', args.end_block)
79 |
80 | exchange_name = args.exchange
81 |
82 | reserves = pd.read_csv('data-scripts/latest-data/%s-reserves.csv' % (exchange_name))
83 |
84 | mcd_fees_data = pd.read_csv('maker-data/mcd/latest-data/maker_fees.csv')
85 |
86 | if exchange_name == 'uniswapv2':
87 | acc = 'UniswapV2'
88 |
89 | def get_mcd_rate(given_block):
90 | pre_fees_data = mcd_fees_data[mcd_fees_data.Block < int(given_block)]
91 | if len(pre_fees_data) < 1:
92 | pre_fees = 10**27
93 | else:
94 | pre_fees_data = pre_fees_data.iloc[-1]
95 | pre_fees = pre_fees_data.Fees
96 | return int(pre_fees)
97 |
98 | def get_uniswap_reserves(given_block):
99 | balances = {collateral_token:0, 'DAI':0}
100 |
101 | pre_reserve = reserves[(reserves.Address == args.address) & (reserves.Block < int(given_block))]
102 | if len(pre_reserve) < 1:
103 | return balances
104 | else:
105 | tokens = (str(pre_reserve.iloc[0]['Token0']).replace(dai_token, 'DAI'), str(pre_reserve.iloc[0]['Token1']).replace(dai_token, 'DAI'))
106 | pre_reserve = pre_reserve.iloc[-1]
107 | return {tokens[0] : int(pre_reserve.Reserve0), tokens[1]: int(pre_reserve.Reserve1)}
108 |
109 | def kint(x):
110 | if x >= 0:
111 | return "gets %d" % (x)
112 | else:
113 | return "gives %d" % (0-x)
114 |
115 |
116 | def get_mcd_prologue(mcd_transactions, start_block):
117 | mcd_transactions = mcd_transactions.split('\n')
118 | mcd_prologue = ''
119 | rate = 10**27
120 | balances = {collateral_token: 0, 'DAI': 0}
121 | curr_block = 0
122 | for transaction in mcd_transactions:
123 | transaction = transaction.strip()
124 | if 'block' in transaction:
125 | curr_block = int(transaction.split()[-1])
126 | if curr_block >= start_block:
127 | break
128 | elif 'vault' in transaction:
129 | curr_balances = get_uniswap_reserves(curr_block)
130 | curr_rate = get_mcd_rate(curr_block)
131 | mcd_prologue += """{acc} in {token0} {balance0} ;
132 | {acc} in {token1} {balance1} ;
133 | {fee_inc} increment in stability fees for {token1} ;
134 | {tx}
135 | """.format(acc=acc, token0='DAI', token1=collateral_token, balance0=kint(curr_balances['DAI'] - balances['DAI']), balance1=kint(curr_balances[collateral_token]-balances[collateral_token]),tx=transaction, fee_inc = curr_rate - rate)
136 | balances = curr_balances
137 | rate = curr_rate
138 | curr_balances = get_uniswap_reserves(start_block)
139 | curr_rate = get_mcd_rate(start_block)
140 | mcd_prologue += """{acc} in {token0} {balance0} ;
141 | {acc} in {token1} {balance1} ;
142 | GetPrice {token0} {token1} ;
143 | {fee_inc} increment in stability fees for {token1} ;
144 | """.format(acc=acc, token0='DAI', token1=collateral_token, balance0=kint(curr_balances['DAI'] - balances['DAI']), balance1=kint(curr_balances[collateral_token]-balances[collateral_token]), fee_inc = curr_rate - rate)
145 | balances = curr_balances
146 | rate = curr_rate
147 | return mcd_prologue
148 |
149 |
150 | # TODO : check if exists
151 | transactions_filepath = 'data-scripts/latest-data/' + exchange_name + '-processed/' + args.address + '.csv'
152 | amm_transactions = ''
153 | for block in range(int(args.start_block), int(args.end_block) + 1):
154 | block_str = str(block)
155 | pipe = Popen('grep -A 1 "block ' + block_str + '" ' + transactions_filepath, shell=True, stdout=PIPE, stderr=PIPE)
156 | component_transactions = pipe.stdout.read() + pipe.stderr.read()
157 | component_transactions = str(component_transactions, "utf-8")
158 | amm_transactions = amm_transactions + component_transactions
159 |
160 | #maker_transactions_filepath = 'maker-data/maker_data.txt'
161 | mcd_transactions_filepath = 'maker-data/mcd/latest-data/maker-processed/%s.csv' % (collateral_type)
162 |
163 | pipe = Popen('grep -B 1 "vault ' + args.cdp + '" ' + mcd_transactions_filepath, shell=True, stdout=PIPE, stderr=PIPE)
164 | mcd_transactions = pipe.stdout.read() + pipe.stderr.read()
165 | mcd_transactions = str(mcd_transactions, "utf-8")
166 |
167 |
168 |
169 | identifier = args.cdp + '-' + args.start_block + '-' + args.end_block + '-' + args.address
170 |
171 | spec_file = 'experiments-mcd-'+ exchange_name+'/' + identifier + '/bound.k'
172 | outfile = 'run-output/'+ identifier +'.out'
173 |
174 |
175 |
176 | # replace address w/ semantics keyword
177 | amm_transactions = amm_transactions.replace(dai_token, 'DAI').strip()
178 | mcd_transactions = mcd_transactions.replace(dai_token, 'DAI').replace(collateral_type, collateral_token).strip()
179 |
180 |
181 | mcd_prologue = get_mcd_prologue(mcd_transactions, int(args.start_block))
182 |
183 | mcd_epilogue = ''
184 |
185 | transactions = amm_transactions + '\n0 bites vault {} ;'.format(args.cdp)
186 |
187 | logger.info(mcd_prologue)
188 |
189 | logger.info(transactions)
190 |
191 | logger.info(mcd_epilogue)
192 |
193 | reordering_time = time.time()
194 |
195 | reordering_mev(transactions, spec_file, outfile, acc, args.address, mcd_prologue, mcd_epilogue, args.num_workers)
196 |
197 | end_time = time.time()
198 |
199 | print("{},{},{}".format(args.num_workers, end_time-start_time, end_time-reordering_time))
--------------------------------------------------------------------------------
/run_parallel_exp.py:
--------------------------------------------------------------------------------
1 | import os, time
2 |
3 | for run in range(3):
4 | for i in [96, 81, 64, 49, 36, 25, 16, 11, 7, 5, 4, 3]:
5 | print("Running %d %d" % (i, run))
6 | os.system("rm -rf run-output")
7 | os.system("mkdir run-output")
8 | cmd = "python3 run_mcd_experiments.py -sb 10245709 -eb 10245722 -cdp 1424800597226405082122559358241526008447780662102 -n %d >> results " % (i)
9 | print(cmd)
10 | os.system(cmd)
11 | time.sleep(5)
12 |
--------------------------------------------------------------------------------
/run_parallel_exp.sh:
--------------------------------------------------------------------------------
1 | runs=3
2 | start=10245709
3 | end=10245722
4 | cdp=1424800597226405082122559358241526008447780662102
5 | python3 run_mcd_experiments.py -sb $start -eb $end -cdp $cdp -n 96
6 | waitforjobs() {
7 | while test $(jobs -p | wc -w) -ge "$1"; do wait -n; done
8 | }
9 | for run in $(seq $runs)
10 | do
11 | for i in {96,81,64,49,36,25,16,11,7,5,4,3}
12 | do
13 | starttime=`date +%s`
14 | for file in experiments-mcd-uniswapv2/$cdp-$start-$end-0xa478c2975ab1ea89e8196811f51a7b7ade33eb11/bound.k*
15 | do
16 | krun $file > /dev/null &
17 | waitforjobs $i
18 | done
19 | endtime=`date +%s`
20 | delta=$(( $endtime - $starttime ))
21 | echo $run,$i,$delta
22 | done
23 | done
24 |
25 |
--------------------------------------------------------------------------------
/run_random_experiments.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | exchange_name=$1
3 | month=$2
4 | cmds_file=random_cmds_all_${exchange_name}_${month}
5 | rm -f $cmds_file
6 | results_file=validate_random_${exchange_name}_${month}
7 | rm -f $results_file
8 |
9 | waitforjobs() {
10 | while test $(jobs -p | wc -w) -ge "$1"; do wait -n; done
11 | }
12 |
13 |
14 | echo exchange,pair,token0,token1,block,numtransactions,mev
15 |
16 |
17 |
18 | for sample in `python3 get_random_blocks.py -m $month -e $exchange_name -n 1000`
19 | do
20 | date=`echo $sample | cut -d, -f1 `
21 | block=`echo $sample | cut -d, -f2 `
22 | relayers=`echo $sample | awk -F "," '{$1=$2=$3=""; print $0}'`
23 | cmd="python3 run_uniswapv2_experiments.py -b $block -e $exchange_name -d $date -a "
24 | for relayer in `echo $relayers`
25 | do
26 | cmd="${cmd} ${relayer}"
27 | done
28 | cmd="${cmd} -p ${results_file} &"
29 | echo $cmd
30 | echo $cmd >> $cmds_file
31 | waitforjobs 25
32 | eval $cmd
33 | done
34 |
--------------------------------------------------------------------------------
/run_tractable_experiments.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | exchange_name=$1
3 | rm -r /tmp/plot_mev/$exchange_name/
4 | mkdir -p /tmp/plot_mev/$exchange_name/
5 | cmds_file=tractable_cmds_all
6 | rm -f $cmds_file
7 |
8 | waitforjobs() {
9 | while test $(jobs -p | wc -w) -ge "$1"; do wait -n; done
10 | }
11 |
12 |
13 | echo exchange,pair,token0,token1,block,numtransactions,mev
14 |
15 | for file in `find data-scripts/latest-data/$exchange_name-processed/ -type f -exec wc -l {} + | sort -rn | tr -s ' ' | cut -d' ' -f3 | grep 0x | head -n 10`
16 | do
17 | temp=${file%.csv}
18 | address=${temp##*/}
19 | for block in `sort -rt, -k2 -n data-scripts/latest-data/active-region/$exchange_name/txcount_$address.csv | grep ,[0-9]$ | head -n 30 | cut -f1 -d,`
20 | do
21 | cmd="python3 run_uniswapv2_experiments.py -b $block -a $address -e $exchange_name -p validate_$exchange_name &"
22 | echo $cmd >> $cmds_file
23 | waitforjobs 20
24 | eval $cmd
25 | # cp experiments/$block-$address/bound.k /tmp/plot_mev/$exchange_name/convergence-$block-$address.csv
26 |
27 | done
28 | done
29 |
--------------------------------------------------------------------------------
/run_uniswapv1_experiments.py:
--------------------------------------------------------------------------------
1 | from subprocess import Popen, PIPE
2 | import matplotlib
3 | matplotlib.use('Agg')
4 | from matplotlib import pyplot as plt
5 | import sys, os
6 | from copy import deepcopy
7 | import pandas as pd
8 | import argparse
9 | import logging
10 | #from find_mev_krun_uniswapv2 import reordering_mev
11 | from find_mev_uniswapv1 import reordering_mev
12 |
13 | # price in eth
14 | def get_price(token, reserves, block):
15 | weth = '0'
16 | if token == weth:
17 | return 1.0
18 | pre_reserve = reserves[(reserves.Token0 == token) & (reserves.Token1 == weth) & (reserves.Block < int(block))]
19 | # pre_reserve = pre_reserve.iloc[-1]
20 | if len(pre_reserve) > 0:
21 | return (int(pre_reserve.iloc[-1].Reserve1) + 0.0) / (int(pre_reserve.iloc[-1].Reserve0))
22 | pre_reserve = reserves[(reserves.Token0 == weth) & (reserves.Token1 == token) & (reserves.Block < int(block))]
23 | # pre_reserve = pre_reserve.iloc[-1]
24 | if len(pre_reserve) > 0:
25 | return (int(pre_reserve.iloc[-1].Reserve0) + 0.0) / (int(pre_reserve.iloc[-1].Reserve1))
26 | return None
27 |
28 |
29 | parser = argparse.ArgumentParser(description='Run UniswapV1 experiments')
30 |
31 | parser.add_argument(
32 | '-v', '--verbose',
33 | help="Be verbose",
34 | action="store_const", dest="loglevel", const=logging.INFO,
35 | default=logging.WARNING
36 | )
37 |
38 | parser.add_argument(
39 | '-e', '--exchange',
40 | help="uniswapv1",
41 | default='uniswapv1'
42 | )
43 |
44 |
45 | parser.add_argument(
46 | '-b', '--block',
47 | help="Block number to find MEV in",
48 | required=True
49 | )
50 |
51 | parser.add_argument(
52 | '-d', '--date',
53 | help="Date",
54 | required=""
55 | )
56 |
57 | parser.add_argument(
58 | '-a', '--address',
59 | nargs='+',
60 | help="pair address",
61 | required=True
62 |
63 | )
64 |
65 | parser.add_argument(
66 | '-c', '--convergence',
67 | help="collect convervgence data",
68 | action="store_true"
69 | )
70 |
71 | parser.add_argument(
72 | '-p', '--paths',
73 | help="collect paths data to validate",
74 | default=""
75 | )
76 |
77 |
78 | args = parser.parse_args()
79 | logging.basicConfig(level=args.loglevel, format='%(message)s')
80 |
81 | logger = logging.getLogger(__name__)
82 |
83 | logger.info('Block : %s', args.block)
84 |
85 | exchange_name = args.exchange
86 |
87 | addresses = set(args.address)
88 |
89 | date = args.date
90 | month = date[:7]
91 |
92 |
93 | reserves = pd.read_csv('data-scripts/latest-data/%s-reserves.csv' % (exchange_name), dtype={'Token1': object, 'Token0': object})
94 | #uniswapv2_pairs = pd.read_csv('data-scripts/latest-data/data/uniswapv2_pairs.csv').set_index('pair')
95 |
96 | balances = {}
97 | tokens = {}
98 | prices = {}
99 |
100 | for address in addresses:
101 | balances[address] = (0,0)
102 | address_reserves = reserves[(reserves.Address == address)]
103 | pre_reserve = address_reserves[(address_reserves.Block < int(args.block))]
104 | if len(pre_reserve) > 0:
105 | pre_reserve = pre_reserve.iloc[-1]
106 | balances[address] = (int(pre_reserve.Reserve0), int(pre_reserve.Reserve1))
107 | token0 = address_reserves.iloc[0].Token0
108 | token1 = address_reserves.iloc[0].Token1
109 | tokens[address] = (token0, token1)
110 | prices[token0] = get_price(token0, reserves, args.block)
111 | prices[token1] = get_price(token1, reserves, args.block)
112 | if prices[token0] is None or prices[token1] is None:
113 | logger.warning("unknown prices for %s", address)
114 | sys.exit(1)
115 |
116 | logger.info(tokens)
117 | logger.info(balances)
118 |
119 |
120 | if exchange_name == 'uniswapv1':
121 | acc = 'UniswapV1'
122 |
123 | identifier = args.block + '-' + '-'.join([address[:8] for address in addresses])
124 |
125 | spec_file = 'experiments/' + identifier + '/bound.k'
126 | outfile = 'output/'+ identifier +'.out'
127 |
128 | transactions = {}
129 |
130 | # if date != "":
131 | # transactions_filepath = 'data-scripts/latest-data/' + exchange_name + '-indexed/' + date + '.csv'
132 | # pipe = Popen('grep -A 1 "block ' + args.block + '" ' + transactions_filepath, shell=True, stdout=PIPE, stderr=PIPE)
133 | # transactions = associate_address(str(pipe.stdout.read() + pipe.stderr.read(), "utf-8"), tokens)
134 | # else:
135 | # for address in addresses:
136 | # transactions_filepath = 'data-scripts/latest-data/' + exchange_name + '-processed/' + address + '.csv'
137 | # pipe = Popen('grep -A 1 "block ' + args.block + '" ' + transactions_filepath, shell=True, stdout=PIPE, stderr=PIPE)
138 | # transactions[address] = str(pipe.stdout.read() + pipe.stderr.read(), "utf-8")
139 |
140 | for address in addresses:
141 | transactions_filepath = 'data-scripts/latest-data/' + exchange_name + '-processed/' + address + '.csv'
142 | pipe = Popen('grep -A 1 "block ' + args.block + '" ' + transactions_filepath, shell=True, stdout=PIPE, stderr=PIPE)
143 | transactions[address] = str(pipe.stdout.read() + pipe.stderr.read(), "utf-8")
144 |
145 |
146 | logger.info(transactions)
147 |
148 |
149 | # TODO : check if exists
150 | # transactions_filepath = 'data-scripts/latest-data/' + exchange_name + '-processed/' + args.address + '.csv'
151 |
152 | # pipe = Popen('grep -A 1 "block ' + args.block + '" ' + transactions_filepath, shell=True, stdout=PIPE, stderr=PIPE)
153 | # transactions = pipe.stdout.read() + pipe.stderr.read()
154 | # transactions = str(transactions, "utf-8")
155 |
156 | total_mev = 0
157 | tx_ordering_u = []
158 | tx_ordering_l = []
159 |
160 | for address in addresses:
161 | mev, u, l = reordering_mev(transactions[address], spec_file, outfile, acc, tokens[address], balances[address], address, prices, args.block, args.convergence)
162 | total_mev += mev
163 | tx_ordering_u.append(u)
164 | tx_ordering_l.append(l)
165 |
166 | path_filename = args.paths
167 |
168 | if path_filename != '':
169 | path_f = open(path_filename, 'a')
170 | path_f.write('{},{},{},{},{}\n'.format(args.block, total_mev, '1', acc, ','.join(tx_ordering_u)))
171 | path_f.write('{},{},{},{},{}\n'.format(args.block, total_mev, '0', acc, ','.join(tx_ordering_l)))
172 | path_f.close()
173 |
174 |
--------------------------------------------------------------------------------
/run_uniswapv1_experiments.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | exchange_name=uniswapv1
3 | type=$1
4 |
5 | waitforjobs() {
6 | while test $(jobs -p | wc -w) -ge "$1"; do wait -n; done
7 | }
8 |
9 | echo exchange,pair,token0,token1,block,numtransactions,mev
10 | if [ "$type" = intractable ]; then
11 | for file in `ls -S data-scripts/latest-data/$exchange_name-processed/* | head -n 10`
12 |
13 | do
14 | temp=${file%.csv}
15 | address=${temp##*/}
16 | for block in `sort -rt, -k2 -n data-scripts/latest-data/active-region/$exchange_name/txcount_$address.csv | head -n 30 | cut -f1 -d,`
17 | do
18 | cmd="python3 run_uniswapv1_experiments.py -b $block -a $address -e $exchange_name &"
19 | waitforjobs 20
20 | eval $cmd
21 | done
22 | done
23 |
24 | else
25 | rm -r /tmp/plot_mev/$exchange_name/
26 | mkdir -p /tmp/plot_mev/$exchange_name/
27 | for file in `ls -S data-scripts/latest-data/$exchange_name-processed/* | head -n 10`
28 | do
29 | temp=${file%.csv}
30 | address=${temp##*/}
31 | for block in `sort -rt, -k2 -n data-scripts/latest-data/active-region/$exchange_name/txcount_$address.csv | grep ,[0-9]$ | head -n 30 | cut -f1 -d,`
32 | do
33 | cmd="python3 run_uniswapv1_experiments.py -b $block -a $address -e $exchange_name -c &"
34 | waitforjobs 20
35 | eval $cmd
36 | cp experiments-uniswapv1/$block-$address/bound.k /tmp/plot_mev/$exchange_name/convergence-$block-$address.csv
37 | done
38 | done
39 | fi
40 |
41 |
--------------------------------------------------------------------------------
/run_uniswapv2_experiments.py:
--------------------------------------------------------------------------------
1 | from subprocess import Popen, PIPE
2 | import matplotlib
3 | matplotlib.use('Agg')
4 | from matplotlib import pyplot as plt
5 | import sys, os
6 | from copy import deepcopy
7 | import pandas as pd
8 | import argparse
9 | import logging
10 | #from find_mev_krun_uniswapv2 import reordering_mev
11 | from find_mev_uniswapv2 import reordering_mev
12 | from collections import defaultdict
13 |
14 | # price in eth
15 | def get_price(token, reserves, block):
16 | weth = '1097077688018008265106216665536940668749033598146'
17 | if token == weth:
18 | return 1.0
19 | pre_reserve = reserves[(reserves.Token0 == token) & (reserves.Token1 == weth) & (reserves.Block < int(block))]
20 | # pre_reserve = pre_reserve.iloc[-1]
21 | if len(pre_reserve) > 0:
22 | return (int(pre_reserve.iloc[-1].Reserve1) + 0.0) / (int(pre_reserve.iloc[-1].Reserve0))
23 | pre_reserve = reserves[(reserves.Token0 == weth) & (reserves.Token1 == token) & (reserves.Block < int(block))]
24 | # pre_reserve = pre_reserve.iloc[-1]
25 | if len(pre_reserve) > 0:
26 | return (int(pre_reserve.iloc[-1].Reserve0) + 0.0) / (int(pre_reserve.iloc[-1].Reserve1))
27 | return None
28 |
29 | def get_address_from_tx(transaction, tokens):
30 | # print(transaction)
31 | for address in tokens:
32 | token0 = tokens[address][0]
33 | token1 = tokens[address][1]
34 | if token0 in transaction and token1 in transaction:
35 | # print(address)
36 | return address
37 | return None
38 |
39 | def associate_address(data, tokens):
40 | ret = {}
41 | transactions = data.split('\n')
42 | for idx in range(1, len(transactions), 2):
43 | address = get_address_from_tx(transactions[idx], tokens)
44 | if address is None:
45 | continue
46 | if address not in ret:
47 | ret[address] = ''
48 | ret[address] = ret[address] + transactions[idx-1] + '\n' + transactions[idx] + '\n'
49 | return ret
50 |
51 |
52 |
53 |
54 | parser = argparse.ArgumentParser(description='Run UniswapV2 experiments')
55 |
56 | parser.add_argument(
57 | '-v', '--verbose',
58 | help="Be verbose",
59 | action="store_const", dest="loglevel", const=logging.INFO,
60 | default=logging.WARNING
61 | )
62 |
63 | parser.add_argument(
64 | '-e', '--exchange',
65 | help="sushiswap/uniswapv2",
66 | default='uniswapv2'
67 | )
68 |
69 |
70 | parser.add_argument(
71 | '-b', '--block',
72 | help="Block number to find MEV in",
73 | required=True
74 | )
75 |
76 | parser.add_argument(
77 | '-d', '--date',
78 | help="Date",
79 | required=""
80 | )
81 |
82 | parser.add_argument(
83 | '-a', '--address',
84 | nargs='+',
85 | help="pair address",
86 | required=True
87 |
88 | )
89 |
90 | parser.add_argument(
91 | '-c', '--convergence',
92 | help="collect convervgence data",
93 | action="store_true"
94 | )
95 |
96 | parser.add_argument(
97 | '-p', '--paths',
98 | help="collect paths data to validate",
99 | default=""
100 | )
101 |
102 |
103 |
104 | args = parser.parse_args()
105 | logging.basicConfig(level=args.loglevel, format='%(message)s')
106 |
107 | logger = logging.getLogger(__name__)
108 |
109 | logger.info('Block : %s', args.block)
110 |
111 | exchange_name = args.exchange
112 |
113 | addresses = set(args.address)
114 |
115 | date = args.date
116 | month = date[:7]
117 |
118 | reserves = pd.read_csv('data-scripts/latest-data/{}-reserves-segmented/{}'.format(exchange_name, month))
119 | #uniswapv2_pairs = pd.read_csv('data-scripts/latest-data/data/uniswapv2_pairs.csv').set_index('pair')
120 |
121 | balances = {}
122 | tokens = {}
123 | prices = {}
124 |
125 | for address in addresses:
126 | balances[address] = (0,0)
127 | address_reserves = reserves[(reserves.Address == address)]
128 | pre_reserve = address_reserves[(address_reserves.Block < int(args.block))]
129 | if len(pre_reserve) > 0:
130 | pre_reserve = pre_reserve.iloc[-1]
131 | balances[address] = (int(pre_reserve.Reserve0), int(pre_reserve.Reserve1))
132 | token0 = address_reserves.iloc[0].Token0
133 | token1 = address_reserves.iloc[0].Token1
134 | tokens[address] = (token0, token1)
135 | prices[token0] = get_price(token0, reserves, args.block)
136 | prices[token1] = get_price(token1, reserves, args.block)
137 | if prices[token0] is None or prices[token1] is None:
138 | logger.warning("unknown prices for %s", address)
139 | sys.exit(1)
140 |
141 | logger.info(tokens)
142 | logger.info(balances)
143 |
144 | if exchange_name == 'uniswapv2':
145 | acc = 'UniswapV2'
146 | elif exchange_name == 'sushiswap':
147 | acc = 'Sushiswap'
148 |
149 | identifier = args.block + '-' + '-'.join([address[:8] for address in addresses])
150 |
151 | spec_file = 'experiments/' + identifier + '/bound.k'
152 | outfile = 'output/'+ identifier +'.out'
153 |
154 | # TODO : check if exists
155 | transactions = {}
156 |
157 | if date != "":
158 | transactions_filepath = 'data-scripts/latest-data/' + exchange_name + '-indexed/' + date + '.csv'
159 | pipe = Popen('grep -A 1 "block ' + args.block + '" ' + transactions_filepath, shell=True, stdout=PIPE, stderr=PIPE)
160 | transactions = associate_address(str(pipe.stdout.read() + pipe.stderr.read(), "utf-8"), tokens)
161 | else:
162 | for address in addresses:
163 | transactions_filepath = 'data-scripts/latest-data/' + exchange_name + '-processed/' + address + '.csv'
164 | pipe = Popen('grep -A 1 "block ' + args.block + '" ' + transactions_filepath, shell=True, stdout=PIPE, stderr=PIPE)
165 | transactions[address] = str(pipe.stdout.read() + pipe.stderr.read(), "utf-8")
166 |
167 | logger.info(transactions)
168 |
169 | total_mev = 0
170 | tx_ordering_u = []
171 | tx_ordering_l = []
172 |
173 | for address in addresses:
174 | mev, u, l = reordering_mev(transactions[address], spec_file, outfile, acc, tokens[address], balances[address], address, prices, args.block, args.convergence)
175 | total_mev += mev
176 | tx_ordering_u.append(u)
177 | tx_ordering_l.append(l)
178 |
179 | path_filename = args.paths
180 |
181 | if path_filename != '':
182 | path_f = open(path_filename, 'a')
183 | path_f.write('{},{},{},{},{}\n'.format(args.block, total_mev, '1', acc, ','.join(tx_ordering_u)))
184 | path_f.write('{},{},{},{},{}\n'.format(args.block, total_mev, '0', acc, ','.join(tx_ordering_l)))
185 | path_f.close()
186 |
187 | # print(acc, pair_address, token0, token1, block, len(all_transactions), mev, sep=',')
188 |
189 |
--------------------------------------------------------------------------------
/tests/boundmaker.k:
--------------------------------------------------------------------------------
1 | module BOUNDMAKER
2 | imports MEV
3 | rule
4 | 0 in 0 gets 0;
5 | 0 in SAI gets 0;
6 | 1 opens vault 101;
7 | 1 locks 5 collateral to vault 101;
8 | 2 is given vault 101;
9 | 2 draws 500 debt from vault 101;
10 | 0 bites vault 101 ;
11 | 1 adds 200 tokens and 1 eth of liquidity to SAI;
12 | 1 adds 100 tokens and 2 eth of liquidity to SAI;
13 | => ?X
14 |
15 | .Map =>?S:Map
16 | .Set => ?_:Set
17 | .List => ?_
18 | .Map => ?_
19 | .Map => ?_
20 | ensures ( ({?S[0 in SAI]}:>Int <=Int 0) andBool ?X ==K DONE) orBool (?X ==K FAIL)
21 |
22 | // notBool((?Y ==K .K) andBool (?M ==K .Set)) orBool ({?S[0 in SAI]}:>Int <=Int 0 )
23 | // orBool (notBool ((0 , SAI) in keys(?P)))
24 | // (notBool(?X ==K .K) andBool notBool(?Y ==K .K)) orBool (?X ==K .K andBool ?Y ==K .K andBool ({?S[0 in SAI]}:>Int <=Int 0 ))
25 | // ( notBool(?X ==K DONE)) orBool
26 | endmodule
27 |
--------------------------------------------------------------------------------
/tests/mevverification.k:
--------------------------------------------------------------------------------
1 | module MEVVERIFICATION
2 | imports MEV
3 | rule
4 | // transaction 0x003c5d067ee03836a4374f4d5c3466e0d8328f62496b5dd1cbdf4d6db6aeacfe
5 | 572342420797838882173629907578269659270010768697 adds 100000000000000000000 tokens and 20000000000000000 eth of liquidity to 1454383474624795085458277788004692202315323288702;
6 | // transaction 0xd54a6243159e4068cb50aebf4645b117bd6bfd627cf7819734c48bd984d5f4e8
7 | 616870745215506062750269120831072350349526031960 in 0 swaps 295147905179352825856 input for 1454383474624795085458277788004692202315323288702 fee 478923078075;
8 | // transaction 0x3a2fd182d016977a87d33ecf67e81d930e07f47d623320a41028f5bbf869db32
9 | 368839096625159992408953185476375976377428775247 in 1454383474624795085458277788004692202315323288702 swaps 120443408692820097171972 input for 0 fee 1177002069144;
10 | // transaction 0x0adb278e7096e67aca0bdb32f9794322a7b8be5835fe21ef0aa012e85b2e77dc
11 | 616870745215506062750269120831072350349526031960 adds 83290999999999977725 tokens and 125401513679990865 eth of liquidity to 1454383474624795085458277788004692202315323288702;
12 | // transaction 0xb0cece3304dc2563fbb2d17736c751b8b441f04683e529c0a251799b7286f6b8
13 | 368839096625159992408953185476375976377428775247 in 1454383474624795085458277788004692202315323288702 swaps 2419684155471892146710118 input for 0 fee 1008871952048;
14 | => .
15 |
16 | .Map =>?S:Map
17 | .Set => .Set
18 | .List => ?A2
19 | ensures {?S[Uniswap in 0]}:>Int >=Int 10556631155968
20 | endmodule
21 |
--------------------------------------------------------------------------------
/tests/test:
--------------------------------------------------------------------------------
1 | // initial balances before block
2 | 0 in 0 gets 50000000000000000000000000000000000000000000;
3 | 0 in 1 gets 100000000000000000000000000000000000000000000;
4 | 1 in 0 gets 100000000000000000000000000000000000000000000;
5 | 2 in 1 gets 200000000000000000000000000000000000000000000;
6 | // setup 100 balances; special address 100
7 | 100 in 0 gets 100000000000000000000000;
8 | 100 in 1 gets 500000000000000000000000;
9 |
10 | // transactions performed during interval (reorgable)
11 | 0 in 0 swaps 50000000000000000000000 input for 0 fee 0;
12 | 2 in 1 swaps 100000000000000000000000 for 0 output fee 0;
13 | 2 in 0 swaps 50000000000000000000000 for 1 output fee 0;
14 | 2 in 1 swaps 10000000000000000000000 for 0 output fee 0;
15 | 0 in 1 swaps 20000000000000000000000 for 0 output fee 0;
16 |
17 |
--------------------------------------------------------------------------------
/tests/test2:
--------------------------------------------------------------------------------
1 | // transaction 0x003c5d067ee03836a4374f4d5c3466e0d8328f62496b5dd1cbdf4d6db6aeacfe
2 | 572342420797838882173629907578269659270010768697 adds 100000000000000000000 tokens and 20000000000000000 eth of liquidity to 1454383474624795085458277788004692202315323288702;
3 | // transaction 0xd54a6243159e4068cb50aebf4645b117bd6bfd627cf7819734c48bd984d5f4e8
4 | 616870745215506062750269120831072350349526031960 intoken 0 swaps 295147905179352825856 input for 1454383474624795085458277788004692202315323288702 fee 478923078075;
5 | // transaction 0x3a2fd182d016977a87d33ecf67e81d930e07f47d623320a41028f5bbf869db32
6 | 368839096625159992408953185476375976377428775247 intoken 1454383474624795085458277788004692202315323288702 swaps 120443408692820097171972 input for 0 fee 1177002069144;
7 | // transaction 0x0adb278e7096e67aca0bdb32f9794322a7b8be5835fe21ef0aa012e85b2e77dc
8 | 616870745215506062750269120831072350349526031960 adds 83290999999999977725 tokens and 125401513679990865 eth of liquidity to 1454383474624795085458277788004692202315323288702;
9 | // transaction 0xb0cece3304dc2563fbb2d17736c751b8b441f04683e529c0a251799b7286f6b8
10 | 368839096625159992408953185476375976377428775247 intoken 1454383474624795085458277788004692202315323288702 swaps 2419684155471892146710118 input for 0 fee 1008871952048;
11 | // transaction 0x3f39349f363e3c5d32ea8a41d230edc4f403be2d5dafbd77cd7341ac9efd51c1
12 | 592749341890664977079655364897901372299776479799 intoken 0 swaps 37510569624339969668 input for 1454383474624795085458277788004692202315323288702 fee 428381323231;
13 | // transaction 0xaac35533dbbe6fa61c3aea568c4b48270086df2cb6f2ad93bf3a89ac51639a80
14 | 264847443905096214789120002031549113956393441556 intoken 0 swaps 21123726544996 input for 1454383474624795085458277788004692202315323288702 fee 827791114500;
15 | // transaction 0xaa4324dfbfa1ec1dec75cebeb30b4c9eb52ffec38ecb247ad7776bccad7534a7
16 | 2141306974950481735552225921960368587143761772 intoken 1454383474624795085458277788004692202315323288702 swaps 408643260412421831993104 input for 0 fee 383010674373;
17 | // transaction 0xebbd436b5c11b0c664acae816f2d116d022941b884bfd16fd9f67d75daf01ace
18 | 731289521199623785935383605947824058451768761790 intoken 1454383474624795085458277788004692202315323288702 swaps 1587408508048504338605904 input for 0 fee 2404091720112;
19 | // transaction 0x1895e9c6a5a44dd24c478040377794db2925073294e7048819bd8bab34408b09
20 | 254671187440071431237291880541617283359307276950 intoken 0 swaps 18446744073709551616 input for 1454383474624795085458277788004692202315323288702 fee 2278371825731;
21 | // transaction 0x0bb881e6f400d9dfa1a34b05af1ce1db3bcfb3bd3dc7dbcc7c905fc33cf128d4
22 | 796311812842238625998407404168727967009026153573 intoken 0 swaps 9901270465449784646 input for 1454383474624795085458277788004692202315323288702 fee 488380412630;
23 | formtemplate
24 |
--------------------------------------------------------------------------------
/tests/test5:
--------------------------------------------------------------------------------
1 | //transaction 0xa5b60e6a104c0c37722ed064f248b6fbaa29e8a7ec6e5730b581bb94264604a5
2 | 1165019065021560315215868796733804035767899207773 adds 5981000000000000000000 tokens and 10000000000000000 eth of liquidity to 1451244131477511989369547772603850172508351625871 block 10;
3 | // transaction 0xf70b88737e42ad0320e9ed214bbd945e07ad3ecca2dc45aa58091d8badf785f5
4 | 1165019065021560315215868796733804035767899207773 in 1451244131477511989369547772603850172508351625871 swaps 9999999000000000000000000 input for 0 fee 499711527574 block 12;
--------------------------------------------------------------------------------
/tests/test6:
--------------------------------------------------------------------------------
1 | 0 in 100 gets 10;
2 |
--------------------------------------------------------------------------------
/tests/test_maker:
--------------------------------------------------------------------------------
1 | 0 in 0 gets 0;
2 | 0 in SAI gets 0;
3 | 1 in 0 gets 100000000;
4 | Uniswap in 0 gets 5;
5 | Uniswap in SAI gets 500;
6 | GetPrice 0 SAI;
7 | 1 opens vault 2;
8 | 1 locks 6 collateral to vault 2;
9 | 1 draws 6 debt from vault 2;
10 | 1 in 0 swaps 40 input for SAI fee 5;
11 | 0 bites vault 2;
12 |
--------------------------------------------------------------------------------
/tests/test_maker2:
--------------------------------------------------------------------------------
1 | 0 in 0 gets 0;
2 | 0 in SAI gets 0;
3 | 1 opens vault 101;
4 | 1 locks 5 collateral to vault 101;
5 | 2 is given vault 101;
6 | 2 draws 500 debt from vault 101;
7 | 0 bites vault 101 ;
8 | 1 adds 200 tokens and 1 eth of liquidity to SAI;
9 | 1 adds 100 tokens and 2 eth of liquidity to SAI;
10 |
--------------------------------------------------------------------------------
/tests/testlq:
--------------------------------------------------------------------------------
1 | 411498817949579190580624588476262637924693753695 adds 150000000 tokens and 76000000000000000 eth of liquidity to 1000808495181554617867411332362082075473764097814;
2 |
--------------------------------------------------------------------------------
/uniswap.py:
--------------------------------------------------------------------------------
1 | import re
2 | from collections import defaultdict
3 |
4 |
5 | class Uniswap:
6 | def __init__(self):
7 | self.token_balances = defaultdict(lambda : 0)
8 |
9 | def process(self, tx):
10 | tx = tx.replace(';', '')
11 |
12 | if 'adds' in tx:
13 | self.add_liquidity(tx)
14 | elif 'removes' in tx:
15 | self.remove_liquidity(tx)
16 | elif 'input' in tx:
17 | self.input_swap(tx)
18 | elif 'output' in tx:
19 | self.output_swap(tx)
20 | elif tx.startswith('//'):
21 | pass
22 | else:
23 | print("ILLEGAL ", tx)
24 |
25 | def add_liquidity(self, tx):
26 | vals = re.match(r'(.*) adds (.*) tokens and (.*) eth of liquidity to (.*)', tx)
27 | self.token_balances['0'] += int(vals.group(3))
28 | self.token_balances[vals.group(4)] += int(vals.group(2))
29 |
30 |
31 | def remove_liquidity(self, tx):
32 | vals = re.match(r'(.*) removes (.*) tokens and (.*) eth of liquidity from (.*)', tx)
33 | self.token_balances['0'] -= int(vals.group(3))
34 | self.token_balances[vals.group(4)] -= int(vals.group(2))
35 |
36 | def input_swap(self, tx):
37 | vals = re.match(r'(.*) in (.*) swaps (.*) input for (.*) fee (.*)', tx)
38 | self.token_balances[vals.group(4)] -= ((997 * int(vals.group(3)) * self.token_balances[vals.group(4)]) / (1000 * self.token_balances[vals.group(2)] + 997 * int(vals.group(3)) ))
39 | self.token_balances[vals.group(2)] += int(vals.group(3))
40 |
41 | def output_swap(self, tx):
42 | vals = re.match(r'(.*) in (.*) swaps (.*) for (.*) output fee (.*)', tx)
43 | self.token_balances[vals.group(2)] += ((1000 * int(vals.group(3)) * self.token_balances[vals.group(2)]) / (997 * self.token_balances[vals.group(4)] - int(vals.group(3)) ) + 1)
44 | self.token_balances[vals.group(4)] -= int(vals.group(3))
45 |
46 | def config(self):
47 | return self.token_balances
48 |
--------------------------------------------------------------------------------
/uniswapv2.py:
--------------------------------------------------------------------------------
1 | import re
2 | from collections import defaultdict
3 |
4 |
5 | class UniswapV2:
6 | def __init__(self, balances={}, exchange_name='UniswapV2'):
7 | self.exchange_name = exchange_name
8 | self.token_balances = defaultdict(lambda : defaultdict(lambda : 0))
9 | self.token_balances[self.exchange_name] = balances
10 |
11 | def process(self, tx):
12 | tx = tx.replace(';', '').strip()
13 |
14 | if 'adds' in tx:
15 | self.add_liquidity(tx)
16 | elif 'removes' in tx:
17 | self.remove_liquidity(tx)
18 | elif 'swaps' in tx:
19 | self.swap(tx)
20 | elif tx.startswith('//'):
21 | pass
22 | else:
23 | print("ILLEGAL ", tx)
24 |
25 | def add_liquidity(self, tx):
26 | vals = re.match(r'(.*) adds (.*) (.*) and (.*) (.*) of liquidity', tx)
27 | token0 = vals.group(3)
28 | token1 = vals.group(5)
29 | amount0 = int(vals.group(2))
30 | amount1 = int(vals.group(4))
31 | address = vals.group(1)
32 | self.token_balances[self.exchange_name][token0] += amount0
33 | self.token_balances[self.exchange_name][token1] += amount1
34 | self.token_balances[address][token0] -= amount0
35 | self.token_balances[address][token1] -= amount1
36 |
37 |
38 | def remove_liquidity(self, tx):
39 | vals = re.match(r'(.*) removes (.*) (.*) and (.*) (.*) of liquidity', tx)
40 | token0 = vals.group(3)
41 | token1 = vals.group(5)
42 | amount0 = int(vals.group(2))
43 | amount1 = int(vals.group(4))
44 | address = vals.group(1)
45 | self.token_balances[self.exchange_name][token0] -= amount0
46 | self.token_balances[self.exchange_name][token1] -= amount1
47 | self.token_balances[address][token0] += amount0
48 | self.token_balances[address][token1] += amount1
49 |
50 |
51 | def swap(self, tx):
52 | vals = re.match(r'(.*) swaps for (.*) by providing (.*) (.*) and (.*) (.*) with change (.*) fee (.*)', tx)
53 | address = vals.group(1)
54 | token_in = vals.group(4)
55 | token_out = vals.group(6)
56 | amount_in_token_in = int(vals.group(3))
57 | amount_in_token_out = int(vals.group(5))
58 | amount_out_token_in = int(vals.group(7))
59 |
60 | amount_out_token_out = (((997 * amount_in_token_in - 1000 * amount_out_token_in) * self.token_balances[self.exchange_name][token_out]) // (1000 * (self.token_balances[self.exchange_name][token_in] - amount_out_token_in) + 997 * amount_in_token_in)) + ((amount_in_token_out * 997) // (1000))
61 |
62 | self.token_balances[self.exchange_name][token_in] += amount_in_token_in - amount_out_token_in
63 | self.token_balances[self.exchange_name][token_out] += amount_in_token_out - amount_out_token_out
64 | self.token_balances[address][token_in] += amount_out_token_in - amount_in_token_in
65 | self.token_balances[address][token_out] += amount_out_token_out - amount_in_token_out
66 |
67 |
68 | def config(self):
69 | return self.token_balances
70 |
--------------------------------------------------------------------------------
/validation/validate.py:
--------------------------------------------------------------------------------
1 | import requests
2 | import sys
3 | import json
4 |
5 | DEBUG = False
6 |
7 | def check_response(response):
8 | if 'result' not in response:
9 | print(response)
10 | return -2
11 | for result in response['result']['results']:
12 | if 'error' in result:
13 | print(response['id'], result)
14 | return -1
15 | return 0
16 |
17 |
18 | def simulate(block, txs, id):
19 | data = {}
20 | data['jsonrpc'] = '2.0'
21 | data['method'] = 'eth_callBundle'
22 | data['params'] = [txs, block]
23 | data['id'] = id
24 | r = requests.post('http://localhost:8545', json=data)
25 | response = json.loads(r.content)
26 | if DEBUG:
27 | print(data)
28 | print(response)
29 | return check_response(response)
30 |
31 | def total_mev(blocks, attacks):
32 | ret = 0.0
33 | for block in blocks:
34 | ret += attacks[block]['mev']
35 | return ret
36 |
37 | filename = sys.argv[1]
38 |
39 | attacks = {}
40 |
41 | f = open(filename, 'r')
42 | for line in f.readlines():
43 | tokens = line.strip().split(',')
44 | block = hex(int(tokens[0]) - 1)
45 | transactions = [x for x in tokens if x.startswith('0x')]
46 | if len(transactions) == 0:
47 | continue
48 | if block not in attacks:
49 | attacks[block] = {}
50 | attacks[block]['mev'] = float(tokens[1])
51 | if tokens[2] == '1':
52 | attacks[block]['upper'] = transactions
53 | elif tokens[2] == '0':
54 | attacks[block]['lower'] = transactions
55 | # if len(attacks) == 6:
56 | # break
57 |
58 | total = []
59 | tried = []
60 | valid = []
61 | pre_checked = []
62 |
63 | for block in attacks:
64 | total.append(block)
65 | if len(attacks[block]['upper']) == len(set(attacks[block]['upper'])):
66 | # no duplicate transactions
67 | tried.append(block)
68 | u = simulate(block, attacks[block]['upper'], block)
69 | l = simulate(block, attacks[block]['lower'], block)
70 | if u!=-2 and l!=-2:
71 | pre_checked.append(block)
72 | if u==0 and l==0:
73 | valid.append(block)
74 | print("Total: {}, Tried: {}, Valid: {}, Pre-Checked: {}".format(len(total), len(tried), len(valid), len(pre_checked)))
75 | print(json.dumps([total, tried, valid, pre_checked]))
76 | print("Total: {}, Tried: {}, Valid: {}, Pre-Checked: {}".format(len(total), len(tried), len(valid), len(pre_checked)))
77 | print("Total mev: {}, Tried mev: {}, Valid mev: {}, Pre-Checked mev: {}".format(total_mev(total, attacks), total_mev(tried, attacks), total_mev(valid, attacks), total_mev(pre_checked, attacks)))
78 |
79 |
--------------------------------------------------------------------------------