├── .dockerignore
├── .envrc
├── .github
└── workflows
│ ├── build_next_release_cryptobot.yml
│ ├── build_tag_cryptobot_img.yaml
│ ├── creates_new_tag.yaml
│ └── pr_docker_tests.yaml
├── .gitignore
├── .hookz.yaml
├── .mypy.ini
├── .python-version
├── COPYING
├── Dockerfile
├── Dockerfile.tests
├── README.md
├── app.py
├── cache
└── .empty
├── configs
└── .empty
├── control
└── .empty
├── cryptobot.jpg
├── examples
├── BuyDropSellRecoveryStrategy.yaml
├── BuyMoonSellRecoveryStrategy.yaml
├── BuyOnGrowthTrendAfterDropStrategy.yaml
├── automated-backtesting.yaml
├── backtesting.yaml
├── secrets.yaml
├── template.yaml
└── testnet.example.yaml
├── klines_caching_service.py
├── lib
├── __init__.py
├── bot.py
├── coin.py
└── helpers.py
├── log
└── .empty
├── price_log_service.py
├── pyproject.toml
├── requirements-dev.txt
├── requirements.txt
├── results
└── .empty
├── run
├── secrets
└── .empty
├── state
└── .empty
├── strategies
├── BuyDropSellRecoveryStrategy.py
├── BuyDropSellRecoveryStrategyWhenBTCisDown.py
├── BuyDropSellRecoveryStrategyWhenBTCisUp.py
├── BuyMoonSellRecoveryStrategy.py
├── BuyOnGrowthTrendAfterDropStrategy.py
├── BuyOnRecoveryAfterDropDuringGrowthTrendStrategy.py
├── BuyOnRecoveryAfterDropFromAverageStrategy.py
└── __init__.py
├── tests
├── BuyDropSellRecoveryStrategy.yaml
├── BuyDropSellRecoveryStrategyWhenBTCisDown.yaml
├── BuyDropSellRecoveryStrategyWhenBTCisUp.yaml
├── BuyMoonSellRecoveryStrategy.yaml
├── BuyOnGrowthTrendAfterDropStrategy.yaml
├── BuyOnRecoveryAfterDropDuringGrowthTrendStrategy.yaml
├── BuyOnRecoveryAfterDropFromAverageStrategy.yaml
├── __init__.py
├── config.yaml
├── fake.yaml
├── index.json.gz
├── index_v2.json.gz
├── price.log.gz
├── prove-backtesting.yaml
├── pytest.ini
├── test_bot.py
├── test_klines_caching_service.py
└── test_prove_backtesting.py
├── tmp
├── .empty
└── .gitignore
└── utils
├── README.md
├── __init__.py
├── best_runs.py
├── config-endpoint-service.py
├── config-endpoint-service.sh
├── dedup-logs.py
├── migrate_cache_files.py
├── prove-backtesting.py
├── prove-backtesting.sh
├── pull_klines.py
└── split_klines_into_symbol_logs.py
/.dockerignore:
--------------------------------------------------------------------------------
1 | cache/*
2 | configs/*
3 | .git
4 | klines_caching_service
5 | log/*
6 | .mypy_cache/*
7 | __pycache__
8 | __pycache__/*
9 | results/*
10 | secrets/*
11 | state/*
12 | .tags
13 | tags
14 | .venv
15 | .venv/*
16 | .git
17 |
--------------------------------------------------------------------------------
/.envrc:
--------------------------------------------------------------------------------
1 | if [ -e .envrc.local ]; then
2 | source .envrc.local
3 | fi
4 |
--------------------------------------------------------------------------------
/.github/workflows/build_next_release_cryptobot.yml:
--------------------------------------------------------------------------------
1 | name: docker build cryptobot:next_release
2 | on:
3 | push:
4 | branches:
5 | - "next_release"
6 |
7 | jobs:
8 | push_to_registry:
9 | name: Push Docker image to GitHub Container Registry
10 | runs-on: self-hosted
11 | permissions:
12 | packages: write
13 | contents: write
14 | steps:
15 | - name: Check out the repo
16 | uses: actions/checkout@v2
17 | with:
18 | fetch-depth: 0
19 |
20 | - name: Login to GitHub Container Registry
21 | uses: docker/login-action@v1
22 | with:
23 | registry: ghcr.io
24 | username: ${{ github.actor }}
25 | password: ${{ secrets.GITHUB_TOKEN }}
26 |
27 | - name: docker build
28 | run:
29 | ./run build TAG=next_release
30 |
31 | - name: docker push
32 | run:
33 | docker push ghcr.io/azulinho/cryptobot:next_release
34 |
35 |
--------------------------------------------------------------------------------
/.github/workflows/build_tag_cryptobot_img.yaml:
--------------------------------------------------------------------------------
1 | name: docker build cryptobot:TAG
2 | on:
3 | push:
4 | tags:
5 | - "*"
6 | jobs:
7 | push_to_registry:
8 | name: Push Docker image to GitHub Container Registry
9 | runs-on: self-hosted
10 | permissions:
11 | packages: write
12 | contents: write
13 | steps:
14 | - name: Check out the repo
15 | uses: actions/checkout@v2
16 | with:
17 | fetch-depth: 0
18 |
19 | - name: Login to GitHub Container Registry
20 | uses: docker/login-action@v1
21 | with:
22 | registry: ghcr.io
23 | username: ${{ github.actor }}
24 | password: ${{ secrets.GITHUB_TOKEN }}
25 |
26 | - name: pull latest base image
27 | run:
28 | docker pull ubuntu:focal
29 |
30 | - name: docker build latest
31 | run: |
32 | ./run build TAG=latest
33 | docker tag ghcr.io/azulinho/cryptobot:latest ghcr.io/azulinho/cryptobot:${{ github.ref_name }}
34 |
35 | - name: docker push latest and tag
36 | run: |
37 | docker push ghcr.io/azulinho/cryptobot:latest
38 | docker push ghcr.io/azulinho/cryptobot:${{ github.ref_name }}
39 |
40 | - name: sets python version to pyston
41 | run: |
42 | echo pyston-2.3.5 > .python-version
43 |
44 | - name: docker build pyston
45 | run:
46 | ./run build TAG=pyston
47 |
48 | - name: docker push pyston
49 | run:
50 | docker push ghcr.io/azulinho/cryptobot:pyston
51 |
52 | - name: sets python version to pypy
53 | run: |
54 | echo pypy3.9-7.3.11 > .python-version
55 |
56 | - name: docker build pypy
57 | run:
58 | ./run build TAG=pypy
59 |
60 | - name: docker push pypy
61 | run:
62 | docker push ghcr.io/azulinho/cryptobot:pypy
63 |
--------------------------------------------------------------------------------
/.github/workflows/creates_new_tag.yaml:
--------------------------------------------------------------------------------
1 | name: Tag new release
2 | on:
3 | push:
4 | branches: master
5 | jobs:
6 | tag_and_release:
7 | name: Create and push new tag and release
8 | runs-on: ubuntu-latest
9 | permissions:
10 | packages: write
11 | contents: write
12 | steps:
13 | - name: Check out the repo
14 | uses: actions/checkout@v2
15 | with:
16 | fetch-depth: 0
17 |
18 | - name: Bump version and push tag
19 | id: tag_version
20 | uses: mathieudutour/github-tag-action@v5.6
21 | with:
22 | github_token: ${{ secrets.PAT_REPO }}
23 |
24 | - name: Create a GitHub release
25 | uses: ncipollo/release-action@v1
26 | with:
27 | tag: ${{ steps.tag_version.outputs.new_tag }}
28 | name: Release ${{ steps.tag_version.outputs.new_tag }}
29 | body: ${{ steps.tag_version.outputs.changelog }}
30 |
--------------------------------------------------------------------------------
/.github/workflows/pr_docker_tests.yaml:
--------------------------------------------------------------------------------
1 | name: Run PR docker based tests
2 | on: pull_request
3 |
4 | jobs:
5 | pr_docker_tests:
6 | name: run ci_pr_docker_tests TAG=pr
7 | runs-on: self-hosted
8 | env:
9 | ACTIONS_ALLOW_UNSECURE_COMMANDS: true
10 | TAG: pr
11 | steps:
12 | - name: Check out the repo
13 | uses: actions/checkout@v2
14 |
15 | - name: Login to GitHub Container Registry
16 | uses: docker/login-action@v1
17 | with:
18 | registry: ghcr.io
19 | username: ${{ github.actor }}
20 | password: ${{ secrets.GITHUB_TOKEN }}
21 |
22 | - name: ./run github_actions_ci_pr_tests
23 | run: |
24 | set -ex
25 | ./run github_actions_ci_pr_docker_tests TAG=pr
26 |
27 | - name: ./run tests
28 | run: |
29 | set -ex
30 | ./run tests
31 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .log
2 | *.log
3 | __pycache__/
4 | config.py
5 | .coins.pickle
6 | venv
7 | .*.pickle
8 | log/*
9 | testlogs/
10 | tickers/*
11 | configs/*
12 | tickers/all.txt tickers/binance-alts-eth.txt tickers/binance-bnb.top8.txt tickers/binance-bnb.txt tickers/binance-btc.top8.txt tickers/binance-btc.txt tickers/binance-eth.top8.txt tickers/cfx.txt tickers/dotbnb.txt tickers/icp.txt tickers/tickers.bnbeth.txt tickers/tickers.btc.txt tickers/tickers.BTC.txt tickers/tickers.dot.txt tickers/tickers.eth.txt tickers/tickers.highvol.safe.txt tickers/tickers.highvol.txt tickers/tickers.keepusdt.txt tickers/tickers.maticbnb.txt tickers/tickers.matic.txt tickers/tickers.next9.txt tickers/tickers.sol.txt tickers/tickers.top2.txt tickers/tickers.top4.txt tickers/tickers.top8.txt tickers/tickers.top9.txt tickers/tickers.txt tickers/tickers.txt.72 tickers/top3bnb.txt tickers/top5usdt.txt
13 | secrets/*
14 | state/*
15 | cache/*
16 | results/*
17 | .mypy_cache
18 | __pycache__
19 | .venv
20 | strategies/Local*
21 | *.~
22 | *~
23 | *.prof
24 | control/*
25 | .tests*.txt
26 | lastfewdays.log.gz
27 | .envrc.local
28 | *.inotifywait
29 |
--------------------------------------------------------------------------------
/.hookz.yaml:
--------------------------------------------------------------------------------
1 | # run with:
2 | # mkdir .git/hooks
3 | # hookz initialize --verbose-output
4 | # hookz reset --verbose-output
5 | #
6 | hooks:
7 | - type: pre-commit
8 | actions:
9 | - name: "run pre-commit-checks"
10 | exec: "bash"
11 | args: ["./run", "tests"]
12 | - type: pre-rebase
13 | actions:
14 | - name: "run pre-commit-checks"
15 | exec: "bash"
16 | args: ["./run", "tests"]
17 |
--------------------------------------------------------------------------------
/.mypy.ini:
--------------------------------------------------------------------------------
1 | [mypy]
2 | disable_error_code = annotation-unchecked
3 |
4 | [mypy-binance.*]
5 | ignore_missing_imports = True
6 | [mypy-requests.*]
7 | ignore_missing_imports = True
8 | [mypy-neotermcolor.*]
9 | ignore_missing_imports = True
10 | [mypy-lz4.*]
11 | ignore_missing_imports = True
12 | [mypy-epdb.*]
13 | ignore_missing_imports = True
14 | [mypy-udatetime.*]
15 | ignore_missing_imports = True
16 | [mypy-yaml.*]
17 | ignore_missing_imports = True
18 | [mypy-faster_fifo.*]
19 | ignore_missing_imports = True
20 | [mypy-faster_fifo_reduction.*]
21 | ignore_missing_imports = True
22 | [mypy-tenacity]
23 | implicit_reexport = True
24 | [mypy-lib.helpers]
25 | implicit_reexport = True
26 |
--------------------------------------------------------------------------------
/.python-version:
--------------------------------------------------------------------------------
1 | 3.11.1
2 |
--------------------------------------------------------------------------------
/COPYING:
--------------------------------------------------------------------------------
1 | GNU GENERAL PUBLIC LICENSE
2 | Version 3, 29 June 2007
3 |
4 | Copyright (C) 2007 Free Software Foundation, Inc.
5 | Everyone is permitted to copy and distribute verbatim copies
6 | of this license document, but changing it is not allowed.
7 |
8 | Preamble
9 |
10 | The GNU General Public License is a free, copyleft license for
11 | software and other kinds of works.
12 |
13 | The licenses for most software and other practical works are designed
14 | to take away your freedom to share and change the works. By contrast,
15 | the GNU General Public License is intended to guarantee your freedom to
16 | share and change all versions of a program--to make sure it remains free
17 | software for all its users. We, the Free Software Foundation, use the
18 | GNU General Public License for most of our software; it applies also to
19 | any other work released this way by its authors. You can apply it to
20 | your programs, too.
21 |
22 | When we speak of free software, we are referring to freedom, not
23 | price. Our General Public Licenses are designed to make sure that you
24 | have the freedom to distribute copies of free software (and charge for
25 | them if you wish), that you receive source code or can get it if you
26 | want it, that you can change the software or use pieces of it in new
27 | free programs, and that you know you can do these things.
28 |
29 | To protect your rights, we need to prevent others from denying you
30 | these rights or asking you to surrender the rights. Therefore, you have
31 | certain responsibilities if you distribute copies of the software, or if
32 | you modify it: responsibilities to respect the freedom of others.
33 |
34 | For example, if you distribute copies of such a program, whether
35 | gratis or for a fee, you must pass on to the recipients the same
36 | freedoms that you received. You must make sure that they, too, receive
37 | or can get the source code. And you must show them these terms so they
38 | know their rights.
39 |
40 | Developers that use the GNU GPL protect your rights with two steps:
41 | (1) assert copyright on the software, and (2) offer you this License
42 | giving you legal permission to copy, distribute and/or modify it.
43 |
44 | For the developers' and authors' protection, the GPL clearly explains
45 | that there is no warranty for this free software. For both users' and
46 | authors' sake, the GPL requires that modified versions be marked as
47 | changed, so that their problems will not be attributed erroneously to
48 | authors of previous versions.
49 |
50 | Some devices are designed to deny users access to install or run
51 | modified versions of the software inside them, although the manufacturer
52 | can do so. This is fundamentally incompatible with the aim of
53 | protecting users' freedom to change the software. The systematic
54 | pattern of such abuse occurs in the area of products for individuals to
55 | use, which is precisely where it is most unacceptable. Therefore, we
56 | have designed this version of the GPL to prohibit the practice for those
57 | products. If such problems arise substantially in other domains, we
58 | stand ready to extend this provision to those domains in future versions
59 | of the GPL, as needed to protect the freedom of users.
60 |
61 | Finally, every program is threatened constantly by software patents.
62 | States should not allow patents to restrict development and use of
63 | software on general-purpose computers, but in those that do, we wish to
64 | avoid the special danger that patents applied to a free program could
65 | make it effectively proprietary. To prevent this, the GPL assures that
66 | patents cannot be used to render the program non-free.
67 |
68 | The precise terms and conditions for copying, distribution and
69 | modification follow.
70 |
71 | TERMS AND CONDITIONS
72 |
73 | 0. Definitions.
74 |
75 | "This License" refers to version 3 of the GNU General Public License.
76 |
77 | "Copyright" also means copyright-like laws that apply to other kinds of
78 | works, such as semiconductor masks.
79 |
80 | "The Program" refers to any copyrightable work licensed under this
81 | License. Each licensee is addressed as "you". "Licensees" and
82 | "recipients" may be individuals or organizations.
83 |
84 | To "modify" a work means to copy from or adapt all or part of the work
85 | in a fashion requiring copyright permission, other than the making of an
86 | exact copy. The resulting work is called a "modified version" of the
87 | earlier work or a work "based on" the earlier work.
88 |
89 | A "covered work" means either the unmodified Program or a work based
90 | on the Program.
91 |
92 | To "propagate" a work means to do anything with it that, without
93 | permission, would make you directly or secondarily liable for
94 | infringement under applicable copyright law, except executing it on a
95 | computer or modifying a private copy. Propagation includes copying,
96 | distribution (with or without modification), making available to the
97 | public, and in some countries other activities as well.
98 |
99 | To "convey" a work means any kind of propagation that enables other
100 | parties to make or receive copies. Mere interaction with a user through
101 | a computer network, with no transfer of a copy, is not conveying.
102 |
103 | An interactive user interface displays "Appropriate Legal Notices"
104 | to the extent that it includes a convenient and prominently visible
105 | feature that (1) displays an appropriate copyright notice, and (2)
106 | tells the user that there is no warranty for the work (except to the
107 | extent that warranties are provided), that licensees may convey the
108 | work under this License, and how to view a copy of this License. If
109 | the interface presents a list of user commands or options, such as a
110 | menu, a prominent item in the list meets this criterion.
111 |
112 | 1. Source Code.
113 |
114 | The "source code" for a work means the preferred form of the work
115 | for making modifications to it. "Object code" means any non-source
116 | form of a work.
117 |
118 | A "Standard Interface" means an interface that either is an official
119 | standard defined by a recognized standards body, or, in the case of
120 | interfaces specified for a particular programming language, one that
121 | is widely used among developers working in that language.
122 |
123 | The "System Libraries" of an executable work include anything, other
124 | than the work as a whole, that (a) is included in the normal form of
125 | packaging a Major Component, but which is not part of that Major
126 | Component, and (b) serves only to enable use of the work with that
127 | Major Component, or to implement a Standard Interface for which an
128 | implementation is available to the public in source code form. A
129 | "Major Component", in this context, means a major essential component
130 | (kernel, window system, and so on) of the specific operating system
131 | (if any) on which the executable work runs, or a compiler used to
132 | produce the work, or an object code interpreter used to run it.
133 |
134 | The "Corresponding Source" for a work in object code form means all
135 | the source code needed to generate, install, and (for an executable
136 | work) run the object code and to modify the work, including scripts to
137 | control those activities. However, it does not include the work's
138 | System Libraries, or general-purpose tools or generally available free
139 | programs which are used unmodified in performing those activities but
140 | which are not part of the work. For example, Corresponding Source
141 | includes interface definition files associated with source files for
142 | the work, and the source code for shared libraries and dynamically
143 | linked subprograms that the work is specifically designed to require,
144 | such as by intimate data communication or control flow between those
145 | subprograms and other parts of the work.
146 |
147 | The Corresponding Source need not include anything that users
148 | can regenerate automatically from other parts of the Corresponding
149 | Source.
150 |
151 | The Corresponding Source for a work in source code form is that
152 | same work.
153 |
154 | 2. Basic Permissions.
155 |
156 | All rights granted under this License are granted for the term of
157 | copyright on the Program, and are irrevocable provided the stated
158 | conditions are met. This License explicitly affirms your unlimited
159 | permission to run the unmodified Program. The output from running a
160 | covered work is covered by this License only if the output, given its
161 | content, constitutes a covered work. This License acknowledges your
162 | rights of fair use or other equivalent, as provided by copyright law.
163 |
164 | You may make, run and propagate covered works that you do not
165 | convey, without conditions so long as your license otherwise remains
166 | in force. You may convey covered works to others for the sole purpose
167 | of having them make modifications exclusively for you, or provide you
168 | with facilities for running those works, provided that you comply with
169 | the terms of this License in conveying all material for which you do
170 | not control copyright. Those thus making or running the covered works
171 | for you must do so exclusively on your behalf, under your direction
172 | and control, on terms that prohibit them from making any copies of
173 | your copyrighted material outside their relationship with you.
174 |
175 | Conveying under any other circumstances is permitted solely under
176 | the conditions stated below. Sublicensing is not allowed; section 10
177 | makes it unnecessary.
178 |
179 | 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
180 |
181 | No covered work shall be deemed part of an effective technological
182 | measure under any applicable law fulfilling obligations under article
183 | 11 of the WIPO copyright treaty adopted on 20 December 1996, or
184 | similar laws prohibiting or restricting circumvention of such
185 | measures.
186 |
187 | When you convey a covered work, you waive any legal power to forbid
188 | circumvention of technological measures to the extent such circumvention
189 | is effected by exercising rights under this License with respect to
190 | the covered work, and you disclaim any intention to limit operation or
191 | modification of the work as a means of enforcing, against the work's
192 | users, your or third parties' legal rights to forbid circumvention of
193 | technological measures.
194 |
195 | 4. Conveying Verbatim Copies.
196 |
197 | You may convey verbatim copies of the Program's source code as you
198 | receive it, in any medium, provided that you conspicuously and
199 | appropriately publish on each copy an appropriate copyright notice;
200 | keep intact all notices stating that this License and any
201 | non-permissive terms added in accord with section 7 apply to the code;
202 | keep intact all notices of the absence of any warranty; and give all
203 | recipients a copy of this License along with the Program.
204 |
205 | You may charge any price or no price for each copy that you convey,
206 | and you may offer support or warranty protection for a fee.
207 |
208 | 5. Conveying Modified Source Versions.
209 |
210 | You may convey a work based on the Program, or the modifications to
211 | produce it from the Program, in the form of source code under the
212 | terms of section 4, provided that you also meet all of these conditions:
213 |
214 | a) The work must carry prominent notices stating that you modified
215 | it, and giving a relevant date.
216 |
217 | b) The work must carry prominent notices stating that it is
218 | released under this License and any conditions added under section
219 | 7. This requirement modifies the requirement in section 4 to
220 | "keep intact all notices".
221 |
222 | c) You must license the entire work, as a whole, under this
223 | License to anyone who comes into possession of a copy. This
224 | License will therefore apply, along with any applicable section 7
225 | additional terms, to the whole of the work, and all its parts,
226 | regardless of how they are packaged. This License gives no
227 | permission to license the work in any other way, but it does not
228 | invalidate such permission if you have separately received it.
229 |
230 | d) If the work has interactive user interfaces, each must display
231 | Appropriate Legal Notices; however, if the Program has interactive
232 | interfaces that do not display Appropriate Legal Notices, your
233 | work need not make them do so.
234 |
235 | A compilation of a covered work with other separate and independent
236 | works, which are not by their nature extensions of the covered work,
237 | and which are not combined with it such as to form a larger program,
238 | in or on a volume of a storage or distribution medium, is called an
239 | "aggregate" if the compilation and its resulting copyright are not
240 | used to limit the access or legal rights of the compilation's users
241 | beyond what the individual works permit. Inclusion of a covered work
242 | in an aggregate does not cause this License to apply to the other
243 | parts of the aggregate.
244 |
245 | 6. Conveying Non-Source Forms.
246 |
247 | You may convey a covered work in object code form under the terms
248 | of sections 4 and 5, provided that you also convey the
249 | machine-readable Corresponding Source under the terms of this License,
250 | in one of these ways:
251 |
252 | a) Convey the object code in, or embodied in, a physical product
253 | (including a physical distribution medium), accompanied by the
254 | Corresponding Source fixed on a durable physical medium
255 | customarily used for software interchange.
256 |
257 | b) Convey the object code in, or embodied in, a physical product
258 | (including a physical distribution medium), accompanied by a
259 | written offer, valid for at least three years and valid for as
260 | long as you offer spare parts or customer support for that product
261 | model, to give anyone who possesses the object code either (1) a
262 | copy of the Corresponding Source for all the software in the
263 | product that is covered by this License, on a durable physical
264 | medium customarily used for software interchange, for a price no
265 | more than your reasonable cost of physically performing this
266 | conveying of source, or (2) access to copy the
267 | Corresponding Source from a network server at no charge.
268 |
269 | c) Convey individual copies of the object code with a copy of the
270 | written offer to provide the Corresponding Source. This
271 | alternative is allowed only occasionally and noncommercially, and
272 | only if you received the object code with such an offer, in accord
273 | with subsection 6b.
274 |
275 | d) Convey the object code by offering access from a designated
276 | place (gratis or for a charge), and offer equivalent access to the
277 | Corresponding Source in the same way through the same place at no
278 | further charge. You need not require recipients to copy the
279 | Corresponding Source along with the object code. If the place to
280 | copy the object code is a network server, the Corresponding Source
281 | may be on a different server (operated by you or a third party)
282 | that supports equivalent copying facilities, provided you maintain
283 | clear directions next to the object code saying where to find the
284 | Corresponding Source. Regardless of what server hosts the
285 | Corresponding Source, you remain obligated to ensure that it is
286 | available for as long as needed to satisfy these requirements.
287 |
288 | e) Convey the object code using peer-to-peer transmission, provided
289 | you inform other peers where the object code and Corresponding
290 | Source of the work are being offered to the general public at no
291 | charge under subsection 6d.
292 |
293 | A separable portion of the object code, whose source code is excluded
294 | from the Corresponding Source as a System Library, need not be
295 | included in conveying the object code work.
296 |
297 | A "User Product" is either (1) a "consumer product", which means any
298 | tangible personal property which is normally used for personal, family,
299 | or household purposes, or (2) anything designed or sold for incorporation
300 | into a dwelling. In determining whether a product is a consumer product,
301 | doubtful cases shall be resolved in favor of coverage. For a particular
302 | product received by a particular user, "normally used" refers to a
303 | typical or common use of that class of product, regardless of the status
304 | of the particular user or of the way in which the particular user
305 | actually uses, or expects or is expected to use, the product. A product
306 | is a consumer product regardless of whether the product has substantial
307 | commercial, industrial or non-consumer uses, unless such uses represent
308 | the only significant mode of use of the product.
309 |
310 | "Installation Information" for a User Product means any methods,
311 | procedures, authorization keys, or other information required to install
312 | and execute modified versions of a covered work in that User Product from
313 | a modified version of its Corresponding Source. The information must
314 | suffice to ensure that the continued functioning of the modified object
315 | code is in no case prevented or interfered with solely because
316 | modification has been made.
317 |
318 | If you convey an object code work under this section in, or with, or
319 | specifically for use in, a User Product, and the conveying occurs as
320 | part of a transaction in which the right of possession and use of the
321 | User Product is transferred to the recipient in perpetuity or for a
322 | fixed term (regardless of how the transaction is characterized), the
323 | Corresponding Source conveyed under this section must be accompanied
324 | by the Installation Information. But this requirement does not apply
325 | if neither you nor any third party retains the ability to install
326 | modified object code on the User Product (for example, the work has
327 | been installed in ROM).
328 |
329 | The requirement to provide Installation Information does not include a
330 | requirement to continue to provide support service, warranty, or updates
331 | for a work that has been modified or installed by the recipient, or for
332 | the User Product in which it has been modified or installed. Access to a
333 | network may be denied when the modification itself materially and
334 | adversely affects the operation of the network or violates the rules and
335 | protocols for communication across the network.
336 |
337 | Corresponding Source conveyed, and Installation Information provided,
338 | in accord with this section must be in a format that is publicly
339 | documented (and with an implementation available to the public in
340 | source code form), and must require no special password or key for
341 | unpacking, reading or copying.
342 |
343 | 7. Additional Terms.
344 |
345 | "Additional permissions" are terms that supplement the terms of this
346 | License by making exceptions from one or more of its conditions.
347 | Additional permissions that are applicable to the entire Program shall
348 | be treated as though they were included in this License, to the extent
349 | that they are valid under applicable law. If additional permissions
350 | apply only to part of the Program, that part may be used separately
351 | under those permissions, but the entire Program remains governed by
352 | this License without regard to the additional permissions.
353 |
354 | When you convey a copy of a covered work, you may at your option
355 | remove any additional permissions from that copy, or from any part of
356 | it. (Additional permissions may be written to require their own
357 | removal in certain cases when you modify the work.) You may place
358 | additional permissions on material, added by you to a covered work,
359 | for which you have or can give appropriate copyright permission.
360 |
361 | Notwithstanding any other provision of this License, for material you
362 | add to a covered work, you may (if authorized by the copyright holders of
363 | that material) supplement the terms of this License with terms:
364 |
365 | a) Disclaiming warranty or limiting liability differently from the
366 | terms of sections 15 and 16 of this License; or
367 |
368 | b) Requiring preservation of specified reasonable legal notices or
369 | author attributions in that material or in the Appropriate Legal
370 | Notices displayed by works containing it; or
371 |
372 | c) Prohibiting misrepresentation of the origin of that material, or
373 | requiring that modified versions of such material be marked in
374 | reasonable ways as different from the original version; or
375 |
376 | d) Limiting the use for publicity purposes of names of licensors or
377 | authors of the material; or
378 |
379 | e) Declining to grant rights under trademark law for use of some
380 | trade names, trademarks, or service marks; or
381 |
382 | f) Requiring indemnification of licensors and authors of that
383 | material by anyone who conveys the material (or modified versions of
384 | it) with contractual assumptions of liability to the recipient, for
385 | any liability that these contractual assumptions directly impose on
386 | those licensors and authors.
387 |
388 | All other non-permissive additional terms are considered "further
389 | restrictions" within the meaning of section 10. If the Program as you
390 | received it, or any part of it, contains a notice stating that it is
391 | governed by this License along with a term that is a further
392 | restriction, you may remove that term. If a license document contains
393 | a further restriction but permits relicensing or conveying under this
394 | License, you may add to a covered work material governed by the terms
395 | of that license document, provided that the further restriction does
396 | not survive such relicensing or conveying.
397 |
398 | If you add terms to a covered work in accord with this section, you
399 | must place, in the relevant source files, a statement of the
400 | additional terms that apply to those files, or a notice indicating
401 | where to find the applicable terms.
402 |
403 | Additional terms, permissive or non-permissive, may be stated in the
404 | form of a separately written license, or stated as exceptions;
405 | the above requirements apply either way.
406 |
407 | 8. Termination.
408 |
409 | You may not propagate or modify a covered work except as expressly
410 | provided under this License. Any attempt otherwise to propagate or
411 | modify it is void, and will automatically terminate your rights under
412 | this License (including any patent licenses granted under the third
413 | paragraph of section 11).
414 |
415 | However, if you cease all violation of this License, then your
416 | license from a particular copyright holder is reinstated (a)
417 | provisionally, unless and until the copyright holder explicitly and
418 | finally terminates your license, and (b) permanently, if the copyright
419 | holder fails to notify you of the violation by some reasonable means
420 | prior to 60 days after the cessation.
421 |
422 | Moreover, your license from a particular copyright holder is
423 | reinstated permanently if the copyright holder notifies you of the
424 | violation by some reasonable means, this is the first time you have
425 | received notice of violation of this License (for any work) from that
426 | copyright holder, and you cure the violation prior to 30 days after
427 | your receipt of the notice.
428 |
429 | Termination of your rights under this section does not terminate the
430 | licenses of parties who have received copies or rights from you under
431 | this License. If your rights have been terminated and not permanently
432 | reinstated, you do not qualify to receive new licenses for the same
433 | material under section 10.
434 |
435 | 9. Acceptance Not Required for Having Copies.
436 |
437 | You are not required to accept this License in order to receive or
438 | run a copy of the Program. Ancillary propagation of a covered work
439 | occurring solely as a consequence of using peer-to-peer transmission
440 | to receive a copy likewise does not require acceptance. However,
441 | nothing other than this License grants you permission to propagate or
442 | modify any covered work. These actions infringe copyright if you do
443 | not accept this License. Therefore, by modifying or propagating a
444 | covered work, you indicate your acceptance of this License to do so.
445 |
446 | 10. Automatic Licensing of Downstream Recipients.
447 |
448 | Each time you convey a covered work, the recipient automatically
449 | receives a license from the original licensors, to run, modify and
450 | propagate that work, subject to this License. You are not responsible
451 | for enforcing compliance by third parties with this License.
452 |
453 | An "entity transaction" is a transaction transferring control of an
454 | organization, or substantially all assets of one, or subdividing an
455 | organization, or merging organizations. If propagation of a covered
456 | work results from an entity transaction, each party to that
457 | transaction who receives a copy of the work also receives whatever
458 | licenses to the work the party's predecessor in interest had or could
459 | give under the previous paragraph, plus a right to possession of the
460 | Corresponding Source of the work from the predecessor in interest, if
461 | the predecessor has it or can get it with reasonable efforts.
462 |
463 | You may not impose any further restrictions on the exercise of the
464 | rights granted or affirmed under this License. For example, you may
465 | not impose a license fee, royalty, or other charge for exercise of
466 | rights granted under this License, and you may not initiate litigation
467 | (including a cross-claim or counterclaim in a lawsuit) alleging that
468 | any patent claim is infringed by making, using, selling, offering for
469 | sale, or importing the Program or any portion of it.
470 |
471 | 11. Patents.
472 |
473 | A "contributor" is a copyright holder who authorizes use under this
474 | License of the Program or a work on which the Program is based. The
475 | work thus licensed is called the contributor's "contributor version".
476 |
477 | A contributor's "essential patent claims" are all patent claims
478 | owned or controlled by the contributor, whether already acquired or
479 | hereafter acquired, that would be infringed by some manner, permitted
480 | by this License, of making, using, or selling its contributor version,
481 | but do not include claims that would be infringed only as a
482 | consequence of further modification of the contributor version. For
483 | purposes of this definition, "control" includes the right to grant
484 | patent sublicenses in a manner consistent with the requirements of
485 | this License.
486 |
487 | Each contributor grants you a non-exclusive, worldwide, royalty-free
488 | patent license under the contributor's essential patent claims, to
489 | make, use, sell, offer for sale, import and otherwise run, modify and
490 | propagate the contents of its contributor version.
491 |
492 | In the following three paragraphs, a "patent license" is any express
493 | agreement or commitment, however denominated, not to enforce a patent
494 | (such as an express permission to practice a patent or covenant not to
495 | sue for patent infringement). To "grant" such a patent license to a
496 | party means to make such an agreement or commitment not to enforce a
497 | patent against the party.
498 |
499 | If you convey a covered work, knowingly relying on a patent license,
500 | and the Corresponding Source of the work is not available for anyone
501 | to copy, free of charge and under the terms of this License, through a
502 | publicly available network server or other readily accessible means,
503 | then you must either (1) cause the Corresponding Source to be so
504 | available, or (2) arrange to deprive yourself of the benefit of the
505 | patent license for this particular work, or (3) arrange, in a manner
506 | consistent with the requirements of this License, to extend the patent
507 | license to downstream recipients. "Knowingly relying" means you have
508 | actual knowledge that, but for the patent license, your conveying the
509 | covered work in a country, or your recipient's use of the covered work
510 | in a country, would infringe one or more identifiable patents in that
511 | country that you have reason to believe are valid.
512 |
513 | If, pursuant to or in connection with a single transaction or
514 | arrangement, you convey, or propagate by procuring conveyance of, a
515 | covered work, and grant a patent license to some of the parties
516 | receiving the covered work authorizing them to use, propagate, modify
517 | or convey a specific copy of the covered work, then the patent license
518 | you grant is automatically extended to all recipients of the covered
519 | work and works based on it.
520 |
521 | A patent license is "discriminatory" if it does not include within
522 | the scope of its coverage, prohibits the exercise of, or is
523 | conditioned on the non-exercise of one or more of the rights that are
524 | specifically granted under this License. You may not convey a covered
525 | work if you are a party to an arrangement with a third party that is
526 | in the business of distributing software, under which you make payment
527 | to the third party based on the extent of your activity of conveying
528 | the work, and under which the third party grants, to any of the
529 | parties who would receive the covered work from you, a discriminatory
530 | patent license (a) in connection with copies of the covered work
531 | conveyed by you (or copies made from those copies), or (b) primarily
532 | for and in connection with specific products or compilations that
533 | contain the covered work, unless you entered into that arrangement,
534 | or that patent license was granted, prior to 28 March 2007.
535 |
536 | Nothing in this License shall be construed as excluding or limiting
537 | any implied license or other defenses to infringement that may
538 | otherwise be available to you under applicable patent law.
539 |
540 | 12. No Surrender of Others' Freedom.
541 |
542 | If conditions are imposed on you (whether by court order, agreement or
543 | otherwise) that contradict the conditions of this License, they do not
544 | excuse you from the conditions of this License. If you cannot convey a
545 | covered work so as to satisfy simultaneously your obligations under this
546 | License and any other pertinent obligations, then as a consequence you may
547 | not convey it at all. For example, if you agree to terms that obligate you
548 | to collect a royalty for further conveying from those to whom you convey
549 | the Program, the only way you could satisfy both those terms and this
550 | License would be to refrain entirely from conveying the Program.
551 |
552 | 13. Use with the GNU Affero General Public License.
553 |
554 | Notwithstanding any other provision of this License, you have
555 | permission to link or combine any covered work with a work licensed
556 | under version 3 of the GNU Affero General Public License into a single
557 | combined work, and to convey the resulting work. The terms of this
558 | License will continue to apply to the part which is the covered work,
559 | but the special requirements of the GNU Affero General Public License,
560 | section 13, concerning interaction through a network will apply to the
561 | combination as such.
562 |
563 | 14. Revised Versions of this License.
564 |
565 | The Free Software Foundation may publish revised and/or new versions of
566 | the GNU General Public License from time to time. Such new versions will
567 | be similar in spirit to the present version, but may differ in detail to
568 | address new problems or concerns.
569 |
570 | Each version is given a distinguishing version number. If the
571 | Program specifies that a certain numbered version of the GNU General
572 | Public License "or any later version" applies to it, you have the
573 | option of following the terms and conditions either of that numbered
574 | version or of any later version published by the Free Software
575 | Foundation. If the Program does not specify a version number of the
576 | GNU General Public License, you may choose any version ever published
577 | by the Free Software Foundation.
578 |
579 | If the Program specifies that a proxy can decide which future
580 | versions of the GNU General Public License can be used, that proxy's
581 | public statement of acceptance of a version permanently authorizes you
582 | to choose that version for the Program.
583 |
584 | Later license versions may give you additional or different
585 | permissions. However, no additional obligations are imposed on any
586 | author or copyright holder as a result of your choosing to follow a
587 | later version.
588 |
589 | 15. Disclaimer of Warranty.
590 |
591 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
592 | APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
593 | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
594 | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
595 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
596 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
597 | IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
598 | ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
599 |
600 | 16. Limitation of Liability.
601 |
602 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
603 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
604 | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
605 | GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
606 | USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
607 | DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
608 | PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
609 | EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
610 | SUCH DAMAGES.
611 |
612 | 17. Interpretation of Sections 15 and 16.
613 |
614 | If the disclaimer of warranty and limitation of liability provided
615 | above cannot be given local legal effect according to their terms,
616 | reviewing courts shall apply local law that most closely approximates
617 | an absolute waiver of all civil liability in connection with the
618 | Program, unless a warranty or assumption of liability accompanies a
619 | copy of the Program in return for a fee.
620 |
621 | END OF TERMS AND CONDITIONS
622 |
623 | How to Apply These Terms to Your New Programs
624 |
625 | If you develop a new program, and you want it to be of the greatest
626 | possible use to the public, the best way to achieve this is to make it
627 | free software which everyone can redistribute and change under these terms.
628 |
629 | To do so, attach the following notices to the program. It is safest
630 | to attach them to the start of each source file to most effectively
631 | state the exclusion of warranty; and each file should have at least
632 | the "copyright" line and a pointer to where the full notice is found.
633 |
634 |
635 | Copyright (C)
636 |
637 | This program is free software: you can redistribute it and/or modify
638 | it under the terms of the GNU General Public License as published by
639 | the Free Software Foundation, either version 3 of the License, or
640 | (at your option) any later version.
641 |
642 | This program is distributed in the hope that it will be useful,
643 | but WITHOUT ANY WARRANTY; without even the implied warranty of
644 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
645 | GNU General Public License for more details.
646 |
647 | You should have received a copy of the GNU General Public License
648 | along with this program. If not, see .
649 |
650 | Also add information on how to contact you by electronic and paper mail.
651 |
652 | If the program does terminal interaction, make it output a short
653 | notice like this when it starts in an interactive mode:
654 |
655 | Copyright (C)
656 | This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
657 | This is free software, and you are welcome to redistribute it
658 | under certain conditions; type `show c' for details.
659 |
660 | The hypothetical commands `show w' and `show c' should show the appropriate
661 | parts of the General Public License. Of course, your program's commands
662 | might be different; for a GUI interface, you would use an "about box".
663 |
664 | You should also get your employer (if you work as a programmer) or school,
665 | if any, to sign a "copyright disclaimer" for the program, if necessary.
666 | For more information on this, and how to apply and follow the GNU GPL, see
667 | .
668 |
669 | The GNU General Public License does not permit incorporating your program
670 | into proprietary programs. If your program is a subroutine library, you
671 | may consider it more useful to permit linking proprietary applications with
672 | the library. If this is what you want to do, use the GNU Lesser General
673 | Public License instead of this License. But first, please read
674 | .
675 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:focal
2 | ENV DEBIAN_FRONTEND noninteractive
3 | RUN apt-get update && \
4 | apt-get install -yq eatmydata
5 | RUN eatmydata apt-get install -yq --no-install-recommends \
6 | make \
7 | build-essential \
8 | libssl-dev \
9 | zlib1g-dev \
10 | libbz2-dev \
11 | libisal-dev \
12 | libisal2 \
13 | libreadline-dev \
14 | libsqlite3-dev \
15 | wget \
16 | curl \
17 | llvm \
18 | libncursesw5-dev \
19 | xz-utils \
20 | tk-dev \
21 | libxml2-dev \
22 | libxmlsec1-dev \
23 | libffi-dev \
24 | liblzma-dev \
25 | git \
26 | ca-certificates \
27 | cargo \
28 | gzip \
29 | pigz \
30 | bzip2 \
31 | pbzip2 \
32 | autoconf \
33 | automake \
34 | shtool \
35 | coreutils \
36 | autogen \
37 | libtool \
38 | shtool \
39 | nasm && \
40 | apt-get clean autoclean && \
41 | apt-get autoremove --yes && \
42 | rm -rf /var/lib/apt/lists/*
43 |
44 |
45 | RUN useradd -d /cryptobot -u 1001 -ms /bin/bash cryptobot
46 | RUN cd /tmp \
47 | && eatmydata wget http://prdownloads.sourceforge.net/ta-lib/ta-lib-0.4.0-src.tar.gz \
48 | && eatmydata tar xf ta-lib-0.4.0-src.tar.gz \
49 | && cd ta-lib \
50 | && eatmydata ./configure --prefix=/usr \
51 | && eatmydata make \
52 | && eatmydata make install \
53 | && rm -rf /tmp/ta-lib*
54 | USER cryptobot
55 | ENV HOME /cryptobot
56 | WORKDIR /cryptobot
57 | COPY .python-version .
58 | RUN curl https://pyenv.run | eatmydata bash
59 | ENV PYENV_ROOT="$HOME/.pyenv"
60 | ENV PATH="$PYENV_ROOT/bin:$PYENV_ROOT/shims/:$PATH"
61 | RUN CONFIGURE_OPTS="--enable-shared --enable-optimizations --with-lto --with-pgo" eatmydata pyenv install \
62 | && rm -f /tmp/python-build*.log
63 | RUN eatmydata python -m venv /cryptobot/.venv
64 | COPY requirements.txt .
65 | RUN eatmydata /cryptobot/.venv/bin/pip install --upgrade pip setuptools wheel
66 | # pyenv is failling to compile isal without setting C_INCLUDE_PATH
67 | RUN eatmydata /cryptobot/.venv/bin/pip install -r requirements.txt && \
68 | rm -rf /tmp/*
69 |
70 | COPY lib/ lib/
71 | COPY utils/__init__.py utils/__init__.py
72 | COPY utils/pull_klines.py utils/pull_klines.py
73 | COPY utils/config-endpoint-service.py utils/config-endpoint-service.py
74 | COPY utils/config-endpoint-service.sh utils/config-endpoint-service.sh
75 | COPY klines_caching_service.py klines_caching_service.py
76 | COPY price_log_service.py price_log_service.py
77 | COPY app.py .
78 | COPY utils/prove-backtesting.sh utils/prove-backtesting.sh
79 | COPY utils/prove-backtesting.py utils/prove-backtesting.py
80 |
81 |
--------------------------------------------------------------------------------
/Dockerfile.tests:
--------------------------------------------------------------------------------
1 | FROM local:tests
2 | RuN mkdir log cache tmp
3 | ADD requirements-dev.txt /cryptobot/requirements-dev.txt
4 | RUN /cryptobot/.venv/bin/pip install -r requirements-dev.txt
5 | ADD .mypy.ini /cryptobot/
6 | ADD pyproject.toml /cryptobot/
7 | ADD tests/ /cryptobot/tests/
8 | ADD strategies/Buy* /cryptobot/strategies/
9 |
10 | RUN /cryptobot/.venv/bin/black --check \
11 | app.py \
12 | klines_caching_service.py \
13 | price_log_service.py \
14 | strategies/ \
15 | lib/ \
16 | tests/ \
17 | utils/
18 |
19 | RUN ls strategies/*.py \
20 | |grep -v Local \
21 | | xargs /cryptobot/.venv/bin/pylint \
22 | app.py \
23 | klines_caching_service.py \
24 | price_log_service.py \
25 | lib/*.py \
26 | utils/*.py
27 |
28 | RUN ls strategies/*.py \
29 | |grep -v Local \
30 | | xargs /cryptobot/.venv/bin/mypy \
31 | app.py \
32 | klines_caching_service.py \
33 | price_log_service.py \
34 | lib/*.py \
35 | utils/*.py
36 |
37 | RUN /cryptobot/.venv/bin/pytest \
38 | --quiet -W ignore --disable-pytest-warnings tests/
39 |
--------------------------------------------------------------------------------
/app.py:
--------------------------------------------------------------------------------
1 | """ CryptoBot for Binance """
2 |
3 | import argparse
4 | import importlib
5 | import json
6 | import logging
7 | import sys
8 | import threading
9 | from os import getpid, unlink
10 | from os.path import exists
11 | from typing import Any
12 |
13 | import colorlog
14 | import epdb
15 | import yaml
16 | from binance.client import Client
17 |
18 | # allow migration from old pickle format to new format
19 | # old pickle cointains app.Bot, app.Coin
20 | from lib.bot import Bot # pylint: disable=unused-import
21 | from lib.coin import Coin # pylint: disable=unused-import
22 | from lib.helpers import cached_binance_client
23 |
24 |
25 | def control_center() -> None:
26 | """pdb remote endpoint"""
27 | while True:
28 | try:
29 | epdb.serve(port=5555)
30 | except Exception: # pylint: disable=broad-except
31 | pass
32 |
33 |
34 | if __name__ == "__main__":
35 | parser = argparse.ArgumentParser()
36 | parser.add_argument("-c", "--config", help="config.yaml file")
37 | parser.add_argument("-s", "--secrets", help="secrets.yaml file")
38 | parser.add_argument(
39 | "-m", "--mode", help='bot mode ["live", "backtesting", "testnet"]'
40 | )
41 | parser.add_argument(
42 | "-ld", "--logs-dir", help="logs directory", default="log"
43 | )
44 | args = parser.parse_args()
45 |
46 | with open(args.config, encoding="utf-8") as _f:
47 | cfg = yaml.safe_load(_f.read())
48 | with open(args.secrets, encoding="utf-8") as _f:
49 | secrets = yaml.safe_load(_f.read())
50 | cfg["MODE"] = args.mode
51 |
52 | PID = getpid()
53 | c_handler = colorlog.StreamHandler(sys.stdout)
54 | c_handler.setFormatter(
55 | colorlog.ColoredFormatter(
56 | "%(log_color)s[%(levelname)s] %(message)s",
57 | log_colors={
58 | "WARNING": "yellow",
59 | "ERROR": "red",
60 | "CRITICAL": "red,bg_white",
61 | },
62 | )
63 | )
64 | c_handler.setLevel(logging.INFO)
65 |
66 | if cfg["DEBUG"]:
67 | f_handler = logging.FileHandler(f"{args.logs_dir}/debug.log")
68 | f_handler.setLevel(logging.DEBUG)
69 |
70 | logging.basicConfig(
71 | level=logging.DEBUG,
72 | format=" ".join(
73 | [
74 | "(%(asctime)s)",
75 | f"({PID})",
76 | "(%(lineno)d)",
77 | "(%(funcName)s)",
78 | "[%(levelname)s]",
79 | "%(message)s",
80 | ]
81 | ),
82 | handlers=[f_handler, c_handler],
83 | datefmt="%Y-%m-%d %H:%M:%S",
84 | )
85 | else:
86 | logging.basicConfig(
87 | level=logging.INFO,
88 | handlers=[c_handler],
89 | )
90 |
91 | if args.mode == "backtesting":
92 | client = cached_binance_client(
93 | secrets["ACCESS_KEY"], secrets["SECRET_KEY"]
94 | )
95 | else:
96 | client = Client(secrets["ACCESS_KEY"], secrets["SECRET_KEY"])
97 |
98 | module = importlib.import_module(f"strategies.{cfg['STRATEGY']}")
99 | Strategy = getattr(module, "Strategy")
100 |
101 | bot: Any = Strategy(client, args.config, cfg)
102 |
103 | logging.info(
104 | f"running in {bot.mode} mode with "
105 | + f"{json.dumps(args.config, indent=4)}"
106 | )
107 |
108 | # clean up any stale control/STOP files
109 | if exists("control/STOP"):
110 | unlink("control/STOP")
111 |
112 | if bot.mode in ["testnet", "live"]:
113 | # start command-control-center (ipdb on port 5555)
114 | t = threading.Thread(target=control_center)
115 | t.daemon = True
116 | t.start()
117 |
118 | if bot.mode == "backtesting":
119 | bot.backtesting()
120 |
121 | if bot.mode == "logmode":
122 | bot.logmode()
123 |
124 | if bot.mode == "testnet":
125 | bot.client.API_URL = "https://testnet.binance.vision/api"
126 | bot.run()
127 |
128 | if bot.mode == "live":
129 | bot.run()
130 |
131 | bot.print_final_balance_report()
132 |
--------------------------------------------------------------------------------
/cache/.empty:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azulinho/cryptobot/ecddc8c6f7144cfcbc7ecb92159f8c08167500a4/cache/.empty
--------------------------------------------------------------------------------
/configs/.empty:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azulinho/cryptobot/ecddc8c6f7144cfcbc7ecb92159f8c08167500a4/configs/.empty
--------------------------------------------------------------------------------
/control/.empty:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azulinho/cryptobot/ecddc8c6f7144cfcbc7ecb92159f8c08167500a4/control/.empty
--------------------------------------------------------------------------------
/cryptobot.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azulinho/cryptobot/ecddc8c6f7144cfcbc7ecb92159f8c08167500a4/cryptobot.jpg
--------------------------------------------------------------------------------
/examples/BuyDropSellRecoveryStrategy.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | PAUSE_FOR: 1
3 | INITIAL_INVESTMENT: 100
4 | MAX_COINS: 2
5 | PAIRING: USDT
6 | CLEAR_COIN_STATS_AT_BOOT: True
7 | CLEAR_COIN_STATS_AT_SALE: True
8 | DEBUG: False
9 | TRADING_FEE: 0.1
10 | SELL_AS_SOON_IT_DROPS: False
11 |
12 | # STRATEGY: BuyMoonSellRecoveryStrategy
13 | # STRATEGY: BuyOnGrowthTrendAfterDropStrategy
14 | # STRATEGY: BuyOnRecoveryAfterDowntrendPeriodStrategy
15 | STRATEGY: BuyDropSellRecoveryStrategy
16 |
17 | ANCHORS: &defaults
18 | SOFT_LIMIT_HOLDING_TIME: 3600
19 | HARD_LIMIT_HOLDING_TIME: 7200
20 | BUY_AT_PERCENTAGE: -5.0
21 | SELL_AT_PERCENTAGE: +3
22 | STOP_LOSS_AT_PERCENTAGE: -10
23 | TRAIL_TARGET_SELL_PERCENTAGE: -0.5
24 | TRAIL_RECOVERY_PERCENTAGE: +1.0
25 | NAUGHTY_TIMEOUT: 28800
26 | KLINES_TREND_PERIOD: 0d # unused
27 | KLINES_SLICE_PERCENTAGE_CHANGE: +0 # unused
28 |
29 |
30 | TICKERS:
31 | BTCUSDT:
32 | <<: *defaults
33 |
34 | ETHUSDT:
35 | <<: *defaults
36 |
37 | BNBUSDT:
38 | <<: *defaults
39 |
40 | DOTUSDT:
41 | <<: *defaults
42 |
43 | ADAUSDT:
44 | <<: *defaults
45 | BUY_AT_PERCENTAGE: -9.0
46 | SELL_AT_PERCENTAGE: +5
47 | STOP_LOSS_AT_PERCENTAGE: -9
48 | TRAIL_TARGET_SELL_PERCENTAGE: -1.0
49 | TRAIL_RECOVERY_PERCENTAGE: +2.5
50 |
51 |
52 | PRICE_LOGS: []
53 |
--------------------------------------------------------------------------------
/examples/BuyMoonSellRecoveryStrategy.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | PAUSE_FOR: 7200
3 | INITIAL_INVESTMENT: 100
4 | MAX_COINS: 2
5 | PAIRING: USDT
6 | CLEAR_COIN_STATS_AT_BOOT: True
7 | CLEAR_COIN_STATS_AT_SALE: True
8 | DEBUG: False
9 | TRADING_FEE: 0.1
10 | SELL_AS_SOON_IT_DROPS: False
11 |
12 | STRATEGY: BuyMoonSellRecoveryStrategy
13 | #STRATEGY: BuyOnGrowthTrendAfterDropStrategy
14 | #STRATEGY: BuyOnRecoveryAfterDowntrendPeriodStrategy
15 | #STRATEGY: BuyDropSellRecoveryStrategy
16 |
17 | ANCHORS: &defaults
18 | SOFT_LIMIT_HOLDING_TIME: 48
19 | HARD_LIMIT_HOLDING_TIME: 96
20 | BUY_AT_PERCENTAGE: +2
21 | SELL_AT_PERCENTAGE: +5.0
22 | STOP_LOSS_AT_PERCENTAGE: -9
23 | TRAIL_TARGET_SELL_PERCENTAGE: -1.0
24 | TRAIL_RECOVERY_PERCENTAGE: +0.0
25 | NAUGHTY_TIMEOUT: 4
26 | KLINES_TREND_PERIOD: 0d # unused
27 | KLINES_SLICE_PERCENTAGE_CHANGE: +0 # unused
28 |
29 |
30 | TICKERS:
31 | BTCUSDT:
32 | <<: *defaults
33 |
34 | ETHUSDT:
35 | <<: *defaults
36 |
37 | BNBUSDT:
38 | <<: *defaults
39 |
40 | DOTUSDT:
41 | <<: *defaults
42 |
43 | ADAUSDT:
44 | <<: *defaults
45 | BUY_AT_PERCENTAGE: +1.0
46 | SELL_AT_PERCENTAGE: +5
47 | STOP_LOSS_AT_PERCENTAGE: -9
48 | TRAIL_TARGET_SELL_PERCENTAGE: -1.0
49 | TRAIL_RECOVERY_PERCENTAGE: +2.5
50 |
51 |
52 | PRICE_LOGS: []
53 |
--------------------------------------------------------------------------------
/examples/BuyOnGrowthTrendAfterDropStrategy.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | PAUSE_FOR: 1
3 | INITIAL_INVESTMENT: 100
4 | MAX_COINS: 2
5 | PAIRING: USDT
6 | CLEAR_COIN_STATS_AT_BOOT: True
7 | CLEAR_COIN_STATS_AT_SALE: True
8 | DEBUG: False
9 | TRADING_FEE: 0.1
10 | SELL_AS_SOON_IT_DROPS: False
11 |
12 | # STRATEGY: BuyMoonSellRecoveryStrategy
13 | STRATEGY: BuyOnGrowthTrendAfterDropStrategy
14 | # STRATEGY: BuyOnRecoveryAfterDowntrendPeriodStrategy
15 | # STRATEGY: BuyDropSellRecoveryStrategy
16 |
17 | ANCHORS: &defaults
18 | SOFT_LIMIT_HOLDING_TIME: 3600
19 | HARD_LIMIT_HOLDING_TIME: 28000
20 | BUY_AT_PERCENTAGE: -9.0
21 | SELL_AT_PERCENTAGE: +3
22 | STOP_LOSS_AT_PERCENTAGE: -10
23 | TRAIL_TARGET_SELL_PERCENTAGE: -0.5
24 | TRAIL_RECOVERY_PERCENTAGE: +0.0 # unused
25 | NAUGHTY_TIMEOUT: 28800
26 | KLINES_TREND_PERIOD: 2d
27 | KLINES_SLICE_PERCENTAGE_CHANGE: +1
28 |
29 |
30 | TICKERS:
31 | BTCUSDT:
32 | <<: *defaults
33 |
34 | ETHUSDT:
35 | <<: *defaults
36 |
37 | BNBUSDT:
38 | <<: *defaults
39 |
40 | DOTUSDT:
41 | <<: *defaults
42 |
43 | ADAUSDT:
44 | <<: *defaults
45 | SELL_AT_PERCENTAGE: +5
46 | STOP_LOSS_AT_PERCENTAGE: -9
47 | TRAIL_TARGET_SELL_PERCENTAGE: -1.0
48 | KLINES_TREND_PERIOD: 4h
49 | KLINES_SLICE_PERCENTAGE_CHANGE: +0.2
50 |
51 |
52 | PRICE_LOGS: []
53 |
--------------------------------------------------------------------------------
/examples/automated-backtesting.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | DEFAULTS: &DEFAULTS
3 | PAUSE_FOR: 1
4 | INITIAL_INVESTMENT: 100
5 | MAX_COINS: 1
6 | PAIRING: "USDT"
7 | CLEAR_COIN_STATS_AT_BOOT: True
8 | CLEAR_COIN_STATS_AT_SALE: True
9 | DEBUG: False
10 | TRADING_FEE: 0.1
11 | SELL_AS_SOON_IT_DROPS: True
12 | KLINES_TREND_PERIOD: "0h"
13 | KLINES_SLICE_PERCENTAGE_CHANGE: +0.0
14 | NAUGHTY_TIMEOUT: 28800
15 | SOFT_LIMIT_HOLDING_TIME: 1
16 | HARD_LIMIT_HOLDING_TIME: 99999
17 | STOP_LOSS_AT_PERCENTAGE: -25
18 |
19 | STRATEGIES:
20 | BuyDropSellRecoveryStrategy:
21 | run1:
22 | BUY_AT_PERCENTAGE: -9
23 | SELL_AT_PERCENTAGE: +1
24 | TRAIL_TARGET_SELL_PERCENTAGE: -0.1
25 | TRAIL_RECOVERY_PERCENTAGE: 1.0
26 |
27 | BuyOnGrowthTrendAfterDropStrategy:
28 | run1:
29 | CLEAR_COIN_STATS_AT_BOOT: True
30 | CLEAR_COIN_STATS_AT_SALE: False
31 | BUY_AT_PERCENTAGE: -5
32 | SELL_AT_PERCENTAGE: +5
33 | TRAIL_TARGET_SELL_PERCENTAGE: -0.5
34 | TRAIL_RECOVERY_PERCENTAGE: 0.0
35 | KLINES_TREND_PERIOD: "2h"
36 | KLINES_SLICE_PERCENTAGE_CHANGE: +1.0
37 | STOP_LOSS_AT_PERCENTAGE: -999999
38 |
39 | BuyOnRecoveryAfterDropDuringGrowthTrendStrategy:
40 | run1:
41 | BUY_AT_PERCENTAGE: -1
42 | SELL_AT_PERCENTAGE: +3
43 | STOP_LOSS_AT_PERCENTAGE: -9
44 | TRAIL_TARGET_SELL_PERCENTAGE: -0.5
45 | TRAIL_RECOVERY_PERCENTAGE: 0.5
46 | SOFT_LIMIT_HOLDING_TIME: 1
47 | HARD_LIMIT_HOLDING_TIME: 172800
48 | NAUGHTY_TIMEOUT: 28800
49 | KLINES_TREND_PERIOD: "1h"
50 | KLINES_SLICE_PERCENTAGE_CHANGE: +0.001
51 |
52 | BuyOnRecoveryAfterDropFromAverageStrategy:
53 | run1:
54 | BUY_AT_PERCENTAGE: -1
55 | SELL_AT_PERCENTAGE: +1
56 | STOP_LOSS_AT_PERCENTAGE: -9
57 | TRAIL_TARGET_SELL_PERCENTAGE: -0.1
58 | TRAIL_RECOVERY_PERCENTAGE: 0.1
59 | SOFT_LIMIT_HOLDING_TIME: 1
60 | HARD_LIMIT_HOLDING_TIME: 17280
61 | NAUGHTY_TIMEOUT: 28800
62 | KLINES_TREND_PERIOD: "2d"
63 | KLINES_SLICE_PERCENTAGE_CHANGE: -0.0
64 |
--------------------------------------------------------------------------------
/examples/backtesting.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | PAUSE_FOR: 1
3 | INITIAL_INVESTMENT: 100
4 | MAX_COINS: 2
5 | PAIRING: USDT
6 | CLEAR_COIN_STATS_AT_BOOT: True
7 | CLEAR_COIN_STATS_AT_SALE: True
8 | DEBUG: False
9 | TRADING_FEE: 0.1
10 | SELL_AS_SOON_IT_DROPS: False
11 |
12 | # STRATEGY: BuyMoonSellRecoveryStrategy
13 | # STRATEGY: BuyOnGrowthTrendAfterDropStrategy
14 | # STRATEGY: BuyOnRecoveryAfterDowntrendPeriodStrategy
15 | STRATEGY: BuyDropSellRecoveryStrategy
16 |
17 | ANCHORS: &defaults
18 | SOFT_LIMIT_HOLDING_TIME: 3600
19 | HARD_LIMIT_HOLDING_TIME: 7200
20 | BUY_AT_PERCENTAGE: -5.0
21 | SELL_AT_PERCENTAGE: +3
22 | STOP_LOSS_AT_PERCENTAGE: -10
23 | TRAIL_TARGET_SELL_PERCENTAGE: -0.5
24 | TRAIL_RECOVERY_PERCENTAGE: +1.0
25 | NAUGHTY_TIMEOUT: 28800
26 | KLINES_TREND_PERIOD: 0d
27 | KLINES_SLICE_PERCENTAGE_CHANGE: +0
28 |
29 |
30 | TICKERS:
31 | BTCUSDT:
32 | <<: *defaults
33 |
34 | ETHUSDT:
35 | <<: *defaults
36 |
37 | BNBUSDT:
38 | <<: *defaults
39 |
40 | DOTUSDT:
41 | <<: *defaults
42 |
43 | ADAUSDT:
44 | <<: *defaults
45 | BUY_AT_PERCENTAGE: -9.0
46 | SELL_AT_PERCENTAGE: +5
47 | STOP_LOSS_AT_PERCENTAGE: -9
48 | TRAIL_TARGET_SELL_PERCENTAGE: -1.0
49 | TRAIL_RECOVERY_PERCENTAGE: +2.5
50 |
51 |
52 | PRICE_LOGS:
53 | - "log/20210922.log.gz"
54 | - "log/20210923.log.gz"
55 |
--------------------------------------------------------------------------------
/examples/secrets.yaml:
--------------------------------------------------------------------------------
1 | ACCESS_KEY: "TESTNET ACCESS_KEY"
2 | SECRET_KEY: "TESTNET SECRET_KEY"
3 |
--------------------------------------------------------------------------------
/examples/template.yaml:
--------------------------------------------------------------------------------
1 |
2 | ---
3 | PAUSE_FOR: 1
4 | INITIAL_INVESTMENT: 100
5 | MAX_COINS: 2
6 | PAIRING: USDT
7 | CLEAR_COIN_STATS_AT_BOOT: False
8 | CLEAR_COIN_STATS_AT_SALE: True
9 | DEBUG: True
10 | TRADING_FEE: 0.1
11 | SELL_AS_SOON_IT_DROPS: False
12 |
13 | STRATEGY: BuyDropSellRecoveryStrategy
14 |
15 | ANCHORS: &defaults
16 | BUY_AT_PERCENTAGE: -3.0
17 | SELL_AT_PERCENTAGE: +1.0
18 | STOP_LOSS_AT_PERCENTAGE: -3.0
19 | TRAIL_TARGET_SELL_PERCENTAGE: -1.0
20 | TRAIL_RECOVERY_PERCENTAGE: +1.0
21 |
22 | SOFT_LIMIT_HOLDING_TIME: 99999
23 | HARD_LIMIT_HOLDING_TIME: 999999
24 | NAUGHTY_TIMEOUT: 99999
25 | KLINES_TREND_PERIOD: 0m
26 | KLINES_SLICE_PERCENTAGE_CHANGE: +0.0
27 |
28 |
29 | TICKERS:
30 | BTCUSDT:
31 | <<: *defaults
32 |
33 | ETHUSDT:
34 | <<: *defaults
35 |
36 | BNBUSDT:
37 | <<: *defaults
38 |
39 | DOTUSDT:
40 | <<: *defaults
41 |
42 | ADAUSDT:
43 | <<: *defaults
44 |
45 |
46 | PRICE_LOGS: [
47 | "log/coin.COINTEMPLATE.log.gz",
48 | ]
49 |
--------------------------------------------------------------------------------
/examples/testnet.example.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | PAUSE_FOR: 1
3 | INITIAL_INVESTMENT: 1500
4 | MAX_COINS: 2
5 | PAIRING: USDT
6 | CLEAR_COIN_STATS_AT_BOOT: True
7 | CLEAR_COIN_STATS_AT_SALE: True
8 | DEBUG: False
9 | TRADING_FEE: 0.1
10 | SELL_AS_SOON_IT_DROPS: False
11 |
12 | STRATEGY: BuyMoonSellRecoveryStrategy
13 | #STRATEGY: BuyOnGrowthTrendAfterDropStrategy
14 | #STRATEGY: BuyOnRecoveryAfterDowntrendPeriodStrategy
15 | #STRATEGY: BuyDropSellRecoveryStrategy
16 |
17 | ANCHORS: &defaults
18 | SOFT_LIMIT_HOLDING_TIME: 3600
19 | HARD_LIMIT_HOLDING_TIME: 7200
20 | BUY_AT_PERCENTAGE: -0.0001
21 | SELL_AT_PERCENTAGE: +0.0001
22 | STOP_LOSS_AT_PERCENTAGE: -0.3
23 | TRAIL_TARGET_SELL_PERCENTAGE: -0.0001
24 | TRAIL_RECOVERY_PERCENTAGE: +0.00001
25 | NAUGHTY_TIMEOUT: 28800
26 | KLINES_TREND_PERIOD: 0d # unused
27 | KLINES_SLICE_PERCENTAGE_CHANGE: +0 # unused
28 |
29 |
30 | TICKERS:
31 | BTCUSDT:
32 | <<: *defaults
33 |
34 | ETHUSDT:
35 | <<: *defaults
36 |
37 | BNBUSDT:
38 | <<: *defaults
39 |
40 | PRICE_LOGS: []
41 |
--------------------------------------------------------------------------------
/klines_caching_service.py:
--------------------------------------------------------------------------------
1 | """ load_klines_for_coin: manages the cache/ directory """
2 | import json
3 | import logging
4 | import sys
5 | import threading
6 | from datetime import datetime
7 | from functools import lru_cache
8 | from hashlib import md5
9 | from os import getpid, mkdir
10 | from os.path import exists
11 | from time import sleep
12 |
13 | import colorlog # pylint: disable=E0401
14 | import requests
15 | from flask import Flask, request # pylint: disable=E0401
16 | from pyrate_limiter import Duration, Limiter, RequestRate
17 | from tenacity import retry, wait_exponential
18 |
19 | rate: RequestRate = RequestRate(
20 | 600, Duration.MINUTE
21 | ) # 600 requests per minute
22 | limiter: Limiter = Limiter(rate)
23 |
24 | DEBUG = False
25 | PID = getpid()
26 |
27 | LOCK = threading.Lock()
28 |
29 | c_handler = colorlog.StreamHandler(sys.stdout)
30 | c_handler.setFormatter(
31 | colorlog.ColoredFormatter(
32 | "%(log_color)s[%(levelname)s] %(message)s",
33 | log_colors={
34 | "WARNING": "yellow",
35 | "ERROR": "red",
36 | "CRITICAL": "red,bg_white",
37 | },
38 | )
39 | )
40 | c_handler.setLevel(logging.INFO)
41 |
42 | if DEBUG:
43 | f_handler = logging.FileHandler("log/debug.log")
44 | f_handler.setLevel(logging.DEBUG)
45 |
46 | logging.basicConfig(
47 | level=logging.DEBUG,
48 | format=" ".join(
49 | [
50 | "(%(asctime)s)",
51 | f"({PID})",
52 | "(%(lineno)d)",
53 | "(%(funcName)s)",
54 | "[%(levelname)s]",
55 | "%(message)s",
56 | ]
57 | ),
58 | handlers=[f_handler, c_handler],
59 | datefmt="%Y-%m-%d %H:%M:%S",
60 | )
61 | else:
62 | logging.basicConfig(
63 | level=logging.INFO,
64 | handlers=[c_handler],
65 | )
66 |
67 |
68 | app = Flask(__name__)
69 |
70 |
71 | @lru_cache(64)
72 | def c_from_timestamp(date: float) -> datetime:
73 | """returns a cached datetime.fromtimestamp()"""
74 | return datetime.fromtimestamp(date)
75 |
76 |
77 | @retry(wait=wait_exponential(multiplier=1, max=3))
78 | @limiter.ratelimit("binance", delay=True)
79 | def requests_with_backoff(query: str):
80 | """retry wrapper for requests calls"""
81 | response = requests.get(query, timeout=30)
82 |
83 | # 418 is a binance api limits response
84 | # don't raise a HTTPError Exception straight away but block until we are
85 | # free from the ban.
86 | status = response.status_code
87 | if status in [418, 429]:
88 | backoff = int(response.headers["Retry-After"])
89 | logging.warning(f"HTTP {status} from binance, sleeping for {backoff}s")
90 | sleep(backoff)
91 | response.raise_for_status()
92 | return response
93 |
94 |
95 | def process_klines_line(kline):
96 | """returns date, low, avg, high from a kline"""
97 | (_, _, high, low, _, _, closetime, _, _, _, _, _) = kline
98 |
99 | date = float(c_from_timestamp(closetime / 1000).timestamp())
100 | low = float(low)
101 | high = float(high)
102 | avg = (low + high) / 2
103 |
104 | return date, low, avg, high
105 |
106 |
107 | def read_from_local_cache(f_path, symbol):
108 | """reads kline from local cache if it exists"""
109 |
110 | # wrap results in a try call, in case our cached files are corrupt
111 | # and attempt to pull the required fields from our data.
112 |
113 | if exists(f"cache/{symbol}/{f_path}"):
114 | try:
115 | with open(f"cache/{symbol}/{f_path}", "r") as f:
116 | results = json.load(f)
117 | except Exception as err: # pylint: disable=W0703
118 | logging.critical(err)
119 | return (False, [])
120 |
121 | # new listed coins will return an empty array
122 | # so we bail out early here
123 | if not results:
124 | return (True, [])
125 |
126 | # check for valid values by reading one line
127 | try:
128 | # pylint: disable=W0612
129 | (
130 | _,
131 | _,
132 | high,
133 | low,
134 | _,
135 | _,
136 | closetime,
137 | _,
138 | _,
139 | _,
140 | _,
141 | _,
142 | ) = results[0]
143 | except Exception as err: # pylint: disable=W0703
144 | logging.critical(err)
145 | return (False, [])
146 |
147 | return (True, results)
148 | logging.info(f"no file cache/{symbol}/{f_path}")
149 | return (False, [])
150 |
151 |
152 | def populate_values(klines, unit):
153 | """builds averages[], lowest[], highest[] out of klines"""
154 | _lowest = []
155 | _averages = []
156 | _highest = []
157 |
158 | # retrieve and calculate the lowest, highest, averages
159 | # from the klines data.
160 | # we need to transform the dates into consumable timestamps
161 | # that work for our bot.
162 | for line in klines:
163 | date, low, avg, high = process_klines_line(line)
164 | _lowest.append((date, low))
165 | _averages.append((date, avg))
166 | _highest.append((date, high))
167 |
168 | # finally, populate all the data coin buckets
169 | values = {}
170 | for metric in ["lowest", "averages", "highest"]:
171 | values[metric] = []
172 |
173 | unit_values = {
174 | "m": 60,
175 | "h": 24,
176 | # for 'Days' we retrieve 1000 days, binance API default
177 | "d": 1000,
178 | }
179 |
180 | timeslice = unit_values[unit]
181 | # we gather all the data we collected and only populate
182 | # the required number of records we require.
183 | # this could possibly be optimized, but at the same time
184 | # this only runs the once when we initialise a coin
185 | for d, v in _lowest[-timeslice:]:
186 | values["lowest"].append((d, v))
187 |
188 | for d, v in _averages[-timeslice:]:
189 | values["averages"].append((d, v))
190 |
191 | for d, v in _highest[-timeslice:]:
192 | values["highest"].append((d, v))
193 |
194 | return (True, values)
195 |
196 |
197 | def call_binance_for_klines(query):
198 | """calls upstream binance and retrieves the klines for a coin"""
199 | logging.info(f"calling binance on {query}")
200 | with LOCK:
201 | response = requests_with_backoff(query)
202 | if response.status_code == 400:
203 | # 400 typically means binance has no klines for this coin
204 | logging.warning(f"got a 400 from binance for {query}")
205 | return (True, [])
206 | return (True, response.json())
207 |
208 |
209 | def save_binance_klines(query, f_path, klines, mode, symbol):
210 | """saves binance klines for a coin locally"""
211 | logging.info(f"caching binance {query} on cache/{symbol}/{f_path}")
212 | if mode == "backtesting":
213 | if not exists(f"cache/{symbol}"):
214 | mkdir(f"cache/{symbol}")
215 |
216 | with open(f"cache/{symbol}/{f_path}", "w") as f:
217 | f.write(json.dumps(klines))
218 |
219 |
220 | @app.route("/")
221 | def load_klines_for_coin():
222 | """fetches from binance or a local cache klines for a coin"""
223 |
224 | symbol = request.args.get("symbol")
225 | date = int(float(request.args.get("date")))
226 | mode = request.args.get("mode")
227 |
228 | # when we initialise a coin, we pull a bunch of klines from binance
229 | # for that coin and save it to disk, so that if we need to fetch the
230 | # exact same data, we can pull it from disk instead.
231 | # we pull klines for the last 60min, the last 24h, and the last 1000days
232 |
233 | api_url = f"https://api.binance.com/api/v3/klines?symbol={symbol}&"
234 |
235 | unit_values = {
236 | "m": (60, 1),
237 | "h": (24, 60),
238 | # for 'Days' we retrieve 1000 days, binance API default
239 | "d": (1000, 60 * 24),
240 | }
241 | unit_url_fpath = []
242 | for unit in ["m", "h", "d"]:
243 | # lets find out the from what date we need to pull klines from while in
244 | # backtesting mode.
245 | timeslice, minutes_before_now = unit_values[unit]
246 |
247 | backtest_end_time = date
248 | end_unix_time = int(
249 | (backtest_end_time - (60 * minutes_before_now)) * 1000
250 | )
251 |
252 | query = f"{api_url}endTime={end_unix_time}&interval=1{unit}"
253 | md5_query = md5(query.encode()).hexdigest() # nosec
254 | f_path = f"{symbol}.{md5_query}"
255 | unit_url_fpath.append((unit, query, f_path))
256 |
257 | values = {}
258 | for metric in ["lowest", "averages", "highest"]:
259 | values[metric] = {}
260 | for unit in ["m", "h", "d", "s"]:
261 | values[metric][unit] = []
262 |
263 | for unit, query, f_path in unit_url_fpath:
264 | klines = []
265 | ok, klines = read_from_local_cache(f_path, symbol)
266 | if not ok:
267 | ok, klines = call_binance_for_klines(query)
268 | if ok:
269 | save_binance_klines(query, f_path, klines, mode, symbol)
270 |
271 | if ok:
272 | ok, low_avg_high = populate_values(klines, unit)
273 |
274 | if ok:
275 | for metric in low_avg_high.keys(): # pylint: disable=C0201,C0206
276 | values[metric][unit] = low_avg_high[metric]
277 | # make sure we don't keep more values that we should
278 | timeslice, _ = unit_values[unit]
279 | while len(values[metric][unit]) > timeslice:
280 | values[metric][unit].pop()
281 | return values
282 |
283 |
284 | if __name__ == "__main__":
285 | app.run(host="0.0.0.0", port=8999)
286 |
--------------------------------------------------------------------------------
/lib/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azulinho/cryptobot/ecddc8c6f7144cfcbc7ecb92159f8c08167500a4/lib/__init__.py
--------------------------------------------------------------------------------
/lib/coin.py:
--------------------------------------------------------------------------------
1 | """ Coin class """
2 |
3 | from typing import Dict, List, Optional
4 | from lib.helpers import add_100
5 |
6 |
7 | class Coin: # pylint: disable=too-few-public-methods
8 | """Coin Class"""
9 |
10 | offset: Optional[Dict[str, int]] = {"s": 60, "m": 3600, "h": 86400}
11 |
12 | def __init__(
13 | self,
14 | symbol: str,
15 | date: float,
16 | market_price: float,
17 | buy_at: float,
18 | sell_at: float,
19 | stop_loss: float,
20 | trail_target_sell_percentage: float,
21 | trail_recovery_percentage: float,
22 | soft_limit_holding_time: int,
23 | hard_limit_holding_time: int,
24 | naughty_timeout: int,
25 | klines_trend_period: str,
26 | klines_slice_percentage_change: float,
27 | ) -> None:
28 | """Coin object"""
29 | self.symbol = symbol
30 | # number of units of a coin held
31 | self.volume: float = float(0)
32 | # what price we bought the coin
33 | self.bought_at: float = float(0)
34 | # minimum coin price recorded since reset
35 | self.min = float(market_price)
36 | # maximum coin price recorded since reset
37 | self.max = float(market_price)
38 | # date of latest price info available for this coin
39 | self.date = date
40 | # current price for the coin
41 | self.price = float(market_price)
42 | # how long in secs we have been holding this coin
43 | self.holding_time = int(0)
44 | # current value, as number of units vs current price
45 | self.value = float(0)
46 | # total cost for all units at time ot buy
47 | self.cost = float(0)
48 | # coin price recorded in the previous iteration
49 | self.last = market_price
50 | # percentage to mark coin as TARGET_DIP
51 | self.buy_at_percentage: float = add_100(buy_at)
52 | # percentage to mark coin as TARGET_SELL
53 | self.sell_at_percentage: float = add_100(sell_at)
54 | # percentage to trigger a stop loss
55 | self.stop_loss_at_percentage: float = add_100(stop_loss)
56 | # current status of coins ['', 'HOLD', 'TARGET_DIP', ...]
57 | self.status = ""
58 | # percentage to recover after a drop that triggers a buy
59 | self.trail_recovery_percentage: float = add_100(
60 | trail_recovery_percentage
61 | )
62 | # trailling stop loss
63 | self.trail_target_sell_percentage: float = add_100(
64 | trail_target_sell_percentage
65 | )
66 | # lowest price while the coin is in TARGET_DIP
67 | self.dip = market_price
68 | # highest price while the coin in TARGET_SELL
69 | self.tip = market_price
70 | # total profit for this coin
71 | self.profit = float(0)
72 | # how to long to keep a coin before shrinking SELL_AT_PERCENTAGE
73 | self.soft_limit_holding_time: int = int(soft_limit_holding_time)
74 | # How long to hold a coin before forcing a sale
75 | self.hard_limit_holding_time: int = int(hard_limit_holding_time)
76 | # how long to block the bot from buying a coin after a STOP_LOSS
77 | self.naughty_timeout: int = int(naughty_timeout)
78 | # dicts storing price data, on different buckets
79 | self.lowest: dict[str, List[List[float]]] = {
80 | "m": [],
81 | "h": [],
82 | "d": [],
83 | }
84 | self.averages: dict[str, List[List[float]]] = {
85 | "s": [],
86 | "m": [],
87 | "h": [],
88 | "d": [],
89 | }
90 | self.highest: dict[str, List[List[float]]] = {
91 | "m": [],
92 | "h": [],
93 | "d": [],
94 | }
95 | # How long to look for trend changes in a coin price
96 | self.klines_trend_period: str = str(klines_trend_period)
97 | # percentage of coin price change in a trend_period slice
98 | self.klines_slice_percentage_change: float = float(
99 | klines_slice_percentage_change
100 | )
101 | # what date we bought the coin
102 | self.bought_date: float = None # type: ignore
103 | # what date we had the last STOP_LOSS
104 | self.naughty_date: float = None # type: ignore
105 | # if we're currently not buying this coin
106 | self.naughty: bool = False
107 | # used in backtesting, the last read date, as the date in the price.log
108 | self.last_read_date: float = date
109 | self.delisted: bool = False
110 |
--------------------------------------------------------------------------------
/lib/helpers.py:
--------------------------------------------------------------------------------
1 | """ helpers module """
2 | import logging
3 | import math
4 | import pickle # nosec
5 | import re
6 | from datetime import datetime
7 | from functools import lru_cache
8 | from os.path import exists, getctime
9 | from time import sleep, time
10 |
11 | import udatetime
12 | from binance.client import Client
13 | from filelock import SoftFileLock
14 | from tenacity import retry, wait_fixed, stop_after_delay
15 |
16 |
17 | def mean(values: list[float]) -> float:
18 | """returns the mean value of an array of integers"""
19 | return sum(values) / len(values)
20 |
21 |
22 | @lru_cache(1024)
23 | def percent(part: float, whole: float) -> float:
24 | """returns the percentage value of a number"""
25 | result: float = float(whole) / 100 * float(part)
26 | return result
27 |
28 |
29 | @lru_cache(1024)
30 | def add_100(number: float) -> float:
31 | """adds 100 to a number"""
32 | return 100 + float(number)
33 |
34 |
35 | @lru_cache(64)
36 | def c_date_from(day: str) -> float:
37 | """returns a cached datetime.fromisoformat()"""
38 | return datetime.fromisoformat(day).timestamp()
39 |
40 |
41 | @lru_cache(64)
42 | def c_from_timestamp(date: float) -> datetime:
43 | """returns a cached datetime.fromtimestamp()"""
44 | return datetime.fromtimestamp(date)
45 |
46 |
47 | @retry(wait=wait_fixed(2), stop=stop_after_delay(10))
48 | def cached_binance_client(access_key: str, secret_key: str) -> Client:
49 | """retry wrapper for binance client first call"""
50 |
51 | lock = SoftFileLock("state/binance.client.lockfile", timeout=10)
52 | # when running automated-testing with multiple threads, we will hit
53 | # api requests limits, this happens during the client initialization
54 | # which mostly issues a ping. To avoid this when running multiple processes
55 | # we cache the client in a pickled state on disk and load it if it already
56 | # exists.
57 | cachefile = "cache/binance.client"
58 | with lock:
59 | if exists(cachefile) and (
60 | udatetime.now().timestamp() - getctime(cachefile) < (30 * 60)
61 | ):
62 | logging.debug("re-using local cached binance.client file")
63 | with open(cachefile, "rb") as f:
64 | _client = pickle.load(f) # nosec
65 | else:
66 | try:
67 | logging.debug("refreshing cached binance.client")
68 | _client = Client(access_key, secret_key)
69 | except Exception as err:
70 | logging.warning(f"API client exception: {err}")
71 | if "much request weight used" in str(err):
72 | timestamp = (
73 | int(re.findall(r"IP banned until (\d+)", str(err))[0])
74 | / 1000
75 | )
76 | logging.info(
77 | f"Pausing until {datetime.fromtimestamp(timestamp)}"
78 | )
79 | while int(time()) < timestamp:
80 | sleep(1)
81 | raise Exception from err # pylint: disable=broad-exception-raised
82 | with open(cachefile, "wb") as f:
83 | pickle.dump(_client, f)
84 |
85 | return _client
86 |
87 |
88 | def step_size_to_precision(step_size: str) -> int:
89 | """returns step size"""
90 | precision: int = step_size.find("1") - 1
91 | with open("log/binance.step_size_to_precision.log", "at") as f:
92 | f.write(f"{step_size} {precision}\n")
93 | return precision
94 |
95 |
96 | def floor_value(val: float, step_size: str) -> str:
97 | """floors quantity depending on precision"""
98 | value: str = ""
99 | precision: int = step_size_to_precision(step_size)
100 | if precision > 0:
101 | value = "{:0.0{}f}".format( # pylint: disable=consider-using-f-string
102 | val, precision
103 | )
104 | else:
105 | value = str(math.floor(int(val)))
106 | with open("log/binance.floor_value.log", "at") as f:
107 | f.write(f"{val} {step_size} {precision} {value}\n")
108 | return value
109 |
--------------------------------------------------------------------------------
/log/.empty:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azulinho/cryptobot/ecddc8c6f7144cfcbc7ecb92159f8c08167500a4/log/.empty
--------------------------------------------------------------------------------
/price_log_service.py:
--------------------------------------------------------------------------------
1 | """ price_log_service.py """
2 | from flask import send_from_directory, Flask
3 |
4 | app: Flask = Flask(__name__)
5 |
6 |
7 | @app.route("/")
8 | def root(path):
9 | """root handler"""
10 | return send_from_directory("log", path)
11 |
12 |
13 | if __name__ == "__main__":
14 | app.run(host="0.0.0.0", port=8998)
15 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.black]
2 | line-length = 79
3 | target-version = ['py39']
4 | include = '\.pyi?$'
5 | extend-exclude = '''
6 | # A regex preceded with ^/ will apply only to files and directories
7 | # in the root of the project.
8 | ^/foo.py # exclude a file named foo.py in the root of the project (in addition to the defaults)
9 | '''
10 |
11 | [tool.pylint.'MESSAGES CONTROL']
12 | disable = [
13 | "too-many-public-methods",
14 | "too-many-return-statements",
15 | "too-many-instance-attributes",
16 | "too-many-arguments",
17 | "logging-fstring-interpolation",
18 | "too-many-lines",
19 | "too-many-statements",
20 | "too-many-branches",
21 | "logging-not-lazy",
22 | "unspecified-encoding",
23 | "too-many-locals",
24 | "fixme",
25 | "R0801",
26 | "C0103"
27 | ]
28 | [tool.pylint.basic]
29 | good-names-rgxs = "^[_a-z][_a-z0-9]?$"
30 |
31 |
--------------------------------------------------------------------------------
/requirements-dev.txt:
--------------------------------------------------------------------------------
1 | black
2 | coverage
3 | ipython
4 | pandas-stubs
5 | pre-commit
6 | pylint
7 | pytest
8 | pytest-forked
9 | pytest-socket
10 | pytest-timeout
11 | pytest-xdist
12 | rope
13 | ropemode
14 | ropevim
15 | snakeviz
16 | types-PyYAML
17 | typing-extensions
18 | urllib3-mock
19 | virtualenv
20 | types-requests
21 | flaky
22 | openai
23 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | aiohttp==3.8.5
2 | aiosignal==1.3.1
3 | async-timeout==4.0.2
4 | asyncore-wsgi==0.0.10
5 | attrs==23.1.0
6 | blinker==1.6.2
7 | certifi==2023.7.22
8 | charset-normalizer==3.1.0
9 | click==8.1.3
10 | colorama==0.4.6
11 | colorlog==6.7.0
12 | dateparser==1.1.8
13 | epdb==0.15.1
14 | filelock==3.12.0
15 | Flask==2.3.2
16 | frozenlist==1.3.3
17 | gevent==22.10.2
18 | greenlet==2.0.2
19 | gunicorn==20.1.0
20 | idna==3.4
21 | importlib-metadata==6.6.0
22 | intervaltree==3.1.0
23 | isal==1.1.0
24 | itsdangerous==2.1.2
25 | Jinja2==3.1.2
26 | lz4==4.3.2
27 | MarkupSafe==2.1.2
28 | multidict==6.0.4
29 | mypy==1.2.0
30 | mypy-extensions==1.0.0
31 | networkx==3.1
32 | numpy==1.24.3
33 | pandas==2.0.1
34 | pycryptodome==3.17
35 | pyrate-limiter==2.10.0
36 | python-binance==1.0.17
37 | python-dateutil==2.8.2
38 | pytz==2023.3
39 | pytz-deprecation-shim==0.1.0.post0
40 | PyYAML==6.0
41 | pyzmq==25.0.2
42 | regex==2023.3.23
43 | requests==2.31.0
44 | six==1.16.0
45 | sortedcontainers==2.4.0
46 | ta==0.10.2
47 | TA-Lib==0.4.26
48 | tag==0.5
49 | tenacity==8.2.2
50 | toml==0.10.2
51 | typing_extensions==4.5.0
52 | tzdata==2023.3
53 | tzlocal==4.3
54 | udatetime==0.0.17
55 | ujson==5.7.0
56 | urllib3==1.26.15
57 | websockets==11.0.2
58 | Werkzeug==2.3.3
59 | yarl==1.9.2
60 | zipp==3.15.0
61 | zope.event==4.6
62 | zope.interface==6.0
63 |
--------------------------------------------------------------------------------
/results/.empty:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azulinho/cryptobot/ecddc8c6f7144cfcbc7ecb92159f8c08167500a4/results/.empty
--------------------------------------------------------------------------------
/run:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | #
3 |
4 | function usage() {
5 | echo "USAGE:"
6 | echo "./run logmode CONFIG_FILE=< config.yaml >"
7 | echo "./run backtesting CONFIG_FILE=< config.yaml >"
8 | echo "./run testnet CONFIG_FILE=< config.yaml >"
9 | echo "./run live CONFIG_FILE=< config.yaml >"
10 | echo "./run compress-logs"
11 | echo "./run lastfewdays DAYS=3 PAIR=USDT"
12 | echo "./run download-price-logs FROM=20210101 TO=20211231"
13 | echo "./run prove-backtesting CONFIG_FILE=myconfig.yaml"
14 | echo "./run config-endpoint-service BIND=0.0.0.0 CONFIG_FILE=myconfig.yaml"
15 | echo "./run klines-caching-service BIND=0.0.0.0"
16 | echo "./run price_log_service BIND=0.0.0.0"
17 | echo "./run download_price_logs FROM=20220101 TO=20220131 UNIT=1m"
18 | }
19 |
20 | function free_port () { # looks for a free TCP port
21 | LPORT=32768;
22 | UPORT=60999;
23 | while true; do
24 | MPORT=$[$LPORT + ($RANDOM % $UPORT)];
25 | (echo "" >/dev/tcp/127.0.0.1/${MPORT}) >/dev/null 2>&1
26 | if [ $? -ne 0 ]; then
27 | echo $MPORT;
28 | return 0;
29 | fi
30 | done
31 | }
32 |
33 | function set_service_ports () { # locks a port to use by each service
34 | for service in $*
35 | do
36 | if [ ! -f $STATE_DIR/.${service}.port ] ; then
37 | free_port > $STATE_DIR/.${service}.port
38 | fi
39 | done
40 | }
41 |
42 | function checks() { # runs docker version checks
43 | if [ "`docker --version | cut -d " " -f3 | tr -d 'v'| cut -c1`" -lt 2 ]; \
44 | then echo "docker version is too old"; exit 1; fi
45 | }
46 |
47 | function build() { # builds docker images
48 | docker pull ubuntu:focal
49 | docker buildx build --build-arg BUILDKIT_INLINE_CACHE=1 -t tempbuild \
50 | --cache-from=${IMAGE}:latest \
51 | --cache-from=${IMAGE}:next_release \
52 | --cache-from=${IMAGE}:${TAG} .
53 | docker tag tempbuild ${IMAGE}:${TAG}
54 | }
55 |
56 | function down() { # power down all containers
57 | docker ps | grep "${CONTAINER_SUFFIX}" | awk '{print $1}' | xargs -i docker kill {}
58 | }
59 |
60 | function latest() { # pulls :latest image tag
61 | docker pull ${IMAGE}:${TAG} >/dev/null
62 | }
63 |
64 | # this should become its own docker image
65 | function download_price_logs() { # downloads klines logs fro binance
66 | if [ -z "$FROM" ]; then
67 | echo "FROM env variable not set"
68 | exit 1
69 | fi
70 | if [ -z "$TO" ]; then
71 | echo "TO env variable not set"
72 | exit 1
73 | fi
74 |
75 | if [ -z "$UNIT" ]; then
76 | export UNIT="1m"
77 | fi
78 |
79 | docker run --rm \
80 | ${USE_TTY} \
81 | ${DOCKER_RUN_AS} \
82 | ${DOCKER_NAME} \
83 | ${DOCKER_MOUNTS} \
84 | ${DOCKER_NETWORK} \
85 | ${RUN_IN_BACKGROUND} \
86 | ${IMAGE}:${TAG} \
87 | /cryptobot/.venv/bin/python -u /cryptobot/utils/pull_klines.py \
88 | -s ${FROM} -e ${TO} -u ${UNIT}
89 | }
90 |
91 | function docker_network() { # creates a docker network
92 | docker network ls |grep ${CONTAINER_SUFFIX} >/dev/null 2>&1 || docker network create ${CONTAINER_SUFFIX}
93 | }
94 |
95 | function logmode() { # runs in logmode
96 | docker run --rm \
97 | ${USE_TTY} \
98 | ${DOCKER_RUN_AS} \
99 | ${DOCKER_NAME} \
100 | ${DOCKER_MOUNTS} \
101 | ${DOCKER_NETWORK} \
102 | ${RUN_IN_BACKGROUND} \
103 | ${IMAGE}:${TAG} \
104 | /cryptobot/.venv/bin/python -u app.py \
105 | -s /cryptobot/secrets/fake.yaml \
106 | -c /cryptobot/configs/${CONFIG_FILE} \
107 | -m ${MODE} > ${LOG_DIR}/${MODE}.${CONFIG_FILE}.txt 2>&1
108 | }
109 |
110 | function testnet() { # runs in testnet mode
111 | if [ -z "$PORT" ]; then
112 | export PORT=$( cat ${STATE_DIR}/.testnet.port)
113 | fi
114 |
115 | docker run --rm \
116 | ${USE_TTY} \
117 | ${DOCKER_RUN_AS} \
118 | ${DOCKER_NAME} \
119 | ${DOCKER_MOUNTS} \
120 | ${DOCKER_NETWORK} \
121 | ${RUN_IN_BACKGROUND} \
122 | -p ${BIND}:${PORT}:5555 \
123 | ${IMAGE}:${TAG} \
124 | /cryptobot/.venv/bin/python -u app.py \
125 | -s /cryptobot/secrets/${SECRETS_FILE:-binance.testnet.yaml} \
126 | -c /cryptobot/configs/${CONFIG_FILE} \
127 | -m ${MODE} > ${LOG_DIR}/${MODE}.${CONFIG_FILE}.txt 2>&1
128 | }
129 |
130 | function live() { # runs in live mode
131 | if [ -z "$PORT" ]; then
132 | export PORT=$( cat ${STATE_DIR}/.${MODE}.port )
133 | fi
134 |
135 | docker run --rm \
136 | ${USE_TTY} \
137 | ${DOCKER_RUN_AS} \
138 | ${DOCKER_NAME} \
139 | ${DOCKER_MOUNTS} \
140 | ${DOCKER_NETWORK} \
141 | ${RUN_IN_BACKGROUND} \
142 | -p ${BIND}:${PORT}:5555 \
143 | ${IMAGE}:${TAG} \
144 | /cryptobot/.venv/bin/python -u app.py \
145 | -s /cryptobot/secrets/${SECRETS_FILE:-binance.prod.yaml} \
146 | -c /cryptobot/configs/${CONFIG_FILE} \
147 | -m ${MODE} >> ${LOG_DIR}/${MODE}.${CONFIG_FILE}.txt 2>&1
148 | }
149 |
150 | function backtesting() { # runs in backtesting mode
151 | docker run --rm \
152 | ${USE_TTY} \
153 | ${DOCKER_RUN_AS} \
154 | ${DOCKER_NAME} \
155 | ${DOCKER_MOUNTS} \
156 | ${DOCKER_NETWORK} \
157 | ${RUN_IN_BACKGROUND} \
158 | ${IMAGE}:${TAG} \
159 | /usr/bin/eatmydata /cryptobot/.venv/bin/python -u app.py \
160 | -s /cryptobot/secrets/${SECRETS_FILE:-fake.yaml} \
161 | -c /cryptobot/configs/${CONFIG_FILE} \
162 | -m backtesting > ${RESULTS_DIR}/backtesting.${CONFIG_FILE}.txt 2>&1
163 | }
164 |
165 | function prove_backtesting() { # runs the prove backtesting
166 | if [ -z "$CONFIG_FILE" ]; then
167 | echo "CONFIG_FILE env variable not set"
168 | exit 1
169 | fi
170 |
171 | RESULTS_LOG="${RESULTS_DIR}/prove-backtesting"
172 | RESULTS_LOG="${RESULTS_LOG}.${CONFIG_FILE}.txt"
173 | docker run --rm \
174 | ${USE_TTY} \
175 | ${DOCKER_RUN_AS} \
176 | ${DOCKER_NAME} \
177 | ${DOCKER_MOUNTS} \
178 | ${DOCKER_NETWORK} \
179 | ${RUN_IN_BACKGROUND} \
180 | -e CONFIG_FILE=${CONFIG_FILE} \
181 | ${IMAGE}:${TAG} \
182 | /usr/bin/eatmydata /cryptobot/utils/prove-backtesting.sh \
183 | > ${RESULTS_LOG}
184 | }
185 |
186 | function config_endpoint_service() { # runs the config endpoint service
187 | if [ -z "$PORT" ]; then
188 | export PORT=$( cat ${STATE_DIR}/.${MODE}.port )
189 | fi
190 |
191 | if [ -z "$CONFIG_FILE" ]; then
192 | echo "CONFIG_FILE env variable not set"
193 | exit 1
194 | fi
195 |
196 | docker run --rm \
197 | ${USE_TTY} \
198 | ${DOCKER_RUN_AS} \
199 | ${DOCKER_NAME} \
200 | ${DOCKER_MOUNTS} \
201 | ${DOCKER_NETWORK} \
202 | ${RUN_IN_BACKGROUND} \
203 | -e CONFIG_FILE=${CONFIG_FILE} \
204 | -p ${BIND}:${PORT}:5883 \
205 | ${IMAGE}:${TAG} \
206 | /cryptobot/utils/config-endpoint-service.sh
207 | }
208 |
209 | function klines_caching_service() { # runs the klines caching service
210 | if [ -z "$PORT" ]; then
211 | export PORT=$( cat ${STATE_DIR}/.${MODE}.port )
212 | fi
213 |
214 | if [ -n "${RUN_IN_BACKGROUND}" ]; then
215 | docker ps | grep "klines_caching_service-${CONTAINER_SUFFIX}" \
216 | |awk '{ print $1 }' | xargs -i docker kill {} >/dev/null 2>&1
217 | fi
218 |
219 | docker run --rm \
220 | ${USE_TTY} \
221 | ${DOCKER_RUN_AS} \
222 | ${DOCKER_NAME} \
223 | ${DOCKER_MOUNTS} \
224 | ${DOCKER_NETWORK} \
225 | --network-alias klines \
226 | ${RUN_IN_BACKGROUND} \
227 | -p ${BIND}:${PORT}:8999 \
228 | ${IMAGE}:${TAG} \
229 | /cryptobot/.venv/bin/gunicorn --preload \
230 | --workers=${N_CPUS} \
231 | --worker-class=gthread \
232 | --threads=8 \
233 | --worker-tmp-dir /dev/shm \
234 | --bind 0.0.0.0:8999 klines_caching_service:app
235 | }
236 |
237 | function price_log_service() { # runs the klines caching service
238 | if [ -z "$PORT" ]; then
239 | export PORT=$( cat ${STATE_DIR}/.${MODE}.port )
240 | fi
241 |
242 | if [ -n "${RUN_IN_BACKGROUND}" ]; then
243 | docker ps | grep "price-log-service-${CONTAINER_SUFFIX}" \
244 | |awk '{ print $1 }' | xargs -i docker kill {} >/dev/null 2>&1
245 | fi
246 |
247 | docker run --rm \
248 | ${USE_TTY} \
249 | ${DOCKER_RUN_AS} \
250 | ${DOCKER_NAME} \
251 | ${DOCKER_MOUNTS} \
252 | ${DOCKER_NETWORK} \
253 | --network-alias price-log-service \
254 | ${RUN_IN_BACKGROUND} \
255 | -p ${BIND}:${PORT}:8998 \
256 | ${IMAGE}:${TAG} \
257 | /cryptobot/.venv/bin/gunicorn --preload \
258 | --workers=${N_CPUS} \
259 | --worker-class=gthread \
260 | --threads=8 \
261 | --worker-tmp-dir /dev/shm \
262 | --bind 0.0.0.0:8998 price_log_service:app
263 | }
264 |
265 | function setup() { # local setup for development
266 | which pyenv >/dev/null 2>&1 || curl https://pyenv.run | bash
267 | export PATH=~/.pyenv/bin:$PATH
268 |
269 | if [ ! -e .venv ]; then
270 | pyenv install -s
271 | pyenv exec python -m venv .venv
272 | fi
273 |
274 | source .venv/bin/activate
275 | pip --disable-pip-version-check install wheel
276 | pip --disable-pip-version-check install -r requirements.txt
277 | pip --disable-pip-version-check install -r requirements-dev.txt
278 | deactivate
279 | }
280 |
281 | function tests() { # CI and pre-commit tests
282 | set -e
283 | set -o pipefail
284 |
285 | IMAGE=local TAG=tests build
286 | docker build -f Dockerfile.tests .
287 | }
288 |
289 | function github_actions_ci_pr_docker_tests() {
290 | set -ex
291 | ./run down
292 | ./run build TAG=pr
293 | ./run klines-caching-service RUN_IN_BACKGROUND=yes TAG=pr
294 | ./run price-log-service RUN_IN_BACKGROUND=yes TAG=pr
295 |
296 | sleep 5
297 |
298 | # don't worry if we can't push, as when running locally this will fail anyway
299 | docker push ghcr.io/azulinho/cryptobot:pr || true
300 | # TODO: review where these are being consumed in the tests
301 | cp tests/fake.yaml secrets/binance.prod.yaml
302 | cp tests/fake.yaml secrets/fake.yaml
303 | cp tests/price.log.gz log/tests.price.log.gz
304 |
305 | mkdir -p log/ETHUSDT
306 | mkdir -p log/BTCUSDT
307 | for ta in 01 02 03 04 05 06 07 08 09
308 | do
309 | cat tests/price.log.gz | grep 2021-12-${ta} |gzip -1 > log/202112${ta}.log.gz
310 | cat tests/price.log.gz | grep ETHUSDT | grep 2021-12-${ta} |gzip -1 > log/ETHUSDT/202112${ta}.log.gz
311 | cat tests/price.log.gz | grep BTCUSDT | grep 2021-12-${ta} |gzip -1 > log/BTCUSDT/202112${ta}.log.gz
312 | done
313 | cp tests/index.json.gz log/
314 | cp tests/index_v2.json.gz log/
315 |
316 | export PRICE_LOG_PORT=$( cat ${STATE_DIR}/.price_log_service.port)
317 | curl --output /dev/null http://${DOCKER_IP}:${PRICE_LOG_PORT}/index.json.gz
318 | curl --output /dev/null http://${DOCKER_IP}:${PRICE_LOG_PORT}/index_v2.json.gz
319 |
320 | echo BuyMoonSellRecoveryStrategy.yaml
321 | cp tests/BuyMoonSellRecoveryStrategy.yaml configs/
322 | ./run backtesting CONFIG_FILE=BuyMoonSellRecoveryStrategy.yaml TAG=pr
323 | grep 'wins:366 losses:98 stales:104 holds:1' results/backtesting.BuyMoonSellRecoveryStrategy.yaml.txt
324 |
325 | echo BuyOnGrowthTrendAfterDropStrategy.yaml
326 | cp tests/BuyOnGrowthTrendAfterDropStrategy.yaml configs/
327 | ./run backtesting CONFIG_FILE=BuyOnGrowthTrendAfterDropStrategy.yaml TAG=pr
328 | grep 'wins:23 losses:3 stales:87 holds:2' results/backtesting.BuyOnGrowthTrendAfterDropStrategy.yaml.txt
329 |
330 | echo BuyDropSellRecoveryStrategy.yaml
331 | cp tests/BuyDropSellRecoveryStrategy.yaml configs/
332 | ./run backtesting CONFIG_FILE=BuyDropSellRecoveryStrategy.yaml TAG=pr
333 | grep 'wins:4 losses:9 stales:1 holds:0' results/backtesting.BuyDropSellRecoveryStrategy.yaml.txt
334 |
335 | echo BuyDropSellRecoveryStrategyWhenBTCisUp.yaml
336 | cp tests/BuyDropSellRecoveryStrategyWhenBTCisUp.yaml configs/
337 | ./run backtesting CONFIG_FILE=BuyDropSellRecoveryStrategyWhenBTCisUp.yaml TAG=pr
338 | grep 'wins:209 losses:2 stales:674 holds:0' results/backtesting.BuyDropSellRecoveryStrategyWhenBTCisUp.yaml.txt
339 |
340 | echo BuyDropSellRecoveryStrategyWhenBTCisDown.yaml
341 | cp tests/BuyDropSellRecoveryStrategyWhenBTCisDown.yaml configs/
342 | ./run backtesting CONFIG_FILE=BuyDropSellRecoveryStrategyWhenBTCisDown.yaml TAG=pr
343 | grep 'wins:10 losses:0 stales:125 holds:0' results/backtesting.BuyDropSellRecoveryStrategyWhenBTCisDown.yaml.txt
344 |
345 | echo BuyOnRecoveryAfterDropDuringGrowthTrendStrategy.yaml
346 | cp tests/BuyOnRecoveryAfterDropDuringGrowthTrendStrategy.yaml configs/
347 | ./run backtesting CONFIG_FILE=BuyOnRecoveryAfterDropDuringGrowthTrendStrategy.yaml TAG=pr
348 | grep 'wins:131 losses:0 stales:411 holds:0' results/backtesting.BuyOnRecoveryAfterDropDuringGrowthTrendStrategy.yaml.txt
349 |
350 | echo BuyOnRecoveryAfterDropFromAverageStrategy.yaml
351 | cp tests/BuyOnRecoveryAfterDropFromAverageStrategy.yaml configs/
352 | ./run backtesting CONFIG_FILE=BuyOnRecoveryAfterDropFromAverageStrategy.yaml TAG=pr
353 | grep 'wins:195 losses:4 stales:621 holds:0' results/backtesting.BuyOnRecoveryAfterDropFromAverageStrategy.yaml.txt
354 |
355 | echo prove-backtesting
356 | cp tests/prove-backtesting.yaml configs/
357 | ./run prove-backtesting \
358 | TAG=pr CONFIG_FILE=prove-backtesting.yaml
359 |
360 | wc -l results/prove-backtesting.prove-backtesting.yaml.txt \
361 | | grep '49'
362 |
363 | for ta in 01 02 03 04 05 06 07 08 09
364 | do
365 | rm -f log/202112${ta}.log.gz
366 | rm -f log/ETHUSDT/202112${ta}.log.gz
367 | rm -f log/BTCUSDT/202112${ta}.log.gz
368 | done
369 | rm -f log/index.json.gz
370 |
371 | ./run down
372 | }
373 |
374 | function compress_logs() { # compresses the latest price logs
375 | find ${LOG_DIR}/ -name "202*.log" | grep -v "$(date '+%Y%m%d')" | xargs -i gzip -3 {}
376 | }
377 |
378 | function last_few_days() { # generates a lastfewdays.log.gz from last n days
379 | if [ -z "$PAIR" ]; then
380 | echo "PAIR env variable not set"
381 | exit 1
382 | fi
383 | if [ -z "$DAYS" ]; then
384 | echo "DAYS env variable not set"
385 | exit 1
386 | fi
387 |
388 | rm -f lastfewdays.log.gz; for ta in `find log/ -name '202*.gz' |sort -n \
389 | | tail -${DAYS}` ; do zcat $ta | grep -a "${PAIR}" \
390 | | grep -vEa 'DOWN${PAIR}|UP${PAIR}|BULL${PAIR}|BEAR${PAIR}' \
391 | | gzip -3 >> lastfewdays.log.gz; done
392 | }
393 |
394 | function main() { # main innit?
395 | LOCAL_PATH="$(pwd)"
396 | export PATH="${LOCAL_PATH}/.venv/bin:$PATH"
397 |
398 | if [ $# -eq 0 ]; then usage; exit 1; fi
399 |
400 | export USE_TTY=""
401 | test -t 1 && USE_TTY="-it"
402 |
403 | # allow for the same syntax used in makefiles using '-' instead of '_'
404 | export MODE=$( echo $1 | tr -s '-' '_' )
405 | shift 1
406 |
407 | # convert CLI into env vars
408 | for ARG in $*
409 | do
410 | export $ARG
411 | done
412 |
413 | # certain modes require certain env vars set
414 | if [ "`echo " $MODE " | grep -cE ' logmode | live | testnet '`" -eq 1 ]; then
415 | if [ -z "$CONFIG_FILE" ]; then
416 | echo "CONFIG_FILE env variable not set"
417 | exit 1
418 | fi
419 | fi
420 |
421 | if [ -z "$RUN_IN_BACKGROUND" ]; then
422 | export RUN_IN_BACKGROUND=""
423 | else
424 | export RUN_IN_BACKGROUND="-d"
425 | fi
426 |
427 | if [ -z "$IMAGE" ]; then
428 | export IMAGE="ghcr.io/azulinho/cryptobot"
429 | fi
430 |
431 | if [ -z "$TAG" ]; then
432 | export TAG="latest"
433 | fi
434 |
435 | if [ -z "$SMP_MULTIPLIER" ]; then
436 | export SMP_MULTIPLIER=1
437 | fi
438 |
439 | if [ -z "$BIND_ADDRESS" ]; then
440 | export DOCKER_IP=$(ip a show docker0 |grep 'inet '| awk '{ print $2 }' | cut -f1 -d '/')
441 | export BIND_ADDRESS=$DOCKER_IP
442 | fi
443 |
444 | if [ -z "$LOG_DIR" ]; then
445 | export LOG_DIR="$(pwd)/log"
446 | fi
447 |
448 | if [ -z "$CONFIG_DIR" ]; then
449 | export CONFIG_DIR="$(pwd)/configs"
450 | fi
451 |
452 | if [ -z "$SECRETS_DIR" ]; then
453 | export SECRETS_DIR="$(pwd)/secrets"
454 | fi
455 |
456 | if [ -z "$STATE_DIR" ]; then
457 | export STATE_DIR="$(pwd)/state"
458 | fi
459 |
460 | if [ -z "$RESULTS_DIR" ]; then
461 | export RESULTS_DIR="$(pwd)/results"
462 | fi
463 |
464 | if [ -z "$CONTROL_DIR" ]; then
465 | export CONTROL_DIR="$(pwd)/control"
466 | fi
467 |
468 | if [ -z "$CACHE_DIR" ]; then
469 | export CACHE_DIR="$(pwd)/cache"
470 | fi
471 |
472 | # TODO: do I need this?
473 | if [ -z "$TESTS_DIR" ]; then
474 | export TESTS_DIR="$(pwd)/tests"
475 | fi
476 |
477 | if [ -z "$SORTBY" ]; then
478 | export SORTBY="number_of_clean_wins"
479 | fi
480 |
481 | if [ -z "$FILTER" ]; then
482 | export FILTER=''
483 | fi
484 |
485 | if [ -z "$TMP_DIR" ]; then
486 | export TMP_DIR="$(pwd)/tmp"
487 | fi
488 |
489 |
490 | export N_CPUS=`grep processor /proc/cpuinfo|wc -l`
491 |
492 | export DOCKER_MOUNTS=""
493 | export DOCKER_MOUNTS="${DOCKER_MOUNTS} -v /etc/timezone:/etc/timezone:ro"
494 | export DOCKER_MOUNTS="${DOCKER_MOUNTS} -v /etc/localtime:/etc/localtime:ro"
495 | export DOCKER_MOUNTS="${DOCKER_MOUNTS} -v $PWD/strategies:/cryptobot/strategies:rw "
496 | export DOCKER_MOUNTS="${DOCKER_MOUNTS} -v $CONFIG_DIR:/cryptobot/configs:rw "
497 | export DOCKER_MOUNTS="${DOCKER_MOUNTS} -v $SECRETS_DIR:/cryptobot/secrets:ro"
498 | export DOCKER_MOUNTS="${DOCKER_MOUNTS} -v $LOG_DIR:/cryptobot/log:rw "
499 | export DOCKER_MOUNTS="${DOCKER_MOUNTS} -v $STATE_DIR:/cryptobot/state:rw "
500 | export DOCKER_MOUNTS="${DOCKER_MOUNTS} -v $RESULTS_DIR:/cryptobot/results:rw "
501 | export DOCKER_MOUNTS="${DOCKER_MOUNTS} -v $CONTROL_DIR:/cryptobot/control:rw "
502 | export DOCKER_MOUNTS="${DOCKER_MOUNTS} -v $CACHE_DIR:/cryptobot/cache:rw "
503 | export DOCKER_MOUNTS="${DOCKER_MOUNTS} -v $TESTS_DIR:/cryptobot/tests:rw "
504 | export DOCKER_MOUNTS="${DOCKER_MOUNTS} -v $TMP_DIR:/cryptobot/tmp:rw "
505 |
506 | export DOCKER_RUN_AS="--user $(id -u):$(id -g)"
507 | export DOCKER_PREFIX_VARS="U=`id -u` G=`id -g` BIND=${BIND_ADDRESS} "
508 | export CONTAINER_SUFFIX="$(whoami)-$(pwd |md5sum |cut -c1-8)"
509 | export DOCKER_NAME="--name $MODE-${CONTAINER_SUFFIX}"
510 | export DOCKER_NETWORK="--network ${CONTAINER_SUFFIX}"
511 |
512 | checks
513 | docker_network
514 | set_service_ports klines_caching_service config_endpoint_service live testnet price_log_service
515 | ${MODE}
516 | }
517 |
518 | main $*
519 |
--------------------------------------------------------------------------------
/secrets/.empty:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azulinho/cryptobot/ecddc8c6f7144cfcbc7ecb92159f8c08167500a4/secrets/.empty
--------------------------------------------------------------------------------
/state/.empty:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azulinho/cryptobot/ecddc8c6f7144cfcbc7ecb92159f8c08167500a4/state/.empty
--------------------------------------------------------------------------------
/strategies/BuyDropSellRecoveryStrategy.py:
--------------------------------------------------------------------------------
1 | """ bot buy strategy file """
2 | from lib.bot import Bot
3 | from lib.coin import Coin
4 | from lib.helpers import c_from_timestamp, logging, percent
5 |
6 |
7 | class Strategy(Bot):
8 | """BuyDropSellRecoveryStrategy"""
9 |
10 | def buy_strategy(self, coin: Coin) -> bool:
11 | """BuyDropSellRecoveryStrategy buy_strategy
12 |
13 | this strategy, looks for the recovery point in price for a coin after
14 | a drop in price.
15 | when a coin drops by BUY_AT_PERCENTAGE the bot marks that coin
16 | as TARGET_DIP, and then monitors its price recording the lowest
17 | price it sees(the dip).
18 | As soon the coin goes above the dip by TRAIL_RECOVERY_PERCENTAGE
19 | the bot buys the coin."""
20 |
21 | if (
22 | # as soon the price goes below BUY_AT_PERCENTAGE, mark coin as
23 | # TARGET_DIP
24 | coin.status == ""
25 | and not coin.naughty
26 | and coin.price < percent(coin.buy_at_percentage, coin.max)
27 | ):
28 | coin.dip = coin.price
29 | logging.info(
30 | f"{c_from_timestamp(coin.date)}: {coin.symbol} [{coin.status}] "
31 | + f"-> [TARGET_DIP] ({coin.price})"
32 | )
33 | coin.status = "TARGET_DIP"
34 |
35 | if coin.status != "TARGET_DIP":
36 | return False
37 |
38 | # record the dip, and wait until the price recovers all the way
39 | # to the TRAIL_RECOVERY_PERCENTAGE, then buy.
40 | self.log_debug_coin(coin)
41 | if coin.price > coin.last:
42 | if coin.price > percent(coin.trail_recovery_percentage, coin.dip):
43 | self.buy_coin(coin)
44 | return True
45 | return False
46 |
--------------------------------------------------------------------------------
/strategies/BuyDropSellRecoveryStrategyWhenBTCisDown.py:
--------------------------------------------------------------------------------
1 | """ bot buy strategy file """
2 | from lib.bot import Bot
3 | from lib.coin import Coin
4 | from lib.helpers import c_from_timestamp, logging, percent
5 |
6 |
7 | class Strategy(Bot):
8 | """BuyDropSellRecoveryStrategyWhenBTCisDown"""
9 |
10 | def buy_strategy(self, coin: Coin) -> bool:
11 | """BuyDropSellRecoveryStrategyWhenBTCisDown buy_strategy
12 |
13 | this strategy only buys coins when the price of bitcoin is heading down.
14 | it waits until BTC has gone down by KLINES_SLICE_PERCENTAGE_CHANGE in
15 | the KLINES_TREND_PERIOD before looking at coin prices.
16 | Then as the price of a coin has gone down by the BUY_AT_PERCENTAGE
17 | it marks the coin as TARGET_DIP.
18 | wait for the coin to go up in price by TRAIL_RECOVERY_PERCENTAGE
19 | before buying the coin
20 | """
21 |
22 | BTC = f"BTC{self.pairing}"
23 | # with this strategy we never buy BTC
24 | if coin.symbol == BTC:
25 | return False
26 |
27 | if BTC not in self.coins:
28 | return False
29 |
30 | unit = str(self.coins[BTC].klines_trend_period[-1:]).lower()
31 | klines_trend_period = int(self.coins[BTC].klines_trend_period[:-1])
32 |
33 | last_period = list(self.coins[BTC].averages[unit])[
34 | -klines_trend_period:
35 | ]
36 |
37 | if len(last_period) < klines_trend_period:
38 | return False
39 |
40 | last_period_slice = last_period[0][1]
41 | for _, n in last_period[1:]:
42 | if (
43 | percent(
44 | 100
45 | + (
46 | -abs(
47 | float(
48 | self.coins[BTC].klines_slice_percentage_change
49 | )
50 | )
51 | ),
52 | last_period_slice,
53 | )
54 | < n
55 | ):
56 | return False
57 | last_period_slice = n
58 |
59 | # has the price gone down by x% on a coin we don't own?
60 | if (
61 | coin.price < percent(coin.buy_at_percentage, coin.max)
62 | and coin.status == ""
63 | and not coin.naughty
64 | ):
65 | coin.dip = coin.price
66 | logging.info(
67 | f"{c_from_timestamp(coin.date)}: {coin.symbol} [{coin.status}] "
68 | + f"-> [TARGET_DIP] ({coin.price})"
69 | )
70 | coin.status = "TARGET_DIP"
71 |
72 | if coin.status != "TARGET_DIP":
73 | return False
74 |
75 | # do some gimmicks, and don't buy the coin straight away
76 | # but only buy it when the price is now higher than the last
77 | # price recorded. This way we ensure that we got the dip
78 | self.log_debug_coin(coin)
79 | if coin.price > coin.last:
80 | if coin.price > percent(coin.trail_recovery_percentage, coin.dip):
81 | self.buy_coin(coin)
82 | return True
83 | return False
84 |
--------------------------------------------------------------------------------
/strategies/BuyDropSellRecoveryStrategyWhenBTCisUp.py:
--------------------------------------------------------------------------------
1 | """ bot buy strategy file """
2 | from lib.bot import Bot
3 | from lib.coin import Coin
4 | from lib.helpers import c_from_timestamp, logging, percent
5 |
6 |
7 | class Strategy(Bot):
8 | """BuyDropSellRecoveryStrategyWhenBTCisUp"""
9 |
10 | def buy_strategy(self, coin: Coin) -> bool:
11 | """BuyDropSellRecoveryStrategyWhenBTCisUp buy_strategy
12 |
13 | this strategy only buys coins when the price of bitcoin is heading up.
14 | it waits until BTC has gone up by KLINES_SLICE_PERCENTAGE_CHANGE in
15 | the KLINES_TREND_PERIOD before looking at coin prices.
16 | Then as the price of a coin has gone down by the BUY_AT_PERCENTAGE
17 | it marks the coin as TARGET_DIP.
18 | wait for the coin to go up in price by TRAIL_RECOVERY_PERCENTAGE
19 | before buying the coin
20 |
21 | """
22 |
23 | BTC = f"BTC{self.pairing}"
24 | # with this strategy we never buy BTC
25 | if coin.symbol == BTC:
26 | return False
27 |
28 | if BTC not in self.coins:
29 | return False
30 |
31 | unit = str(self.coins[BTC].klines_trend_period[-1:]).lower()
32 | klines_trend_period = int(self.coins[BTC].klines_trend_period[:-1])
33 |
34 | last_period = list(self.coins[BTC].averages[unit])[
35 | -klines_trend_period:
36 | ]
37 |
38 | if len(last_period) < klines_trend_period:
39 | return False
40 |
41 | last_period_slice = last_period[0][1]
42 | for _, n in last_period[1:]:
43 | if (
44 | percent(
45 | 100
46 | + float(self.coins[BTC].klines_slice_percentage_change),
47 | last_period_slice,
48 | )
49 | > n
50 | ):
51 | return False
52 | last_period_slice = n
53 |
54 | # has the price gone down by x% on a coin we don't own?
55 | if (
56 | coin.price < percent(coin.buy_at_percentage, coin.max)
57 | and coin.status == ""
58 | and not coin.naughty
59 | ):
60 | coin.dip = coin.price
61 | logging.info(
62 | f"{c_from_timestamp(coin.date)}: {coin.symbol} [{coin.status}] "
63 | + f"-> [TARGET_DIP] ({coin.price})"
64 | )
65 | coin.status = "TARGET_DIP"
66 |
67 | if coin.status != "TARGET_DIP":
68 | return False
69 |
70 | # do some gimmicks, and don't buy the coin straight away
71 | # but only buy it when the price is now higher than the last
72 | # price recorded. This way we ensure that we got the dip
73 | self.log_debug_coin(coin)
74 | if coin.price < coin.last:
75 | if coin.price > percent(coin.trail_recovery_percentage, coin.dip):
76 | self.buy_coin(coin)
77 | return True
78 | return False
79 |
--------------------------------------------------------------------------------
/strategies/BuyMoonSellRecoveryStrategy.py:
--------------------------------------------------------------------------------
1 | """ bot buy strategy file """
2 | from lib.bot import Bot
3 | from lib.coin import Coin
4 | from lib.helpers import percent
5 |
6 |
7 | class Strategy(Bot):
8 | """BuyMoonSellRecoveryStrategy"""
9 |
10 | def buy_strategy(self, coin: Coin) -> bool:
11 | """BuyMoonSellRecoveryStrategy buy_strategy
12 |
13 | this strategy looks for a price change between the last price recorded
14 | the current price, and if it was gone up by BUY_AT_PERCENTAGE
15 | it buys the coin.
16 |
17 | """
18 | if coin.price > percent(coin.buy_at_percentage, coin.last):
19 | self.buy_coin(coin)
20 | self.log_debug_coin(coin)
21 | return True
22 | return False
23 |
--------------------------------------------------------------------------------
/strategies/BuyOnGrowthTrendAfterDropStrategy.py:
--------------------------------------------------------------------------------
1 | """ bot buy strategy file """
2 | from lib.bot import Bot
3 | from lib.coin import Coin
4 | from lib.helpers import c_from_timestamp, logging, percent
5 |
6 |
7 | class Strategy(Bot):
8 | """BuyOnGrowthTrendAfterDropStrategy"""
9 |
10 | def buy_strategy(self, coin: Coin) -> bool:
11 | """BuyOnGrowthTrendAfterDropStrategy buy_strategy
12 | Wait for a coin to drop below BUY_AT_PERCENTAGE and then
13 | monitor its growth trend over a certain period, where each slice of
14 | that period must grow by at least n% over the previous slice.
15 | As soon that happens buy this coin.
16 | """
17 |
18 | unit = str(coin.klines_trend_period[-1:]).lower()
19 | klines_trend_period = int("".join(coin.klines_trend_period[:-1]))
20 | last_period = list(coin.averages[unit])[-klines_trend_period:]
21 |
22 | # we need at least a full period of klines before we can
23 | # make a buy decision
24 | if len(last_period) < klines_trend_period:
25 | return False
26 |
27 | # check if the maximum price recorded is now lower than the
28 | # BUY_AT_PERCENTAGE
29 | if (
30 | coin.status == ""
31 | and not coin.naughty
32 | and coin.price < percent(coin.buy_at_percentage, coin.max)
33 | ):
34 | coin.dip = coin.price
35 | coin.status = "TARGET_DIP"
36 | logging.info(
37 | f"{c_from_timestamp(coin.date)}: {coin.symbol} [{coin.status}] "
38 | + f"-> [TARGET_DIP] ({coin.price})"
39 | )
40 |
41 | if coin.status != "TARGET_DIP":
42 | return False
43 |
44 | # if the price keeps going down, skip it
45 | # we want to make sure the price has increased over n slices of the
46 | # klines_trend_period (m, h, d) by klines_slice_percentage_change
47 | # each time.
48 | last_period_slice = last_period[0][1]
49 | for _, n in last_period[1:]:
50 | if (
51 | percent(
52 | 100 + coin.klines_slice_percentage_change,
53 | last_period_slice,
54 | )
55 | > n
56 | ):
57 | return False
58 | last_period_slice = n
59 | self.buy_coin(coin)
60 | return True
61 |
--------------------------------------------------------------------------------
/strategies/BuyOnRecoveryAfterDropDuringGrowthTrendStrategy.py:
--------------------------------------------------------------------------------
1 | """ bot buy strategy file """
2 | from lib.bot import Bot
3 | from lib.coin import Coin
4 | from lib.helpers import c_from_timestamp, logging, percent
5 |
6 |
7 | class Strategy(Bot):
8 | """BuyOnRecoveryAfterDropDuringGrowthTrendStrategy"""
9 |
10 | def buy_strategy(self, coin: Coin) -> bool:
11 | """BuyOnRecoveryAfterDropDuringGrowthTrendStrategy buy_strategy
12 |
13 | This strategy looks for coins that have gone up by
14 | KLINES_SLICE_PERCENTAGE_CHANGE in each slice (m,h,d) of the
15 | KLINES_TREND_PERIOD.
16 | Then it checkous that the current price for those is
17 | lower than the BUY_AT_PERCENTAGE over the maximum price recorded.
18 | if it is, then mark the coin as TARGET_DIP
19 | and buy it as soon we're over the dip by TRAIL_RECOVERY_PERCENTAGE.
20 | """
21 |
22 | unit = str(coin.klines_trend_period[-1:]).lower()
23 | klines_trend_period = int(coin.klines_trend_period[:-1])
24 |
25 | last_period = list(coin.averages[unit])[-klines_trend_period:]
26 |
27 | # we need at least a full period of klines before we can
28 | # make a buy decision
29 | if len(last_period) < klines_trend_period:
30 | return False
31 |
32 | last_period_slice = last_period[0][1]
33 | # if the price keeps going down, skip it
34 | # we want to make sure the price has increased over n slices of the
35 | # klines_trend_period (m, h, d) by klines_slice_percentage_change
36 | # each time.
37 | for _, n in last_period[1:]:
38 | if (
39 | percent(
40 | 100 + coin.klines_slice_percentage_change,
41 | last_period_slice,
42 | )
43 | > n
44 | ):
45 | return False
46 | last_period_slice = n
47 |
48 | # check if the maximum price recorded is now lower than the
49 | # BUY_AT_PERCENTAGE
50 | if (
51 | coin.price < percent(coin.buy_at_percentage, coin.max)
52 | and coin.status == ""
53 | and not coin.naughty
54 | ):
55 | coin.dip = coin.price
56 | logging.info(
57 | f"{c_from_timestamp(coin.date)}: {coin.symbol} [{coin.status}] "
58 | + f"-> [TARGET_DIP] ({coin.price})"
59 | )
60 | coin.status = "TARGET_DIP"
61 | return False
62 |
63 | if coin.status != "TARGET_DIP":
64 | return False
65 |
66 | # do some gimmicks, and don't buy the coin straight away
67 | # but only buy it when the price is now higher than the last
68 | # price recorded. This way we ensure that we got the dip
69 | self.log_debug_coin(coin)
70 | if coin.price > coin.last:
71 | if coin.price > percent(coin.trail_recovery_percentage, coin.dip):
72 | self.buy_coin(coin)
73 | return True
74 | return False
75 |
--------------------------------------------------------------------------------
/strategies/BuyOnRecoveryAfterDropFromAverageStrategy.py:
--------------------------------------------------------------------------------
1 | """ bot buy strategy file """
2 | from lib.bot import Bot
3 | from lib.coin import Coin
4 | from lib.helpers import c_from_timestamp, logging, mean, percent
5 |
6 |
7 | class Strategy(Bot):
8 | """BuyOnRecoveryAfterDropFromAverageStrategy"""
9 |
10 | def buy_strategy(self, coin: Coin) -> bool:
11 | """BuyOnRecoveryAfterDropFromAverageStrategy buy_strategy
12 |
13 | This strategy looks for coins that are below the average price over
14 | the last KLINES_TREND_PERIOD by at least the BUY_AT_PERCENTAGE.
15 | if it is, then mark the coin as TARGET_DIP
16 | and buy it as soon we're over the dip by TRAIL_RECOVERY_PERCENTAGE.
17 | """
18 |
19 | unit = str(coin.klines_trend_period[-1:]).lower()
20 | klines_trend_period = int("".join(coin.klines_trend_period[:-1]))
21 |
22 | last_period = list(coin.averages[unit])[-klines_trend_period:]
23 |
24 | # we need at least a full period of klines before we can
25 | # make a buy decision
26 | if len(last_period) < klines_trend_period:
27 | return False
28 |
29 | average = mean([v for _, v in last_period])
30 | # check if the average price recorded over the last_period is now
31 | # lower than the BUY_AT_PERCENTAGE
32 | if (
33 | coin.status == ""
34 | and not coin.naughty
35 | and (coin.price < percent(coin.buy_at_percentage, average))
36 | ):
37 | coin.dip = coin.price
38 | logging.info(
39 | f"{c_from_timestamp(coin.date)}: {coin.symbol} [{coin.status}] "
40 | + f"-> [TARGET_DIP] ({coin.price})"
41 | )
42 | coin.status = "TARGET_DIP"
43 |
44 | if coin.status != "TARGET_DIP":
45 | return False
46 |
47 | # do some gimmicks, and don't buy the coin straight away
48 | # but only buy it when the price is now higher than the last
49 | # price recorded. This way we ensure that we got the dip
50 | self.log_debug_coin(coin)
51 | if coin.price > coin.last:
52 | if coin.price > percent(coin.trail_recovery_percentage, coin.dip):
53 | self.buy_coin(coin)
54 | return True
55 | return False
56 |
--------------------------------------------------------------------------------
/strategies/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azulinho/cryptobot/ecddc8c6f7144cfcbc7ecb92159f8c08167500a4/strategies/__init__.py
--------------------------------------------------------------------------------
/tests/BuyDropSellRecoveryStrategy.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | PAUSE_FOR: 14400 # every 5 minutes
3 | INITIAL_INVESTMENT: 100
4 | MAX_COINS: 8
5 | PAIRING: USDT
6 | CLEAR_COIN_STATS_AT_BOOT: False
7 | CLEAR_COIN_STATS_AT_SALE: True
8 | DEBUG: True
9 | TRADING_FEE: 0.1
10 | SELL_AS_SOON_IT_DROPS: False # <-- HERE SCALPING MODE
11 | PRICE_LOG_SERVICE_URL: "http://price-log-service:8998/"
12 |
13 | STRATEGY: BuyDropSellRecoveryStrategy
14 |
15 | ANCHORS:
16 | DEFAULTS: &defaults
17 | BUY_AT_PERCENTAGE: -0.01
18 | SELL_AT_PERCENTAGE: +0.03
19 | STOP_LOSS_AT_PERCENTAGE: -0.5
20 | TRAIL_TARGET_SELL_PERCENTAGE: -0.1
21 | TRAIL_RECOVERY_PERCENTAGE: +0.01
22 | SOFT_LIMIT_HOLDING_TIME: 1
23 | HARD_LIMIT_HOLDING_TIME: 100
24 | NAUGHTY_TIMEOUT: 120
25 | KLINES_TREND_PERIOD: 3m
26 | KLINES_SLICE_PERCENTAGE_CHANGE: +0.001
27 |
28 | TICKERS:
29 | BTCUSDT:
30 | <<: *defaults
31 |
32 | ETHUSDT:
33 | <<: *defaults
34 |
35 | PRICE_LOGS: [
36 | "tests.price.log.gz",
37 | ]
38 |
--------------------------------------------------------------------------------
/tests/BuyDropSellRecoveryStrategyWhenBTCisDown.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | PAUSE_FOR: 1
3 | INITIAL_INVESTMENT: 100
4 | MAX_COINS: 4
5 | PAIRING: USDT
6 | CLEAR_COIN_STATS_AT_BOOT: False
7 | CLEAR_COIN_STATS_AT_SALE: True
8 | DEBUG: True
9 | TRADING_FEE: 0.1
10 | SELL_AS_SOON_IT_DROPS: False # <-- HERE SCALPING MODE
11 | PRICE_LOG_SERVICE_URL: "http://price-log-service:8998/"
12 |
13 | STRATEGY: BuyOnGrowthTrendAfterDropStrategy
14 |
15 |
16 | ANCHORS:
17 | DEFAULTS: &defaults
18 | BUY_AT_PERCENTAGE: -1.0
19 | SELL_AT_PERCENTAGE: +0.6
20 | STOP_LOSS_AT_PERCENTAGE: -3.0
21 | TRAIL_TARGET_SELL_PERCENTAGE: -0.1
22 | TRAIL_RECOVERY_PERCENTAGE: +0.1
23 | SOFT_LIMIT_HOLDING_TIME: 1
24 | HARD_LIMIT_HOLDING_TIME: 100
25 | NAUGHTY_TIMEOUT: 120
26 | KLINES_TREND_PERIOD: 3m
27 | KLINES_SLICE_PERCENTAGE_CHANGE: -0.001
28 |
29 | TICKERS:
30 | BTCUSDT:
31 | <<: *defaults
32 |
33 | ETHUSDT:
34 | <<: *defaults
35 |
36 | PRICE_LOGS: [
37 | "tests.price.log.gz",
38 | ]
39 |
--------------------------------------------------------------------------------
/tests/BuyDropSellRecoveryStrategyWhenBTCisUp.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | PAUSE_FOR: 1
3 | INITIAL_INVESTMENT: 100
4 | MAX_COINS: 4
5 | PAIRING: USDT
6 | CLEAR_COIN_STATS_AT_BOOT: False
7 | CLEAR_COIN_STATS_AT_SALE: True
8 | DEBUG: True
9 | TRADING_FEE: 0.1
10 | SELL_AS_SOON_IT_DROPS: False # <-- HERE SCALPING MODE
11 | PRICE_LOG_SERVICE_URL: "http://price-log-service:8998/"
12 |
13 | STRATEGY: BuyOnGrowthTrendAfterDropStrategy
14 |
15 |
16 | ANCHORS:
17 | DEFAULTS: &defaults
18 | BUY_AT_PERCENTAGE: -0.1
19 | SELL_AT_PERCENTAGE: +0.1
20 | STOP_LOSS_AT_PERCENTAGE: -1.0
21 | TRAIL_TARGET_SELL_PERCENTAGE: -0.1
22 | TRAIL_RECOVERY_PERCENTAGE: +0.1
23 | SOFT_LIMIT_HOLDING_TIME: 1
24 | HARD_LIMIT_HOLDING_TIME: 100
25 | NAUGHTY_TIMEOUT: 120
26 | KLINES_TREND_PERIOD: 3m
27 | KLINES_SLICE_PERCENTAGE_CHANGE: +0.001
28 |
29 | TICKERS:
30 | BTCUSDT:
31 | <<: *defaults
32 |
33 | ETHUSDT:
34 | <<: *defaults
35 |
36 | PRICE_LOGS: [
37 | "tests.price.log.gz",
38 | ]
39 |
--------------------------------------------------------------------------------
/tests/BuyMoonSellRecoveryStrategy.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | PAUSE_FOR: 1
3 | INITIAL_INVESTMENT: 100
4 | MAX_COINS: 4
5 | PAIRING: USDT
6 | CLEAR_COIN_STATS_AT_BOOT: False
7 | CLEAR_COIN_STATS_AT_SALE: True
8 | DEBUG: True
9 | TRADING_FEE: 0.1
10 | SELL_AS_SOON_IT_DROPS: False # <-- HERE SCALPING MODE
11 | PRICE_LOG_SERVICE_URL: "http://price-log-service:8998/"
12 |
13 | STRATEGY: BuyMoonSellRecoveryStrategy
14 |
15 | ANCHORS:
16 | DEFAULTS: &defaults
17 | BUY_AT_PERCENTAGE: +0.1
18 | SELL_AT_PERCENTAGE: +0.3
19 | STOP_LOSS_AT_PERCENTAGE: -1.0
20 | TRAIL_TARGET_SELL_PERCENTAGE: -0.1
21 | TRAIL_RECOVERY_PERCENTAGE: +1.0
22 | SOFT_LIMIT_HOLDING_TIME: 1
23 | HARD_LIMIT_HOLDING_TIME: 3600
24 | NAUGHTY_TIMEOUT: 120
25 | KLINES_TREND_PERIOD: 3m
26 | KLINES_SLICE_PERCENTAGE_CHANGE: +0.001
27 |
28 | TICKERS:
29 | BTCUSDT:
30 | <<: *defaults
31 |
32 | ETHUSDT:
33 | <<: *defaults
34 |
35 | PRICE_LOGS: [
36 | "tests.price.log.gz",
37 | ]
38 |
--------------------------------------------------------------------------------
/tests/BuyOnGrowthTrendAfterDropStrategy.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | PAUSE_FOR: 1
3 | INITIAL_INVESTMENT: 100
4 | MAX_COINS: 8
5 | PAIRING: USDT
6 | CLEAR_COIN_STATS_AT_BOOT: False
7 | CLEAR_COIN_STATS_AT_SALE: True
8 | DEBUG: True
9 | TRADING_FEE: 0.1
10 | SELL_AS_SOON_IT_DROPS: False # <-- HERE SCALPING MODE
11 | PRICE_LOG_SERVICE_URL: "http://price-log-service:8998/"
12 |
13 | STRATEGY: BuyOnGrowthTrendAfterDropStrategy
14 | #STRATEGY: BuyDropSellRecoveryStrategyWhenBTCisUp
15 | #STRATEGY: BuyDropSellRecoveryStrategy
16 |
17 |
18 | ANCHORS:
19 | DEFAULTS: &defaults
20 | BUY_AT_PERCENTAGE: -0.5
21 | SELL_AT_PERCENTAGE: +0.2
22 | STOP_LOSS_AT_PERCENTAGE: -0.5
23 | TRAIL_TARGET_SELL_PERCENTAGE: -0.1
24 | TRAIL_RECOVERY_PERCENTAGE: +0.1
25 | SOFT_LIMIT_HOLDING_TIME: 1
26 | HARD_LIMIT_HOLDING_TIME: 100
27 | NAUGHTY_TIMEOUT: 120
28 | KLINES_TREND_PERIOD: 3h
29 | KLINES_SLICE_PERCENTAGE_CHANGE: +0.1
30 |
31 |
32 | TICKERS:
33 | BTCUSDT:
34 | <<: *defaults
35 |
36 | ETHUSDT:
37 | <<: *defaults
38 |
39 | PRICE_LOGS: [
40 | "tests.price.log.gz",
41 | ]
42 |
--------------------------------------------------------------------------------
/tests/BuyOnRecoveryAfterDropDuringGrowthTrendStrategy.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | PAUSE_FOR: 1
3 | INITIAL_INVESTMENT: 100
4 | MAX_COINS: 8
5 | PAIRING: USDT
6 | CLEAR_COIN_STATS_AT_BOOT: False
7 | CLEAR_COIN_STATS_AT_SALE: True
8 | DEBUG: True
9 | TRADING_FEE: 0.1
10 | SELL_AS_SOON_IT_DROPS: False # <-- HERE SCALPING MODE
11 | PRICE_LOG_SERVICE_URL: "http://price-log-service:8998/"
12 |
13 | STRATEGY: BuyOnRecoveryAfterDropDuringGrowthTrendStrategy
14 |
15 |
16 | ANCHORS:
17 | DEFAULTS: &defaults
18 | BUY_AT_PERCENTAGE: -0.1
19 | SELL_AT_PERCENTAGE: +0.1
20 | STOP_LOSS_AT_PERCENTAGE: -1.0
21 | TRAIL_TARGET_SELL_PERCENTAGE: -0.1
22 | TRAIL_RECOVERY_PERCENTAGE: +0.1
23 | SOFT_LIMIT_HOLDING_TIME: 1
24 | HARD_LIMIT_HOLDING_TIME: 100
25 | NAUGHTY_TIMEOUT: 120
26 | KLINES_TREND_PERIOD: 3m
27 | KLINES_SLICE_PERCENTAGE_CHANGE: +0.001
28 |
29 |
30 | TICKERS:
31 | BTCUSDT:
32 | <<: *defaults
33 |
34 | ETHUSDT:
35 | <<: *defaults
36 |
37 | PRICE_LOGS: [
38 | "tests.price.log.gz",
39 | ]
40 |
--------------------------------------------------------------------------------
/tests/BuyOnRecoveryAfterDropFromAverageStrategy.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | PAUSE_FOR: 1
3 | INITIAL_INVESTMENT: 100
4 | MAX_COINS: 8
5 | PAIRING: USDT
6 | CLEAR_COIN_STATS_AT_BOOT: False
7 | CLEAR_COIN_STATS_AT_SALE: True
8 | DEBUG: True
9 | TRADING_FEE: 0.1
10 | SELL_AS_SOON_IT_DROPS: False # <-- HERE SCALPING MODE
11 | PRICE_LOG_SERVICE_URL: "http://price-log-service:8998/"
12 |
13 | STRATEGY: BuyOnRecoveryAfterDropFromAverageStrategy
14 |
15 |
16 | ANCHORS:
17 | DEFAULTS: &defaults
18 | BUY_AT_PERCENTAGE: -0.1
19 | SELL_AT_PERCENTAGE: +0.1
20 | STOP_LOSS_AT_PERCENTAGE: -1.0
21 | TRAIL_TARGET_SELL_PERCENTAGE: -0.1
22 | TRAIL_RECOVERY_PERCENTAGE: +0.1
23 | SOFT_LIMIT_HOLDING_TIME: 1
24 | HARD_LIMIT_HOLDING_TIME: 100
25 | NAUGHTY_TIMEOUT: 120
26 | KLINES_TREND_PERIOD: 3m
27 | KLINES_SLICE_PERCENTAGE_CHANGE: +0.001
28 |
29 |
30 | TICKERS:
31 | BTCUSDT:
32 | <<: *defaults
33 |
34 | ETHUSDT:
35 | <<: *defaults
36 |
37 | PRICE_LOGS: [
38 | "tests.price.log.gz",
39 | ]
40 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azulinho/cryptobot/ecddc8c6f7144cfcbc7ecb92159f8c08167500a4/tests/__init__.py
--------------------------------------------------------------------------------
/tests/config.yaml:
--------------------------------------------------------------------------------
1 | PAUSE_FOR: 1
2 | INITIAL_INVESTMENT: 100
3 | MAX_COINS: 2
4 | PAIRING: USDT
5 | CLEAR_COIN_STATS_AT_BOOT: True
6 | CLEAR_COIN_STATS_AT_SALE: True
7 | DEBUG: True
8 | TRADING_FEE: 0.1
9 | SELL_AS_SOON_IT_DROPS: False
10 | PRICE_LOG_SERVICE_URL: "http://price-log-service:8998/"
11 |
12 | STRATEGY: TestStrategyBuyDropSellRecovery
13 |
14 | ANCHORS: &defaults
15 | SOFT_LIMIT_HOLDING_TIME: 3600
16 | HARD_LIMIT_HOLDING_TIME: 7200
17 | BUY_AT_PERCENTAGE: -5.0
18 | SELL_AT_PERCENTAGE: +3
19 | STOP_LOSS_AT_PERCENTAGE: -10
20 | TRAIL_TARGET_SELL_PERCENTAGE: -0.5
21 | TRAIL_RECOVERY_PERCENTAGE: +1.0
22 | NAUGHTY_TIMEOUT: 28800
23 | KLINES_TREND_PERIOD: 7d
24 | KLINES_SLICE_PERCENTAGE_CHANGE: +1
25 |
26 |
27 | TICKERS:
28 | BTCUSDT:
29 | <<: *defaults
30 |
31 | ETHUSDT:
32 | <<: *defaults
33 |
34 | BNBUSDT:
35 | <<: *defaults
36 |
37 | DOTUSDT:
38 | <<: *defaults
39 |
40 | ADAUSDT:
41 | <<: *defaults
42 | BUY_AT_PERCENTAGE: -9.0
43 | SELL_AT_PERCENTAGE: +5
44 | STOP_LOSS_AT_PERCENTAGE: -9
45 | TRAIL_TARGET_SELL_PERCENTAGE: -1.0
46 | TRAIL_RECOVERY_PERCENTAGE: +2.5
47 |
48 |
49 | PRICE_LOGS: []
50 |
51 | EXCLUDED_COINS:
52 | - 'DOWNUSDT'
53 | - 'UPUSDT'
54 |
--------------------------------------------------------------------------------
/tests/fake.yaml:
--------------------------------------------------------------------------------
1 | ACCESS_KEY: "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
2 | SECRET_KEY: "yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy"
3 |
--------------------------------------------------------------------------------
/tests/index.json.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azulinho/cryptobot/ecddc8c6f7144cfcbc7ecb92159f8c08167500a4/tests/index.json.gz
--------------------------------------------------------------------------------
/tests/index_v2.json.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azulinho/cryptobot/ecddc8c6f7144cfcbc7ecb92159f8c08167500a4/tests/index_v2.json.gz
--------------------------------------------------------------------------------
/tests/price.log.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azulinho/cryptobot/ecddc8c6f7144cfcbc7ecb92159f8c08167500a4/tests/price.log.gz
--------------------------------------------------------------------------------
/tests/prove-backtesting.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | KIND: "PROVE_BACKTESTING"
3 | PAUSE_FOR: 1
4 | INITIAL_INVESTMENT: 100
5 | RE_INVEST_PERCENTAGE: 100
6 | MAX_COINS: 1
7 | PAIRING: "USDT"
8 | CLEAR_COIN_STATS_AT_BOOT: True
9 | CLEAR_COIN_STATS_AT_SALE: True
10 | DEBUG: False
11 | TRADING_FEE: 0.000001
12 | SELL_AS_SOON_IT_DROPS: True
13 | STOP_BOT_ON_LOSS: False
14 | ENABLE_NEW_LISTING_CHECKS: True
15 | ENABLE_NEW_LISTING_CHECKS_AGE_IN_DAYS: 31
16 | KLINES_CACHING_SERVICE_URL: "http://klines:8999"
17 | PRICE_LOG_SERVICE_URL: "http://price-log-service:8998"
18 | CONCURRENCY: 1
19 |
20 | FILTER_BY: "ETH"
21 | MIN_PROFIT: 1
22 | MIN_WINS: 0
23 | MAX_STALES: 9999
24 | MAX_HOLDS: 9999
25 | MAX_LOSSES: 9999
26 | FROM_DATE: 20211202
27 | END_DATE: 20211206
28 | ROLL_BACKWARDS: 1
29 | ROLL_FORWARD: 1
30 | STOP_BOT_ON_LOSS: false
31 | STOP_BOT_ON_STALE: false
32 | VALID_TOKENS: ["ETH"]
33 |
34 | STRATEGY: BuyDropSellRecoveryStrategy
35 |
36 | COMMON: &COMMON
37 | STOP_LOSS_AT_PERCENTAGE: -35
38 | NAUGHTY_TIMEOUT: 28800
39 | SOFT_LIMIT_HOLDING_TIME: 1
40 | HARD_LIMIT_HOLDING_TIME: 999999
41 | KLINES_TREND_PERIOD: "0h"
42 | KLINES_SLICE_PERCENTAGE_CHANGE: +0.0
43 |
44 | RUNS: &STRATS
45 | run1:
46 | <<: *COMMON
47 | BUY_AT_PERCENTAGE: -9
48 | SELL_AT_PERCENTAGE: +0.5
49 | TRAIL_TARGET_SELL_PERCENTAGE: -0.1
50 | TRAIL_RECOVERY_PERCENTAGE: 0.1
51 | run2:
52 | <<: *COMMON
53 | BUY_AT_PERCENTAGE: -3
54 | SELL_AT_PERCENTAGE: +0.5
55 | TRAIL_TARGET_SELL_PERCENTAGE: -0.1
56 | TRAIL_RECOVERY_PERCENTAGE: 0.1
57 |
--------------------------------------------------------------------------------
/tests/pytest.ini:
--------------------------------------------------------------------------------
1 | [pytest]
2 | ;addopts = --disable-socket --allow-unix-socket
3 | addopts = -n 4
4 | timeout = 10
5 |
--------------------------------------------------------------------------------
/tests/test_klines_caching_service.py:
--------------------------------------------------------------------------------
1 | """ pytests tests for app.py """
2 | # pylint: disable=missing-module-docstring
3 | # pylint: disable=missing-class-docstring
4 | # pylint: disable=missing-function-docstring
5 | # pylint: disable=redefined-outer-name
6 | # pylint: disable=import-outside-toplevel
7 | # pylint: disable=no-self-use
8 |
9 |
10 | class TestKlinesCaching_service:
11 | def test_placeholder(self):
12 | pass
13 |
--------------------------------------------------------------------------------
/tests/test_prove_backtesting.py:
--------------------------------------------------------------------------------
1 | """ test_prove_backtesting """
2 |
3 | import unittest
4 | from typing import Dict
5 | from unittest import mock
6 | from datetime import datetime
7 |
8 | import json
9 | import importlib
10 |
11 | pb = importlib.import_module("utils.prove-backtesting")
12 |
13 | CONFIG: Dict = {
14 | "FILTER_BY": "",
15 | "FROM_DATE": "20180101",
16 | "END_DATE": "20221231",
17 | "ROLL_BACKWARDS": 4,
18 | "ROLL_FORWARD": 3,
19 | "STRATEGY": "BuyDropSellRecoveryStrategy",
20 | "RUNS": {},
21 | "PAUSE_FOR": "0.1",
22 | "INITIAL_INVESTMENT": 100,
23 | "RE_INVEST_PERCENTAGE": 100,
24 | "MAX_COINS": 1,
25 | "PAIRING": "USDT",
26 | "CLEAR_COIN_STATS_AT_BOOT": True,
27 | "CLEAR_COIN_STATS_AT_SALE": True,
28 | "DEBUG": False,
29 | "TRADING_FEE": 0.1,
30 | "SELL_AS_SOON_IT_DROPS": True,
31 | "STOP_BOT_ON_LOSS": False,
32 | "STOP_BOT_ON_STALE": False,
33 | "ENABLE_NEW_LISTING_CHECKS": True,
34 | "ENABLE_NEW_LISTING_CHECKS_AGE_IN_DAYS": 30,
35 | "KLINES_CACHING_SERVICE_URL": "http://klines",
36 | "PRICE_LOG_SERVICE_URL": "http://price-log",
37 | "CONCURRENCY": 1,
38 | "MIN_WINS": 1,
39 | "MIN_PROFIT": 1,
40 | "MAX_LOSSES": 1,
41 | "MAX_STALES": 1,
42 | "MAX_HOLDS": 1,
43 | "VALID_TOKENS": [],
44 | }
45 |
46 |
47 | def mocked_get_index_json_call(_):
48 | """mocks get_index_json"""
49 |
50 | class Obj: # pylint: disable=too-few-public-methods
51 | """mocks get_index_json"""
52 |
53 | def __init__(self):
54 | """mocks get_index_json"""
55 | self.content = json.dumps({})
56 |
57 | return Obj()
58 |
59 |
60 | class TestProveBacktesting(unittest.TestCase):
61 | """Test ProveBacktesting"""
62 |
63 | def test_generate_start_dates(self):
64 | """Test Generate Start Dates"""
65 |
66 | pb.get_index_json = mocked_get_index_json_call
67 | instance = pb.ProveBacktesting(CONFIG)
68 |
69 | # Define input parameters
70 | start_date = datetime(2022, 1, 1)
71 | end_date = datetime(2022, 1, 31)
72 | jump = 7
73 |
74 | # Call the function
75 | result = instance.generate_start_dates(start_date, end_date, jump)
76 |
77 | # Define the expected output
78 | expected_output = [
79 | "20220101",
80 | "20220108",
81 | "20220115",
82 | "20220122",
83 | "20220129",
84 | ]
85 |
86 | # Perform the assertion
87 | self.assertEqual(result, expected_output)
88 |
89 | def test_rollback_dates_from(self):
90 | """test rollback dates from"""
91 | end_date = "20211231"
92 | expected_dates = ["20211228", "20211229", "20211230", "20211231"]
93 |
94 | pb.get_index_json = mocked_get_index_json_call
95 | instance = pb.ProveBacktesting(CONFIG)
96 |
97 | actual_dates = instance.rollback_dates_from(end_date)
98 | self.assertEqual(actual_dates, expected_dates)
99 |
100 | def test_rollforward_dates_from(self):
101 | """test rollforward dates from"""
102 | pb.get_index_json = mocked_get_index_json_call
103 | instance = pb.ProveBacktesting(CONFIG)
104 |
105 | end_date = "20210101"
106 | instance.roll_forward = 3
107 | expected_result = ["20210102", "20210103", "20210104"]
108 | result = instance.rollforward_dates_from(end_date)
109 | self.assertEqual(result, expected_result)
110 |
111 | end_date = "20211230"
112 | instance.roll_forward = 1
113 | expected_result = ["20211231"]
114 | result = instance.rollforward_dates_from(end_date)
115 | self.assertEqual(result, expected_result)
116 |
117 | end_date = "20210131"
118 | instance.roll_forward = 0
119 | expected_result = []
120 | result = instance.rollforward_dates_from(end_date)
121 | self.assertEqual(result, expected_result)
122 |
123 | def test_generate_price_log_list(self):
124 | """test generate price log"""
125 |
126 | dates = ["20210101", "20210102", "20210103"]
127 | symbol = "AAPL"
128 | expected_urls = [
129 | "AAPL/20210101.log.gz",
130 | "AAPL/20210102.log.gz",
131 | "AAPL/20210103.log.gz",
132 | ]
133 |
134 | pb.get_index_json = mocked_get_index_json_call
135 | instance = pb.ProveBacktesting(CONFIG)
136 |
137 | result = instance.generate_price_log_list(dates, symbol)
138 |
139 | # Assert
140 | self.assertEqual(result, expected_urls)
141 |
142 | def test_filter_on_avail_days_with_log(self):
143 | """test filter on avail days with log"""
144 | pb.get_index_json = mocked_get_index_json_call
145 | instance = pb.ProveBacktesting(CONFIG)
146 |
147 | # Define test input
148 | dates = ["2021-01-01", "2021-01-02"]
149 | data = {
150 | "2021-01-01": ["BTCUSDT", "ETHUSDT"],
151 | "2021-01-02": ["BTCUSDT", "ETHUSDT"],
152 | "2021-01-03": ["BTCUSDT", "ETHUSDT"],
153 | }
154 |
155 | # Call the method under test
156 | result = instance.filter_on_avail_days_with_log(dates, data)
157 |
158 | # Define the expected output
159 | expected = {
160 | "BTCUSDT": [
161 | "BTCUSDT/2021-01-01.log.gz",
162 | "BTCUSDT/2021-01-02.log.gz",
163 | ],
164 | "ETHUSDT": [
165 | "ETHUSDT/2021-01-01.log.gz",
166 | "ETHUSDT/2021-01-02.log.gz",
167 | ],
168 | }
169 |
170 | # Assert the result matches the expected output
171 | self.assertEqual(result, expected)
172 |
173 | def test_filter_on_coins_with_min_age_logs(self):
174 | """test filter on coins win min age logs"""
175 |
176 | # TODO: review this
177 | pb.get_index_json = mocked_get_index_json_call
178 | instance = pb.ProveBacktesting(CONFIG)
179 |
180 | index = {
181 | "20220101": ["BTCUSDT", "ETHUSDT", "BNBUSDT"],
182 | "20220102": ["BTCUSDT", "ETHUSDT"],
183 | "20220103": ["BNBUSDT"],
184 | }
185 | last_day = "20220104"
186 | next_run_coins = {
187 | "BTCUSDT": ["20220101.log.gz", "20220102.log.gz"],
188 | "ETHUSDT": ["20220101.log.gz", "20220102.log.gz"],
189 | "BNBUSDT": ["20220101.log.gz", "20220103.log.gz"],
190 | }
191 | instance.enable_new_listing_checks_age_in_days = 2
192 |
193 | expected_result = {
194 | "BTCUSDT": ["20220101.log.gz", "20220102.log.gz"],
195 | "ETHUSDT": ["20220101.log.gz", "20220102.log.gz"],
196 | "BNBUSDT": ["20220101.log.gz", "20220103.log.gz"],
197 | }
198 |
199 | result = instance.filter_on_coins_with_min_age_logs(
200 | index, last_day, next_run_coins
201 | )
202 | self.assertEqual(result, expected_result)
203 |
204 | @mock.patch("builtins.open")
205 | def test_gather_best_results_from_run(self, mock_open):
206 | """test gather best results from run"""
207 | # Mock the file contents and search results
208 | mock_open.return_value.__enter__.return_value.read.return_value = (
209 | "INFO wins:10 losses:2 stales:3 holds:4 final balance: 100.0"
210 | )
211 |
212 | pb.get_index_json = mocked_get_index_json_call
213 | obj = pb.ProveBacktesting(CONFIG)
214 |
215 | # Set the desired values for min_wins, min_profit, max_losses, max_stales, max_holds
216 | obj.min_wins = 5
217 | obj.min_profit = 50.0
218 | obj.max_losses = 10
219 | obj.max_stales = 10
220 | obj.max_holds = 10
221 |
222 | # Set the desired coin list and run_id
223 | coin_list = {"coin1"}
224 | run_id = "123"
225 |
226 | # Call the method
227 | result = obj.sum_of_results_from_run(coin_list, run_id)
228 |
229 | # Assert the expected results
230 | self.assertEqual(result["total_wins"], 10)
231 | self.assertEqual(result["total_losses"], 2)
232 | self.assertEqual(result["total_stales"], 3)
233 | self.assertEqual(result["total_holds"], 4)
234 | self.assertEqual(result["total_profit"], 100.0)
235 |
--------------------------------------------------------------------------------
/tmp/.empty:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Azulinho/cryptobot/ecddc8c6f7144cfcbc7ecb92159f8c08167500a4/tmp/.empty
--------------------------------------------------------------------------------
/tmp/.gitignore:
--------------------------------------------------------------------------------
1 | *
2 |
--------------------------------------------------------------------------------
/utils/README.md:
--------------------------------------------------------------------------------
1 | dedup-logs.py
2 | ===============
3 | Shrinks price.logs generated by cryptobot versions below 0.0.77 by dedupping
4 | lines for coins that have not changed price over consecutives reads.
5 |
6 | ```
7 | ls *.log.gz | xargs -i python utils/dedup-logs.py -l {}
8 | ```
9 |
10 | best_runs.py
11 | ==============
12 | prints out the final results from each prove-backtesting run:
13 |
14 | ```
15 | python utils/best_runs.py | sort -k
16 | ```
17 |
--------------------------------------------------------------------------------
/utils/__init__.py:
--------------------------------------------------------------------------------
1 | """ __init__.py """
2 | import importlib
3 |
4 | ProveBacktesting = importlib.import_module("utils.prove-backtesting")
5 | __all__ = ["ProveBacktesting"]
6 |
--------------------------------------------------------------------------------
/utils/best_runs.py:
--------------------------------------------------------------------------------
1 | """
2 | parses prove-backtesting result files and returns:
3 | profit, Strategy, config, min, wins|profit, start, end, forward, backtrack
4 |
5 | 223 BuyDropSellRecoveryStrategy backtesting.9029.yaml min:7 wins 20211108 20220919 f:7 b:7
6 |
7 | """
8 |
9 | import re
10 | from os import listdir
11 | from os.path import isfile, join
12 | from typing import Dict, List
13 |
14 | mypath: str = "./results/"
15 | results_txt: List = [f for f in listdir(mypath) if isfile(join(mypath, f))]
16 |
17 | filename_regex_str: str = (
18 | r"^prove-backtesting\.(.*\.*\.yaml)\.min(\d+)"
19 | + r"\.([wins|profit]+)\.(\d+)_(\d+)\.f(\d+)d\.b(\d+)d\.txt"
20 | )
21 |
22 | final_balance_regex: str = (
23 | r".* PROVE-BACKTESTING: final balance for (.*): (\d+)"
24 | )
25 |
26 | proves_backtesting_files: Dict[str, Dict] = {}
27 |
28 | for result_txt in results_txt:
29 | matches = re.search(filename_regex_str, result_txt)
30 | if matches:
31 | proves_backtesting_files[result_txt] = {}
32 | proves_backtesting_files[result_txt]["strats"] = {}
33 | proves_backtesting_files[result_txt]["config"] = matches.group(1)
34 | proves_backtesting_files[result_txt]["min"] = matches.group(2)
35 | proves_backtesting_files[result_txt]["wins_profit"] = matches.group(3)
36 | proves_backtesting_files[result_txt]["start_date"] = matches.group(4)
37 | proves_backtesting_files[result_txt]["end_date"] = matches.group(5)
38 | proves_backtesting_files[result_txt]["forward"] = matches.group(6)
39 | proves_backtesting_files[result_txt]["backward"] = matches.group(7)
40 |
41 | with open(f"./results/{result_txt}") as f:
42 | lines: List = f.readlines()
43 |
44 | if len(lines[-1:]):
45 | if "PROVE-BACKTESTING: FINISHED" not in lines[-1:][0]:
46 | continue
47 | else:
48 | continue
49 |
50 | with open(f"./results/{result_txt}") as f:
51 | for line in f:
52 | matches = re.search(final_balance_regex, line)
53 | if matches:
54 | strategy: str = matches.group(1)
55 | balance: str = matches.group(2)
56 | proves_backtesting_files[result_txt]["strats"][
57 | strategy
58 | ] = balance
59 |
60 | top_balance: float = float(0)
61 | best_strat: str = ""
62 | for strat in proves_backtesting_files[result_txt]["strats"].keys():
63 | if (
64 | float(proves_backtesting_files[result_txt]["strats"][strat])
65 | > top_balance
66 | ):
67 | best_strat = strat
68 | top_balance = float(
69 | proves_backtesting_files[result_txt]["strats"][strat]
70 | )
71 |
72 | proves_backtesting_files[result_txt]["best"] = best_strat
73 | if proves_backtesting_files[result_txt]["best"] != "":
74 | run = proves_backtesting_files[result_txt]
75 | print(
76 | f"{run['strats'][best_strat]} {run['best']} {run['config']} "
77 | + f"min:{run['min']} {run['wins_profit']} {run['start_date']} "
78 | + f"{run['end_date']} f:{run['forward']} b:{run['backward']}"
79 | )
80 |
--------------------------------------------------------------------------------
/utils/config-endpoint-service.py:
--------------------------------------------------------------------------------
1 | """ config-endpoint-service """
2 | import argparse
3 | import hashlib
4 | import json
5 | import os
6 | import subprocess
7 | import threading
8 | import time
9 | from datetime import datetime, timedelta
10 | from typing import Any, Dict
11 |
12 | import yaml
13 | from flask import Flask, jsonify, Response
14 |
15 | g: Dict[str, Any] = {}
16 | app: Flask = Flask(__name__)
17 |
18 |
19 | def log_msg(msg: str) -> None:
20 | """logs out message prefixed with timestamp"""
21 | now: str = datetime.now().strftime("%H:%M:%S")
22 | print(f"{now} {msg}")
23 |
24 |
25 | def run_prove_backtesting() -> None:
26 | """calls prove-backtesting"""
27 |
28 | yesterday: datetime = datetime.now() - timedelta(days=1)
29 | end_date: str = yesterday.strftime("%Y%m%d")
30 |
31 | with open("configs/CONFIG_ENDPOINT_SERVICE.yaml", "w") as c:
32 | endpoint_config: dict[str, Any] = g["CONFIG"]
33 | endpoint_config["FROM_DATE"] = end_date
34 | endpoint_config["END_DATE"] = end_date
35 | # prove-backtestin won't take 0 but it doesn't matter as
36 | # we're giving yesterday's date as the start/end date and the logs
37 | # for today (ROLL_FORWARD=1) don't exist yet.
38 | endpoint_config["ROLL_FORWARD"] = int(1)
39 | c.write(json.dumps(endpoint_config))
40 |
41 | subprocess.run(
42 | "python -u utils/prove-backtesting.py "
43 | + "-c configs/CONFIG_ENDPOINT_SERVICE.yaml",
44 | shell=True,
45 | check=False,
46 | )
47 |
48 |
49 | @app.route("/")
50 | def root() -> Response:
51 | """Flask / handler"""
52 | strategy: str = g["CONFIG"]["STRATEGY"]
53 |
54 | with open(f"configs/optimized.{strategy}.yaml") as c:
55 | cfg: Dict[str, Any] = yaml.safe_load(c.read())
56 | hashstr: str = hashlib.md5(
57 | (json.dumps(cfg["TICKERS"], sort_keys=True)).encode("utf-8")
58 | ).hexdigest()
59 | cfg["md5"] = hashstr
60 | return jsonify(cfg)
61 |
62 |
63 | def api_endpoint() -> None:
64 | """runs Flask"""
65 | app.run(debug=True, use_reloader=False, host="0.0.0.0", port=5883)
66 |
67 |
68 | if __name__ == "__main__":
69 | parser: argparse.ArgumentParser = argparse.ArgumentParser()
70 | parser.add_argument(
71 | "-c", "--config", help="prove-backtesting config.yaml file"
72 | )
73 |
74 | args: argparse.Namespace = parser.parse_args()
75 |
76 | with open(args.config, "rt") as f:
77 | config: Any = yaml.safe_load(f.read())
78 |
79 | g["CONFIG"] = config
80 |
81 | t: threading.Thread = threading.Thread(target=api_endpoint)
82 | t.daemon = True
83 | t.start()
84 |
85 | while True:
86 | time.sleep(1)
87 | if os.path.exists("control/RUN"):
88 | log_msg("control/RUN flag found")
89 | os.unlink("control/RUN")
90 | run_prove_backtesting()
91 |
--------------------------------------------------------------------------------
/utils/config-endpoint-service.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | ulimit -n 65535
3 | source /cryptobot/.venv/bin/activate
4 | python -u utils/config-endpoint-service.py -c ${CONFIG_FILE}
5 |
--------------------------------------------------------------------------------
/utils/dedup-logs.py:
--------------------------------------------------------------------------------
1 | """ dedup entries in price.logs where the price of a coin hasn't moved """
2 | import argparse
3 | import gzip
4 | import logging
5 | import sys
6 | import traceback
7 | from pathlib import Path
8 |
9 | if __name__ == "__main__":
10 | try:
11 | parser = argparse.ArgumentParser()
12 | parser.add_argument("-l", "--log", help="log")
13 | args = parser.parse_args()
14 |
15 | p = Path(".")
16 |
17 | coin = {}
18 | oldcoin = {}
19 | with gzip.open(str(args.log), "rt") as logfile:
20 | line = logfile.readline()
21 | date = (line.split(" ")[0]).replace("-", "")
22 | fh = open(f"{date}.log.dedup", "wt") # pylint: disable=R1732
23 |
24 | with gzip.open(str(args.log), "rt") as logfile:
25 | for line in logfile:
26 | parts = line.split(" ")
27 | symbol = parts[2]
28 | date = " ".join(parts[0:1])
29 | price = parts[3]
30 |
31 | if symbol not in coin:
32 | coin[symbol] = price
33 | oldcoin[symbol] = str(0)
34 |
35 | if price != oldcoin[symbol]:
36 | fh.write(line)
37 | oldcoin[symbol] = str(price)
38 |
39 | fh.close()
40 | except Exception: # pylint: disable=broad-except
41 | logging.error(traceback.format_exc())
42 | sys.exit(1)
43 |
--------------------------------------------------------------------------------
/utils/migrate_cache_files.py:
--------------------------------------------------------------------------------
1 | """ migrates the cache files to the new symbol/file layout """
2 | from os import listdir, mkdir
3 | from os.path import exists
4 | from shutil import move
5 |
6 | cache_files = listdir("cache")
7 | for file in cache_files:
8 | if "precision" in file:
9 | continue
10 | symbol = file.split(".")[0]
11 | if not exists(f"cache/{symbol}"):
12 | print(f"creating cache/{symbol}")
13 | mkdir(f"cache/{symbol}")
14 | if file == symbol:
15 | continue
16 |
17 | print(f"moving cache/{file}")
18 | move(f"cache/{file}", f"cache/{symbol}/{file}")
19 |
--------------------------------------------------------------------------------
/utils/prove-backtesting.py:
--------------------------------------------------------------------------------
1 | """ prove backtesting """
2 | import glob
3 | import json
4 | import os
5 | import re
6 | import subprocess
7 | import sys
8 | from argparse import ArgumentParser, Namespace
9 | from datetime import datetime, timedelta
10 | from itertools import islice
11 | from multiprocessing import Pool
12 | from string import Template
13 | from time import sleep
14 | from typing import Any, Dict, List, Optional, Set
15 | from collections import OrderedDict
16 |
17 | import pandas as pd
18 | import requests
19 | import yaml
20 | from tenacity import retry, wait_fixed, stop_after_attempt
21 |
22 |
23 | @retry(wait=wait_fixed(30), stop=stop_after_attempt(3))
24 | def get_index_json(query: str) -> requests.Response:
25 | """retry wrapper for requests calls"""
26 | response: requests.Response = requests.get(query, timeout=15)
27 | status: int = response.status_code
28 | if status != 200:
29 | with open("log/price_log_service.response.log", "at") as l:
30 | l.write(f"{query} {status} {response}\n")
31 | response.raise_for_status()
32 | return response
33 |
34 |
35 | def log_msg(msg: str) -> None:
36 | """logs out message prefixed with timestamp"""
37 | now: str = datetime.now().strftime("%H:%M:%S")
38 | print(f"{now} PROVE-BACKTESTING: {msg}")
39 |
40 |
41 | def cleanup() -> None:
42 | """clean files"""
43 | for item in glob.glob("configs/coin.*.yaml"):
44 | os.remove(item)
45 | for item in glob.glob("results/backtesting.coin.*.txt"):
46 | os.remove(item)
47 | for item in glob.glob("results/backtesting.coin.*.log.gz"):
48 | os.remove(item)
49 | if os.path.exists("log/backtesting.log"):
50 | os.remove("log/backtesting.log")
51 |
52 |
53 | def flag_checks() -> None:
54 | """checks for flags in control/"""
55 | while os.path.exists("control/PAUSE"):
56 | log_msg("control/PAUSE flag found. Sleeping 1min.")
57 | sleep(60)
58 |
59 |
60 | def wrap_subprocessing(conf: str, timeout: Optional[int] = 0) -> None:
61 | """wraps subprocess call"""
62 | if timeout == 0:
63 | timeout = None
64 | subprocess.run(
65 | "python app.py -m backtesting -s tests/fake.yaml "
66 | + f"-c configs/{conf} >results/backtesting.{conf}.txt 2>&1",
67 | shell=True,
68 | timeout=timeout,
69 | check=False,
70 | )
71 |
72 |
73 | class ProveBacktesting:
74 | """ProveBacktesting"""
75 |
76 | def __init__(self, cfg: Dict[str, Any]) -> None:
77 | """init"""
78 | self.filter_by: str = cfg["FILTER_BY"]
79 | self.from_date: datetime = datetime.strptime(
80 | str(cfg["FROM_DATE"]), "%Y%m%d"
81 | )
82 | self.end_date: datetime = datetime.strptime(
83 | str(cfg["END_DATE"]), "%Y%m%d"
84 | )
85 | self.roll_backwards: int = int(cfg["ROLL_BACKWARDS"])
86 | self.roll_forward: int = int(cfg["ROLL_FORWARD"])
87 | self.strategy: str = cfg["STRATEGY"]
88 | self.runs: Dict[str, Any] = dict(cfg["RUNS"])
89 | self.pause_for: float = float(cfg["PAUSE_FOR"])
90 | self.initial_investment: float = float(cfg["INITIAL_INVESTMENT"])
91 | self.re_invest_percentage: float = float(cfg["RE_INVEST_PERCENTAGE"])
92 | self.max_coins: int = int(cfg["MAX_COINS"])
93 | self.pairing: str = str(cfg["PAIRING"])
94 | self.clear_coin_stats_at_boot: bool = bool(
95 | cfg["CLEAR_COIN_STATS_AT_BOOT"]
96 | )
97 | self.clear_coin_stats_at_sale: bool = bool(
98 | cfg["CLEAR_COIN_STATS_AT_SALE"]
99 | )
100 | self.debug: bool = bool(cfg["DEBUG"])
101 | self.trading_fee: float = float(cfg["TRADING_FEE"])
102 | self.sell_as_soon_it_drops: bool = bool(cfg["SELL_AS_SOON_IT_DROPS"])
103 | self.stop_bot_on_loss: bool = bool(cfg["STOP_BOT_ON_LOSS"])
104 | self.stop_bot_on_stale: bool = bool(cfg["STOP_BOT_ON_STALE"])
105 | self.enable_new_listing_checks: bool = bool(
106 | cfg["ENABLE_NEW_LISTING_CHECKS"]
107 | )
108 | self.enable_new_listing_checks_age_in_days: int = int(
109 | cfg["ENABLE_NEW_LISTING_CHECKS_AGE_IN_DAYS"]
110 | )
111 | self.klines_caching_service_url: str = cfg[
112 | "KLINES_CACHING_SERVICE_URL"
113 | ]
114 | self.price_log_service_url: str = cfg["PRICE_LOG_SERVICE_URL"]
115 | self.concurrency: int = int(cfg["CONCURRENCY"])
116 | self.start_dates: List[str] = self.generate_start_dates(
117 | self.from_date, self.end_date, self.roll_forward
118 | )
119 | self.min_wins: int = int(cfg["MIN_WINS"])
120 | self.min_profit: float = float(cfg["MIN_PROFIT"])
121 | self.max_losses: int = int(cfg["MAX_LOSSES"])
122 | self.max_stales: int = int(cfg["MAX_STALES"])
123 | self.max_holds: int = int(cfg["MAX_HOLDS"])
124 | self.valid_tokens: list[str] = cfg.get("VALID_TOKENS", [])
125 |
126 | self.index_json: Dict[str, Any] = json.loads(
127 | get_index_json(
128 | f"{self.price_log_service_url}/index_v2.json.gz"
129 | ).content
130 | )
131 | self.cfg: Dict[str, Any] = cfg
132 |
133 | def generate_start_dates(
134 | self, start_date: datetime, end_date: datetime, jump: Optional[int] = 7
135 | ) -> List[str]:
136 | """returns a list of dates, with a gap in 'jump' days"""
137 | dates = pd.date_range(start_date, end_date, freq="d").strftime(
138 | "%Y%m%d"
139 | )
140 | start_dates: List[str] = list(islice(dates, 0, None, jump))
141 | return start_dates
142 |
143 | def rollback_dates_from(self, end_date: str) -> List[str]:
144 | """returns a list of dates, up to 'days' before the 'end_date'"""
145 | dates: List[str] = (
146 | pd.date_range(
147 | datetime.strptime(str(end_date), "%Y%m%d")
148 | - timedelta(days=self.roll_backwards - 1),
149 | end_date,
150 | freq="d",
151 | )
152 | .strftime("%Y%m%d")
153 | .tolist()
154 | )
155 | return dates
156 |
157 | def rollforward_dates_from(self, end_date: str) -> List[str]:
158 | """returns a list of dates, up to 'days' past the 'end_date'"""
159 | start_date: datetime = datetime.strptime(
160 | str(end_date), "%Y%m%d"
161 | ) + timedelta(days=1)
162 | _end_date: datetime = datetime.strptime(
163 | str(end_date), "%Y%m%d"
164 | ) + timedelta(days=self.roll_forward)
165 | dates: List[str] = (
166 | pd.date_range(start_date, _end_date, freq="d")
167 | .strftime("%Y%m%d")
168 | .tolist()
169 | )
170 | return dates
171 |
172 | def generate_price_log_list(
173 | self, dates: List[str], symbol: Optional[str] = None
174 | ) -> List[str]:
175 | """makes up the price log url list"""
176 | urls: List[str] = []
177 | for day in dates:
178 | if symbol:
179 | if self.filter_by in symbol:
180 | if self.valid_tokens != []:
181 | if symbol in " ".join(
182 | [f"{v}{self.pairing}" for v in self.valid_tokens]
183 | ):
184 | urls.append(f"{symbol}/{day}.log.gz")
185 | else:
186 | urls.append(f"{symbol}/{day}.log.gz")
187 | else:
188 | # TODO: validate that this logfile actually exist
189 | urls.append(f"{day}.log.gz")
190 | return urls
191 |
192 | def write_single_coin_config(
193 | self, symbol: str, _price_logs: List[str], thisrun: Dict[str, Any]
194 | ) -> None:
195 | """generates a config.yaml for a coin"""
196 |
197 | if self.filter_by not in symbol:
198 | return
199 |
200 | tmpl: Template = Template(
201 | """{
202 | "CLEAR_COIN_STATS_AT_BOOT": $CLEAR_COIN_STATS_AT_BOOT,
203 | "CLEAR_COIN_STATS_AT_SALE": $CLEAR_COIN_STATS_AT_SALE,
204 | "DEBUG": $DEBUG,
205 | "ENABLE_NEW_LISTING_CHECKS": $ENABLE_NEW_LISTING_CHECKS,
206 | "ENABLE_NEW_LISTING_CHECKS_AGE_IN_DAYS": $ENABLE_NEW_LISTING_CHECKS_AGE_IN_DAYS,
207 | "INITIAL_INVESTMENT": $INITIAL_INVESTMENT,
208 | "KLINES_CACHING_SERVICE_URL": "$KLINES_CACHING_SERVICE_URL",
209 | "MAX_COINS": 1,
210 | "PAIRING": "$PAIRING",
211 | "PAUSE_FOR": $PAUSE_FOR,
212 | "PRICE_LOGS": $PRICE_LOGS,
213 | "PRICE_LOG_SERVICE_URL": "$PRICE_LOG_SERVICE_URL",
214 | "RE_INVEST_PERCENTAGE": $RE_INVEST_PERCENTAGE,
215 | "SELL_AS_SOON_IT_DROPS": $SELL_AS_SOON_IT_DROPS,
216 | "STOP_BOT_ON_LOSS": $STOP_BOT_ON_LOSS,
217 | "STOP_BOT_ON_STALE": $STOP_BOT_ON_STALE,
218 | "STRATEGY": "$STRATEGY",
219 | "TICKERS": {
220 | "$COIN": {
221 | "BUY_AT_PERCENTAGE": "$BUY_AT_PERCENTAGE",
222 | "SELL_AT_PERCENTAGE": "$SELL_AT_PERCENTAGE",
223 | "STOP_LOSS_AT_PERCENTAGE": "$STOP_LOSS_AT_PERCENTAGE",
224 | "TRAIL_TARGET_SELL_PERCENTAGE": "$TRAIL_TARGET_SELL_PERCENTAGE",
225 | "TRAIL_RECOVERY_PERCENTAGE": "$TRAIL_RECOVERY_PERCENTAGE",
226 | "SOFT_LIMIT_HOLDING_TIME": "$SOFT_LIMIT_HOLDING_TIME",
227 | "HARD_LIMIT_HOLDING_TIME": "$HARD_LIMIT_HOLDING_TIME",
228 | "NAUGHTY_TIMEOUT": "$NAUGHTY_TIMEOUT",
229 | "KLINES_TREND_PERIOD": "$KLINES_TREND_PERIOD",
230 | "KLINES_SLICE_PERCENTAGE_CHANGE": "$KLINES_SLICE_PERCENTAGE_CHANGE"
231 | }
232 | },
233 | "TRADING_FEE": $TRADING_FEE,
234 | }"""
235 | )
236 |
237 | # on our coin backtesting runs, we want to quit early if we are using
238 | # a mode that discards runs with STALES or LOSSES
239 | stop_bot_on_loss = False
240 | stop_bot_on_stale = False
241 |
242 | if self.max_losses == 0:
243 | stop_bot_on_loss = True
244 | if self.max_stales == 0:
245 | stop_bot_on_stale = True
246 |
247 | with open(f"configs/coin.{symbol}.yaml", "wt") as c:
248 | c.write(
249 | tmpl.substitute(
250 | {
251 | "CLEAR_COIN_STATS_AT_BOOT": True,
252 | "CLEAR_COIN_STATS_AT_SALE": self.clear_coin_stats_at_sale,
253 | "COIN": symbol,
254 | "DEBUG": self.debug,
255 | "ENABLE_NEW_LISTING_CHECKS": False,
256 | "ENABLE_NEW_LISTING_CHECKS_AGE_IN_DAYS": 1,
257 | "INITIAL_INVESTMENT": self.initial_investment,
258 | "KLINES_CACHING_SERVICE_URL": self.klines_caching_service_url,
259 | # each coin backtesting run should only use one coin
260 | # MAX_COINS will only be applied to the final optimized run
261 | "MAX_COINS": 1,
262 | "PAIRING": self.pairing,
263 | "PAUSE_FOR": self.pause_for,
264 | "PRICE_LOGS": _price_logs,
265 | "PRICE_LOG_SERVICE_URL": self.price_log_service_url,
266 | "RE_INVEST_PERCENTAGE": 100,
267 | "SELL_AS_SOON_IT_DROPS": self.sell_as_soon_it_drops,
268 | "STOP_BOT_ON_LOSS": stop_bot_on_loss,
269 | "STOP_BOT_ON_STALE": stop_bot_on_stale,
270 | "STRATEGY": self.strategy,
271 | "TRADING_FEE": self.trading_fee,
272 | "BUY_AT_PERCENTAGE": thisrun["BUY_AT_PERCENTAGE"],
273 | "SELL_AT_PERCENTAGE": thisrun["SELL_AT_PERCENTAGE"],
274 | "STOP_LOSS_AT_PERCENTAGE": thisrun[
275 | "STOP_LOSS_AT_PERCENTAGE"
276 | ],
277 | "TRAIL_TARGET_SELL_PERCENTAGE": thisrun[
278 | "TRAIL_TARGET_SELL_PERCENTAGE"
279 | ],
280 | "TRAIL_RECOVERY_PERCENTAGE": thisrun[
281 | "TRAIL_RECOVERY_PERCENTAGE"
282 | ],
283 | "SOFT_LIMIT_HOLDING_TIME": thisrun[
284 | "SOFT_LIMIT_HOLDING_TIME"
285 | ],
286 | "HARD_LIMIT_HOLDING_TIME": thisrun[
287 | "HARD_LIMIT_HOLDING_TIME"
288 | ],
289 | "NAUGHTY_TIMEOUT": thisrun["NAUGHTY_TIMEOUT"],
290 | "KLINES_TREND_PERIOD": thisrun["KLINES_TREND_PERIOD"],
291 | "KLINES_SLICE_PERCENTAGE_CHANGE": thisrun[
292 | "KLINES_SLICE_PERCENTAGE_CHANGE"
293 | ],
294 | }
295 | )
296 | )
297 |
298 | def write_optimized_strategy_config(
299 | self,
300 | _price_logs: List[str],
301 | _tickers: Dict[str, Any],
302 | s_balance: float,
303 | ) -> None:
304 | """generates a config.yaml for forwardtesting optimized run"""
305 |
306 | # we keep "state" between optimized runs, by soaking up an existing
307 | # optimized config file and an existing wallet.json file
308 | # while this could cause the bot as it starts to run to pull old
309 | # optimized config files from old runs, we only consume those for
310 | # matching ticker info to the contents of our wallet.json, and we clean
311 | # up the json files at the start and end of the prove-backtesting.
312 | # so we don't expect to ever consume old tickers info from an old
313 | # config file.
314 | old_tickers: Dict[str, Any] = {}
315 | old_wallet: List[str] = []
316 | if os.path.exists(f"configs/optimized.{self.strategy}.yaml"):
317 | with open(
318 | f"configs/optimized.{self.strategy}.yaml", encoding="utf-8"
319 | ) as c:
320 | old_tickers = yaml.safe_load(c.read())["TICKERS"]
321 |
322 | if os.path.exists(f"tmp/optimized.{self.strategy}.yaml.wallet.json"):
323 | with open(f"tmp/optimized.{self.strategy}.yaml.wallet.json") as w:
324 | old_wallet = json.load(w)
325 |
326 | # now generate tickers from the contents of our wallet and the previous
327 | # config file, we will merge this with a new config file.
328 | x: Dict[str, Any] = {}
329 | for symbol in old_wallet:
330 | x[symbol] = old_tickers[symbol]
331 |
332 | log_msg(f" wallet: {old_wallet}")
333 |
334 | z: Dict[str, Any] = _tickers | x
335 | _tickers = z
336 | log_msg(f" tickers: {_tickers}")
337 |
338 | tmpl: Template = Template(
339 | """{
340 | "CLEAR_COIN_STATS_AT_BOOT": $CLEAR_COIN_STATS_AT_BOOT,
341 | "CLEAR_COIN_STATS_AT_SALE": $CLEAR_COIN_STATS_AT_SALE,
342 | "DEBUG": $DEBUG,
343 | "ENABLE_NEW_LISTING_CHECKS": $ENABLE_NEW_LISTING_CHECKS,
344 | "ENABLE_NEW_LISTING_CHECKS_AGE_IN_DAYS": $ENABLE_NEW_LISTING_CHECKS_AGE_IN_DAYS,
345 | "INITIAL_INVESTMENT": $INITIAL_INVESTMENT,
346 | "KLINES_CACHING_SERVICE_URL": "$KLINES_CACHING_SERVICE_URL",
347 | "MAX_COINS": $MAX_COINS,
348 | "PAIRING": "$PAIRING",
349 | "PAUSE_FOR": $PAUSE_FOR,
350 | "PRICE_LOGS": $PRICE_LOGS,
351 | "PRICE_LOG_SERVICE_URL": "$PRICE_LOG_SERVICE_URL",
352 | "RE_INVEST_PERCENTAGE": $RE_INVEST_PERCENTAGE,
353 | "SELL_AS_SOON_IT_DROPS": $SELL_AS_SOON_IT_DROPS,
354 | "STOP_BOT_ON_LOSS": $STOP_BOT_ON_LOSS,
355 | "STOP_BOT_ON_STALE": $STOP_BOT_ON_STALE,
356 | "STRATEGY": "$STRATEGY",
357 | "TICKERS": $TICKERS,
358 | "TRADING_FEE": $TRADING_FEE
359 | }"""
360 | )
361 |
362 | with open(f"configs/optimized.{self.strategy}.yaml", "wt") as c:
363 | c.write(
364 | tmpl.substitute(
365 | {
366 | "CLEAR_COIN_STATS_AT_BOOT": self.clear_coin_stats_at_boot,
367 | "CLEAR_COIN_STATS_AT_SALE": self.clear_coin_stats_at_sale,
368 | "DEBUG": self.debug,
369 | "ENABLE_NEW_LISTING_CHECKS": self.enable_new_listing_checks,
370 | "ENABLE_NEW_LISTING_CHECKS_AGE_IN_DAYS": self.enable_new_listing_checks_age_in_days, # pylint: disable=line-too-long
371 | "INITIAL_INVESTMENT": s_balance,
372 | "KLINES_CACHING_SERVICE_URL": self.klines_caching_service_url,
373 | "MAX_COINS": self.max_coins,
374 | "PAIRING": self.pairing,
375 | "PAUSE_FOR": self.pause_for,
376 | "PRICE_LOGS": _price_logs,
377 | "PRICE_LOG_SERVICE_URL": self.price_log_service_url,
378 | "RE_INVEST_PERCENTAGE": self.re_invest_percentage,
379 | "SELL_AS_SOON_IT_DROPS": self.sell_as_soon_it_drops,
380 | "STOP_BOT_ON_LOSS": self.stop_bot_on_loss,
381 | "STOP_BOT_ON_STALE": self.stop_bot_on_stale,
382 | "STRATEGY": self.strategy,
383 | "TICKERS": _tickers,
384 | "TRADING_FEE": self.trading_fee,
385 | }
386 | )
387 | )
388 |
389 | def filter_on_avail_days_with_log(
390 | self, dates: List[str], data: Dict[str, Any]
391 | ) -> Dict[str, Any]:
392 | """build a dictionary with all the coins that have price log entries
393 | available for the dates we asked to backtest.
394 | then append the list of available price logs to that { coin: [] }
395 | """
396 |
397 | next_run_coins: Dict[str, Any] = {}
398 |
399 | for day in data.keys():
400 | if day in dates:
401 | for coin in data[day]:
402 | # discard any BULL/BEAR tokens
403 | if any(
404 | f"{w}{self.pairing}" in coin
405 | for w in ["UP", "DOWN", "BULL", "BEAR"]
406 | ) or any(
407 | f"{self.pairing}{w}" in coin
408 | for w in ["UP", "DOWN", "BULL", "BEAR"]
409 | ):
410 | continue
411 | if (
412 | self.filter_by in coin
413 | and self.pairing in coin
414 | and coin.endswith(self.pairing)
415 | ):
416 | if coin not in next_run_coins:
417 | next_run_coins[coin] = []
418 | next_run_coins[coin].append(f"{coin}/{day}.log.gz")
419 |
420 | return next_run_coins
421 |
422 | def filter_on_coins_with_min_age_logs(
423 | self,
424 | index: Dict[str, Any],
425 | last_day: str,
426 | next_run_coins: Dict[str, Any],
427 | ) -> Dict[str, Any]:
428 | """from the dict containing all the coins and the price logs to test,
429 | drop any coin that doesn't have the required number of logs
430 | as per the enable_new_listing_checks_age_in_days setting
431 | """
432 |
433 | all_logs: Dict[str, Any] = {}
434 |
435 | # from the dict containing all the coins and the price logs to test,
436 | # drop any coin that doesn't have the required number of logs
437 | # as per the enable_new_listing_checks_age_in_days setting
438 | for day in index.keys():
439 | # skip any empty dates in index.json.gz
440 | if not index[day]:
441 | continue
442 | # we need to make sure we don't keep dates past the last day
443 | # we're backtesting
444 | if datetime.strptime(day, "%Y%m%d") > datetime.strptime(
445 | last_day, "%Y%m%d"
446 | ):
447 | continue
448 | for coin in list(next_run_coins.keys()):
449 | if coin not in all_logs:
450 | all_logs[coin] = []
451 | all_logs[coin].append(f"{coin}/{day}.log.gz")
452 |
453 | for coin in list(next_run_coins.keys()):
454 | if (
455 | len(all_logs[coin])
456 | <= self.enable_new_listing_checks_age_in_days
457 | ):
458 | del next_run_coins[coin]
459 |
460 | return next_run_coins
461 |
462 | def write_all_coin_configs(
463 | self, dates: List[str], thisrun: Dict[str, Any]
464 | ) -> Set[str]:
465 | """generate all coinfiles"""
466 |
467 | index_dates = self.index_json["DATES"]
468 |
469 | next_run_coins: Dict[str, Any] = self.filter_on_avail_days_with_log(
470 | dates, index_dates
471 | )
472 |
473 | if self.valid_tokens != []:
474 | for coin in list(next_run_coins.keys()):
475 | if self.filter_by not in coin:
476 | del next_run_coins[coin]
477 | if coin not in " ".join(
478 | [f"{v}{self.pairing}" for v in self.valid_tokens]
479 | ):
480 | del next_run_coins[coin]
481 |
482 | if self.enable_new_listing_checks:
483 | next_run_coins = self.filter_on_coins_with_min_age_logs(
484 | index_dates, dates[-1], next_run_coins
485 | )
486 |
487 | for coin, _price_logs in next_run_coins.items():
488 | self.write_single_coin_config(coin, _price_logs, thisrun)
489 |
490 | return set(next_run_coins.keys())
491 |
492 | def parallel_backtest_all_coins(
493 | self, _coin_list: Set[str], n_tasks: int, _run: str
494 | ) -> Dict[str, Any]:
495 | """parallel_backtest_all_coins"""
496 |
497 | tasks: List[Any] = []
498 | with Pool(processes=n_tasks) as pool:
499 | for coin in _coin_list:
500 | if self.filter_by in coin and self.pairing in coin:
501 | # then we backtesting this strategy run against each coin
502 | # ocasionally we get stuck runs, so we timeout a coin run
503 | # to a maximum of 15 minutes
504 | job: Any = pool.apply_async(
505 | wrap_subprocessing,
506 | (f"coin.{coin}.yaml",),
507 | )
508 | tasks.append(job)
509 |
510 | for t in tasks:
511 | try:
512 | t.get()
513 | except subprocess.TimeoutExpired as excp:
514 | log_msg(f"timeout while running: {excp}")
515 |
516 | for coin in _coin_list:
517 | try:
518 | os.remove(f"tmp/coin.{coin}.yaml.coins.json")
519 | os.remove(f"tmp/coin.{coin}.yaml.wallet.json")
520 | os.remove(f"tmp/coin.{coin}.yaml.results.json")
521 | except: # pylint: disable=bare-except
522 | pass
523 |
524 | return self.sum_of_results_from_run(_coin_list, _run)
525 |
526 | def sum_of_results_from_run(
527 | self, _coin_list: Set[str], run_id: str
528 | ) -> Dict[str, Any]:
529 | """finds the best results across all coins from this run"""
530 | wins_re: str = r".*INFO.*\swins:([0-9]+)\slosses:([0-9]+)\sstales:([0-9]+)\sholds:([0-9]+)"
531 | balance_re: str = r".*INFO.*final\sbalance:\s(-?[0-9]+\.[0-9]+)"
532 |
533 | highest_profit: float = float(0)
534 | coin_with_highest_profit: str = ""
535 |
536 | _run: Dict[str, Any] = {}
537 | _run["total_wins"] = 0
538 | _run["total_losses"] = 0
539 | _run["total_stales"] = 0
540 | _run["total_holds"] = 0
541 | _run["total_profit"] = 0
542 |
543 | # TODO: parsing logfiles is not nice, rework this in app.py
544 | for symbol in _coin_list:
545 | results_txt: str = f"results/backtesting.coin.{symbol}.yaml.txt"
546 | with open(results_txt) as r:
547 | run_results: str = r.read()
548 |
549 | try:
550 | wins, losses, stales, holds = re.search(
551 | wins_re, run_results
552 | ).groups() # type: ignore
553 | balance = float(
554 | re.search(balance_re, run_results).groups()[0] # type: ignore
555 | )
556 | except AttributeError as e:
557 | log_msg(
558 | f"Exception while collecting results from {results_txt}"
559 | )
560 | log_msg(str(e))
561 | log_msg(f"Contents of file below: \n{run_results}")
562 | wins, losses, stales, holds = [0, 0, 0, 0]
563 | balance = float(0)
564 |
565 | if (
566 | (int(wins) >= self.min_wins)
567 | and (float(balance) >= self.min_profit)
568 | and (int(losses) <= self.max_losses)
569 | and (int(stales) <= self.max_stales)
570 | and (int(holds) <= self.max_holds)
571 | ):
572 | _run["total_wins"] += int(wins)
573 | _run["total_losses"] += int(losses)
574 | _run["total_stales"] += int(stales)
575 | _run["total_holds"] += int(holds)
576 | _run["total_profit"] += float(balance)
577 |
578 | if balance > highest_profit:
579 | coin_with_highest_profit = symbol
580 | highest_profit = float(balance)
581 |
582 | log_msg(
583 | f" {run_id}: sum of all coins profit:{_run['total_profit']:.3f}|"
584 | + f"w:{_run['total_wins']},l:{_run['total_losses']},"
585 | + f"s:{_run['total_stales']},h:{_run['total_holds']}|"
586 | + "coin with highest profit:"
587 | + f"{coin_with_highest_profit}:{highest_profit:.3f}"
588 | )
589 | return _run
590 |
591 | def find_best_results_from_backtesting_log(
592 | self, kind: str
593 | ) -> Dict[str, Any]:
594 | """parses backtesting.log for the best result for a coin"""
595 |
596 | coins: OrderedDict = OrderedDict()
597 | _results: dict = {}
598 | log: str = "log/backtesting.log"
599 | if os.path.exists(log):
600 | with open(log, encoding="utf-8") as lines:
601 | for line in lines:
602 | _profit, _, _, wls, cfgname, _cfg = line[7:].split("|")
603 | if not self.filter_by in cfgname:
604 | continue
605 | profit = float(_profit)
606 |
607 | coin = cfgname[9:].split(".")[0]
608 | w, l, s, h = [int(x[1:]) for x in wls.split(",")]
609 |
610 | if (
611 | (int(w) < self.min_wins)
612 | or (float(profit) < self.min_profit)
613 | or (int(l) > self.max_losses)
614 | or (int(s) > self.max_stales)
615 | or (int(h) > self.max_holds)
616 | ):
617 | continue
618 |
619 | blob = json.loads(_cfg)
620 | if "TICKERS" in blob.keys():
621 | coincfg = blob["TICKERS"][
622 | coin
623 | ] # pylint: disable=W0123
624 | else:
625 | continue
626 |
627 | if coin not in coins:
628 | coins[coin] = {
629 | "profit": profit,
630 | "wls": wls,
631 | "w": w,
632 | "l": l,
633 | "s": s,
634 | "h": h,
635 | "cfgname": cfgname,
636 | "coincfg": coincfg,
637 | }
638 |
639 | if coin in coins:
640 | if profit > coins[coin]["profit"]:
641 | coins[coin] = {
642 | "profit": profit,
643 | "wls": wls,
644 | "w": w,
645 | "l": l,
646 | "s": s,
647 | "h": h,
648 | "cfgname": cfgname,
649 | "coincfg": coincfg,
650 | }
651 |
652 | _coins: dict = coins
653 | coins = OrderedDict(
654 | sorted(_coins.items(), key=lambda x: x[1]["w"])
655 | )
656 | for coin in coins:
657 | if kind == "coincfg":
658 | _results[coin] = coins[coin]["coincfg"]
659 | return _results
660 |
661 | def log_best_run_results(self, this: Dict[str, Any]) -> None:
662 | """finds and logs the best results in the strategy"""
663 | best_run: str = ""
664 | best_profit_in_runs: int = 0
665 | for _run in this.keys():
666 | if this[_run]["total_profit"] >= best_profit_in_runs:
667 | best_run = _run
668 | best_profit_in_runs = this[_run]["total_profit"]
669 | log_msg(
670 | f"{self.strategy} best run {best_run} profit: {best_profit_in_runs:.3f}"
671 | )
672 |
673 | def run_optimized_config(self) -> float:
674 | """runs optimized config"""
675 | with open(f"configs/optimized.{self.strategy}.yaml") as cf:
676 | _tickers: Dict[str, Any] = yaml.safe_load(cf.read())["TICKERS"]
677 |
678 | wrap_subprocessing(f"optimized.{self.strategy}.yaml")
679 | with open(
680 | f"results/backtesting.optimized.{self.strategy}.yaml.txt"
681 | ) as results_txt:
682 | r = results_txt.read()
683 |
684 | end_investment = float(
685 | re.findall(r"investment: start: .* end: (\d+\.?\d?)", r)[0]
686 | )
687 |
688 | log_msg(
689 | f" final investment for {self.strategy}: {str(end_investment)}"
690 | )
691 |
692 | return end_investment
693 |
694 |
695 | if __name__ == "__main__":
696 | for f in glob.glob("tmp/*"):
697 | os.remove(f)
698 |
699 | parser: ArgumentParser = ArgumentParser()
700 | parser.add_argument("-c", "--cfgs", help="backtesting cfg")
701 | args: Namespace = parser.parse_args()
702 |
703 | with open(args.cfgs, encoding="utf-8") as _c:
704 | config: Any = yaml.safe_load(_c.read())
705 |
706 | if config["KIND"] != "PROVE_BACKTESTING":
707 | log_msg("Incorrect KIND: type")
708 | sys.exit(1)
709 |
710 | cleanup()
711 | if os.path.exists("state/binance.client"):
712 | os.remove("state/binance.client.lockfile")
713 | for f in glob.glob("tmp/*"):
714 | os.remove(f)
715 | for f in glob.glob("configs/coin.*.yaml"):
716 | os.remove(f)
717 |
718 | n_cpus: Optional[int] = os.cpu_count()
719 |
720 | pv: ProveBacktesting = ProveBacktesting(config)
721 |
722 | # generate start_dates
723 | log_msg(
724 | f"running from {pv.start_dates[0]} to {pv.start_dates[-1]} "
725 | + f"backtesting previous {pv.roll_backwards} days "
726 | + f"every {pv.roll_forward} days"
727 | )
728 | final_investment: float = pv.initial_investment
729 | starting_investment: float = pv.initial_investment
730 | for date in pv.start_dates:
731 | cleanup()
732 |
733 | rollbackward_dates: List[str] = pv.rollback_dates_from(date)
734 | log_msg(
735 | f"now backtesting {rollbackward_dates[0]}...{rollbackward_dates[-1]}"
736 | )
737 |
738 | results: Dict[str, Any] = {}
739 | for run in pv.runs:
740 | flag_checks()
741 | # TODO: do we consume the price_logs ?
742 | coin_list: Set[str] = pv.write_all_coin_configs(
743 | rollbackward_dates, pv.runs[run]
744 | )
745 | results[run] = pv.parallel_backtest_all_coins(
746 | coin_list, pv.concurrency, run
747 | )
748 |
749 | pv.log_best_run_results(results)
750 |
751 | # using the backtesting.log, we now build the list of tickers
752 | # we will be using in forwardtesting
753 | tickers = pv.find_best_results_from_backtesting_log("coincfg")
754 | cleanup()
755 |
756 | # figure out the next block of dates for our forwadtesting
757 | rollforward_dates: List[str] = pv.rollforward_dates_from(date)
758 |
759 | # and generate the list of price logs to use from those dates
760 | price_logs = pv.generate_price_log_list(rollforward_dates)
761 |
762 | log_msg(
763 | f"now forwardtesting {rollforward_dates[0]}...{rollforward_dates[-1]}"
764 | )
765 | log_msg(
766 | f" starting investment for {pv.strategy}: {starting_investment}"
767 | )
768 |
769 | pv.write_optimized_strategy_config(
770 | price_logs, tickers, starting_investment
771 | )
772 | final_investment = pv.run_optimized_config()
773 | starting_investment = final_investment
774 |
775 | log_msg("COMPLETED WITH RESULTS:")
776 | log_msg(f" {pv.strategy}: {final_investment}")
777 | for f in glob.glob("tmp/*"):
778 | os.remove(f)
779 | for f in glob.glob("configs/coin.*.yaml"):
780 | os.remove(f)
781 | log_msg("PROVE-BACKTESTING: FINISHED")
782 |
--------------------------------------------------------------------------------
/utils/prove-backtesting.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | ulimit -n 65535
3 | source /cryptobot/.venv/bin/activate
4 | python -u /cryptobot/utils/prove-backtesting.py -c configs/${CONFIG_FILE}
5 |
--------------------------------------------------------------------------------
/utils/pull_klines.py:
--------------------------------------------------------------------------------
1 | """ retrieves klines for binance suitable for cryptoBot """
2 |
3 | import argparse
4 | import gzip
5 | import os
6 | import json
7 | import time
8 | from datetime import datetime, timedelta
9 |
10 | from binance.client import Client # pylint: disable=E0401
11 |
12 | client = Client("FAKE", "FAKE")
13 |
14 |
15 | def get_all_tickers():
16 | """returns the current list of tickers from binance"""
17 | _tickers = []
18 | for item in client.get_all_tickers():
19 | _tickers.append(item["symbol"])
20 | return sorted(_tickers)
21 |
22 |
23 | def pull_klines(k_symbol, k_start, k_end, _unit, limit=720):
24 | """returns klines for a particular day and ticker"""
25 | k_results = []
26 | print(f"start: {k_start} end: {k_end}")
27 | while k_start <= k_end:
28 | print(f"fetching chunk {k_start} <-> {k_start + (limit * 60000)}")
29 | klines = client.get_klines(
30 | symbol=k_symbol,
31 | interval=_unit,
32 | limit=limit,
33 | startTime=int(k_start),
34 | endTime=int(k_start + (limit * 60000)),
35 | )
36 | for entry in klines:
37 | k_results.append(tuple(entry))
38 | k_start = k_start + (limit * 60000)
39 | # klines is an expensive API call, so only pull one klines set per second
40 | time.sleep(0.3)
41 | return k_results
42 |
43 |
44 | def daterange(date1, date2):
45 | """returns a list of dates between 2 dates"""
46 | dates = []
47 | for item in range(int((date2 - date1).days) + 1):
48 | dates.append(date1 + timedelta(item))
49 | return dates
50 |
51 |
52 | def gather_symbols_and_logs(log_dir="log") -> tuple[set[str], set[str]]:
53 | """returns lists of symbols and dates"""
54 | date_list = set()
55 | symbols_list = set()
56 |
57 | # gather all date.log.gz logs and
58 | # all symbol dirs
59 | for dir_item in sorted(os.listdir(log_dir)):
60 | if (
61 | os.path.isfile(f"{log_dir}/{dir_item}")
62 | and dir_item.startswith("20")
63 | and dir_item.endswith(".log.gz")
64 | ):
65 | date: str = dir_item.split(".")[0]
66 | date_list.add(date)
67 | if os.path.isdir(f"{log_dir}/{dir_item}"):
68 | symbols_list.add(dir_item)
69 |
70 | return (set(symbols_list), set(date_list))
71 |
72 |
73 | def gather_symbols_per_date(
74 | log_dir, symbols_list, date_list
75 | ) -> dict[str, list[str]]:
76 | """returns map of dates containing symbols available on that date"""
77 | dates_idx: dict[str, list[str]] = {}
78 |
79 | # we'll store all symbol logs in each date
80 | for date in sorted(date_list):
81 | if date not in dates_idx:
82 | dates_idx[date] = []
83 |
84 | for _symbol in sorted(symbols_list):
85 | logs: list[str] = os.listdir(f"{log_dir}/{_symbol}")
86 | for _log in sorted(logs):
87 | if not os.path.isfile(f"{log_dir}/{_symbol}/{_log}"):
88 | continue
89 | _date: str = _log.split(".")[0]
90 | dates_idx[_date].append(_symbol)
91 | return dates_idx
92 |
93 |
94 | def generate_index(log_dir="log") -> None:
95 | """generates index.json with dates <- [coins]"""
96 |
97 | print("generating index...")
98 | symbols_list, date_list = gather_symbols_and_logs(log_dir)
99 |
100 | dates_index: dict[str, list[str]] = gather_symbols_per_date(
101 | log_dir, symbols_list, date_list
102 | )
103 |
104 | # generate index_v1
105 | print("writing index.json.gz...")
106 |
107 | with gzip.open(
108 | f"{log_dir}/index.json.gz", "wt", encoding="utf-8"
109 | ) as index_json:
110 | index_json.write(json.dumps(dates_index, indent=4))
111 |
112 | # generate index_v2
113 | print("generating index_v2.json.gz...")
114 | index: dict[str, dict] = {"DATES": {}, "COINS": {}}
115 | for date in dates_index.keys(): # pylint: disable=C0206,C0201
116 | index["DATES"][date] = list(dates_index[date])
117 |
118 | for _symbol in sorted(os.listdir(log_dir)):
119 | if os.path.isdir(f"{log_dir}/{_symbol}"):
120 | logs: list[str] = os.listdir(f"{log_dir}/{_symbol}")
121 | index["COINS"][_symbol] = sorted(logs)
122 |
123 | print("writing index_v2.json.gz...")
124 | with gzip.open(
125 | f"{log_dir}/index_v2.json.gz", "wt", encoding="utf-8"
126 | ) as index_json:
127 | index_json.write(json.dumps(index, indent=4))
128 |
129 |
130 | if __name__ == "__main__":
131 | parser: argparse.ArgumentParser = argparse.ArgumentParser()
132 | parser.add_argument("-s", "--start", help="start day to fetch klines for")
133 | parser.add_argument(
134 | "-e", "--end", help="end day to fetch klines for", required=False
135 | )
136 | parser.add_argument(
137 | "-u", "--unit", help="Unit to use 1m/5m/1h/1d", default="1m"
138 | )
139 |
140 | args = parser.parse_args()
141 | s = args.start
142 | # if we don't define an end date, lets assume we only want one day
143 | if args.end:
144 | e = args.end
145 | else:
146 | e = s
147 |
148 | unit = args.unit
149 | start_dt = datetime.strptime(s, "%Y%m%d")
150 | end_dt = datetime.strptime(e, "%Y%m%d")
151 |
152 | print("getting list of all binance tickers")
153 | tickers = get_all_tickers()
154 | ignore_list = []
155 |
156 | # iterate over the date range, so that we generate one price.log.gz file
157 | # per day.
158 | # we run the dates in reverse, as we want to discard tickers as soon we
159 | # reach a date where they have no klines data available.
160 | for dt in reversed(daterange(start_dt, end_dt)):
161 | day = dt.strftime("%Y%m%d")
162 | if os.path.exists(f"log/{day}.log.gz"):
163 | print(f"log/{day}.log.gz already exists, skipping day")
164 | continue
165 |
166 | print(f"processing day {day}")
167 | # pull klines from 00:00:00 to 23:59:59 on each day, every 1 min
168 | start = float(
169 | datetime.strptime(f"{day} 00:00:00", "%Y%m%d %H:%M:%S").timestamp()
170 | * 1000
171 | )
172 | end = float(
173 | datetime.strptime(f"{day} 23:59:59", "%Y%m%d %H:%M:%S").timestamp()
174 | * 1000
175 | )
176 |
177 | log = []
178 |
179 | # iterate over the current (as of from today) list of available
180 | # tickers on binance, and retrieve the klines for each one for this
181 | # particular day.
182 | for ticker in tickers:
183 | if ticker in ignore_list:
184 | continue
185 |
186 | print(f"getting klines for {ticker} on {day}")
187 |
188 | ticker_klines: list = []
189 | for line in pull_klines(ticker, start, end, unit):
190 | ticker_klines.append(line)
191 |
192 | if not ticker_klines:
193 | # this ticker doesn't exist at this date and dates before this
194 | # let's add it to the ignore list
195 | print(f"no data found for {ticker}, ignoring coin from now on")
196 | ignore_list.append(ticker)
197 | continue
198 |
199 | # build our price.log file based on the klines info
200 | for (
201 | _,
202 | _,
203 | high,
204 | low,
205 | _,
206 | _,
207 | closetime,
208 | _,
209 | _,
210 | _,
211 | _,
212 | _,
213 | ) in ticker_klines:
214 | klines_date: str = str( # pylint: disable=invalid-name
215 | datetime.fromtimestamp(float(closetime) / 1000)
216 | ) # pylint: disable=C0103
217 | log.append(
218 | f"{klines_date} {ticker} {(float(high) + float(low))/2}\n"
219 | )
220 |
221 | # create a directory for each symbol to keep its logfiles
222 | if not os.path.exists(f"log/{ticker}"):
223 | os.mkdir(f"log/{ticker}")
224 |
225 | # write down log/ ticker / day.log
226 | with open(f"log/{ticker}/{day}.log", "w", encoding="utf-8") as f:
227 | oldprice = 0 # pylint: disable=invalid-name
228 | for line in log:
229 | parts = line.split(" ")
230 | symbol = parts[2]
231 |
232 | # we are consuming lines from log which contains all klines
233 | # from today.
234 | if ticker != symbol:
235 | continue
236 |
237 | price = parts[3]
238 | # dedup identical price log lines
239 | if price != oldprice:
240 | oldprice = price
241 | f.write(line)
242 |
243 | # and compress to log/ ticker / day.log.gz
244 | with gzip.open(f"log/{ticker}/{day}.log.gz", "wt") as z:
245 | with open(f"log/{ticker}/{day}.log", encoding="utf-8") as f:
246 | z.write(f.read())
247 | if os.path.exists(f"log/{ticker}/{day}.log"):
248 | os.remove(f"log/{ticker}/{day}.log")
249 |
250 | # now that we have all klines for all tickers for this day,
251 | # we're going to dedup the results and discard any lines that haven't
252 | # moved in price.
253 | print(f"saving and sorting all klines for {day}")
254 | coin = {}
255 | oldcoin = {}
256 | with open(f"log/{day}.log", "w", encoding="utf-8") as f:
257 | for line in sorted(log):
258 | parts = line.split(" ")
259 | symbol = parts[2]
260 | # price_date = " ".join(parts[0:1])
261 | price = parts[3]
262 |
263 | if symbol not in coin:
264 | coin[symbol] = price
265 | oldcoin[symbol] = 0
266 |
267 | if price != oldcoin[symbol]:
268 | f.write(line)
269 | oldcoin[symbol] = price
270 |
271 | # and finally we compression our price.log for this day and discard
272 | # and temporary work files.
273 | with gzip.open(f"log/{day}.log.gz", "wt") as z:
274 | with open(f"log/{day}.log", encoding="utf-8") as f:
275 | z.write(f.read())
276 | if os.path.exists(f"log/{day}.log"):
277 | os.remove(f"log/{day}.log")
278 |
279 | # and generate and index.json for all the dates and which coin files
280 | # are available for those dates
281 | generate_index("log")
282 |
--------------------------------------------------------------------------------
/utils/split_klines_into_symbol_logs.py:
--------------------------------------------------------------------------------
1 | """ splits klines logfiles into individual symbol files """
2 |
3 | import argparse
4 | import gzip
5 | import os
6 |
7 | from typing import Dict
8 |
9 | parser = argparse.ArgumentParser()
10 | parser.add_argument("-f")
11 |
12 | args = parser.parse_args()
13 | daylog = args.f
14 |
15 | with gzip.open(daylog, "rt") as f:
16 | lines = f.readlines()
17 |
18 | coins: Dict = {}
19 | coin_filenames = set()
20 | for line in lines:
21 | parts = line.split(" ")
22 | symbol = parts[2]
23 | price = parts[3]
24 |
25 | date = parts[0].replace("-", "")
26 | coin_filename = f"log/{symbol}/{date}.log"
27 |
28 | # if we already have a gzipped file for this coin, it means we've already
29 | # processed it, so skip it
30 | if os.path.exists(f"{coin_filename}.gz"):
31 | continue
32 |
33 | if symbol not in coins:
34 | coins[symbol] = {}
35 | coins[symbol]["lines"] = []
36 | coins[symbol]["oldprice"] = 0
37 |
38 | coins[symbol]["lines"].append(line)
39 | coins[symbol]["price"] = price
40 |
41 | if not os.path.exists(f"log/{symbol}"):
42 | os.mkdir(f"log/{symbol}")
43 | # create empty file
44 | with open(coin_filename, "w", encoding="utf-8") as c:
45 | pass
46 |
47 | if coins[symbol]["oldprice"] != coins[symbol]["price"]:
48 | coin_filenames.add(coin_filename)
49 | with open(coin_filename, "a", encoding="utf-8") as c:
50 | c.write(line)
51 |
52 | for coin_filename in coin_filenames:
53 | with gzip.open(f"{coin_filename}.gz", "wt") as z:
54 | with open(coin_filename, encoding="utf-8") as f:
55 | z.write(f.read())
56 | if os.path.exists(coin_filename):
57 | os.remove(coin_filename)
58 |
--------------------------------------------------------------------------------