├── speed_test
├── develop_requirements.txt
├── setup.cfg
├── test_requirements.txt
├── dataserv_client
├── control
│ ├── __init__.py
│ └── Thread.py
├── __init__.py
├── logmessages.py
├── bin
│ └── dataserv-client
├── version.py
├── common.py
├── exceptions.py
├── deserialize.py
├── messaging.py
├── cli.py
├── builder.py
├── api.py
└── bandwidth_test.py
├── requirements.txt
├── tests
├── __init__.py
├── fixtures.json
├── test_bandwidth_test.py
├── test_deserialize.py
├── test_builder.py
└── test_client.py
├── __init__.py
├── MANIFEST.in
├── travis_setup.sh
├── .travis.yml
├── run_tests.sh
├── .gitignore
├── LICENSE
├── py2exe_MediaCollector.py
├── CHANGELOG.md
├── Makefile
├── setup.py
└── README.rst
/speed_test:
--------------------------------------------------------------------------------
1 | {"download": 1, "upload": 2}
--------------------------------------------------------------------------------
/develop_requirements.txt:
--------------------------------------------------------------------------------
1 | ipython
2 | pudb
3 |
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [bdist_wheel]
2 | universal = 0
3 |
--------------------------------------------------------------------------------
/test_requirements.txt:
--------------------------------------------------------------------------------
1 | coverage
2 | coveralls
3 |
--------------------------------------------------------------------------------
/dataserv_client/control/__init__.py:
--------------------------------------------------------------------------------
1 | from . import Thread # NOQA
2 |
--------------------------------------------------------------------------------
/dataserv_client/__init__.py:
--------------------------------------------------------------------------------
1 | from .version import __version__ # NOQA
2 |
--------------------------------------------------------------------------------
/dataserv_client/logmessages.py:
--------------------------------------------------------------------------------
1 | def InvalidAuthenticationHeaders():
2 | return "Invalid authentication headers. Synchronize your clock with an internet-based time server."
3 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | RandomIO >= 0.2.1
2 | partialhash >= 1.1.0
3 | future >= 0.15.0
4 | btctxstore >= 4.6.0
5 | psutil >= 3.2.2
6 | storjcore >= 0.0.6
7 | storjnode == 0.0.21
8 | storjkademlia
9 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
1 | from . test_builder import * # NOQA
2 | from . test_client import * # NOQA
3 | from . test_deserialize import * # NOQA
4 | from . test_bandwidth_test import * # NOQA
5 |
--------------------------------------------------------------------------------
/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | # monkeypath logging
3 | from dataserv_client import common
4 | common.logging.basicConfig(format=common.LOG_FORMAT,
5 | level=common.logging.DEBUG)
6 |
--------------------------------------------------------------------------------
/dataserv_client/bin/dataserv-client:
--------------------------------------------------------------------------------
1 | #!python
2 | # coding: utf-8
3 |
4 |
5 | import sys
6 | from dataserv_client import cli
7 |
8 |
9 | if __name__ == "__main__":
10 | cli.main(sys.argv[1:])
11 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include requirements.txt
2 | include test_requirements.txt
3 | include develop_requirements.txt
4 |
--------------------------------------------------------------------------------
/dataserv_client/version.py:
--------------------------------------------------------------------------------
1 | # Store the version here so:
2 | # 1) we don't load dependencies by storing it in __init__.py
3 | # 2) we can import it in setup.py for the same reason
4 | # 3) we can import it into your module module
5 | __version__ = '2.1.12'
6 |
--------------------------------------------------------------------------------
/travis_setup.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 |
4 | export DATASERV_MAX_PING="10"
5 | export DATASERV_CLIENT_CONNECTION_RETRY_DELAY="1"
6 | export DATASERV_CACHING_TIME="0"
7 | export PYCOIN_NATIVE="openssl"
8 |
9 |
10 | # get paths
11 | BASE_DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
12 | TMP_SERVER_DIR=/tmp/dataserv_$(< /dev/urandom tr -dc _A-Z-a-z-0-9 | head -c6)
13 |
14 |
15 | # setup server
16 | git clone https://github.com/Storj/dataserv -b develop $TMP_SERVER_DIR
17 | pip install -r $TMP_SERVER_DIR/requirements.txt
18 | cd $TMP_SERVER_DIR/dataserv
19 | python app.py db upgrade
20 |
21 | # start server
22 | python app.py runserver < /dev/null &>/dev/null &
23 |
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | language: python
2 | python:
3 | - "2.7"
4 |
5 | install:
6 | - pip install -r requirements.txt
7 | - pip install -r test_requirements.txt
8 |
9 | before_script:
10 | - bash travis_setup.sh
11 |
12 | # run tests, e.g. python setup.py test
13 | script:
14 | - export DATASERV_MAX_PING="10"
15 | - export DATASERV_CLIENT_CONNECTION_RETRY_DELAY="1"
16 | - export DATASERV_CACHING_TIME="0"
17 | - export PYCOIN_NATIVE="openssl"
18 | - coverage run --source="dataserv_client" -m unittest tests
19 |
20 | # run coverage
21 | after_success:
22 | - coveralls
23 |
24 | # post to slack
25 | notifications:
26 | slack: storjcommunity:TZfUO9ycY8R9UYwztWZTo0jk
27 | email: false
28 |
--------------------------------------------------------------------------------
/run_tests.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 |
4 | export DATASERV_MAX_PING="10"
5 | export DATASERV_CLIENT_CONNECTION_RETRY_DELAY="1"
6 | export DATASERV_CACHING_TIME="0"
7 | export PYCOIN_NATIVE="openssl"
8 |
9 |
10 | # get paths
11 | BASE_DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
12 | TMP_SERVER_DIR=/tmp/dataserv_$(< /dev/urandom tr -dc _A-Z-a-z-0-9 | head -c6)
13 | PY=$BASE_DIR/env/bin/python
14 | PIP=$BASE_DIR/env/bin/pip
15 |
16 |
17 | # setup server
18 | git clone https://github.com/Storj/dataserv -b develop $TMP_SERVER_DIR
19 | $PIP install -r $TMP_SERVER_DIR/requirements.txt
20 | cd $TMP_SERVER_DIR/dataserv
21 | $PY app.py db upgrade
22 |
23 | # run server
24 | screen -S testserver -d -m $PY app.py runserver
25 |
26 |
27 | # run tests
28 | cd $BASE_DIR
29 | $PY -m unittest --quiet tests
30 |
31 |
32 | # clean up
33 | screen -S testserver -X kill
34 | rm -rf $TMP_SERVER_DIR
35 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 |
5 | # C extensions
6 | *.so
7 |
8 | # Distribution / packaging
9 | .Python
10 | env/
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | *.egg-info/
23 | .installed.cfg
24 | *.egg
25 |
26 | # PyInstaller
27 | # Usually these files are written by a python script from a template
28 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
29 | *.manifest
30 | *.spec
31 |
32 | # Installer logs
33 | pip-log.txt
34 | pip-delete-this-directory.txt
35 |
36 | # Unit test / coverage reports
37 | htmlcov/
38 | .tox/
39 | .coverage
40 | .coverage.*
41 | .cache
42 | nosetests.xml
43 | coverage.xml
44 | *,cover
45 |
46 | # Translations
47 | *.mo
48 | *.pot
49 |
50 | # Django stuff:
51 | *.log
52 |
53 | # Sphinx documentation
54 | docs/_build/
55 |
56 | # PyBuilder
57 | target/
58 |
59 | # Rope
60 | .ropeproject
61 |
62 | # Vim
63 | *.swp
64 |
65 | dataserv/dataserv.db
66 | .idea
67 | tests/data
68 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Copyright (c) 2015 Storj Labs
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
23 |
--------------------------------------------------------------------------------
/py2exe_MediaCollector.py:
--------------------------------------------------------------------------------
1 | import os
2 | import glob
3 | import lib2to3
4 | from py2exe.build_exe import py2exe as build_exe
5 |
6 | class MediaCollector(build_exe):
7 | """
8 | This class Adds
9 | jsonschema files draft3.json and draft4.json
10 | lib2to3 files Grammar.txt and PatternGrammar.txt
11 | to the list of compiled files so it will be included in the zipfile.
12 | """
13 |
14 | def copy_extensions(self, extensions):
15 | build_exe.copy_extensions(self, extensions)
16 |
17 | # lib2to3 files Grammar.txt and PatternGrammar.txt
18 |
19 | # Define the data path where the files reside.
20 | data_path = os.path.join(lib2to3.__path__[0],'*.txt')
21 |
22 | # Create the subdir where the json files are collected.
23 | media = os.path.join('lib2to3')
24 | full = os.path.join(self.collect_dir, media)
25 | self.mkpath(full)
26 |
27 | # Copy the json files to the collection dir. Also add the copied file
28 | # to the list of compiled files so it will be included in the zipfile.
29 | for f in glob.glob(data_path):
30 | name = os.path.basename(f)
31 | self.copy_file(f, os.path.join(full, name))
32 | self.compiled_files.append(os.path.join(media, name))
33 |
34 |
--------------------------------------------------------------------------------
/dataserv_client/control/Thread.py:
--------------------------------------------------------------------------------
1 | from queue import Queue
2 | from threading import Thread
3 |
4 |
5 | class Worker(Thread):
6 | """Thread executing tasks from a given tasks queue"""
7 | def __init__(self, tasks):
8 | Thread.__init__(self)
9 | self.tasks = tasks
10 | self.daemon = True
11 | self.start()
12 |
13 | def run(self):
14 | while True:
15 | func, args, kargs = self.tasks.get()
16 | try:
17 | func(*args, **kargs)
18 | except Exception as e:
19 | print(e)
20 | self.tasks.task_done()
21 |
22 |
23 | class ThreadPool:
24 | """Pool of threads consuming tasks from a queue"""
25 | def __init__(self, num_threads):
26 | self.tasks = Queue(num_threads)
27 | for _ in range(num_threads):
28 | Worker(self.tasks)
29 |
30 | def add_task(self, func, *args, **kargs):
31 | """Add a task to the queue"""
32 | self.tasks.put((func, args, kargs))
33 |
34 | def active_count(self):
35 | """Return the number of active threads"""
36 | return self.tasks.qsize()
37 |
38 | def wait_completion(self):
39 | """Wait for completion of all the tasks in the queue"""
40 | self.tasks.join()
41 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | v2.1.12
2 |
3 | * Use util and config from storjnode.
4 |
5 | v2.1.11
6 |
7 | * Minor bug fix
8 |
9 | v2.1.10
10 |
11 | * Minor bug fix
12 |
13 | v2.1.9
14 |
15 | * Updated readme
16 | * Show storj dht node id and prot on startup.
17 |
18 | v2.1.8
19 |
20 | * Minor bug fix
21 |
22 | v2.1.7
23 |
24 | * Minor bug fix
25 |
26 | v2.1.6
27 |
28 | * Minor bug fix
29 |
30 | v2.1.5
31 |
32 | * Added audit
33 | * Added background p2p storjnode
34 | * Added --repair option
35 | * Better multithreading
36 | * Added --min_free_size option
37 |
38 | v2.1.4
39 |
40 | * Increased default ping interval.
41 | * Added missing arguments to farm command.
42 |
43 | v2.1.3
44 |
45 | * Fixed client crash if file system type not detected.
46 |
47 | v2.1.2
48 |
49 | * Minor bug fixes only
50 |
51 | v2.1.1
52 |
53 | * improved logging
54 | * added --quiet option
55 |
56 | v2.1.0
57 |
58 | * added -use_folder_tree for fat32 drives
59 | * added decimal support for --max_size
60 | * added farm command
61 |
62 | v2.0.3
63 |
64 | * Minor bug fixes only
65 |
66 | v2.0.2
67 |
68 | * Removed auto update
69 |
70 | v2.0.1
71 |
72 | * Removed auto update
73 |
74 | v2.0.0
75 |
76 | * added config
77 | * build performance improvements
78 | * added --set_height_interval option
79 | * added authentication
80 |
--------------------------------------------------------------------------------
/dataserv_client/common.py:
--------------------------------------------------------------------------------
1 | import os
2 | import binascii
3 | from storjnode.log import logging # NOQA
4 | from pycoin.encoding import b2a_hashed_base58
5 | from pycoin.encoding import a2b_hashed_base58
6 |
7 |
8 | DEFAULT_URL = "http://status.driveshare.org"
9 |
10 |
11 | # read default delay from os environ if available
12 | if os.environ.get("DATASERV_MAX_PING"):
13 | DEFAULT_DELAY = os.environ.get("DATASERV_MAX_PING")
14 | else:
15 | DEFAULT_DELAY = 60 # default seconds
16 |
17 |
18 | DEFAULT_APP_HOME = os.path.join(os.path.expanduser("~"), ".storj")
19 | DEFAULT_CONFIG_PATH = os.path.join(DEFAULT_APP_HOME, "config.json")
20 |
21 |
22 | # build
23 | DEFAULT_SET_HEIGHT_INTERVAL = 25
24 | SHARD_SIZE = 1024 * 1024 * 128 # 128 MB
25 | DEFAULT_MAX_SIZE = 1024 * 1024 * 1024 # 1 GB
26 | DEFAULT_MIN_FREE_SIZE = 1024 * 1024 * 1024 # 1GB
27 | DEFAULT_STORE_PATH = os.path.join(DEFAULT_APP_HOME, "store")
28 |
29 | # audit
30 | DEFAULT_MIN_CONFIRMATIONS = 6
31 | DEFAULT_BLOCK_SIZE = 80
32 | DEFAULT_AUDIT_DELAY = 60
33 | DEFAULT_FULL_AUDIT = 1000
34 |
35 |
36 | # connection retry
37 | DEFAULT_CONNECTION_RETRY_LIMIT = 120 # 120 * 30sec = 1 hour
38 | _retry_delay_label = "DATASERV_CLIENT_CONNECTION_RETRY_DELAY"
39 | if os.environ.get(_retry_delay_label):
40 | DEFAULT_CONNECTION_RETRY_DELAY = int(os.environ.get(_retry_delay_label))
41 | else:
42 | DEFAULT_CONNECTION_RETRY_DELAY = 30
43 |
44 |
45 | def nodeid2address(hexnodeid):
46 | """Convert a node id to a bitcoin address."""
47 | nodeid = binascii.unhexlify(hexnodeid)
48 | return b2a_hashed_base58(b'\0' + nodeid)
49 |
50 |
51 | def address2nodeid(address):
52 | """Convert a bitcoin address to a node id."""
53 | return binascii.hexlify(a2b_hashed_base58(address)[1:]).decode("utf-8")
54 |
--------------------------------------------------------------------------------
/dataserv_client/exceptions.py:
--------------------------------------------------------------------------------
1 | from btctxstore.exceptions import * # NOQA
2 |
3 |
4 | class DataservClientException(Exception):
5 | pass
6 |
7 |
8 | class InvalidUrl(DataservClientException):
9 |
10 | def __init__(self):
11 | super(InvalidUrl, self).__init__("Invalid Url!")
12 |
13 |
14 | class InvalidConfig(DataservClientException):
15 |
16 | def __init__(self):
17 | super(InvalidConfig, self).__init__("Invalid Config!")
18 |
19 |
20 | class AddressAlreadyRegistered(DataservClientException):
21 |
22 | def __init__(self, address, url):
23 | msg = "409 Address {0} already registered at {1}!".format(address, url)
24 | super(AddressAlreadyRegistered, self).__init__(msg)
25 |
26 |
27 | class ServerNotFound(DataservClientException):
28 |
29 | def __init__(self, url):
30 | msg = "404 Server not found at {0}!".format(url)
31 | super(ServerNotFound, self).__init__(msg)
32 |
33 |
34 | class ServerError(DataservClientException):
35 |
36 | def __init__(self, url):
37 | msg = "500 Server error at {0}!".format(url) # pragma: no cover
38 | super(ServerError, self).__init__(msg) # pragma: no cover
39 |
40 |
41 | class InvalidAddress(DataservClientException):
42 |
43 | def __init__(self, address):
44 | msg = "Address {0} not valid!".format(address)
45 | super(InvalidAddress, self).__init__(msg)
46 |
47 |
48 | class AuthWifRequired(DataservClientException):
49 |
50 | def __init__(self):
51 | msg = "Required authentication wif not given!"
52 | super(AuthWifRequired, self).__init__(msg)
53 |
54 |
55 | class ConnectionError(DataservClientException):
56 |
57 | def __init__(self, url):
58 | msg = "Could not connect to server {0}!".format(url)
59 | super(ConnectionError, self).__init__(msg)
60 |
61 | class BlockExplorerApiFailed(DataservClientException):
62 |
63 | def __init__(self, url):
64 | msg = "Block explorer api result failed for {0}!".format(url)
65 | super(ConnectionError, self).__init__(msg)
66 |
67 |
--------------------------------------------------------------------------------
/tests/fixtures.json:
--------------------------------------------------------------------------------
1 | {
2 | "addresses" : {
3 | "alpha" : "12guBkWfVjiqBnu5yRdTseBB7wBM5WSWnm",
4 | "beta" : "1BZR9GHs9a1bBfh6cwnDtvq6GEvNwVWxFa",
5 | "gamma" : "1Jd4YBQ7d8nHGe4zWfLL9EWHMkspN9JKGf",
6 | "delta" : "16eEuTp1QERjCC8ZnGf34NvkptMifNSCti",
7 | "epsilon" : "1FwSLAJtpLrSQp94damzWY2nK5cEBugZfC",
8 | "zeta" : "1FHgmJkT4od36Zu3SVSzi71Kcvcs33Y1hn",
9 | "eta" : "1wqyu7Mxz6sgmnHGzQdaSdW8zpGkViL79",
10 | "theta" : "1AFJM5dn1iqHXtnttJJgskKwrhhajaY7iC",
11 | "iota" : "19oWeFAWJh3WUKF9KEXdFUtwD9TQAf4gh9",
12 | "lambda" : "17prdhkPcSJ3TC4SoSVNCAbUdr8xZrokaY",
13 | "mu" : "1DNe4PPhr6raNbADsHABGSpm6XQi7KhSTo",
14 | "nu" : "16Smzr8ESjdFDdfj5pVZifvSRzHhim3gAn",
15 | "pi" : "1EdCc5bxUAsdsvuJN48gK8UteezYNC2ffU",
16 | "omicron" : "19FfabAxmTZRCuxBvesMovz1xSfGgsmoqg",
17 | "kappa" : "1G5UfNg2M1qExpLGDLko8cfusLQ2GvVSqK",
18 | "ksi" : "15xu7JLwqZB9ZakrfZQJF5AJpNDwWabqwA",
19 | "rho" : "1EYtmt5QWgwATbJvnVP9G9cDXrMcX5bHJ",
20 | "sigma" : "12qx5eKHmtwHkrpByYBdosRwUfSfbGsqhT",
21 | "tau" : "1MfQwmCQaLRxAAij1Xii6BxFtkVvjrHPc2",
22 | "upsilon" : "1MwWa91KJGzctsYxE9V5iHFVp9Ub9HBarV",
23 | "phi" : "1LRVczz1Ln1ECom7oVotEmUVLKbxofQfKS",
24 | "chi" : "12zhPViCGssXWiUMeGuEYgqLFr1wF1MJH9",
25 | "psi" : "1BKUVHEWRQNFF8M9TUZhsuGiQxL6rqeSi5",
26 | "omega" : "1NJZ3jDHVM3BdBQZPyNLc8n5GLUSmW72Vn"
27 | },
28 | "test_sha256" : {
29 | "expected" : "c059c8035bbd74aa81f4c787c39390b57b974ec9af25a7248c46a3ebfe0f9dc8"
30 | },
31 | "test_build_seed" : {
32 | "hash0" : "8f4306631f71e40369acc3fb5645e7d13d17e686a3b623b46b4872714d3e3e92",
33 | "hash3" : "4192fe2dd784eb5bc770258e5a494a3025b43025304def4b088c574dc4fa8821"
34 | },
35 | "test_builder_audit" : {
36 | "result0" : "fe1c53216c9caf86d59223d748523e517e14f5aea93e795df1970020e2e85bce",
37 | "result1" : "4d37964e85db33544f6e5e1fb862a6f42baf9166b447e8d93e573f79dcd79812",
38 | "expected" : "2442550929857fd6046f7d4c53ef6a7360aee0f28c7b5ae3bdfdc02f7087c9ea"
39 | }
40 | }
41 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | PY_VERSION := 2
2 | WHEEL_DIR := /tmp/wheelhouse
3 | PIP := env/bin/pip
4 | PY := env/bin/python
5 | USE_WHEELS := 0
6 | ifeq ($(USE_WHEELS), 0)
7 | WHEEL_INSTALL_ARGS := # void
8 | else
9 | WHEEL_INSTALL_ARGS := --use-wheel --no-index --find-links=$(WHEEL_DIR)
10 | endif
11 |
12 |
13 | help:
14 | @echo "COMMANDS:"
15 | @echo " clean Remove all generated files."
16 | @echo " setup Setup development environment."
17 | @echo " shell Open ipython from the development environment."
18 | @echo " test Run tests."
19 | @echo " wheel Build package wheel & save in $(WHEEL_DIR)."
20 | @echo " wheels Build dependency wheels & save in $(WHEEL_DIR)."
21 | @echo " publish Build and upload package to pypi.python.org"
22 | @echo ""
23 | @echo "VARIABLES:"
24 | @echo " PY_VERSION Version of python to use. 2 or 3"
25 | @echo " WHEEL_DIR Where you save your wheels. Default: $(WHEEL_DIR)."
26 | @echo " USE_WHEELS Install packages from wheel dir, off by default."
27 |
28 |
29 | clean:
30 | rm -rf env
31 | rm -rf build
32 | rm -rf dist
33 | rm -rf *.egg
34 | rm -rf *.egg-info
35 | find | grep -i ".*__pycache__.*$$" | xargs -r -L1 rm -rf
36 | find | grep -i ".*\.pyc$$" | xargs -r -L1 rm
37 |
38 |
39 | virtualenv: clean
40 | virtualenv -p /usr/bin/python$(PY_VERSION) env
41 | $(PIP) install wheel
42 |
43 |
44 | wheels: virtualenv
45 | $(PIP) wheel --find-links=$(WHEEL_DIR) --wheel-dir=$(WHEEL_DIR) -r requirements.txt
46 | $(PIP) wheel --find-links=$(WHEEL_DIR) --wheel-dir=$(WHEEL_DIR) -r test_requirements.txt
47 | $(PIP) wheel --find-links=$(WHEEL_DIR) --wheel-dir=$(WHEEL_DIR) -r develop_requirements.txt
48 |
49 |
50 | wheel: test
51 | $(PY) setup.py bdist_wheel
52 | mv dist/*.whl $(WHEEL_DIR)
53 |
54 |
55 | setup: virtualenv
56 | $(PIP) install $(WHEEL_INSTALL_ARGS) -r requirements.txt
57 | $(PIP) install $(WHEEL_INSTALL_ARGS) -r test_requirements.txt
58 | $(PIP) install $(WHEEL_INSTALL_ARGS) -r develop_requirements.txt
59 |
60 |
61 | shell: setup
62 | env/bin/ipython
63 |
64 |
65 | test: setup
66 | bash run_tests.sh
67 |
68 |
69 | publish: test
70 | $(PY) setup.py register bdist_wheel upload
71 |
72 |
73 | # Break in case of bug!
74 | # import pudb; pu.db
75 |
--------------------------------------------------------------------------------
/dataserv_client/deserialize.py:
--------------------------------------------------------------------------------
1 | from btctxstore.deserialize import * # NOQA
2 |
3 | import re
4 | import decimal
5 | from dataserv_client import exceptions
6 |
7 |
8 | def positive_nonzero_integer(i):
9 | i = positive_integer(i)
10 | if i == 0:
11 | raise exceptions.InvalidInput("Value must be greater then 0!")
12 | return i
13 |
14 |
15 | def url(urlstr):
16 | # source http://stackoverflow.com/a/7160778/90351
17 | regex = re.compile(
18 | r'^(?:http|ftp)s?://' # http:// or https://
19 | r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain...
20 | r'localhost|' # localhost...
21 | r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
22 | r'(?::\d+)?' # optional port
23 | r'(?:/?|[/?]\S+)$', re.IGNORECASE
24 | )
25 | if not bool(regex.match(urlstr)):
26 | raise exceptions.InvalidUrl()
27 | return urlstr
28 |
29 |
30 | def byte_count(byte_count): # ugly but much faster and safer then regex
31 |
32 | # default value or python api used
33 | if isinstance(byte_count, int):
34 | return positive_nonzero_integer(byte_count)
35 |
36 | byte_count = unicode_str(byte_count)
37 |
38 | def _get_byte_count(postfix, base, exponant):
39 | char_num = len(postfix)
40 | if byte_count[-char_num:] == postfix:
41 | count = decimal.Decimal(byte_count[:-char_num]) # remove postfix
42 | return positive_nonzero_integer(count * (base ** exponant))
43 | return None
44 |
45 | # check base 1024
46 | if len(byte_count) > 1:
47 | n = None
48 | n = n if n is not None else _get_byte_count('K', 1024, 1)
49 | n = n if n is not None else _get_byte_count('M', 1024, 2)
50 | n = n if n is not None else _get_byte_count('G', 1024, 3)
51 | n = n if n is not None else _get_byte_count('T', 1024, 4)
52 | n = n if n is not None else _get_byte_count('P', 1024, 5)
53 | if n is not None:
54 | return n
55 |
56 | # check base 1000
57 | if len(byte_count) > 2:
58 | n = None
59 | n = n if n is not None else _get_byte_count('KB', 1000, 1)
60 | n = n if n is not None else _get_byte_count('MB', 1000, 2)
61 | n = n if n is not None else _get_byte_count('GB', 1000, 3)
62 | n = n if n is not None else _get_byte_count('TB', 1000, 4)
63 | n = n if n is not None else _get_byte_count('PB', 1000, 5)
64 | if n is not None:
65 | return n
66 |
67 | return positive_nonzero_integer(byte_count)
68 |
69 |
70 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # coding: utf-8
3 |
4 |
5 | import os
6 | import sys
7 | from setuptools import setup, find_packages
8 |
9 |
10 | # Only load py2exe settings when its used, so we can install it first.
11 | options = {}
12 | cmdclass = {}
13 | if os.name == 'nt' and 'py2exe' in sys.argv:
14 | import py2exe # NOQA
15 | from py2exe_MediaCollector import MediaCollector
16 | options = {'py2exe': {
17 | "skip_archive": True,
18 | "dll_excludes": ['IPHLPAPI.DLL', 'WTSAPI32.dll', 'CRYPT32.dll', 'PSAPI.DLL', 'MSVCR100.dll'],
19 | "optimize": 2,
20 | "bundle_files": 3, # This tells py2exe to bundle everything
21 | }}
22 | cmdclass = {'py2exe': MediaCollector}
23 |
24 | # Only load py2app settings when its used, so we can install it first.
25 | if os.name == 'postix' and 'py2app' in sys.argv:
26 | import py2app # NOQA
27 | options = {'py2app': {
28 | "optimize": 2,
29 | }}
30 |
31 |
32 | exec(open('dataserv_client/version.py').read()) # load __version__
33 | SCRIPT = os.path.join('dataserv_client', 'bin', 'dataserv-client')
34 |
35 |
36 | setup(
37 | app=['dataserv_client/bin/dataserv-client'],
38 | name='dataserv-client',
39 | description="Client for storing and auditing data. http://storj.io",
40 | long_description=open("README.rst").read(),
41 | keywords="",
42 | url='http://storj.io',
43 | author='Shawn Wilkinson',
44 | author_email='shawn+dataserv-client@storj.io',
45 | license="MIT",
46 | version=__version__, # NOQA
47 | scripts=[SCRIPT],
48 | console=[SCRIPT],
49 | test_suite="tests",
50 | dependency_links=[],
51 | install_requires=open("requirements.txt").readlines(),
52 | tests_require=open("test_requirements.txt").readlines(),
53 | packages=find_packages(exclude=['dataserv_client.bin']),
54 | classifiers=[
55 | # "Development Status :: 1 - Planning",
56 | "Development Status :: 2 - Pre-Alpha",
57 | # "Development Status :: 3 - Alpha",
58 | # "Development Status :: 4 - Beta",
59 | # "Development Status :: 5 - Production/Stable",
60 | "Intended Audience :: Developers",
61 | "License :: OSI Approved :: MIT License",
62 | "Operating System :: OS Independent",
63 | "Programming Language :: Python",
64 | "Programming Language :: Python :: 2",
65 | "Programming Language :: Python :: 2.7",
66 | "Programming Language :: Python :: 3",
67 | "Programming Language :: Python :: 3.3",
68 | "Programming Language :: Python :: 3.4",
69 | "Topic :: Software Development :: Libraries :: Python Modules",
70 | ],
71 | cmdclass=cmdclass,
72 | options=options
73 | )
74 |
--------------------------------------------------------------------------------
/tests/test_bandwidth_test.py:
--------------------------------------------------------------------------------
1 | import dataserv_client.bandwidth_test as bt
2 | import unittest
3 | import socket
4 | import time
5 | import threading
6 | import timeit
7 | import os
8 |
9 |
10 | class TestBandwidthTest(unittest.TestCase):
11 | def test_bound_socket(self):
12 | sock_family = socket.AF_INET
13 | sock_type = socket.SOCK_STREAM
14 | bt.source = "127.0.0.1"
15 | sock = bt.bound_socket(sock_family, sock_type)
16 | self.assertTrue(isinstance(sock, socket.socket))
17 |
18 | def test_distance(self):
19 | coordinate_pairs = {
20 | 1: [[10, 10], [10, 10]],
21 | 2: [[33.7550, 84.3900], [40.7127, 74.0059]],
22 | 3: [[0, 0], [0, 0]],
23 | 4: [[-33.7550, -84.3900], [40.7127, 74.0059]],
24 | }
25 |
26 | results = {
27 | 1: 0.0,
28 | 2: 1200.0,
29 | 3: 0.0,
30 | 4: 17959
31 | }
32 |
33 | for coordinate_pair in coordinate_pairs:
34 | source, destination = coordinate_pairs[coordinate_pair]
35 | expected = results[coordinate_pair]
36 | got = round(bt.distance(source, destination))
37 | self.assertTrue(expected == got)
38 |
39 | def test_build_user_agent(self):
40 | ua = bt.build_user_agent()
41 | self.assertTrue(ua != "")
42 | self.assertTrue(type(ua) in (type(b""), type(u"")))
43 |
44 | def test_build_request(self):
45 | url = "http://storj.io/"
46 | data = "test"
47 | headers = {"referer": "http://www.google.com/"}
48 | r = bt.build_request(url, data, headers)
49 | self.assertTrue(isinstance(r, bt.Request))
50 |
51 | # Invalid URL.
52 | url = ""
53 | try:
54 | r = bt.build_request(url, data, headers)
55 | except ValueError:
56 | pass
57 |
58 | def test_catch_request(self):
59 | # Pass
60 | r = bt.Request(url="http://www.google.com/")
61 | r, e = bt.catch_request(r)
62 | self.assertTrue(r != None)
63 |
64 | # Fail without error.
65 | r = bt.Request(url="http://127.0.0.1:74231/")
66 | r, e = bt.catch_request(r)
67 | self.assertTrue(r == None)
68 |
69 | def test_file_getter(self):
70 | bt.shutdown_event = threading.Event()
71 | file_getter = bt.FileGetter("http://storj.io/", time.time())
72 | file_getter.run()
73 | time.sleep(1)
74 | self.assertTrue(sum(file_getter.result))
75 |
76 | def test_download_speed(self):
77 | bt.shutdown_event = threading.Event()
78 | files = [
79 | "http://www.storj.io/",
80 | "http://www.google.com/"
81 | ]
82 |
83 | self.assertTrue(bt.downloadSpeed(files))
84 | self.assertFalse(bt.downloadSpeed([]))
85 |
86 | def test_file_putter(self):
87 | bt.shutdown_event = threading.Event()
88 | file_putter = bt.FilePutter("http://atl.speedtest.pavlovmedia.net/speedtest/upload.php", timeit.default_timer(), 250000)
89 | file_putter.start()
90 | file_putter.join()
91 | self.assertTrue(file_putter.result)
92 |
93 | def test_upload_speed(self):
94 | bt.shutdown_event = threading.Event()
95 | files = [
96 | "http://atl.speedtest.pavlovmedia.net/speedtest/upload.php"
97 | ]
98 |
99 | sizes = [250000]
100 | self.assertTrue(bt.uploadSpeed("http://atl.speedtest.pavlovmedia.net/speedtest/upload.php", sizes, 1))
101 | self.assertFalse(bt.uploadSpeed("", sizes, 1))
102 |
103 | def test_get_config(self):
104 | # Invalid URL.
105 | try:
106 | bt.getConfig(url="test")
107 | except ValueError:
108 | pass
109 |
110 | # Valid XML.
111 | configxml = """
112 |
113 |
114 |
115 |
116 |
117 |
118 | """
119 | self.assertTrue(type(bt.getConfig(configxml=configxml)) is dict)
120 |
121 | def test_closest_servers(self):
122 | configxml = bt.getConfig()
123 | servers = bt.closestServers(configxml["client"])
124 | self.assertTrue(len(servers))
125 | self.assertTrue(type(bt.getBestServer(servers)) is dict)
126 |
127 | def test_speed_test_cached(self):
128 | cache_path = os.path.join(os.getcwd(), "speed_test")
129 | if os.path.exists(cache_path):
130 | os.remove(cache_path)
131 |
132 | content = """{"download": 1, "upload": 2}"""
133 | with open(cache_path, "w") as fp:
134 | fp.write(content)
135 |
136 | ret = bt.speed_test_cached()
137 | self.assertTrue(ret["download"] == 1)
138 | self.assertTrue(ret["upload"] == 2)
139 |
140 |
--------------------------------------------------------------------------------
/dataserv_client/messaging.py:
--------------------------------------------------------------------------------
1 | import json
2 | import http.client
3 | import socket
4 | import time
5 | from datetime import datetime
6 | from future.moves.urllib.parse import urlparse, urlencode # NOQA
7 | from future.moves.urllib.request import urlopen, Request
8 | from future.moves.urllib.error import HTTPError, URLError
9 | import btctxstore
10 | import storjcore
11 | from dataserv_client import exceptions
12 | from dataserv_client import logmessages
13 | from dataserv_client import common
14 |
15 |
16 | logger = common.logging.getLogger(__name__)
17 |
18 |
19 | class Messaging(object):
20 |
21 | def __init__(self, server_url, wif, connection_retry_limit,
22 | connection_retry_delay):
23 | self._server_url = server_url
24 | self._server_address = None
25 | self.retry_limit = connection_retry_limit
26 | self.retry_delay = connection_retry_delay
27 |
28 | # TODO pass testnet and dryrun options
29 | self.btctxstore = btctxstore.BtcTxStore()
30 | self.wif = wif
31 |
32 | def auth_address(self):
33 | return self.btctxstore.get_address(self.wif)
34 |
35 | def get_nodeid(self):
36 | return common.address2nodeid(self.auth_address())
37 |
38 | def _url_query(self, api_path, authenticate=True): # NOQA
39 | i = 0
40 | while i <= self.retry_limit:
41 | i += 1
42 | try:
43 | query_url = self._server_url + api_path
44 | req = Request(query_url)
45 | starttime = datetime.utcnow()
46 | if self.wif and authenticate:
47 | headers = storjcore.auth.create_headers(
48 | self.btctxstore, self._get_server_address(), self.wif
49 | )
50 | req.add_header("Date", headers["Date"])
51 | req.add_header("Authorization", headers["Authorization"])
52 | logger.info("Query: {0} generated in {1}".format(
53 | query_url, datetime.utcnow()-starttime))
54 | response = urlopen(req, timeout=30)
55 | if 200 <= response.code <= 299:
56 | return response.read()
57 | except HTTPError as e:
58 | if e.code == 409:
59 | raise exceptions.AddressAlreadyRegistered(
60 | self.get_nodeid(), self._server_url
61 | )
62 | elif e.code == 404:
63 | raise exceptions.ServerNotFound(self._server_url + api_path)
64 | elif e.code == 400:
65 | raise exceptions.InvalidAddress(self.get_nodeid())
66 | elif e.code == 401: # auth error (likely clock off)
67 | # log "HTTP Error 401: UNAUTHORIZED"
68 | logger.warning(logmessages.InvalidAuthenticationHeaders())
69 | elif e.code == 500: # pragma: no cover
70 | raise exceptions.ServerError(self._server_url)
71 | else:
72 | raise e # pragma: no cover
73 | except http.client.HTTPException as e:
74 | logger.warning(repr(e))
75 | except URLError as e:
76 | logger.warning(repr(e))
77 | except socket.error as e:
78 | logger.warning(repr(e))
79 |
80 | # retry
81 | delay = self.retry_delay
82 | logger.info("Query retry in {0} seconds.".format(delay))
83 | time.sleep(delay)
84 |
85 | # retry limit
86 | logger.error("Failed to connect to {0}".format(self._server_url))
87 | raise exceptions.ConnectionError(self._server_url)
88 |
89 | def _get_server_address(self):
90 | if not self._server_address:
91 | data = self._url_query("/api/address", authenticate=False)
92 | self._server_address = json.loads(data.decode("utf-8"))["address"]
93 | if not self.btctxstore.validate_address(self._server_address):
94 | logger.error("Invalid server address '{0}'".format(
95 | self._server_address
96 | ))
97 | raise exceptions.InvalidAddress(self._server_address)
98 | return self._server_address
99 |
100 | def server_url(self):
101 | return self._server_url
102 |
103 | def register(self, payout_addr):
104 | """Attempt to register this client address."""
105 | if payout_addr and not self.btctxstore.validate_address(payout_addr):
106 | logger.error("Invalid payout address '{0}'".format(payout_addr))
107 | raise exceptions.InvalidAddress(payout_addr)
108 | if payout_addr:
109 | return self._url_query("/api/register/{0}/{1}".format(
110 | self.get_nodeid(), payout_addr
111 | ))
112 |
113 | def set_bandwidth(self, upload, download):
114 | """Attempt to set bandwidth values for this client."""
115 | url = "/api/bandwidth/{nodeid}/{upload}/{download}"
116 | return self._url_query(url.format(
117 | nodeid=self.get_nodeid(), upload=int(upload), download=int(download)
118 | ))
119 |
120 | def ping(self):
121 | """Send a heartbeat message for this client address."""
122 | return self._url_query("/api/ping/{0}".format(self.get_nodeid()))
123 |
124 | def audit(self, block_height, response):
125 | """Send audit response for this client address."""
126 | return self._url_query('/api/audit/%s/%s/%s' % (self.get_nodeid(),
127 | block_height,
128 | response))
129 |
130 | def height(self, height):
131 | """Set the height claim for this client address."""
132 | return self._url_query('/api/height/%s/%s' % (self.get_nodeid(),
133 | height))
134 |
--------------------------------------------------------------------------------
/tests/test_deserialize.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | from dataserv_client import deserialize
3 | from dataserv_client import exceptions
4 |
5 |
6 | class TestUrl(unittest.TestCase):
7 |
8 | def test_url(self):
9 |
10 | # test http
11 | urlstr = "http://test.url.com"
12 | self.assertEqual(deserialize.url(urlstr), urlstr)
13 |
14 | # test https
15 | urlstr = "https://test.url.com"
16 | self.assertEqual(deserialize.url(urlstr), urlstr)
17 |
18 | # test ip
19 | urlstr = "https://127.0.0.1"
20 | self.assertEqual(deserialize.url(urlstr), urlstr)
21 |
22 | # test port
23 | urlstr = "https://127.0.0.1:5000"
24 | self.assertEqual(deserialize.url(urlstr), urlstr)
25 |
26 | # test ignores case
27 | urlstr = "HTTP://TEST.URL.COM"
28 | self.assertEqual(deserialize.url(urlstr), urlstr)
29 |
30 | # test invalid
31 | def callback():
32 | deserialize.url("--?%>=_`~$")
33 | self.assertRaises(exceptions.InvalidUrl, callback)
34 |
35 |
36 | class TestByteCount(unittest.TestCase):
37 |
38 | def test_types(self):
39 |
40 | # accepted types
41 | self.assertEqual(deserialize.byte_count(1), 1)
42 | self.assertEqual(deserialize.byte_count("1"), 1)
43 | self.assertEqual(deserialize.byte_count(b"1"), 1)
44 | self.assertEqual(deserialize.byte_count(u"1"), 1)
45 |
46 | def callback():
47 | deserialize.byte_count(None)
48 | self.assertRaises(exceptions.InvalidInput, callback)
49 |
50 | def callback():
51 | deserialize.byte_count(1.0)
52 | self.assertRaises(exceptions.InvalidInput, callback)
53 |
54 | def test_int(self):
55 | self.assertEqual(deserialize.byte_count(1), 1)
56 |
57 | def test_no_postfix(self):
58 | self.assertEqual(deserialize.byte_count("1"), 1)
59 | self.assertEqual(deserialize.byte_count("2"), 2)
60 |
61 | def test_base_1024(self):
62 | self.assertEqual(deserialize.byte_count("1K"), 1 * (1024 ** 1))
63 | self.assertEqual(deserialize.byte_count("2K"), 2 * (1024 ** 1))
64 |
65 | self.assertEqual(deserialize.byte_count("1M"), 1 * (1024 ** 2))
66 | self.assertEqual(deserialize.byte_count("2M"), 2 * (1024 ** 2))
67 |
68 | self.assertEqual(deserialize.byte_count("1G"), 1 * (1024 ** 3))
69 | self.assertEqual(deserialize.byte_count("2G"), 2 * (1024 ** 3))
70 |
71 | self.assertEqual(deserialize.byte_count("1T"), 1 * (1024 ** 4))
72 | self.assertEqual(deserialize.byte_count("2T"), 2 * (1024 ** 4))
73 |
74 | self.assertEqual(deserialize.byte_count("1P"), 1 * (1024 ** 5))
75 | self.assertEqual(deserialize.byte_count("2P"), 2 * (1024 ** 5))
76 |
77 | def test_base_1000(self):
78 | self.assertEqual(deserialize.byte_count("1KB"), 1 * (1000 ** 1))
79 | self.assertEqual(deserialize.byte_count("2KB"), 2 * (1000 ** 1))
80 |
81 | self.assertEqual(deserialize.byte_count("1MB"), 1 * (1000 ** 2))
82 | self.assertEqual(deserialize.byte_count("2MB"), 2 * (1000 ** 2))
83 |
84 | self.assertEqual(deserialize.byte_count("1GB"), 1 * (1000 ** 3))
85 | self.assertEqual(deserialize.byte_count("2GB"), 2 * (1000 ** 3))
86 |
87 | self.assertEqual(deserialize.byte_count("1TB"), 1 * (1000 ** 4))
88 | self.assertEqual(deserialize.byte_count("2TB"), 2 * (1000 ** 4))
89 |
90 | self.assertEqual(deserialize.byte_count("1PB"), 1 * (1000 ** 5))
91 | self.assertEqual(deserialize.byte_count("2PB"), 2 * (1000 ** 5))
92 |
93 | def test_decimal(self):
94 | # test unit boundries base 1024
95 | self.assertEqual(deserialize.byte_count("1.0K"), 1024 ** 1)
96 | self.assertEqual(deserialize.byte_count("1.0M"), 1024 ** 2)
97 | self.assertEqual(deserialize.byte_count("1.0G"), 1024 ** 3)
98 | self.assertEqual(deserialize.byte_count("1.0T"), 1024 ** 4)
99 | self.assertEqual(deserialize.byte_count("1.0P"), 1024 ** 5)
100 |
101 | # test unit boundries base 1000
102 | self.assertEqual(deserialize.byte_count("1.0KB"), 1000 ** 1)
103 | self.assertEqual(deserialize.byte_count("1.0MB"), 1000 ** 2)
104 | self.assertEqual(deserialize.byte_count("1.0GB"), 1000 ** 3)
105 | self.assertEqual(deserialize.byte_count("1.0TB"), 1000 ** 4)
106 | self.assertEqual(deserialize.byte_count("1.0PB"), 1000 ** 5)
107 |
108 | # test between unit boundries base 1024
109 | self.assertEqual(deserialize.byte_count("0.5K"), (1024 ** 1 / 2))
110 | self.assertEqual(deserialize.byte_count("0.5M"), (1024 ** 2 / 2))
111 | self.assertEqual(deserialize.byte_count("0.5G"), (1024 ** 3 / 2))
112 | self.assertEqual(deserialize.byte_count("0.5T"), (1024 ** 4 / 2))
113 | self.assertEqual(deserialize.byte_count("0.5P"), (1024 ** 5 / 2))
114 |
115 | # test between unit boundries base 1000
116 | self.assertEqual(deserialize.byte_count("0.5KB"), (1000 ** 1 / 2))
117 | self.assertEqual(deserialize.byte_count("0.5MB"), (1000 ** 2 / 2))
118 | self.assertEqual(deserialize.byte_count("0.5GB"), (1000 ** 3 / 2))
119 | self.assertEqual(deserialize.byte_count("0.5TB"), (1000 ** 4 / 2))
120 | self.assertEqual(deserialize.byte_count("0.5PB"), (1000 ** 5 / 2))
121 |
122 | # test type
123 | self.assertTrue(isinstance(deserialize.byte_count("0.49K"), int))
124 | self.assertTrue(isinstance(deserialize.byte_count("0.49M"), int))
125 | self.assertTrue(isinstance(deserialize.byte_count("0.49G"), int))
126 | self.assertTrue(isinstance(deserialize.byte_count("0.49T"), int))
127 | self.assertTrue(isinstance(deserialize.byte_count("0.49P"), int))
128 | self.assertTrue(isinstance(deserialize.byte_count("0.49KB"), int))
129 | self.assertTrue(isinstance(deserialize.byte_count("0.49MB"), int))
130 | self.assertTrue(isinstance(deserialize.byte_count("0.49GB"), int))
131 | self.assertTrue(isinstance(deserialize.byte_count("0.49TB"), int))
132 | self.assertTrue(isinstance(deserialize.byte_count("0.49PB"), int))
133 |
134 |
135 | if __name__ == '__main__':
136 | unittest.main()
137 |
--------------------------------------------------------------------------------
/dataserv_client/cli.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import signal
3 | import argparse
4 | from dataserv_client import api
5 | from dataserv_client import common
6 |
7 |
8 | signal.signal(signal.SIGINT, signal.default_int_handler) # rm twisted handler
9 | logger = common.logging.getLogger(__name__)
10 |
11 |
12 | def _add_programm_args(parser):
13 | # url
14 | parser.add_argument(
15 | "--url", default=common.DEFAULT_URL,
16 | help="Url of the farmer (default: {0}).".format(common.DEFAULT_URL)
17 | )
18 |
19 | # max_size
20 | default = common.DEFAULT_MAX_SIZE
21 | parser.add_argument(
22 | "--max_size", default=default,
23 | help="Maximum data size in bytes. (default: {0}).".format(default)
24 | )
25 |
26 | # min_free_size
27 | default = common.DEFAULT_MIN_FREE_SIZE
28 | parser.add_argument(
29 | "--min_free_size", default=default,
30 | help="Minimum free size in bytes. (default: {0}).".format(default)
31 | )
32 |
33 | # store_path
34 | default = common.DEFAULT_STORE_PATH
35 | parser.add_argument(
36 | "--store_path", default=default,
37 | help="Storage path. (default: {0}).".format(default)
38 | )
39 |
40 | # config_path
41 | default = common.DEFAULT_CONFIG_PATH
42 | parser.add_argument(
43 | "--config_path", default=default,
44 | help="Config path. (default: {0}).".format(default)
45 | )
46 |
47 | # debug
48 | parser.add_argument('--debug', action='store_true',
49 | help="Show debug information.")
50 |
51 | # debug
52 | parser.add_argument('--quiet', action='store_true',
53 | help="Only show warning and error information.")
54 |
55 | # use folder tree
56 | parser.add_argument(
57 | '--use_folder_tree', action='store_true',
58 | help="Use folder tree to store files (always on for fat32 store_path)."
59 | )
60 |
61 |
62 | def _add_version(command_parser):
63 | version_parser = command_parser.add_parser( # NOQA
64 | "version", help="Show version number."
65 | )
66 |
67 |
68 | def _add_register(command_parser):
69 | register_parser = command_parser.add_parser( # NOQA
70 | "register", help="Register your node on the network."
71 | )
72 |
73 |
74 | def _add_ping(command_parser):
75 | ping_parser = command_parser.add_parser( # NOQA
76 | "ping", help="Ping master node."
77 | )
78 |
79 |
80 | def _add_config(command_parser):
81 | config_parser = command_parser.add_parser( # NOQA
82 | "config", help="Edit and display config."
83 | )
84 |
85 | # wallet
86 | msg = "Set node wallet to given hwif."
87 | config_parser.add_argument("--set_wallet", default=None, help=msg)
88 |
89 | # payout_address
90 | msg = "Root address of wallet used by default."
91 | config_parser.add_argument("--set_payout_address", default=None, help=msg)
92 |
93 |
94 | def _add_poll(command_parser):
95 | poll_parser = command_parser.add_parser(
96 | "poll", help="Let the network know your are online."
97 | )
98 | poll_parser.add_argument(
99 | "--delay", default=common.DEFAULT_DELAY,
100 | help="Deley between each ping."
101 | )
102 | poll_parser.add_argument(
103 | "--limit", default=None, help="Limit poll time in seconds."
104 | )
105 |
106 |
107 | def _add_freespace(command_parser):
108 | freespace_parser = command_parser.add_parser( # NOQA
109 | "freespace", help="Return free disk space."
110 | )
111 |
112 |
113 | def _add_build(command_parser):
114 | build_parser = command_parser.add_parser(
115 | "build", help="Fill the farmer with data up to their max."
116 | )
117 |
118 | # Threadpool workers
119 | build_parser.add_argument('--workers', default=1,
120 | help="Number of threadpool workers.")
121 |
122 | # cleanup
123 | build_parser.add_argument('--cleanup', action='store_true',
124 | help="Remove generated files.")
125 |
126 | # rebuild
127 | build_parser.add_argument('--rebuild', action='store_true',
128 | help="Replace previously files.")
129 |
130 | # repair
131 | build_parser.add_argument('--repair', action='store_true',
132 | help="Replace bad and missing files.")
133 |
134 | # set height interval
135 | default = common.DEFAULT_SET_HEIGHT_INTERVAL
136 | build_parser.add_argument(
137 | "--set_height_interval", default=default,
138 | help="Interval at which to set height (default: {0}).".format(default)
139 | )
140 |
141 |
142 | def _add_audit(command_parser):
143 | audit_parser = command_parser.add_parser(
144 | "audit", help="Audit the generated data."
145 | )
146 | audit_parser.add_argument(
147 | "--delay", default=common.DEFAULT_AUDIT_DELAY,
148 | help="Deley between each audit."
149 | )
150 | audit_parser.add_argument(
151 | "--limit", default=None, help="Limit audit time in seconds."
152 | )
153 |
154 |
155 | def _add_farm(command_parser):
156 | farm_parser = command_parser.add_parser(
157 | "farm", help="Start farmer."
158 | )
159 |
160 | # Threadpool workers
161 | farm_parser.add_argument('--workers', default=1,
162 | help="Number of threadpool workers.")
163 |
164 | # cleanup
165 | farm_parser.add_argument('--cleanup', action='store_true',
166 | help="Remove generated files.")
167 |
168 | # rebuild
169 | farm_parser.add_argument('--rebuild', action='store_true',
170 | help="Replace previously files.")
171 |
172 | # repair
173 | farm_parser.add_argument('--repair', action='store_true',
174 | help="Replace bad and missing files.")
175 |
176 | # set height interval
177 | default = common.DEFAULT_SET_HEIGHT_INTERVAL
178 | farm_parser.add_argument(
179 | "--set_height_interval", default=default,
180 | help="Interval at which to set height (default: {0}).".format(default)
181 | )
182 |
183 | farm_parser.add_argument(
184 | "--delay", default=common.DEFAULT_DELAY,
185 | help="Deley between each ping."
186 | )
187 |
188 | farm_parser.add_argument(
189 | "--limit", default=None, help="Limit poll time in seconds."
190 | )
191 |
192 |
193 | def _parse_args(args):
194 | class ArgumentParser(argparse.ArgumentParser):
195 | def error(self, message):
196 | sys.stderr.write('error: %s\n' % message)
197 | self.print_help()
198 | sys.exit(2)
199 | # TODO let user put in store path and max size shard size is 128
200 |
201 | # setup parser
202 | description = "Dataserve client command-line interface."
203 | parser = ArgumentParser(description=description)
204 |
205 | _add_programm_args(parser)
206 |
207 | command_parser = parser.add_subparsers(
208 | title='commands', dest='command', metavar=""
209 | )
210 |
211 | _add_version(command_parser)
212 | _add_register(command_parser)
213 | _add_ping(command_parser)
214 | _add_poll(command_parser)
215 | _add_freespace(command_parser)
216 | _add_build(command_parser)
217 | _add_audit(command_parser)
218 | _add_config(command_parser)
219 | _add_farm(command_parser)
220 |
221 | # get values
222 | arguments = vars(parser.parse_args(args=args))
223 | command_name = arguments.pop("command")
224 | if not command_name:
225 | parser.error("No command given!")
226 | return command_name, arguments
227 |
228 |
229 | def main(args):
230 | client = None
231 | try:
232 | command_name, arguments = _parse_args(args)
233 | client = api.Client(
234 | url=arguments.pop("url"),
235 | debug=arguments.pop("debug"),
236 | quiet=arguments.pop("quiet"),
237 | use_folder_tree=arguments.pop("use_folder_tree"),
238 | max_size=arguments.pop("max_size"),
239 | min_free_size=arguments.pop("min_free_size"),
240 | store_path=arguments.pop("store_path"),
241 | config_path=arguments.pop("config_path"),
242 | )
243 | return getattr(client, command_name)(**arguments)
244 | except KeyboardInterrupt:
245 | logger.warning("Caught KeyboardInterrupt")
246 | # if client is not None and client.storjnode is not None:
247 | # client.storjnode.stop()
248 |
--------------------------------------------------------------------------------
/README.rst:
--------------------------------------------------------------------------------
1 | ===============
2 | dataserv-client
3 | ===============
4 |
5 | |BuildLink|_ |CoverageLink|_ |BuildLink2|_ |CoverageLink2|_ |LicenseLink|_
6 |
7 | .. |BuildLink| image:: https://img.shields.io/travis/Storj/dataserv-client/master.svg?label=Build-Master
8 | .. _BuildLink: https://travis-ci.org/Storj/dataserv-client
9 |
10 | .. |CoverageLink| image:: https://img.shields.io/coveralls/Storj/dataserv-client/master.svg?label=Coverage-Master
11 | .. _CoverageLink: https://coveralls.io/r/Storj/dataserv-client
12 |
13 | .. |BuildLink2| image:: https://img.shields.io/travis/Storj/dataserv-client/develop.svg?label=Build-Develop
14 | .. _BuildLink2: https://travis-ci.org/Storj/dataserv-client
15 |
16 | .. |CoverageLink2| image:: https://img.shields.io/coveralls/Storj/dataserv-client/develop.svg?label=Coverage-Develop
17 | .. _CoverageLink2: https://coveralls.io/r/Storj/dataserv-client
18 |
19 | .. |LicenseLink| image:: https://img.shields.io/badge/license-MIT-blue.svg
20 | .. _LicenseLink: https://raw.githubusercontent.com/Storj/dataserv-client
21 |
22 |
23 | Contributing
24 | ============
25 |
26 | We welcome contributions if you have a little extra time and Python experience. We ask that you make your pull requests on the `develop `_ branch, as we only use `master `_ for releases. Please follow `PEP8 `_, and make sure you document anything new. If you have any questions, you can find the entire team on `Slack `_. Note: If you plan on running the unit tests for the repo, you will also need to have `dataserv `_ running locally with a new db.
27 |
28 |
29 | Setup
30 | =====
31 |
32 | Windows
33 | -------
34 |
35 | Download `latest windows release from github `_.
36 |
37 | Extract the zip file to the folder where you wish to have it installed.
38 |
39 | ::
40 |
41 | $ dataserv-client.exe version
42 |
43 | The dataserv-client will automatically update when new releases are made.
44 |
45 |
46 | Linux (Ubuntu/Mint/Debian)
47 | --------------------------
48 |
49 | Install client
50 |
51 | ::
52 |
53 | # install apt dependencies
54 | $ sudo apt-get install python python-pip python-dev gcc
55 |
56 | $ sudo pip install dataserv-client
57 | $ dataserv-client version
58 |
59 | Update client
60 |
61 | ::
62 |
63 | $ sudo pip install dataserv-client --upgrade
64 | $ dataserv-client version
65 |
66 |
67 | OSX
68 | ---
69 |
70 | Install client
71 |
72 | ::
73 |
74 | $ brew install python
75 | $ rehash
76 | $ pip install dataserv-client
77 | $ dataserv-client version
78 |
79 | Update client
80 |
81 | ::
82 |
83 | $ pip install dataserv-client --upgrade
84 | $ dataserv-client version
85 |
86 |
87 | Farmer Quickstart Guide
88 | =======================
89 |
90 | **Configure your farmer node**
91 |
92 | Optionally set a cold storage payout address.
93 | You can set a counterparty wallet address here.
94 | All configuration must be done before starting the node.
95 |
96 | ::
97 |
98 | dataserv-client config --set_payout_address=
99 |
100 |
101 |
102 | **Start your farmer node**
103 |
104 | Optionally specify the path to store data, the available space, and minimum free space.
105 |
106 | ::
107 |
108 | dataserv-client --store_path= --max_size= --min_free_size= farm
109 |
110 | Optional max_size and min_free_size syntax
111 |
112 | ::
113 |
114 | --max_size=1.0K # 1024^1 bytes
115 | --max_size=1.0KB # 1000^1 bytes
116 | --max_size=1.0M # 1024^2 bytes
117 | --max_size=1.0MB # 1000^2 bytes
118 | --max_size=1.0G # 1024^3 bytes
119 | --max_size=1.0GB # 1000^3 bytes
120 | --max_size=1.0T # 1024^4 bytes
121 | --max_size=1.0TB # 1000^4 bytes
122 | --max_size=1.0P # 1024^5 bytes
123 | --max_size=1.0PB # 1000^5 bytes
124 |
125 |
126 | Farmer Multi Disc Guide
127 | =======================
128 |
129 | In order to farm on multiple discs you will have to run several instances,
130 | as multiple paths are not yet supported. To do this you will need one config
131 | for each disc.
132 |
133 | Different instances can share a common payout address, however it is recommended
134 | to use a different payout address for each instance.
135 |
136 |
137 | Disc 1
138 | ------
139 |
140 | ::
141 |
142 | dataserv-client --config_path= config --set_payout_address=
143 | dataserv-client --config_path= --store_path= --max_size= farm
144 |
145 |
146 | Disc n
147 | ------
148 |
149 | ::
150 |
151 | dataserv-client --config_path= config --set_payout_address=
152 | dataserv-client --config_path= --store_path= --max_size= farm
153 |
154 |
155 | Farmer Setting Custom Height
156 | ============================
157 |
158 |
159 | In order to build a bit faster, you may consider using the --set_height_interval command.
160 | If you set a high height number though please also consider running another instance of the client with poll.
161 | Poll will send every 60 sec, farm or build will send only when the height interval is reached.
162 |
163 |
164 | Running the farm command
165 | ------------------------
166 |
167 | ::
168 |
169 | dataserv-client --url=http://status.driveshare.org --store_path= --max_size= farm --set_height_interval=(default: 25, max recommended: 199999)
170 |
171 |
172 | Running the poll command
173 | ------------------------
174 |
175 | ::
176 |
177 | dataserv-client --url=http://status.driveshare.org poll
178 |
179 | Workers Guide
180 | =============
181 |
182 | You can start multiple workers by executing farm or build with the optional argument --workers. It is recommended to start only as many workers as your cpu and hard drive can handle. With a fast hard drive a cpu usage of ~80% is possible.
183 |
184 | ::
185 |
186 | dataserv-client build --workers=
187 |
188 | ::
189 |
190 | dataserv-client farm --workers=
191 |
192 |
193 | Command Line Interface Usage
194 | ============================
195 |
196 | Argument ordering
197 | -----------------
198 |
199 | ::
200 |
201 | $ dataserv-client COMMAND
202 |
203 |
204 | Argument ordering example
205 | -------------------------
206 |
207 | ::
208 |
209 | $ dataserv-client --debug build --rebuild
210 |
211 |
212 | Show program help, optional arguments and commands
213 | --------------------------------------------------
214 |
215 | ::
216 |
217 | $ dataserv-client --help
218 | usage: dataserv-client [-h] [--url URL] [--max_size MAX_SIZE]
219 | [--store_path STORE_PATH] [--config_path CONFIG_PATH]
220 | [--debug] [--use_folder_tree]
221 | ...
222 |
223 | Dataserve client command-line interface.
224 |
225 | optional arguments:
226 | -h, --help show this help message and exit
227 | --url URL Url of the farmer (default:
228 | http://status.driveshare.org).
229 | --max_size MAX_SIZE Maximum data size in bytes. (default: 1073741824).
230 | --min_free_size MIN_FREE_SIZE
231 | Minimum free size in bytes. (default: 1073741824).
232 | --store_path STORE_PATH
233 | Storage path. (default: /home/user/.storj/store).
234 | --config_path CONFIG_PATH
235 | Config path. (default: /home/user/.storj/config.json).
236 | --debug Show debug information.
237 | --quiet Only show warning and error information.
238 | --use_folder_tree Use folder tree to store files (always on for fat32
239 | store_path).
240 |
241 | commands:
242 |
243 | version Show version number.
244 | register Register your node on the network.
245 | ping Ping master node.
246 | poll Let the network know your are online.
247 | build Fill the farmer with data up to their max.
248 | audit Audit the generated data.
249 | config Edit and display config.
250 | farm Start farmer.
251 |
252 |
253 | Show command help and optional arguments
254 | ----------------------------------------
255 |
256 | ::
257 |
258 | $ dataserv-client config --help
259 | usage: dataserv-client config [-h] [--set_wallet SET_WALLET]
260 | [--set_payout_address SET_PAYOUT_ADDRESS]
261 |
262 | optional arguments:
263 | -h, --help show this help message and exit
264 | --set_wallet SET_WALLET
265 | Set node wallet to given hwif.
266 | --set_payout_address SET_PAYOUT_ADDRESS
267 | Root address of wallet used by default.
268 |
269 |
--------------------------------------------------------------------------------
/dataserv_client/builder.py:
--------------------------------------------------------------------------------
1 | import os
2 | import time
3 | import bisect
4 | import hashlib
5 | import binascii
6 | import RandomIO
7 | import partialhash
8 | import psutil
9 | import json
10 | import storjnode
11 | from future.moves.urllib.request import urlopen
12 | from dataserv_client import control
13 | from dataserv_client import common
14 | from dataserv_client.common import DEFAULT_MIN_CONFIRMATIONS
15 | from dataserv_client.exceptions import BlockExplorerApiFailed
16 |
17 |
18 | logger = common.logging.getLogger(__name__)
19 |
20 |
21 | class Builder:
22 |
23 | def __init__(self, address, shard_size, max_size, min_free_size,
24 | on_generate_shard=None, use_folder_tree=False):
25 | self.target_height = int(max_size / shard_size)
26 | self.address = address
27 | self.shard_size = shard_size
28 | self.max_size = max_size
29 | self.min_free_size = min_free_size
30 | self.use_folder_tree = use_folder_tree
31 | self.on_generate_shard = on_generate_shard
32 |
33 | @staticmethod
34 | def sha256(content):
35 | """Finds the SHA-256 hash of the content."""
36 | content = content.encode('utf-8')
37 | return hashlib.sha256(content).hexdigest()
38 |
39 | def _build_all_seeds(self, height):
40 | """Includes seed for height 0."""
41 | seed = self.sha256(self.address)
42 | seeds = [seed]
43 | for i in range(height):
44 | seed = self.sha256(seed)
45 | seeds.append(seed)
46 | return seeds
47 |
48 | def build_seeds(self, height):
49 | """Deterministically build seeds."""
50 | return self._build_all_seeds(height)[:height]
51 |
52 | def build_seed(self, height):
53 | """Deterministically build a seed."""
54 | return self._build_all_seeds(height).pop()
55 |
56 | def _get_shard_path(self, store_path, seed, create_needed_folders=False):
57 | if self.use_folder_tree:
58 | folders = os.path.join(*storjnode.util.chunks(seed, 3))
59 | store_path = os.path.join(store_path, folders)
60 | if create_needed_folders:
61 | storjnode.util.ensure_path_exists(store_path)
62 | return os.path.join(store_path, seed)
63 |
64 | def generate_shard(self, seed, store_path, cleanup=False):
65 | """
66 | Save a shard, and return its SHA-256 hash.
67 |
68 | :param seed: Seed pased to RandomIO to generate file.
69 | :param store_path: What path to store the file.
70 | :param cleanup: Delete the file after generation.
71 | :return: SHA-256 hash of the file.
72 | """
73 |
74 | # save the shard
75 | path = self._get_shard_path(store_path, seed,
76 | create_needed_folders=True)
77 | try:
78 | RandomIO.RandomIO(seed).genfile(self.shard_size, path)
79 | except IOError as e:
80 | msg = "Failed to write shard, will try once more! '{0}'"
81 | logger.error(msg.format(repr(e)))
82 | time.sleep(2)
83 | RandomIO.RandomIO(seed).genfile(self.shard_size, path)
84 |
85 | # get the file hash
86 | with open(path, 'rb') as f:
87 | file_hash = hashlib.sha256(f.read()).hexdigest()
88 |
89 | # remove file if requested
90 | if cleanup:
91 | os.remove(path)
92 |
93 | return file_hash
94 |
95 | def filter_to_resume_point(self, store_path, enum_seeds):
96 | """
97 | Binary search to find the proper place to resume.
98 |
99 | :param store_path: What path to the files are stored at.
100 | :param enum_seeds: List of seeds to check.
101 | :return:
102 | """
103 | class HackedCompareObject(str):
104 | def __gt__(hco_self, seed):
105 | path = self._get_shard_path(store_path, seed)
106 | return os.path.exists(path)
107 |
108 | seeds = [seed for num, seed in enum_seeds]
109 | index = bisect.bisect_left(seeds, HackedCompareObject())
110 |
111 | logger.info("Resuming from height {0}".format(index))
112 | return index
113 |
114 | def build(self, store_path, workers=1, cleanup=False,
115 | rebuild=False, repair=False):
116 | """
117 | Fill the farmer with data up to their max.
118 |
119 | :param store_path: What path to store the file.
120 | :param cleanup: Delete the file after generation.
121 | :param rebuild: Re-generate the shards.
122 | """
123 |
124 | generated = {}
125 | pool = control.Thread.ThreadPool(workers)
126 |
127 | enum_seeds = list(enumerate(self.build_seeds(self.target_height)))
128 | last_height = 0
129 | if not rebuild:
130 | last_height = self.filter_to_resume_point(store_path, enum_seeds)
131 |
132 | # rebuild bad or missing shards
133 | if repair:
134 | for shard_num, seed in enum_seeds[:last_height]:
135 | path = self._get_shard_path(store_path, seed)
136 | if not (os.path.exists(path) and
137 | os.path.getsize(path) == self.shard_size):
138 | msg = "Repeair seed {0} height {1}."
139 | logger.info(msg.format(seed, shard_num))
140 | pool.add_task(self.generate_shard, seed,
141 | store_path, cleanup)
142 | pool.wait_completion()
143 |
144 | for shard_num, seed in enum_seeds[last_height:]:
145 | try:
146 | space_free = psutil.disk_usage(store_path).free
147 | space_required = (self.shard_size * (pool.active_count() + 1))
148 | if (space_free - space_required < self.min_free_size):
149 | msg = "Minimum free disk space reached ({0}) for '{1}'."
150 | logger.info(msg.format(self.min_free_size, store_path))
151 | last_height = shard_num
152 | break
153 |
154 | file_hash = pool.add_task(self.generate_shard, seed,
155 | store_path, cleanup)
156 |
157 | generated[seed] = file_hash
158 | logger.info("Saving seed {0} with SHA-256 hash {1}.".format(
159 | seed, file_hash
160 | ))
161 |
162 | last_height = shard_num + 1
163 | if self.on_generate_shard:
164 | self.on_generate_shard(shard_num + 1, False)
165 |
166 | except KeyboardInterrupt:
167 | last_height = shard_num + 1
168 | logger.warning("Caught KeyboardInterrupt, finishing workers")
169 | break
170 |
171 | pool.wait_completion()
172 | if self.on_generate_shard:
173 | self.on_generate_shard(last_height, True)
174 |
175 | return generated
176 |
177 | def clean(self, store_path):
178 | """
179 | Delete shards from path.
180 |
181 | :param store_path: Path the shards are stored at.
182 | """
183 |
184 | seeds = self.build_seeds(self.target_height)
185 | for shard_num, seed in enumerate(seeds):
186 | path = self._get_shard_path(store_path, seed)
187 | if os.path.exists(path):
188 | os.remove(path)
189 |
190 | def checkup(self, store_path):
191 | """
192 | Make sure the shards exist.
193 |
194 | :param store_path: Path the shards are stored at.
195 | :return True if all shards exist, False otherwise.
196 | """
197 |
198 | seeds = self.build_seeds(self.target_height)
199 | for shard_num, seed in enumerate(seeds):
200 | path = self._get_shard_path(store_path, seed)
201 | if not os.path.exists(path):
202 | return False
203 | return True
204 |
205 | def btc_height(self):
206 | """Bitcoin height"""
207 | url = 'https://chain.so/api/v2/get_info/BTC'
208 | result = json.loads(urlopen(url).read().decode('utf8'))
209 | if result['status'] == 'success':
210 | return result['data']['blocks']
211 | else:
212 | raise BlockExplorerApiFailed(url)
213 |
214 | def btc_block(self, index):
215 | """Bitcoin block for given index"""
216 | url = 'https://chain.so/api/v2/get_block/BTC/' + str(index)
217 | result = json.loads(urlopen(url).read().decode('utf8'))
218 | if result['status'] == 'success':
219 | result['data']['block_no'] = int(result['data']['block_no'])
220 | result['data']['confirmations'] = int(
221 | result['data']['confirmations']
222 | )
223 | return result['data']
224 | else:
225 | raise BlockExplorerApiFailed(url)
226 |
227 | def btc_last_confirmed_block(self,
228 | min_confirmations=DEFAULT_MIN_CONFIRMATIONS):
229 | """last Bitcoin block with given min confirmation"""
230 | btc_height = self.btc_height()
231 |
232 | while True:
233 | btc_block = self.btc_block(btc_height)
234 | enough_confirms = btc_block['confirmations'] >= min_confirmations
235 | if (enough_confirms and btc_block['is_orphan'] == False):
236 | return btc_block
237 | btc_height -= 1
238 |
239 | def audit(self, store_path, btc_index, btc_hash,
240 | block_size=common.DEFAULT_BLOCK_SIZE,
241 | full_audit=common.DEFAULT_FULL_AUDIT,
242 | min_confirmations=DEFAULT_MIN_CONFIRMATIONS):
243 | """audit one block"""
244 |
245 | audit_begin = (btc_index % full_audit) * block_size
246 | audit_end = audit_begin + block_size
247 |
248 | logger.info("Audit block {0} - {1}.".format(audit_begin, audit_end))
249 |
250 | seeds = self.build_seeds(audit_end)[audit_begin:]
251 |
252 | # check if the block is complete
253 | for seed in seeds:
254 | path = self._get_shard_path(store_path, seed)
255 | if not (os.path.exists(path) and
256 | os.path.getsize(path) == self.shard_size):
257 | logger.info("Shard missing or corrupt {0}".format(path))
258 | return 0
259 |
260 | # generate audit response
261 | audit_hash = ""
262 | for seed in seeds:
263 | path = self._get_shard_path(store_path, seed)
264 | digest = partialhash.compute(path, seed=btc_hash.encode('utf8'))
265 | audit_hash += str(binascii.hexlify(digest))
266 | return str(hashlib.sha256(audit_hash.encode('utf-8')).hexdigest())
267 |
--------------------------------------------------------------------------------
/dataserv_client/api.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | import os
3 | import hashlib
4 | import time
5 | import psutil
6 | import storjnode
7 | from datetime import datetime
8 | from datetime import timedelta
9 | from btctxstore import BtcTxStore
10 | from dataserv_client import common
11 | from dataserv_client import builder
12 | from dataserv_client import exceptions
13 | from dataserv_client import messaging
14 | from dataserv_client import deserialize
15 | from dataserv_client.bandwidth_test import speedtest
16 | from dataserv_client import __version__
17 | from crochet import setup
18 | setup() # start twisted via crochet
19 |
20 |
21 | logger = common.logging.getLogger(__name__)
22 |
23 |
24 | SHOW_CONFIG_TEMPLATE = """Current configuration.
25 |
26 | Authentication address: {0}
27 | Payout address: {1}
28 | """
29 |
30 |
31 | # TODO move all logic to control, api should only deserialize/validate input
32 |
33 |
34 | class Client(object):
35 |
36 | def __init__(self, url=common.DEFAULT_URL, debug=False, quiet=False,
37 | use_folder_tree=False, max_size=common.DEFAULT_MAX_SIZE,
38 | min_free_size=common.DEFAULT_MIN_FREE_SIZE,
39 | store_path=common.DEFAULT_STORE_PATH,
40 | config_path=common.DEFAULT_CONFIG_PATH,
41 | connection_retry_limit=common.DEFAULT_CONNECTION_RETRY_LIMIT,
42 | connection_retry_delay=common.DEFAULT_CONNECTION_RETRY_DELAY):
43 |
44 | debug = deserialize.flag(debug)
45 | quiet = deserialize.flag(quiet)
46 |
47 | self.url = deserialize.url(url)
48 | self.use_folder_tree = deserialize.flag(use_folder_tree)
49 | self.max_size = deserialize.byte_count(max_size)
50 | self.min_free_size = deserialize.byte_count(min_free_size)
51 |
52 | self.messenger = None # lazy
53 | self.btctxstore = BtcTxStore()
54 | self.retry_limit = deserialize.positive_integer(connection_retry_limit)
55 | self.retry_delay = deserialize.positive_integer(connection_retry_delay)
56 |
57 | # paths
58 | self.cfg_path = os.path.realpath(config_path)
59 | storjnode.util.ensure_path_exists(os.path.dirname(self.cfg_path))
60 | self.store_path = os.path.realpath(store_path)
61 | storjnode.util.ensure_path_exists(self.store_path)
62 |
63 | # check for vfat partions
64 | try:
65 | fstype = storjnode.util.get_fs_type(self.store_path)
66 |
67 | # FileNotFoundError: [Errno 2] No such file or directory: '/etc/mtab'
68 | # psutil: https://code.google.com/p/psutil/issues/detail?id=434
69 | except EnvironmentError as e:
70 | logger.warning(e)
71 | fstype = None
72 |
73 | if fstype == "vfat":
74 | logger.info("Detected vfat partition, using folder tree.")
75 | self.use_folder_tree = True
76 | if fstype is None:
77 | msg = "Couldn't detected partition type for '{0}'"
78 | logger.warning(msg.format(self.store_path))
79 |
80 | self.cfg = storjnode.config.get(self.btctxstore, self.cfg_path)
81 |
82 | @staticmethod
83 | def version():
84 | print(__version__)
85 | return __version__
86 |
87 | def _init_messenger(self):
88 | """Make sure messenger exists."""
89 | if self.messenger is None:
90 | wif = self.btctxstore.get_key(self.cfg["wallet"])
91 | self.messenger = messaging.Messaging(self.url, wif,
92 | self.retry_limit,
93 | self.retry_delay)
94 |
95 | def register(self):
96 | """Attempt to register the config address."""
97 | self._init_messenger()
98 | payout_address = self.cfg["payout_address"]
99 | self.messenger.register(payout_address)
100 | logger.info("Registered on server '{0}'.".format(self.url))
101 | return True
102 |
103 | def config(self, set_wallet=None, set_payout_address=None):
104 | """
105 | Set and then show the config settings.
106 |
107 | :param set_wallet: Set the HWIF for registration/auth address.
108 | :param set_payout_address: Set the payout address.
109 | :return: Configuation object.
110 | """
111 | if((set_payout_address is not None) and
112 | (not self.btctxstore.validate_address(set_payout_address))):
113 | raise exceptions.InvalidAddress(set_payout_address)
114 | if((set_wallet is not None) and
115 | (not self.btctxstore.validate_wallet(set_wallet))):
116 | raise exceptions.InvalidHWIF(set_wallet)
117 |
118 | self._init_messenger()
119 | config_updated = False
120 |
121 | # update payout address if requested
122 | if set_payout_address:
123 | self.cfg["payout_address"] = set_payout_address
124 | config_updated = True
125 |
126 | # update wallet if requested
127 | if set_wallet:
128 | self.cfg["wallet"] = set_wallet
129 | config_updated = True
130 |
131 | # save config if updated
132 | if config_updated:
133 | storjnode.config.save(self.btctxstore, self.cfg_path, self.cfg)
134 |
135 | # display config
136 | print(SHOW_CONFIG_TEMPLATE.format(
137 | self.messenger.get_nodeid(),
138 | self.cfg["payout_address"]
139 | ))
140 | return self.cfg
141 |
142 | def ping(self):
143 | """Attempt one keep-alive with the server."""
144 | self._init_messenger()
145 |
146 | msg = "Pinging server '{0}' at {1:%Y-%m-%d %H:%M:%S}."
147 | logger.info(msg.format(self.messenger.server_url(), datetime.now()))
148 | self.messenger.ping()
149 |
150 | return True
151 |
152 | def poll(self, delay=common.DEFAULT_DELAY, limit=None):
153 | """Attempt continuous keep-alive with the server.
154 |
155 | :param delay: Delay in seconds per ping of the server.
156 | :param limit: Number of seconds in the future to stop polling.
157 | :return: True, if limit is reached. None, if otherwise.
158 | """
159 | delay = deserialize.positive_integer(delay)
160 | stop_time = None
161 | if limit is not None:
162 | stop_time = datetime.now() + timedelta(seconds=int(limit))
163 |
164 | while True: # ping the server every X seconds
165 | self.ping()
166 |
167 | if stop_time and datetime.now() >= stop_time:
168 | return True
169 | time.sleep(int(delay))
170 |
171 | def freespace(self):
172 | freespace = psutil.disk_usage(self.store_path).free
173 | print(freespace)
174 | return freespace
175 |
176 | def build(self, workers=1, cleanup=False, rebuild=False, repair=False,
177 | set_height_interval=common.DEFAULT_SET_HEIGHT_INTERVAL):
178 | """Generate test files deterministically based on address.
179 |
180 | :param workers: Number of Number of threadpool workers.
181 | :param cleanup: Remove files in shard directory.
182 | :param rebuild: Re-generate any file shards.
183 | :param set_height_interval: Number of shards to generate before
184 | notifying the server.
185 | """
186 |
187 | workers = deserialize.positive_nonzero_integer(workers)
188 |
189 | set_height_interval = deserialize.positive_nonzero_integer(
190 | set_height_interval
191 | )
192 | cleanup = deserialize.flag(cleanup)
193 | rebuild = deserialize.flag(rebuild)
194 | repair = deserialize.flag(repair)
195 |
196 | self._init_messenger()
197 | logger.info("Starting build")
198 |
199 | def _on_generate_shard(cur_height, last):
200 | """
201 | Because URL requests are slow, only update the server when we are
202 | at the first height, at some height_interval, or the last height.
203 |
204 | :param cur_height: Current height in the building process.
205 | """
206 | first = cur_height == 1
207 | set_height = (cur_height % int(set_height_interval)) == 0
208 |
209 | if first or set_height or last:
210 | self.messenger.height(cur_height)
211 | logger.info("Current height at {0}.".format(cur_height))
212 |
213 | # Initialize builder and generate/re-generate shards
214 | bldr = builder.Builder(address=self.cfg["payout_address"],
215 | shard_size=common.SHARD_SIZE,
216 | max_size=self.max_size,
217 | min_free_size=self.min_free_size,
218 | on_generate_shard=_on_generate_shard,
219 | use_folder_tree=self.use_folder_tree)
220 | generated = bldr.build(self.store_path, workers=workers,
221 | cleanup=cleanup, rebuild=rebuild, repair=repair)
222 |
223 | logger.info("Build finished")
224 | return generated
225 |
226 | def audit(self, delay=common.DEFAULT_AUDIT_DELAY, limit=None):
227 |
228 | self._init_messenger()
229 |
230 | # Initialize builder and audit shards
231 | bldr = builder.Builder(address=self.cfg["payout_address"],
232 | shard_size=common.SHARD_SIZE,
233 | max_size=self.max_size,
234 | min_free_size=self.min_free_size,
235 | use_folder_tree=self.use_folder_tree)
236 |
237 | delay = deserialize.positive_integer(delay)
238 | stop_time = None
239 | if limit is not None:
240 | stop_time = datetime.now() + timedelta(seconds=int(limit))
241 |
242 | btc_index = 0
243 | while True:
244 | btc_block = bldr.btc_last_confirmed_block(
245 | min_confirmations=common.DEFAULT_MIN_CONFIRMATIONS
246 | )
247 | if btc_block['block_no'] != btc_index:
248 | btc_hash = btc_block['blockhash']
249 | btc_index = btc_block['block_no']
250 |
251 | logger.debug("Using bitcoin block {0} hash {1}.".format(
252 | btc_index, btc_hash))
253 |
254 | wif = self.btctxstore.get_key(self.cfg["wallet"])
255 | address = self.btctxstore.get_address(wif)
256 | response_data = address + btc_hash + str(bldr.audit(
257 | self.store_path,
258 | btc_block['block_no'],
259 | btc_block['blockhash']))
260 | response = hashlib.sha256(
261 | response_data.encode('utf-8')
262 | ).hexdigest()
263 |
264 | # New Dataserv Server version is needed
265 | self.messenger.audit(btc_block['block_no'], response)
266 | else:
267 | msg = "Bitcoin block {0} already used. Waiting for new block."
268 | logger.debug(msg.format(btc_index))
269 |
270 | if stop_time and datetime.now() >= stop_time:
271 | return True
272 | time.sleep(int(delay))
273 |
274 | def farm(self, workers=1, cleanup=False, rebuild=False, repair=False,
275 | set_height_interval=common.DEFAULT_SET_HEIGHT_INTERVAL,
276 | delay=common.DEFAULT_DELAY, limit=None):
277 | """ Fully automatic client for users wishing a simple turnkey solution.
278 | This will run all functions automatically with the most sane defaults
279 | and as little user interface as possible.
280 |
281 | :param workers: Number of Number of threadpool workers.
282 | :param cleanup: Remove files in shard directory.
283 | :param rebuild: Re-generate any file shards.
284 | :param set_height_interval: Number of shards to generate before
285 | notifying the server.
286 | :param delay: Delay in seconds per ping of the server.
287 | :param limit: Number of seconds in the future to stop polling.
288 | """
289 |
290 | workers = deserialize.positive_nonzero_integer(workers)
291 |
292 | set_height_interval = deserialize.positive_nonzero_integer(
293 | set_height_interval
294 | )
295 | cleanup = deserialize.flag(cleanup)
296 | rebuild = deserialize.flag(rebuild)
297 | repair = deserialize.flag(repair)
298 |
299 | # farmer never gives up
300 | self._init_messenger()
301 | self.messenger.retry_limit = 99999999999999999999999999999999999999
302 |
303 | try:
304 | self.register()
305 | except exceptions.AddressAlreadyRegistered:
306 | pass # already registered ...
307 |
308 | self.set_bandwidth()
309 |
310 | self.build(workers=workers, cleanup=cleanup, rebuild=rebuild,
311 | repair=repair, set_height_interval=set_height_interval)
312 | self.poll(delay=delay, limit=limit)
313 | return True
314 |
315 | def set_bandwidth(self):
316 | results = speedtest()
317 | self.messenger.set_bandwidth(results["upload"],
318 | results["download"])
319 |
--------------------------------------------------------------------------------
/tests/test_builder.py:
--------------------------------------------------------------------------------
1 | import os
2 | import json
3 | import random
4 | import shutil
5 | import unittest
6 | import tempfile
7 | import json
8 | from random import randint
9 | from datetime import datetime
10 |
11 | try:
12 | # For Python 3.0 and later
13 | from urllib.request import urlopen
14 | except ImportError:
15 | # Fall back to Python 2's urllib2
16 | from urllib2 import urlopen
17 |
18 | import partialhash
19 | from dataserv_client.builder import Builder
20 | from dataserv_client import common
21 |
22 | my_shard_size = 1024 * 128 # 128K
23 | my_max_size = 1024 * 256 # 256K
24 | my_min_free_size = 1024 * 256 # 256K
25 | height = int(my_max_size / my_shard_size)
26 | fixtures = json.load(open("tests/fixtures.json"))
27 | addresses = fixtures["addresses"]
28 | url = "http://127.0.0.1:5000"
29 |
30 |
31 | def _to_bytes(string):
32 | return string.encode('utf-8')
33 |
34 |
35 | class TestBuilder(unittest.TestCase):
36 | def setUp(self):
37 | self.store_path = tempfile.mkdtemp()
38 | print(self.store_path)
39 |
40 | def tearDown(self):
41 | shutil.rmtree(self.store_path)
42 |
43 | def test_sha256(self):
44 | expected = fixtures["test_sha256"]["expected"]
45 | self.assertEqual(Builder.sha256("storj"), expected)
46 | self.assertNotEqual(Builder.sha256("not storj"), expected)
47 |
48 | def test_build_seed(self):
49 | bucket = Builder(addresses["alpha"], my_shard_size, my_max_size,
50 | my_min_free_size)
51 | hash0 = fixtures["test_build_seed"]["hash0"]
52 | hash3 = fixtures["test_build_seed"]["hash3"]
53 | self.assertEqual(bucket.build_seed(0), hash0)
54 | self.assertEqual(bucket.build_seed(3), hash3)
55 |
56 | def test_builder_build(self):
57 | # generate shards for testing
58 | bucket = Builder(addresses["beta"], my_shard_size, my_max_size,
59 | my_min_free_size)
60 | bucket.build(self.store_path)
61 |
62 | # see if the shards exist
63 | seeds = bucket.build_seeds(height)
64 | for seed in seeds:
65 | path = os.path.join(self.store_path, seed)
66 | print("PATH", path)
67 | self.assertTrue(os.path.exists(path))
68 |
69 | bucket.clean(self.store_path)
70 |
71 | # generate shards for testing
72 | bucket = Builder(addresses["gamma"], my_shard_size, my_max_size,
73 | my_min_free_size)
74 | bucket.build(self.store_path, cleanup=True)
75 |
76 | # see if the shards are deleted
77 | seeds = bucket.build_seeds(height)
78 | for seed in seeds:
79 | path = os.path.join(self.store_path, seed)
80 | self.assertFalse(os.path.exists(path))
81 |
82 | def test_builder_clean(self):
83 | # generate shards for testing
84 | bucket = Builder(addresses["delta"], my_shard_size, my_max_size,
85 | my_min_free_size)
86 | bucket.build(self.store_path)
87 |
88 | # see if the shards exist
89 | seeds = bucket.build_seeds(height)
90 | for seed in seeds:
91 | path = os.path.join(self.store_path, seed)
92 | self.assertTrue(os.path.exists(path))
93 |
94 | # clean command
95 | bucket.clean(self.store_path)
96 |
97 | # see if the shards are deleted
98 | seeds = bucket.build_seeds(height)
99 | for seed in seeds:
100 | path = os.path.join(self.store_path, seed)
101 | self.assertFalse(os.path.exists(path))
102 |
103 | @unittest.skip("to many blockchain api requests")
104 | def test_builder_audit(self):
105 | bucket = Builder(addresses["epsilon"], my_shard_size, 0,
106 | my_min_free_size)
107 |
108 | # check last confirmed bitcoin hash
109 | btc_block = bucket.btc_last_confirmed_block()
110 |
111 | self.assertTrue(btc_block['confirmations']>=
112 | common.DEFAULT_MIN_CONFIRMATIONS)
113 | self.assertTrue(btc_block['is_orphan']==False)
114 |
115 | index = btc_block['block_no']
116 |
117 | block_pos = index % common.DEFAULT_FULL_AUDIT
118 | block_size = common.DEFAULT_BLOCK_SIZE
119 |
120 | # create empty files to skip to btc_index
121 | seeds = bucket.build_seeds(block_pos * block_size)
122 | for seed in seeds:
123 | path = os.path.join(self.store_path, seed)
124 | open(path,'w').close()
125 |
126 | # generate shards for audit
127 | shard_size = my_shard_size * (block_pos + 1) * block_size
128 | bucket = Builder(addresses["epsilon"], my_shard_size, shard_size,
129 | my_min_free_size)
130 | bucket.build(self.store_path)
131 |
132 | # audit possible
133 | good_hash = bucket.audit(self.store_path,
134 | btc_block['block_no'],
135 | btc_block['blockhash'])
136 | self.assertTrue(good_hash)
137 |
138 | seeds = bucket.build_seeds((block_pos + 1) * block_size)
139 |
140 | # copy a bad file for a bad audit
141 | path1 = os.path.join(self.store_path, seeds[-2])
142 | path2 = os.path.join(self.store_path, seeds[-1])
143 | shutil.copyfile(path1, path2)
144 | bad_hash = bucket.audit(self.store_path,
145 | btc_block['block_no'],
146 | btc_block['blockhash'])
147 |
148 | self.assertFalse(good_hash==bad_hash)
149 |
150 | # write some bad data
151 | with open(path2, "a") as f:
152 | f.write("bad data is bad\n")
153 |
154 | # audit failed because last shard has bad data
155 | self.assertFalse(bucket.audit(self.store_path,
156 | btc_block['block_no'],
157 | btc_block['blockhash']))
158 |
159 | # remove last shard
160 | os.remove(path2)
161 |
162 | # audit failed because last shard missing
163 | self.assertFalse(bucket.audit(self.store_path,
164 | btc_block['block_no'],
165 | btc_block['blockhash']))
166 |
167 | # build last shard again
168 | bucket = Builder(addresses["epsilon"], my_shard_size, shard_size,
169 | my_min_free_size)
170 | bucket.build(self.store_path)
171 |
172 | # audit possible
173 | good_hash = bucket.audit(self.store_path,
174 | btc_block['block_no'],
175 | btc_block['blockhash'])
176 | self.assertTrue(good_hash)
177 |
178 | # remove first shard of that block
179 | path1 = os.path.join(self.store_path, seeds[-80])
180 | os.remove(path1)
181 |
182 | # audit failed because first shard missing
183 | self.assertFalse(bucket.audit(self.store_path,
184 | btc_block['block_no'],
185 | btc_block['blockhash']))
186 |
187 | def test_builder_checkup(self):
188 | # generate shards for testing
189 | bucket = Builder(addresses["epsilon"], my_shard_size, my_max_size,
190 | my_min_free_size)
191 | generated = bucket.build(self.store_path)
192 |
193 | # make sure all files are there
194 | self.assertTrue(bucket.checkup(self.store_path))
195 |
196 | # remove one of the files
197 | remove_file = random.choice(list(generated.keys()))
198 | os.remove(os.path.join(self.store_path, remove_file))
199 |
200 | # check again, should fail
201 | self.assertFalse(bucket.checkup(self.store_path))
202 |
203 | def test_builder_rebuilds(self):
204 | bucket = Builder(addresses["epsilon"], my_shard_size, my_max_size,
205 | my_min_free_size)
206 |
207 | # generate empty files to be rebuilt
208 | seeds = bucket.build_seeds(height)
209 | for seed in seeds:
210 | path = os.path.join(self.store_path, seed)
211 | with open(path, 'a'):
212 | os.utime(path, None)
213 |
214 | # rebuild all files
215 | bucket.build(self.store_path, rebuild=True)
216 |
217 | def test_build_rebuild(self):
218 | # generate shards for testing
219 | bucket = Builder(addresses["epsilon"], my_shard_size, my_max_size,
220 | my_min_free_size)
221 | bucket.build(self.store_path)
222 |
223 | # remove one of the files
224 | r = 'baf428097fa601fac185750483fd532abb0e43f9f049398290fac2c049cc2a60'
225 | os.remove(os.path.join(self.store_path, r))
226 |
227 | # check again, should fail
228 | self.assertFalse(bucket.checkup(self.store_path))
229 |
230 | # rebuild
231 | bucket.build(self.store_path, rebuild=True)
232 |
233 | # check again, should pass
234 | self.assertTrue(bucket.checkup(self.store_path))
235 |
236 | # modify one of the files
237 | o = 'baf428097fa601fac185750483fd532abb0e43f9f049398290fac2c049cc2a60'
238 | path = os.path.join(self.store_path, o)
239 | sha256_org_file = partialhash.compute(path)
240 |
241 | # write some data
242 | with open(path, "a") as f:
243 | f.write("bad data is bad\n")
244 |
245 | # check their hashes
246 | sha256_mod_file = partialhash.compute(path)
247 | self.assertNotEqual(sha256_org_file, sha256_mod_file)
248 |
249 | # build without a rebuild should fail
250 | bucket.build(self.store_path)
251 | sha256_mod_file = partialhash.compute(path)
252 | self.assertNotEqual(sha256_org_file, sha256_mod_file)
253 |
254 | # build with a rebuild should pass
255 | bucket.build(self.store_path, rebuild=True)
256 | sha256_mod_file = partialhash.compute(path)
257 | self.assertEqual(sha256_org_file, sha256_mod_file)
258 |
259 | def test_build_repair(self):
260 | # generate shards for testing
261 | bucket = Builder(addresses["epsilon"], my_shard_size, my_max_size,
262 | my_min_free_size)
263 | bucket.build(self.store_path)
264 |
265 | # remove one of the files
266 | r = 'baf428097fa601fac185750483fd532abb0e43f9f049398290fac2c049cc2a60'
267 | os.remove(os.path.join(self.store_path, r))
268 |
269 | # check again, should fail
270 | self.assertFalse(bucket.checkup(self.store_path))
271 |
272 | # repair
273 | bucket.build(self.store_path, repair=True)
274 |
275 | # check again, should pass
276 | self.assertTrue(bucket.checkup(self.store_path))
277 |
278 | # modify one of the files
279 | o = 'baf428097fa601fac185750483fd532abb0e43f9f049398290fac2c049cc2a60'
280 | path = os.path.join(self.store_path, o)
281 | sha256_org_file = partialhash.compute(path)
282 |
283 | # write some data
284 | with open(path, "a") as f:
285 | f.write("bad data is bad\n")
286 |
287 | # check their hashes
288 | sha256_mod_file = partialhash.compute(path)
289 | self.assertNotEqual(sha256_org_file, sha256_mod_file)
290 |
291 | # build without a repair should fail
292 | bucket.build(self.store_path)
293 | sha256_mod_file = partialhash.compute(path)
294 | self.assertNotEqual(sha256_org_file, sha256_mod_file)
295 |
296 | # build with a repair should pass
297 | bucket.build(self.store_path, repair=True)
298 | sha256_mod_file = partialhash.compute(path)
299 | self.assertEqual(sha256_org_file, sha256_mod_file)
300 |
301 | def test_build_cont(self):
302 | max_size1 = 1024 * 1024 * 384
303 | max_size2 = 1024 * 1024 * 128
304 |
305 | # generate shards for testing
306 | start_time = datetime.utcnow()
307 | bucket = Builder(addresses["epsilon"], my_shard_size, max_size1,
308 | my_min_free_size)
309 | bucket.build(self.store_path)
310 | end_delta = datetime.utcnow() - start_time
311 |
312 | # should skip all shards and be faster
313 | start_time2 = datetime.utcnow()
314 | bucket = Builder(addresses["epsilon"], my_shard_size, max_size2,
315 | my_min_free_size)
316 | bucket.build(self.store_path)
317 | end_delta2 = datetime.utcnow() - start_time2
318 |
319 | self.assertTrue(end_delta2 < end_delta)
320 |
321 | # delete 10% random files
322 | my_height = int(max_size2 / my_shard_size)
323 | seeds = bucket.build_seeds(height)
324 | for seed in seeds:
325 | path = os.path.join(self.store_path, seed)
326 | if randint(0,9)==0:
327 | os.remove(path)
328 |
329 | # should rebuild missing shards and be slower as skip all
330 | # but faster as new build
331 | start_time3 = datetime.utcnow()
332 | bucket = Builder(addresses["epsilon"], my_shard_size, max_size2,
333 | my_min_free_size)
334 | bucket.build(self.store_path, repair=True)
335 | end_delta3 = datetime.utcnow() - start_time3
336 |
337 | self.assertTrue(end_delta3 < end_delta) # faster than new build
338 | self.assertTrue(end_delta3 > end_delta2) # slower than skip all
339 |
340 | def test_on_generate_shard_callback(self):
341 | # save callback args
342 | on_generate_shard_called_with = []
343 |
344 | def on_generate_shard(*args):
345 | on_generate_shard_called_with.append(args)
346 |
347 | # generate shards for testing
348 | bucket = Builder(addresses["omega"], my_shard_size, my_max_size,
349 | my_min_free_size,
350 | on_generate_shard=on_generate_shard)
351 | bucket.build(self.store_path)
352 |
353 | # check correct call count (+1 call for last height)
354 | calls = len(on_generate_shard_called_with)
355 | self.assertEqual(int(my_max_size / my_shard_size) + 1, calls)
356 |
357 | # check height order
358 | for num in range(calls - 1):
359 | height = on_generate_shard_called_with[num][0]
360 | self.assertEqual(num + 1, height)
361 |
362 | def test_use_folder_tree_clean(self):
363 | bucket = Builder(addresses["beta"], my_shard_size, my_max_size,
364 | my_min_free_size,
365 | use_folder_tree=True)
366 | bucket.build(self.store_path)
367 | self.assertTrue(bucket.checkup(self.store_path))
368 | bucket.clean(self.store_path)
369 |
370 | def callback(a, d, files):
371 | self.assertTrue(len(files) == 0)
372 | os.walk(self.store_path, callback, None)
373 |
374 | def test_use_folder_tree_cleanup(self):
375 | bucket = Builder(addresses["beta"], my_shard_size, my_max_size,
376 | my_min_free_size,
377 | use_folder_tree=True)
378 | bucket.build(self.store_path, cleanup=True)
379 |
380 | def callback(a, d, files):
381 | self.assertTrue(len(files) == 0)
382 | os.walk(self.store_path, callback, None)
383 |
384 | def test_on_KeyboardInterrupt(self):
385 | def _raise(height, last):
386 | if not last: # only raise 1 of 2 calls
387 | raise KeyboardInterrupt()
388 |
389 | # generate 1 file with KeyboadInterrupt
390 | bucket = Builder(addresses["epsilon"], my_shard_size, my_max_size,
391 | my_min_free_size, on_generate_shard=_raise)
392 | self.assertTrue(bucket.build(store_path=self.store_path))
393 |
394 | # 1 of 2 files exists and no bad files
395 | for shard_num in range(height):
396 | path = os.path.join(self.store_path, bucket.build_seed(shard_num))
397 | if shard_num <= 0:
398 | self.assertTrue(os.path.exists(path)
399 | and os.path.getsize(path) == my_shard_size)
400 | else:
401 | self.assertFalse(os.path.exists(path))
402 |
403 | if __name__ == '__main__':
404 | # import pudb; pu.db # set break point
405 | unittest.main()
406 |
--------------------------------------------------------------------------------
/dataserv_client/bandwidth_test.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 | # Copyright 2012-2015 Matt Martz
4 | # All Rights Reserved.
5 | #
6 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
7 | # not use this file except in compliance with the License. You may obtain
8 | # a copy of the License at
9 | #
10 | # http://www.apache.org/licenses/LICENSE-2.0
11 | #
12 | # Unless required by applicable law or agreed to in writing, software
13 | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
14 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
15 | # License for the specific language governing permissions and limitations
16 | # under the License.
17 |
18 | import os
19 | import sys
20 | import math
21 | import signal
22 | import socket
23 | import timeit
24 | import platform
25 | import threading
26 | import json
27 |
28 | __version__ = '0.3.4'
29 |
30 | # Some global variables we use
31 | user_agent = None
32 | source = None
33 | shutdown_event = None
34 | scheme = 'http'
35 |
36 |
37 | # Used for bound_interface
38 | socket_socket = socket.socket
39 |
40 | try:
41 | import xml.etree.cElementTree as ET
42 | except ImportError:
43 | try:
44 | import xml.etree.ElementTree as ET
45 | except ImportError:
46 | from xml.dom import minidom as DOM
47 | ET = None
48 |
49 | # Begin import game to handle Python 2 and Python 3
50 | try:
51 | from urllib2 import urlopen, Request, HTTPError, URLError
52 | except ImportError:
53 | from urllib.request import urlopen, Request, HTTPError, URLError
54 |
55 | try:
56 | from httplib import HTTPConnection, HTTPSConnection
57 | except ImportError:
58 | e_http_py2 = sys.exc_info()
59 | try:
60 | from http.client import HTTPConnection, HTTPSConnection
61 | except ImportError:
62 | e_http_py3 = sys.exc_info()
63 | raise SystemExit('Your python installation is missing required HTTP '
64 | 'client classes:\n\n'
65 | 'Python 2: %s\n'
66 | 'Python 3: %s' % (e_http_py2[1], e_http_py3[1]))
67 |
68 | try:
69 | from Queue import Queue
70 | except ImportError:
71 | from queue import Queue
72 |
73 | try:
74 | from urlparse import urlparse
75 | except ImportError:
76 | from urllib.parse import urlparse
77 |
78 | try:
79 | import builtins
80 | except ImportError:
81 | def print_(*args, **kwargs):
82 | """The new-style print function taken from
83 | https://pypi.python.org/pypi/six/
84 |
85 | """
86 | fp = kwargs.pop("file", sys.stdout)
87 | if fp is None:
88 | return
89 |
90 | def write(data):
91 | if not isinstance(data, basestring):
92 | data = str(data)
93 | fp.write(data)
94 |
95 | want_unicode = False
96 | sep = kwargs.pop("sep", None)
97 | if sep is not None:
98 | if isinstance(sep, unicode):
99 | want_unicode = True
100 | elif not isinstance(sep, str):
101 | raise TypeError("sep must be None or a string")
102 | end = kwargs.pop("end", None)
103 | if end is not None:
104 | if isinstance(end, unicode):
105 | want_unicode = True
106 | elif not isinstance(end, str):
107 | raise TypeError("end must be None or a string")
108 | if kwargs:
109 | raise TypeError("invalid keyword arguments to print()")
110 | if not want_unicode:
111 | for arg in args:
112 | if isinstance(arg, unicode):
113 | want_unicode = True
114 | break
115 | if want_unicode:
116 | newline = unicode("\n")
117 | space = unicode(" ")
118 | else:
119 | newline = "\n"
120 | space = " "
121 | if sep is None:
122 | sep = space
123 | if end is None:
124 | end = newline
125 | for i, arg in enumerate(args):
126 | if i:
127 | write(sep)
128 | write(arg)
129 | write(end)
130 | else:
131 | print_ = getattr(builtins, 'print')
132 | del builtins
133 |
134 |
135 | class SpeedtestCliServerListError(Exception):
136 | """Internal Exception class used to indicate to move on to the next
137 | URL for retrieving speedtest.net server details
138 |
139 | """
140 |
141 |
142 | def bound_socket(*args, **kwargs):
143 | """Bind socket to a specified source IP address"""
144 |
145 | global source
146 | sock = socket_socket(*args, **kwargs)
147 | sock.bind((source, 0))
148 | return sock
149 |
150 |
151 | def distance(origin, destination):
152 | """Determine distance between 2 sets of [lat,lon] in km"""
153 |
154 | lat1, lon1 = origin
155 | lat2, lon2 = destination
156 | radius = 6371 # km
157 |
158 | dlat = math.radians(lat2 - lat1)
159 | dlon = math.radians(lon2 - lon1)
160 | a = (math.sin(dlat / 2) * math.sin(dlat / 2) +
161 | math.cos(math.radians(lat1)) *
162 | math.cos(math.radians(lat2)) * math.sin(dlon / 2) *
163 | math.sin(dlon / 2))
164 | c = 2 * math.atan2(math.sqrt(a), math.sqrt(1 - a))
165 | d = radius * c
166 |
167 | return d
168 |
169 |
170 | def build_user_agent():
171 | """Build a Mozilla/5.0 compatible User-Agent string"""
172 |
173 | global user_agent
174 | if user_agent:
175 | return user_agent
176 |
177 | ua_tuple = (
178 | 'Mozilla/5.0',
179 | '(%s; U; %s; en-us)' % (platform.system(), platform.architecture()[0]),
180 | 'Python/%s' % platform.python_version(),
181 | '(KHTML, like Gecko)',
182 | 'speedtest-cli/%s' % __version__
183 | )
184 | user_agent = ' '.join(ua_tuple)
185 | return user_agent
186 |
187 |
188 | def build_request(url, data=None, headers={}):
189 | """Build a urllib2 request object
190 |
191 | This function automatically adds a User-Agent header to all requests
192 |
193 | """
194 |
195 | schemed_url = ''
196 | if len(url):
197 | if url[0] == ':':
198 | schemed_url = '%s%s' % (scheme, url)
199 | else:
200 | schemed_url = url
201 |
202 | headers['User-Agent'] = user_agent
203 | return Request(schemed_url, data=data, headers=headers)
204 |
205 |
206 | def catch_request(request):
207 | """Helper function to catch common exceptions encountered when
208 | establishing a connection with a HTTP/HTTPS request
209 |
210 | """
211 |
212 | try:
213 | uh = urlopen(request)
214 | return uh, False
215 | except (HTTPError, URLError, socket.error):
216 | e = sys.exc_info()[1]
217 | return None, e
218 |
219 |
220 | class FileGetter(threading.Thread):
221 | """Thread class for retrieving a URL"""
222 |
223 | def __init__(self, url, start):
224 | self.url = url
225 | self.result = 0
226 | self.starttime = start
227 | threading.Thread.__init__(self)
228 |
229 | def run(self):
230 | self.result = [0]
231 | try:
232 | if (timeit.default_timer() - self.starttime) <= 10:
233 | request = build_request(self.url)
234 | f = urlopen(request)
235 | while 1 and not shutdown_event.isSet():
236 | self.result.append(len(f.read(10240)))
237 | if self.result[-1] == 0:
238 | break
239 | f.close()
240 | except (IOError, ValueError) as e:
241 | pass
242 |
243 |
244 | def downloadSpeed(files, quiet=False):
245 | """Function to launch FileGetter threads and calculate download speeds"""
246 |
247 | start = timeit.default_timer()
248 |
249 | def producer(q, files):
250 | for file in files:
251 | thread = FileGetter(file, start)
252 | thread.start()
253 | q.put(thread, True)
254 |
255 | finished = []
256 |
257 | def consumer(q, total_files):
258 | while len(finished) < total_files:
259 | thread = q.get(True)
260 | while thread.isAlive():
261 | thread.join(timeout=0.1)
262 | finished.append(sum(thread.result))
263 | del thread
264 |
265 | q = Queue(6)
266 | prod_thread = threading.Thread(target=producer, args=(q, files))
267 | cons_thread = threading.Thread(target=consumer, args=(q, len(files)))
268 | start = timeit.default_timer()
269 | prod_thread.start()
270 | cons_thread.start()
271 | while prod_thread.isAlive():
272 | prod_thread.join(timeout=0.1)
273 | while cons_thread.isAlive():
274 | cons_thread.join(timeout=0.1)
275 | return (sum(finished) / (timeit.default_timer() - start))
276 |
277 |
278 | class FilePutter(threading.Thread):
279 | """Thread class for putting a URL"""
280 |
281 | def __init__(self, url, start, size):
282 | self.url = url
283 | chars = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ'
284 | data = chars * (int(round(int(size) / 36.0)))
285 | self.data = ('content1=%s' % data[0:int(size) - 9]).encode()
286 | del data
287 | self.result = 0
288 | self.starttime = start
289 | threading.Thread.__init__(self)
290 |
291 | def run(self):
292 | try:
293 | if ((timeit.default_timer() - self.starttime) <= 10 and
294 | not shutdown_event.isSet()):
295 | request = build_request(self.url, data=self.data)
296 | f = urlopen(request)
297 | f.read(11)
298 | f.close()
299 | self.result = len(self.data)
300 | else:
301 | self.result = 0
302 | except (IOError, ValueError) as e:
303 | self.result = 0
304 |
305 |
306 | def uploadSpeed(url, sizes, quiet=False):
307 | """Function to launch FilePutter threads and calculate upload speeds"""
308 |
309 | start = timeit.default_timer()
310 |
311 | def producer(q, sizes):
312 | for size in sizes:
313 | thread = FilePutter(url, start, size)
314 | thread.start()
315 | q.put(thread, True)
316 |
317 | finished = []
318 |
319 | def consumer(q, total_sizes):
320 | while len(finished) < total_sizes:
321 | thread = q.get(True)
322 | while thread.isAlive():
323 | thread.join(timeout=0.1)
324 | finished.append(thread.result)
325 | del thread
326 |
327 | q = Queue(6)
328 | prod_thread = threading.Thread(target=producer, args=(q, sizes))
329 | cons_thread = threading.Thread(target=consumer, args=(q, len(sizes)))
330 | start = timeit.default_timer()
331 | prod_thread.start()
332 | cons_thread.start()
333 | while prod_thread.isAlive():
334 | prod_thread.join(timeout=0.1)
335 | while cons_thread.isAlive():
336 | cons_thread.join(timeout=0.1)
337 |
338 | duration = timeit.default_timer() - start
339 | return (sum(finished) / duration)
340 |
341 |
342 | def getAttributesByTagName(dom, tagName):
343 | """Retrieve an attribute from an XML document and return it in a
344 | consistent format
345 |
346 | Only used with xml.dom.minidom, which is likely only to be used
347 | with python versions older than 2.5
348 | """
349 | elem = dom.getElementsByTagName(tagName)[0]
350 | return dict(list(elem.attributes.items()))
351 |
352 |
353 | def getConfig(url='://www.speedtest.net/speedtest-config.php', configxml=None):
354 | """Download the speedtest.net configuration and return only the data
355 | we are interested in
356 | """
357 |
358 | if configxml is None:
359 | request = build_request(url)
360 | uh, e = catch_request(request)
361 | if e:
362 | print_('Could not retrieve speedtest.net configuration: %s' % e)
363 | sys.exit(1)
364 | configxml = []
365 | while 1:
366 | configxml.append(uh.read(10240))
367 | if len(configxml[-1]) == 0:
368 | break
369 | if int(uh.code) != 200:
370 | return None
371 | uh.close()
372 |
373 |
374 | try:
375 | try:
376 | root = ET.fromstring(''.encode().join(configxml))
377 | config = {
378 | 'client': root.find('client').attrib,
379 | 'times': root.find('times').attrib,
380 | 'download': root.find('download').attrib,
381 | 'upload': root.find('upload').attrib}
382 | except AttributeError: # Python3 branch
383 | root = DOM.parseString(''.join(configxml))
384 | config = {
385 | 'client': getAttributesByTagName(root, 'client'),
386 | 'times': getAttributesByTagName(root, 'times'),
387 | 'download': getAttributesByTagName(root, 'download'),
388 | 'upload': getAttributesByTagName(root, 'upload')}
389 | except SyntaxError as e:
390 | print_('Failed to parse speedtest.net configuration')
391 | sys.exit(1)
392 | del root
393 | del configxml
394 | return config
395 |
396 |
397 | def closestServers(client, all=False):
398 | """Determine the 5 closest speedtest.net servers based on geographic
399 | distance
400 | """
401 |
402 | urls = [
403 | '://www.speedtest.net/speedtest-servers-static.php',
404 | '://c.speedtest.net/speedtest-servers-static.php',
405 | '://www.speedtest.net/speedtest-servers.php',
406 | '://c.speedtest.net/speedtest-servers.php',
407 | ]
408 | errors = []
409 | servers = {}
410 | for url in urls:
411 | try:
412 | request = build_request(url)
413 | uh, e = catch_request(request)
414 | if e:
415 | errors.append('%s' % e)
416 | raise SpeedtestCliServerListError
417 | serversxml = []
418 | while 1:
419 | serversxml.append(uh.read(10240))
420 | if len(serversxml[-1]) == 0:
421 | break
422 | if int(uh.code) != 200:
423 | uh.close()
424 | raise SpeedtestCliServerListError
425 | uh.close()
426 | try:
427 | try:
428 | root = ET.fromstring(''.encode().join(serversxml))
429 | elements = root.getiterator('server')
430 | except AttributeError: # Python3 branch
431 | root = DOM.parseString(''.join(serversxml))
432 | elements = root.getElementsByTagName('server')
433 | except SyntaxError:
434 | raise SpeedtestCliServerListError
435 | for server in elements:
436 | try:
437 | attrib = server.attrib
438 | except AttributeError:
439 | attrib = dict(list(server.attributes.items()))
440 | d = distance([float(client['lat']),
441 | float(client['lon'])],
442 | [float(attrib.get('lat')),
443 | float(attrib.get('lon'))])
444 | attrib['d'] = d
445 | if d not in servers:
446 | servers[d] = [attrib]
447 | else:
448 | servers[d].append(attrib)
449 | del root
450 | del serversxml
451 | del elements
452 | except SpeedtestCliServerListError:
453 | continue
454 |
455 | # We were able to fetch and parse the list of speedtest.net servers
456 | if servers:
457 | break
458 |
459 | if not servers:
460 | print_('Failed to retrieve list of speedtest.net servers:\n\n %s' %
461 | '\n'.join(errors))
462 | sys.exit(1)
463 |
464 | closest = []
465 | for d in sorted(servers.keys()):
466 | for s in servers[d]:
467 | closest.append(s)
468 | if len(closest) == 5 and not all:
469 | break
470 | else:
471 | continue
472 | break
473 |
474 | del servers
475 | return closest
476 |
477 |
478 | def getBestServer(servers):
479 | """Perform a speedtest.net latency request to determine which
480 | speedtest.net server has the lowest latency
481 | """
482 |
483 | results = {}
484 | for server in servers:
485 | cum = []
486 | url = '%s/latency.txt' % os.path.dirname(server['url'])
487 | urlparts = urlparse(url)
488 | for i in range(0, 3):
489 | try:
490 | if urlparts[0] == 'https':
491 | h = HTTPSConnection(urlparts[1])
492 | else:
493 | h = HTTPConnection(urlparts[1])
494 | headers = {'User-Agent': user_agent}
495 | start = timeit.default_timer()
496 | h.request("GET", urlparts[2], headers=headers)
497 | r = h.getresponse()
498 | total = (timeit.default_timer() - start)
499 | except (HTTPError, URLError, socket.error):
500 | cum.append(3600)
501 | continue
502 | text = r.read(9)
503 | if int(r.status) == 200 and text == 'test=test'.encode():
504 | cum.append(total)
505 | else:
506 | cum.append(3600)
507 | h.close()
508 | avg = round((sum(cum) / 6) * 1000, 3)
509 | results[avg] = server
510 | fastest = sorted(results.keys())[0]
511 | best = results[fastest]
512 | best['latency'] = fastest
513 |
514 | return best
515 |
516 |
517 | def ctrl_c(signum, frame):
518 | """Catch Ctrl-C key sequence and set a shutdown_event for our threaded
519 | operations
520 | """
521 |
522 | global shutdown_event
523 | shutdown_event.set()
524 | raise SystemExit('\nCancelling...')
525 |
526 |
527 | def version():
528 | """Print the version"""
529 |
530 | raise SystemExit(__version__)
531 |
532 |
533 | def speedtest():
534 | """Run the full speedtest.net test"""
535 |
536 | global shutdown_event, source, scheme
537 | shutdown_event = threading.Event()
538 |
539 | signal.signal(signal.SIGINT, ctrl_c)
540 |
541 | socket.setdefaulttimeout(10)
542 |
543 | # Pre-cache the user agent string
544 | build_user_agent()
545 |
546 | try:
547 | config = getConfig()
548 | except URLError:
549 | print_('Cannot retrieve speedtest configuration')
550 | sys.exit(1)
551 |
552 | servers = closestServers(config['client'])
553 | best = getBestServer(servers)
554 |
555 | test_results = {
556 | "download": 0,
557 | "upload": 0
558 | }
559 | sizes = [350, 500, 750, 1000, 1500, 2000, 2500, 3000, 3500, 4000]
560 | urls = []
561 | for size in sizes:
562 | for i in range(0, 4):
563 | urls.append('%s/random%sx%s.jpg' %
564 | (os.path.dirname(best['url']), size, size))
565 |
566 | dlspeed = downloadSpeed(urls, 1)
567 | test_results["download"] = dlspeed
568 |
569 | sizesizes = [int(.25 * 1000 * 1000), int(.5 * 1000 * 1000)]
570 | sizes = []
571 | for size in sizesizes:
572 | for i in range(0, 25):
573 | sizes.append(size)
574 | ulspeed = uploadSpeed(best['url'], sizes, 1)
575 | test_results["upload"] = ulspeed
576 |
577 | return test_results
578 |
579 |
580 | def speed_test_cached(cache_path=None):
581 |
582 | # Find cache file.
583 | if cache_path is None:
584 | cache_path = os.path.join(os.getcwd(), "speed_test")
585 |
586 | # Load results.
587 | results = {}
588 | if os.path.exists(cache_path):
589 | with open(cache_path, "r") as fp:
590 | try:
591 | content = fp.read()
592 | temp = json.loads(content)
593 | results["download"] = int(temp["download"])
594 | results["upload"] = int(temp["upload"])
595 | except (KeyError, ValueError):
596 | pass
597 |
598 | # Valid results?
599 | if "upload" not in list(results) or "download" not in list(results):
600 | # Get new results.
601 | results = speedtest()
602 |
603 | # Save new results.
604 | as_json = json.dumps(results)
605 | with open(cache_path, "w") as fp:
606 | fp.write(as_json)
607 |
608 | return results
609 |
610 |
611 | if __name__ == "__main__":
612 | print(speed_test_cached())
613 |
--------------------------------------------------------------------------------
/tests/test_client.py:
--------------------------------------------------------------------------------
1 | from dataserv_client import common
2 | import os
3 | import tempfile
4 | import unittest
5 | import datetime
6 | import json
7 | import psutil
8 | from future.moves.urllib.request import urlopen
9 | from dataserv_client import cli
10 | from dataserv_client import api
11 | from btctxstore import BtcTxStore
12 | from dataserv_client import exceptions
13 |
14 |
15 | url = "http://127.0.0.1:5000"
16 | common.SHARD_SIZE = 1024 * 128 # monkey patch shard size to 128K
17 |
18 |
19 | class AbstractTestSetup(object):
20 | def setUp(self):
21 | self.btctxstore = BtcTxStore()
22 |
23 | # debug output the server online list
24 | # print(urlopen(url + '/api/online/json').read().decode('utf8'))
25 |
26 |
27 | class TestClientRegister(AbstractTestSetup, unittest.TestCase):
28 |
29 | def test_register_payout(self):
30 | client = api.Client(url=url, config_path=tempfile.mktemp())
31 | config = client.config()
32 | self.assertTrue(client.register())
33 | result = json.loads(
34 | urlopen(url + '/api/online/json').read().decode('utf8')
35 | )
36 | result = [farmer for farmer in result['farmers']
37 | if farmer['payout_addr'] == config['payout_address']]
38 | last_seen = result[0]['last_seen']
39 | reg_time = result[0]['reg_time']
40 | result = json.dumps(result, sort_keys=True)
41 | expected = json.dumps([{
42 | 'height': 0,
43 | 'nodeid': common.address2nodeid(config['payout_address']),
44 | 'last_seen': last_seen,
45 | 'payout_addr': config['payout_address'],
46 | 'reg_time': reg_time,
47 | 'bandwidth_upload': 0,
48 | 'bandwidth_download': 0,
49 | "ip": "",
50 | 'uptime': 100.0
51 | }], sort_keys=True)
52 | self.assertEqual(result, expected)
53 |
54 | def test_register(self): # register without createing a config
55 | client = api.Client(url=url)
56 | self.assertTrue(client.register())
57 |
58 | def test_already_registered(self):
59 | def callback():
60 | client = api.Client(url=url, config_path=tempfile.mktemp())
61 | client.register()
62 | client.register()
63 |
64 | self.assertRaises(exceptions.AddressAlreadyRegistered, callback)
65 |
66 | def test_invalid_farmer(self):
67 | def callback():
68 | client = api.Client(url=url + "/xyz",
69 | config_path=tempfile.mktemp())
70 | client.register()
71 |
72 | self.assertRaises(exceptions.ServerNotFound, callback)
73 |
74 |
75 | class TestClientPing(AbstractTestSetup, unittest.TestCase):
76 | def test_ping(self):
77 | client = api.Client(url=url, config_path=tempfile.mktemp())
78 | self.assertTrue(client.register())
79 | self.assertTrue(client.ping())
80 |
81 | def test_invalid_farmer(self):
82 | def callback():
83 | client = api.Client(url=url + "/xyz",
84 | config_path=tempfile.mktemp())
85 | client.ping()
86 |
87 | self.assertRaises(exceptions.ServerNotFound, callback)
88 |
89 |
90 | class TestClientPoll(AbstractTestSetup, unittest.TestCase):
91 | def test_poll(self):
92 | client = api.Client(url=url, config_path=tempfile.mktemp())
93 | client.register()
94 |
95 | before = datetime.datetime.now()
96 | self.assertTrue(client.poll(delay=2, limit=2))
97 | after = datetime.datetime.now()
98 |
99 | # check that poll did 2 pings with 2 sec delay
100 | self.assertTrue(datetime.timedelta(seconds=2) <= (after - before))
101 |
102 |
103 | class TestInvalidArgument(AbstractTestSetup, unittest.TestCase):
104 | def test_invalid_retry_limit(self):
105 | def callback():
106 | api.Client(connection_retry_limit=-1,
107 | config_path=tempfile.mktemp())
108 |
109 | self.assertRaises(exceptions.InvalidInput, callback)
110 |
111 | def test_invalid_retry_delay(self):
112 | def callback():
113 | api.Client(connection_retry_delay=-1,
114 | config_path=tempfile.mktemp())
115 |
116 | self.assertRaises(exceptions.InvalidInput, callback)
117 |
118 | def test_invalid_negativ_max_size(self):
119 | def callback():
120 | api.Client(max_size=-1, config_path=tempfile.mktemp())
121 |
122 | self.assertRaises(exceptions.InvalidInput, callback)
123 |
124 | def test_invalid_zero_max_size(self):
125 | def callback():
126 | api.Client(max_size=0, config_path=tempfile.mktemp())
127 |
128 | self.assertRaises(exceptions.InvalidInput, callback)
129 |
130 | def test_invalid_negativ_min_free_size(self):
131 | def callback():
132 | api.Client(min_free_size=-1, config_path=tempfile.mktemp())
133 |
134 | self.assertRaises(exceptions.InvalidInput, callback)
135 |
136 | def test_invalid_zero_min_free_size(self):
137 | def callback():
138 | api.Client(min_free_size=0, config_path=tempfile.mktemp())
139 |
140 | self.assertRaises(exceptions.InvalidInput, callback)
141 |
142 | def test_build_invalid_negative_workers(self):
143 | def callback():
144 | client = api.Client(config_path=tempfile.mktemp())
145 | client.build(workers=-1)
146 |
147 | self.assertRaises(exceptions.InvalidInput, callback)
148 |
149 | def test_farm_invalid_zero_workers(self):
150 | def callback():
151 | client = api.Client(config_path=tempfile.mktemp())
152 | client.farm(workers=0)
153 |
154 | self.assertRaises(exceptions.InvalidInput, callback)
155 |
156 | def test_build_invalid_negative_set_height_interval(self):
157 | def callback():
158 | client = api.Client(config_path=tempfile.mktemp())
159 | client.build(set_height_interval=-1)
160 |
161 | self.assertRaises(exceptions.InvalidInput, callback)
162 |
163 | def test_farm_invalid_zero_set_height_interval(self):
164 | def callback():
165 | client = api.Client(config_path=tempfile.mktemp())
166 | client.farm(set_height_interval=0)
167 |
168 | self.assertRaises(exceptions.InvalidInput, callback)
169 |
170 | def test_farm_invalid_negative_set_height_interval(self):
171 | def callback():
172 | client = api.Client(config_path=tempfile.mktemp())
173 | client.farm(set_height_interval=-1)
174 |
175 | self.assertRaises(exceptions.InvalidInput, callback)
176 |
177 | def test_build_invalid_zero_set_height_interval(self):
178 | def callback():
179 | client = api.Client(config_path=tempfile.mktemp())
180 | client.build(set_height_interval=0)
181 |
182 | self.assertRaises(exceptions.InvalidInput, callback)
183 |
184 | def test_poll_invalid_negativ_delay(self):
185 | def callback():
186 | client = api.Client(config_path=tempfile.mktemp())
187 | client.poll(delay=-1, limit=0)
188 |
189 | self.assertRaises(exceptions.InvalidInput, callback)
190 |
191 | def test_audit_invalid_negativ_delay(self):
192 | def callback():
193 | client = api.Client(config_path=tempfile.mktemp())
194 | client.audit(delay=-1, limit=0)
195 |
196 | self.assertRaises(exceptions.InvalidInput, callback)
197 |
198 |
199 | class TestConnectionRetry(AbstractTestSetup, unittest.TestCase):
200 | def test_no_retry(self):
201 | def callback():
202 | client = api.Client(url="http://invalid.url",
203 | connection_retry_limit=0,
204 | connection_retry_delay=0,
205 | config_path=tempfile.mktemp())
206 | client.register()
207 |
208 | before = datetime.datetime.now()
209 | self.assertRaises(exceptions.ConnectionError, callback)
210 | after = datetime.datetime.now()
211 | self.assertTrue(datetime.timedelta(seconds=15) > (after - before))
212 |
213 | def test_retry_server_not_found(self):
214 | def callback():
215 | client = api.Client(url="http://ServerNotFound.url",
216 | config_path=tempfile.mktemp(),
217 | connection_retry_limit=2,
218 | connection_retry_delay=2)
219 | client.register()
220 |
221 | before = datetime.datetime.now()
222 | self.assertRaises(exceptions.ConnectionError, callback)
223 | after = datetime.datetime.now()
224 | self.assertTrue(datetime.timedelta(seconds=4) < (after - before))
225 |
226 | def test_retry_invalid_url(self):
227 | def callback():
228 | client = api.Client(url="http://127.0.0.257",
229 | config_path=tempfile.mktemp(),
230 | connection_retry_limit=2,
231 | connection_retry_delay=2)
232 | client.register()
233 |
234 | before = datetime.datetime.now()
235 | self.assertRaises(exceptions.ConnectionError, callback)
236 | after = datetime.datetime.now()
237 | self.assertTrue(datetime.timedelta(seconds=4) < (after - before))
238 |
239 | def test_retry_high_retry_limit(self):
240 | def callback():
241 | client = api.Client(url="http://127.0.0.257",
242 | config_path=tempfile.mktemp(),
243 | connection_retry_limit=2000,
244 | connection_retry_delay=0,
245 | quiet=True)
246 | client.register()
247 |
248 | self.assertRaises(exceptions.ConnectionError, callback)
249 |
250 |
251 | class TestClientBuild(AbstractTestSetup, unittest.TestCase):
252 | def test_build(self):
253 | client = api.Client(url=url,
254 | config_path=tempfile.mktemp(),
255 | max_size=1024 * 256) # 256K
256 | client.register()
257 | generated = client.build(cleanup=True)
258 | self.assertTrue(len(generated))
259 |
260 | client = api.Client(url=url,
261 | config_path=tempfile.mktemp(),
262 | max_size=1024 * 512) # 512K
263 | config = client.config()
264 | client.register()
265 | generated = client.build(cleanup=True)
266 | self.assertTrue(len(generated) == 4)
267 |
268 | result = json.loads(
269 | urlopen(url + '/api/online/json').read().decode('utf8')
270 | )
271 | result = [farmer for farmer in result['farmers']
272 | if farmer['payout_addr'] == config['payout_address']]
273 |
274 | last_seen = result[0]['last_seen']
275 | reg_time = result[0]['reg_time']
276 | result = json.dumps(result, sort_keys=True)
277 | expected = json.dumps([{
278 | 'height': 4,
279 | 'nodeid': common.address2nodeid(config['payout_address']),
280 | 'last_seen': last_seen,
281 | 'payout_addr': config['payout_address'],
282 | 'reg_time': reg_time,
283 | 'bandwidth_upload': 0,
284 | 'bandwidth_download': 0,
285 | "ip": "",
286 | 'uptime': 100.0
287 | }], sort_keys=True)
288 | self.assertEqual(result, expected)
289 |
290 | def test_build_min_free_space(self):
291 |
292 | store_path = tempfile.mktemp()
293 | os.mkdir(store_path)
294 | my_free_size = psutil.disk_usage(store_path).free - (1024 * 256) # 256
295 | client = api.Client(url=url,
296 | config_path=tempfile.mktemp(),
297 | store_path=store_path,
298 | max_size=1024 * 1024 * 2,
299 | min_free_size=my_free_size) # 256
300 | config = client.config()
301 | client.register()
302 | generated = client.build()
303 | self.assertTrue(len(generated) > 0) # build at least 1 shard
304 | self.assertTrue(len(generated) < 16) # stoped cause of free Space
305 |
306 | result = json.loads(
307 | urlopen(url + '/api/online/json').read().decode('utf8')
308 | )
309 | result = [farmer for farmer in result['farmers']
310 | if farmer['payout_addr'] == config['payout_address']]
311 | last_seen = result[0]['last_seen']
312 | reg_time = result[0]['reg_time']
313 | result = json.dumps(result, sort_keys=True)
314 | expected = json.dumps([{
315 | 'height': len(generated),
316 | 'nodeid': common.address2nodeid(config['payout_address']),
317 | 'last_seen': last_seen,
318 | 'payout_addr': config['payout_address'],
319 | 'reg_time': reg_time,
320 | 'bandwidth_upload': 0,
321 | 'bandwidth_download': 0,
322 | "ip": "",
323 | 'uptime': 100.0
324 | }], sort_keys=True)
325 |
326 | self.assertEqual(result, expected)
327 |
328 |
329 | class TestClientFarm(AbstractTestSetup, unittest.TestCase):
330 | def test_farm(self):
331 | client = api.Client(url=url,
332 | config_path=tempfile.mktemp(),
333 | max_size=1024 * 256) # 256K
334 |
335 | befor = datetime.datetime.now()
336 | self.assertTrue(client.farm(delay=2, limit=2)) # check farm return true
337 | after = datetime.datetime.now()
338 |
339 | # check that farm did 2 pings with 2 sec delay
340 | self.assertTrue(datetime.timedelta(seconds=2) <= (after - befor))
341 |
342 | def test_farm_registered(self):
343 | client = api.Client(url=url,
344 | config_path=tempfile.mktemp(),
345 | max_size=1024 * 256) # 256K
346 | config = client.config()
347 | client.register()
348 |
349 | befor = datetime.datetime.now()
350 | self.assertTrue(client.farm(delay=2, limit=2)) # check farm return true
351 | after = datetime.datetime.now()
352 |
353 | # check that farm did 2 pings with 2 sec delay
354 | self.assertTrue(datetime.timedelta(seconds=2) <= (after - befor))
355 |
356 | result = json.loads(
357 | urlopen(url + '/api/online/json').read().decode('utf8')
358 | )
359 | result = [farmer for farmer in result['farmers']
360 | if farmer['payout_addr'] == config['payout_address']]
361 | last_seen = result[0]['last_seen']
362 | reg_time = result[0]['reg_time']
363 |
364 | # check bandwidth and pop as expected result cannot be know
365 | bandwidth_upload = result[0].pop('bandwidth_upload')
366 | bandwidth_download = result[0].pop('bandwidth_download')
367 | self.assertGreater(bandwidth_upload, 0)
368 | self.assertGreater(bandwidth_download, 0)
369 |
370 | result = json.dumps(result, sort_keys=True)
371 | expected = json.dumps([{
372 | 'height': 2,
373 | 'nodeid': common.address2nodeid(config['payout_address']),
374 | 'last_seen': last_seen,
375 | 'payout_addr': config['payout_address'],
376 | 'reg_time': reg_time,
377 | "ip": "",
378 | 'uptime': 100.0
379 | }], sort_keys=True)
380 |
381 | self.assertEqual(result, expected)
382 |
383 |
384 | class TestClientAudit(AbstractTestSetup, unittest.TestCase):
385 | @unittest.skip("to many blockchain api requests")
386 | def test_audit(self):
387 | client = api.Client(url=url,
388 | config_path=tempfile.mktemp(),
389 | max_size=1024 * 256) # 256K
390 | client.register()
391 | self.assertTrue(client.audit(delay=1, limit=1))
392 |
393 |
394 | class TestClientCliArgs(AbstractTestSetup, unittest.TestCase):
395 | def test_version(self):
396 | args = [
397 | "--config_path=" + tempfile.mktemp(),
398 | "version"
399 | ]
400 | self.assertTrue(cli.main(args))
401 |
402 | def test_freespace(self):
403 | args = [
404 | "--config_path=" + tempfile.mktemp(),
405 | "freespace"
406 | ]
407 | self.assertTrue(cli.main(args))
408 |
409 | def test_poll(self):
410 | path = tempfile.mktemp()
411 |
412 | args = [
413 | "--url=" + url,
414 | "--config_path=" + path,
415 | "register",
416 | ]
417 | cli.main(args)
418 |
419 | args = [
420 | "--url=" + url,
421 | "--config_path=" + path,
422 | "poll",
423 | "--delay=0",
424 | "--limit=0"
425 | ] # no pings needed for check args
426 | self.assertTrue(cli.main(args))
427 |
428 | def test_register(self):
429 | args = [
430 | "--url=" + url,
431 | "--config_path=" + tempfile.mktemp(),
432 | "register"
433 | ]
434 | self.assertTrue(cli.main(args))
435 |
436 | def test_build(self):
437 | path = tempfile.mktemp()
438 |
439 | args = [
440 | "--url=" + url,
441 | "--config_path=" + path,
442 | "register",
443 | ]
444 | cli.main(args)
445 |
446 | args = [
447 | "--url=" + url,
448 | "--config_path=" + path,
449 | "--max_size=" + str(1024 * 256), # 256K
450 | "--min_free_size=" + str(1024 * 256), # 256K
451 | "build",
452 | "--workers=4",
453 | "--cleanup",
454 | "--rebuild",
455 | "--repair",
456 | "--set_height_interval=3"
457 | ]
458 | self.assertTrue(cli.main(args))
459 |
460 | def test_audit(self):
461 | path = tempfile.mktemp()
462 |
463 | args = [
464 | "--url=" + url,
465 | "--config_path=" + path,
466 | "register",
467 | ]
468 | cli.main(args)
469 |
470 | args = [
471 | "--url=" + url,
472 | "--config_path=" + path,
473 | "audit",
474 | "--delay=0",
475 | "--limit=0"
476 | ] # no audit needed for check args
477 | self.assertTrue(cli.main(args))
478 |
479 | def test_farm(self):
480 | args = [
481 | "--url=" + url,
482 | "--config_path=" + tempfile.mktemp(),
483 | "--max_size=" + str(1024 * 256), # 256K
484 | "--min_free_size=" + str(1024 * 256), # 256K
485 | "farm",
486 | "--workers=4",
487 | "--cleanup",
488 | "--rebuild",
489 | "--repair",
490 | "--set_height_interval=3",
491 | "--delay=0",
492 | "--limit=0"
493 | ] # no pings needed for check args
494 | self.assertTrue(cli.main(args))
495 |
496 | def test_ping(self):
497 | config_path = tempfile.mktemp()
498 | args = [
499 | "--url=" + url,
500 | "--config_path=" + config_path,
501 | "register"
502 | ]
503 | self.assertTrue(cli.main(args))
504 |
505 | args = [
506 | "--url=" + url,
507 | "--config_path=" + config_path,
508 | "ping"
509 | ]
510 | self.assertTrue(cli.main(args))
511 |
512 | def test_no_command_error(self):
513 | def callback():
514 | cli.main([])
515 |
516 | self.assertRaises(SystemExit, callback)
517 |
518 | def test_input_error(self):
519 | def callback():
520 | path = tempfile.mktemp()
521 | cli.main([
522 | "--url=" + url,
523 | "--config_path=" + path,
524 | "register",
525 | ])
526 | cli.main([
527 | "--url=" + url,
528 | "--config_path=" + path,
529 | "poll",
530 | "--delay=5",
531 | "--limit=xyz"
532 | ])
533 |
534 | self.assertRaises(ValueError, callback)
535 |
536 |
537 | class TestConfig(AbstractTestSetup, unittest.TestCase):
538 | def test_show(self):
539 | payout_wif = self.btctxstore.create_key()
540 | hwif = self.btctxstore.create_wallet()
541 | payout_address = self.btctxstore.get_address(payout_wif)
542 | client = api.Client(config_path=tempfile.mktemp())
543 | config = client.config(set_wallet=hwif,
544 | set_payout_address=payout_address)
545 | self.assertEqual(config["wallet"], hwif)
546 | self.assertEqual(config["payout_address"], payout_address)
547 |
548 | def test_validation(self):
549 | def callback():
550 | client = api.Client(config_path=tempfile.mktemp())
551 | client.config(set_payout_address="invalid")
552 |
553 | self.assertRaises(exceptions.InvalidAddress, callback)
554 |
555 | def test_persistance(self):
556 | config_path = tempfile.mktemp()
557 | a = api.Client(config_path=config_path).config()
558 | b = api.Client(config_path=config_path).config()
559 | c = api.Client(config_path=config_path).config()
560 | self.assertEqual(a, b, c)
561 | self.assertTrue(c["wallet"] is not None)
562 |
563 |
564 | if __name__ == '__main__':
565 | unittest.main()
566 |
--------------------------------------------------------------------------------