├── debian ├── compat ├── dirs ├── source │ └── format ├── stratum-mining-proxy.default ├── stratum-mining-proxy.postrm ├── stratum-mining-proxy.logrotate ├── stratum-mining-proxy.preinst ├── stratum-mining-proxy.prerm ├── control ├── stratum-mining-proxy.postinst ├── rules ├── changelog └── stratum-mining-proxy.init ├── midstatec ├── __init__.py ├── Makefile ├── midstatec.py └── midstatemodule.c ├── mining_libs ├── __init__.py ├── version.py ├── worker_registry.py ├── multicast_responder.py ├── utils.py ├── midstate.py ├── client_service.py ├── getwork_listener.py ├── jobs.py └── stratum_listener.py ├── .dockerignore ├── .gitignore ├── Dockerfile ├── setup.py ├── README.md ├── example_multicast.py ├── mining_proxy.py └── distribute_setup.py /debian/compat: -------------------------------------------------------------------------------- 1 | 7 2 | -------------------------------------------------------------------------------- /midstatec/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /mining_libs/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /debian/dirs: -------------------------------------------------------------------------------- 1 | usr/sbin 2 | -------------------------------------------------------------------------------- /debian/source/format: -------------------------------------------------------------------------------- 1 | 1.0 2 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | * 2 | !midstatec 3 | !mining_libs 4 | !*.py 5 | -------------------------------------------------------------------------------- /mining_libs/version.py: -------------------------------------------------------------------------------- 1 | # last stable: 1.5.5 2 | VERSION='1.5.7' 3 | -------------------------------------------------------------------------------- /debian/stratum-mining-proxy.default: -------------------------------------------------------------------------------- 1 | #POOL_HOST=stratum.bitcoin.cz 2 | #POOL_PORT=3333 3 | #SCRYPT_TARGET=1 4 | #CUSTOM_USER=... 5 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | build 3 | distribute* 4 | *egg-info* 5 | debian/files 6 | debian/*/ 7 | debian/*.debhelper* 8 | debian/*.substvars 9 | -------------------------------------------------------------------------------- /debian/stratum-mining-proxy.postrm: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # Automatically added by dh_installinit 3 | if [ "$1" = "purge" ] ; then 4 | update-rc.d stratum-mining-proxy remove >/dev/null 5 | fi 6 | # End automatically added section 7 | -------------------------------------------------------------------------------- /debian/stratum-mining-proxy.logrotate: -------------------------------------------------------------------------------- 1 | /var/log/stratum-mining-proxy.log 2 | { 3 | rotate 7 4 | daily 5 | missingok 6 | notifempty 7 | delaycompress 8 | compress 9 | postrotate 10 | invoke-rc.d stratum-mining-proxy restart > /dev/null 11 | endscript 12 | } 13 | -------------------------------------------------------------------------------- /midstatec/Makefile: -------------------------------------------------------------------------------- 1 | CC = gcc 2 | CFLAGS = -march=native -Wall -funroll-all-loops -O3 -fstrict-aliasing -Wall -std=c99 -I/usr/include/python2.7 3 | LDFLAGS = -Wl,-O1 -Wl,--as-needed -lpython2.7 4 | 5 | all: test midstate.so 6 | 7 | test: midstatemodule.c 8 | $(CC) $(CFLAGS) midstatemodule.c -o test $(LDFLAGS) 9 | 10 | midstate.so: midstatemodule.c 11 | $(CC) $(CFLAGS) -fPIC -shared midstatemodule.c -o midstate.so $(LDFLAGS) 12 | 13 | .PHONY: clean 14 | 15 | clean: 16 | rm -f midstate.so test 17 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM archlinux 2 | 3 | RUN useradd -r -m -g users proxy 4 | 5 | WORKDIR /home/proxy 6 | 7 | RUN pacman -Sy --noconfirm --needed base-devel git python2-pip vim python2-virtualenv 8 | 9 | USER proxy 10 | 11 | RUN git clone https://github.com/braiins/stratum-mining-proxy.git && \ 12 | git clone https://github.com/braiins/stratum.git 13 | 14 | RUN pip2 install --user ./stratum 15 | 16 | EXPOSE 3333 17 | 18 | ENTRYPOINT ["python2", "stratum-mining-proxy/mining_proxy.py", "--enable-stratum-extensions"] 19 | -------------------------------------------------------------------------------- /debian/stratum-mining-proxy.preinst: -------------------------------------------------------------------------------- 1 | #! /bin/sh 2 | 3 | set -e 4 | 5 | # This was added by stdeb to workaround Debian #479852. In a nutshell, 6 | # pycentral does not remove normally remove its symlinks on an 7 | # upgrade. Since we're using python-support, however, those symlinks 8 | # will be broken. This tells python-central to clean up any symlinks. 9 | if [ -e /var/lib/dpkg/info/stratum-mining-proxy.list ] && which pycentral >/dev/null 2>&1 10 | then 11 | pycentral pkgremove stratum-mining-proxy 12 | fi 13 | 14 | #DEBHELPER# 15 | -------------------------------------------------------------------------------- /debian/stratum-mining-proxy.prerm: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # Automatically added by dh_installinit 3 | if [ -x "/etc/init.d/stratum-mining-proxy" ]; then 4 | if [ -x "`which invoke-rc.d 2>/dev/null`" ]; then 5 | invoke-rc.d stratum-mining-proxy stop || exit $? 6 | else 7 | /etc/init.d/stratum-mining-proxy stop || exit $? 8 | fi 9 | fi 10 | # End automatically added section 11 | # Automatically added by dh_pysupport 12 | if which update-python-modules >/dev/null 2>&1; then 13 | update-python-modules -c stratum-mining-proxy.public 14 | fi 15 | # End automatically added section 16 | -------------------------------------------------------------------------------- /debian/control: -------------------------------------------------------------------------------- 1 | Source: stratum-mining-proxy 2 | Maintainer: Corey Ralph 3 | Section: python 4 | Priority: optional 5 | Build-Depends: python-setuptools (>= 0.6b3), debhelper (>= 7), python-support (>= 0.8.4), libssl-dev, python-dev 6 | Standards-Version: 3.8.4 7 | XS-Python-Version: current 8 | 9 | Package: stratum-mining-proxy 10 | Architecture: i386 amd64 11 | Depends: ${misc:Depends}, ${python:Depends}, python-stratum, python-twisted-web, python-ecdsa, python-argparse 12 | XB-Python-Version: ${python:Versions} 13 | Provides: ${python:Provides} 14 | Description: Getwork-compatible proxy for Stratum mining pools 15 | -------------------------------------------------------------------------------- /debian/stratum-mining-proxy.postinst: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # Automatically added by dh_pysupport 3 | if which update-python-modules >/dev/null 2>&1; then 4 | update-python-modules stratum-mining-proxy.public 5 | fi 6 | # End automatically added section 7 | # Automatically added by dh_installinit 8 | if [ -x "/etc/init.d/stratum-mining-proxy" ]; then 9 | update-rc.d stratum-mining-proxy defaults >/dev/null 10 | if [ -x "`which invoke-rc.d 2>/dev/null`" ]; then 11 | invoke-rc.d stratum-mining-proxy start || exit $? 12 | else 13 | /etc/init.d/stratum-mining-proxy start || exit $? 14 | fi 15 | fi 16 | # End automatically added section 17 | -------------------------------------------------------------------------------- /debian/rules: -------------------------------------------------------------------------------- 1 | #!/usr/bin/make -f 2 | 3 | # This file was automatically generated by stdeb 0.6.0 at 4 | # Tue, 28 Jan 2014 15:45:25 +1100 5 | 6 | # Unset the environment variables set by dpkg-buildpackage. (This is 7 | # necessary because distutils is brittle with compiler/linker flags 8 | # set. Specifically, packages using f2py will break without this.) 9 | unexport CPPFLAGS 10 | unexport CFLAGS 11 | unexport CXXFLAGS 12 | unexport FFLAGS 13 | unexport LDFLAGS 14 | 15 | #exports specified using stdeb Setup-Env-Vars: 16 | #export DH_OPTIONS=--buildsystem=python_distutils 17 | 18 | %: 19 | dh $@ 20 | 21 | override_dh_install: 22 | dh_install 23 | # Rename binary to match package name and move to sbin 24 | mv debian/stratum-mining-proxy/usr/bin/mining_proxy.py debian/stratum-mining-proxy/usr/sbin/stratum-mining-proxy 25 | 26 | override_dh_clean: 27 | dh_clean 28 | # distribute module is downloaded during build 29 | rm -f distribute-*.egg distribute-*.tar.gz 30 | rm -rf stratum_mining_proxy.egg-info 31 | -------------------------------------------------------------------------------- /midstatec/midstatec.py: -------------------------------------------------------------------------------- 1 | # Original source: https://gitorious.org/midstate/midstate 2 | 3 | import struct 4 | import binascii 5 | from midstate import SHA256 6 | 7 | test_data = binascii.unhexlify("0000000293d5a732e749dbb3ea84318bd0219240a2e2945046015880000003f5000000008d8e2673e5a071a2c83c86e28033b1a0a4aac90dde7a0670827cd0c3ef8caf7d5076c7b91a057e0800000000000000800000000000000000000000000000000000000000000000000000000000000000000000000000000080020000") 8 | test_target_midstate = binascii.unhexlify("4c8226f95a31c9619f5197809270e4fa0a2d34c10215cf4456325e1237cb009d") 9 | 10 | 11 | def midstate(data): 12 | reversed = struct.pack('>IIIIIIIIIIIIIIII', *struct.unpack('>IIIIIIIIIIIIIIII', data[:64])[::-1])[::-1] 13 | return struct.pack(' Fri, 9 May 2014 23:59:59 +0200 6 | 7 | stratum-mining-proxy (1.5.6-1) unstable; urgency=low 8 | 9 | * Add CUSTOM_USER and SCRYPT_TARGET options in default file 10 | 11 | -- Marek Palatinus Sun, 9 Feb 2014 18:36:00 +0000 12 | 13 | stratum-mining-proxy (1.5.2-3) unstable; urgency=low 14 | 15 | * Add CUSTOM_USER and SCRYPT_TARGET options in default file 16 | 17 | -- Corey Ralph Wed, 29 Jan 2014 15:16:18 +1100 18 | 19 | stratum-mining-proxy (1.5.2-2) unstable; urgency=low 20 | 21 | * Add dependency on python-argparse 22 | 23 | -- Corey Ralph Wed, 29 Jan 2014 12:19:12 +1100 24 | 25 | stratum-mining-proxy (1.5.2-1.3) unstable; urgency=low 26 | 27 | * Clean egg-info build artifact 28 | 29 | -- Corey Ralph Wed, 29 Jan 2014 11:36:47 +1100 30 | 31 | stratum-mining-proxy (1.5.2-1.2) unstable; urgency=low 32 | 33 | * Fix comments in init script and change defaults 34 | 35 | -- Corey Ralph Wed, 29 Jan 2014 11:16:24 +1100 36 | 37 | stratum-mining-proxy (1.5.2-1.1) unstable; urgency=low 38 | 39 | * Add logrotate config, init and defaults 40 | 41 | -- Corey Ralph Wed, 29 Jan 2014 10:44:27 +1100 42 | 43 | stratum-mining-proxy (1.5.2-1) unstable; urgency=low 44 | 45 | * source package automatically created by stdeb 0.6.0 46 | 47 | -- Corey Ralph Tue, 28 Jan 2014 15:45:25 +1100 48 | -------------------------------------------------------------------------------- /mining_libs/worker_registry.py: -------------------------------------------------------------------------------- 1 | import time 2 | 3 | import stratum.logger 4 | log = stratum.logger.get_logger('proxy') 5 | 6 | class WorkerRegistry(object): 7 | def __init__(self, f): 8 | self.f = f # Factory of Stratum client 9 | self.clear_authorizations() 10 | 11 | def clear_authorizations(self): 12 | self.authorized = [] 13 | self.unauthorized = [] 14 | self.last_failure = 0 15 | 16 | def _on_authorized(self, result, worker_name): 17 | if result == True: 18 | self.authorized.append(worker_name) 19 | else: 20 | self.unauthorized.append(worker_name) 21 | return result 22 | 23 | def _on_failure(self, failure, worker_name): 24 | log.exception("Cannot authorize worker '%s'" % worker_name) 25 | self.last_failure = time.time() 26 | 27 | def authorize(self, worker_name, password): 28 | if worker_name in self.authorized: 29 | return True 30 | 31 | if worker_name in self.unauthorized and time.time() - self.last_failure < 60: 32 | # Prevent flooding of mining.authorize() requests 33 | log.warning("Authentication of worker '%s' with password '%s' failed, next attempt in few seconds..." % \ 34 | (worker_name, password)) 35 | return False 36 | 37 | d = self.f.rpc('mining.authorize', [worker_name, password]) 38 | d.addCallback(self._on_authorized, worker_name) 39 | d.addErrback(self._on_failure, worker_name) 40 | return d 41 | 42 | def is_authorized(self, worker_name): 43 | return (worker_name in self.authorized) 44 | 45 | def is_unauthorized(self, worker_name): 46 | return (worker_name in self.unauthorized) 47 | -------------------------------------------------------------------------------- /mining_libs/multicast_responder.py: -------------------------------------------------------------------------------- 1 | import json 2 | from twisted.internet.protocol import DatagramProtocol 3 | 4 | import stratum.logger 5 | log = stratum.logger.get_logger('proxy') 6 | 7 | class MulticastResponder(DatagramProtocol): 8 | def __init__(self, pool_host, stratum_port, getwork_port): 9 | # Upstream Stratum host/port 10 | # Used for identifying the pool which we're connected to. 11 | # Some load balancing strategies can change the host/port 12 | # during the mining session (by mining.reconnect()), but this points 13 | # to initial host/port provided by user on cmdline or by X-Stratum 14 | self.pool_host = pool_host 15 | 16 | self.stratum_port = stratum_port 17 | self.getwork_port = getwork_port 18 | 19 | def startProtocol(self): 20 | # 239.0.0.0/8 are for private use within an organization 21 | self.transport.joinGroup("239.3.3.3") 22 | self.transport.setTTL(5) 23 | 24 | def writeResponse(self, address, msg_id, result, error=None): 25 | self.transport.write(json.dumps({"id": msg_id, "result": result, "error": error}), address) 26 | 27 | def datagramReceived(self, datagram, address): 28 | log.info("Received local discovery request from %s:%d" % address) 29 | 30 | try: 31 | data = json.loads(datagram) 32 | except: 33 | # Skip response if datagram is not parsable 34 | log.error("Unparsable datagram") 35 | return 36 | 37 | msg_id = data.get('id') 38 | msg_method = data.get('method') 39 | #msg_params = data.get('params') 40 | 41 | if msg_method == 'mining.get_upstream': 42 | self.writeResponse(address, msg_id, (self.pool_host, self.stratum_port, self.getwork_port)) -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from setuptools import setup, Extension 4 | import sys, os 5 | try: 6 | import py2exe 7 | except ImportError: 8 | py2exe = None 9 | 10 | from mining_libs import version 11 | 12 | args = { 13 | 'name': 'stratum_mining_proxy', 14 | 'version': version.VERSION, 15 | 'description': 'Getwork-compatible proxy for Stratum mining pools', 16 | 'author': 'slush', 17 | 'author_email': 'slush@satoshilabs.com', 18 | 'url': 'http://mining.bitcoin.cz/stratum-mining/', 19 | 'ext_modules': [ 20 | Extension( 21 | 'midstate', 22 | ['midstatec/midstatemodule.c'], 23 | include_dirs=['/usr/include/python2.7'], 24 | extra_compile_args=['-march=native', '-Wall', '-funroll-all-loops', '-O3', '-fstrict-aliasing', '-Wall', '-std=c99', '-fPIC', '-shared'], 25 | libraries=['python2.7'], 26 | extra_link_args=['-Wl,-O1', '-Wl,--as-needed'] 27 | ) 28 | ], 29 | 'py_modules': ['mining_libs.client_service', 'mining_libs.getwork_listener', 30 | 'mining_libs.jobs', 'mining_libs.midstate', 31 | 'mining_libs.multicast_responder', 'mining_libs.stratum_listener', 32 | 'mining_libs.utils', 'mining_libs.version', 'mining_libs.worker_registry', 33 | 'midstatec.midstatec'], 34 | 'install_requires': ['setuptools>=0.6c11', 'twisted>=12.2.0', 'stratum>=0.2.15', 'argparse'], 35 | 'scripts': ['mining_proxy.py'], 36 | } 37 | 38 | if py2exe != None: 39 | args.update({ 40 | # py2exe options 41 | 'options': {'py2exe': 42 | {'optimize': 2, 43 | 'bundle_files': 1, 44 | 'compressed': True, 45 | 'dll_excludes': ['mswsock.dll', 'powrprof.dll'], 46 | }, 47 | }, 48 | 'console': ['mining_proxy.py'], 49 | 'zipfile': None, 50 | }) 51 | 52 | setup(**args) 53 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | stratum-mining-proxy 2 | ==================== 3 | Notes on Stratum extensions 4 | ------------------------------- 5 | 6 | In order to operate on bosminer, stratum extensions need to be enabled (`--enable-stratum-extensions`). 7 | 8 | Installation using Docker 9 | ------------------------------- 10 | This works on any system with Docker (Linux/Mac/Windows) and doesn't need anything other than Git and Docker. 11 | 12 | 1. Download and extract this directory 13 | 1. `cd stratum-mining-proxy` 14 | 1. `docker build -t stratum-mining-proxy .` 15 | 1. Run interactively: `docker run -it --rm -p 3333:3333 stratum-mining-proxy -o us-east.stratum.slushpool.com -p 3333 -sh 0.0.0.0 -sp 3333` 16 | 1. Run interactively: `docker run -it --rm -p 3333:3333 stratum-mining-proxy -o eu.stratum.slushpool.com -p 3333 -sh 0.0.0.0 -sp 3333` 17 | 1. Run as a service: `docker run -d --restart unless-stopped --rm -p 3333:3333 stratum-mining-proxy -o us-east.stratum.slushpool.com -p 3333 -sh 0.0.0.0 -sp 3333` 18 | 19 | Installation on Linux using Git 20 | ------------------------------- 21 | This is advanced option for experienced users, but give you the easiest way for updating the proxy. 22 | 23 | 1. install python2 and python2-virtualenv for your distribution 24 | 1. Download and extract this directory 25 | 1. Download stratum dependency `git clone git://github.com/braiins/stratum.git` 26 | 1. `cd stratum-mining-proxy` 27 | 1. `virtualenv .env --python=/usr/bin/python2.7` 28 | 1. `. .env/bin/activate` 29 | 1. `python -m pip install -e ../stratum` 30 | 1. `python mining_proxy.py -o us-east.stratum.slushpool.com -sh 0.0.0.0 -sp 3333 --enable-stratum-extensions` 31 | 32 | Troubleshooting: Installation on Ubuntu/RaspberryPi 33 | ------------------------------- 34 | There seem to be problems with building pyinstaller with default compiler GCC on RaspberryPi 35 | 36 | 1. install python2 and python2-virtualenv for your distribution 37 | 1. Download and extract this directory 38 | 1. `git clone git://github.com/braiins/stratum.git` 39 | 1. `cd stratum-mining-proxy` 40 | 1. `virtualenv .env --python=/usr/bin/python2.7` 41 | 1. `. .env/bin/activate` 42 | 1. Manually build PyInstaller: 43 | 1. `sudo apt-get install clang` 44 | 1. `git clone https://github.com/pyinstaller/pyinstaller.git` 45 | 1. `cd pyinstaller/bootloader/` 46 | 1. `python ./waf all --target-arch=32bit --clang` 47 | 1. `sudo apt-get remove clang # remove the clang, it is not needed anymore` 48 | 1. `cd ..` 49 | 1. `python setup.py install` 50 | 1. `cd ..` 51 | 1. `python -m pip install -e ../stratum` 52 | 1. `mkdir -p .env/local/include/python2.7` 53 | 1. `ln -s /usr/include/python2.7/pyconfig.h .env/local/include/python2.7/pyconfig.h` 54 | 1. `pyinstaller -F mining_proxy.py --name stratum-mining-proxy` 55 | 1. Resulting binary is in `dist/stratum-mining-proxy` 56 | 57 | Installation on Arch Linux 58 | -------------------------- 59 | 1. `git clone https://aur.archlinux.org/stratum-mining-proxy.git` 60 | 1. `cd stratum-mining-proxy` 61 | 1. `makepkg -sri` 62 | 1. run `stratum-mining-proxy -o us-east.stratum.slushpool.com -p 3333 -sh 0.0.0.0 -sp 3333 --enable-stratum-extensions` 63 | -------------------------------------------------------------------------------- /example_multicast.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | ''' 3 | This is just an example script for miner developers. 4 | If you're end user, you don't need to use this script. 5 | 6 | Detector of Stratum mining proxies on local network 7 | Copyright (C) 2012 Marek Palatinus 8 | 9 | This program is free software: you can redistribute it and/or modify 10 | it under the terms of the GNU General Public License as published by 11 | the Free Software Foundation, either version 3 of the License, or 12 | (at your option) any later version. 13 | 14 | This program is distributed in the hope that it will be useful, 15 | but WITHOUT ANY WARRANTY; without even the implied warranty of 16 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 17 | GNU General Public License for more details. 18 | 19 | You should have received a copy of the GNU General Public License 20 | along with this program. If not, see . 21 | ''' 22 | 23 | from twisted.internet.protocol import DatagramProtocol 24 | from twisted.internet import reactor, defer 25 | 26 | import json 27 | 28 | class MulticastClient(DatagramProtocol): 29 | 30 | def startProtocol(self): 31 | self.transport.joinGroup("239.3.3.3") 32 | self.transport.write(json.dumps({"id": 0, "method": "mining.get_upstream", "params": []}), ('239.3.3.3', 3333)) 33 | 34 | def datagramReceived(self, datagram, address): 35 | '''Some data from peers received. 36 | 37 | Example of valid datagram: 38 | {"id": 0, "result": [["api-stratum.bitcoin.cz", 3333], 3333, 8332], "error": null} 39 | 40 | First argument - (host, port) of upstream pool 41 | Second argument - Stratum port where proxy is listening 42 | Third parameter - Getwork port where proxy is listening 43 | ''' 44 | #print "Datagram %s received from %s" % (datagram, address) 45 | 46 | try: 47 | data = json.loads(datagram) 48 | except: 49 | print "Unparsable datagram received" 50 | 51 | 52 | if data.get('id') != 0 or data.get('result') == None: 53 | return 54 | 55 | 56 | (proxy_host, proxy_port) = address 57 | (pool_host, pool_port) = data['result'][0] 58 | stratum_port = data['result'][1] 59 | getwork_port = data['result'][2] 60 | 61 | print "Found stratum proxy on %(proxy_host)s:%(stratum_port)d (stratum), "\ 62 | "%(proxy_host)s:%(getwork_port)d (getwork), "\ 63 | "mining for %(pool_host)s:%(pool_port)d" % \ 64 | {'proxy_host': proxy_host, 65 | 'pool_host': pool_host, 66 | 'pool_port': pool_port, 67 | 'stratum_port': stratum_port, 68 | 'getwork_port': getwork_port} 69 | 70 | def stop(): 71 | print "Local discovery of Stratum proxies is finished." 72 | reactor.stop() 73 | 74 | print "Listening for Stratum proxies on local network..." 75 | reactor.listenMulticast(3333, MulticastClient(), listenMultiple=True) 76 | reactor.callLater(5, stop) 77 | reactor.run() 78 | -------------------------------------------------------------------------------- /mining_libs/utils.py: -------------------------------------------------------------------------------- 1 | import hashlib 2 | import struct 3 | 4 | from twisted.internet import defer, reactor 5 | from twisted.web import client 6 | 7 | import stratum.logger 8 | log = stratum.logger.get_logger('proxy') 9 | 10 | def show_message(msg): 11 | '''Repeatedly displays the message received from 12 | the server.''' 13 | log.warning("MESSAGE FROM THE SERVER OPERATOR: %s" % msg) 14 | log.warning("Restart proxy to discard the message") 15 | reactor.callLater(10, show_message, msg) 16 | 17 | def format_hash(h): 18 | # For printing hashes to console 19 | return "%s" % h[:8] 20 | 21 | def uint256_from_str(s): 22 | r = 0L 23 | t = struct.unpack(">= 32 33 | return rs 34 | 35 | def reverse_hash(h): 36 | return struct.pack('>IIIIIIII', *struct.unpack('>IIIIIIII', h)[::-1])[::-1] 37 | 38 | def doublesha(b): 39 | return hashlib.sha256(hashlib.sha256(b).digest()).digest() 40 | 41 | @defer.inlineCallbacks 42 | def detect_stratum(host, port): 43 | '''Perform getwork request to given 44 | host/port. If server respond, it will 45 | try to parse X-Stratum header. 46 | Not the most elegant code, but it works, 47 | because Stratum server should close the connection 48 | when client uses unknown payload.''' 49 | 50 | def get_raw_page(url, *args, **kwargs): 51 | # In Twisted 13.1.0 _parse() function replaced by _URI class. 52 | # In Twisted 15.0.0 _URI class renamed to URI. 53 | if hasattr(client, "_parse"): 54 | scheme, host, port, path = client._parse(url) 55 | else: 56 | try: 57 | from twisted.web.client import _URI as URI 58 | except ImportError: 59 | from twisted.web.client import URI 60 | 61 | uri = URI.fromBytes(url) 62 | scheme = uri.scheme 63 | host = uri.host 64 | port = uri.port 65 | 66 | factory = client.HTTPClientFactory(url, *args, **kwargs) 67 | reactor.connectTCP(host, port, factory) 68 | return factory 69 | 70 | def _on_callback(_, d):d.callback(True) 71 | def _on_errback(_, d): d.callback(True) 72 | f = get_raw_page('http://%s:%d' % (host, port)) 73 | 74 | d = defer.Deferred() 75 | f.deferred.addCallback(_on_callback, d) 76 | f.deferred.addErrback(_on_errback, d) 77 | (yield d) 78 | 79 | if not f.response_headers: 80 | # Most likely we're already connecting to Stratum 81 | defer.returnValue((host, port)) 82 | 83 | header = f.response_headers.get('x-stratum', None)[0] 84 | if not header: 85 | # Looks like pool doesn't support stratum 86 | defer.returnValue(None) 87 | 88 | if 'stratum+tcp://' not in header: 89 | # Invalid header or unsupported transport 90 | defer.returnValue(None) 91 | 92 | header = header.replace('stratum+tcp://', '').strip() 93 | host = header.split(':') 94 | 95 | if len(host) == 1: 96 | # Port is not specified 97 | defer.returnValue((host[0], 3333)) 98 | elif len(host) == 2: 99 | defer.returnValue((host[0], int(host[1]))) 100 | 101 | defer.returnValue(None) -------------------------------------------------------------------------------- /mining_libs/midstate.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2011 by jedi95 and 2 | # CFSworks 3 | # 4 | # Permission is hereby granted, free of charge, to any person obtaining a copy 5 | # of this software and associated documentation files (the "Software"), to deal 6 | # in the Software without restriction, including without limitation the rights 7 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 8 | # copies of the Software, and to permit persons to whom the Software is 9 | # furnished to do so, subject to the following conditions: 10 | # 11 | # The above copyright notice and this permission notice shall be included in 12 | # all copies or substantial portions of the Software. 13 | # 14 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 15 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 16 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 17 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 18 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 19 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 20 | # THE SOFTWARE. 21 | 22 | import struct 23 | 24 | # Some SHA-256 constants... 25 | K = [ 26 | 0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5, 0x3956c25b, 0x59f111f1, 27 | 0x923f82a4, 0xab1c5ed5, 0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3, 28 | 0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174, 0xe49b69c1, 0xefbe4786, 29 | 0x0fc19dc6, 0x240ca1cc, 0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da, 30 | 0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7, 0xc6e00bf3, 0xd5a79147, 31 | 0x06ca6351, 0x14292967, 0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13, 32 | 0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85, 0xa2bfe8a1, 0xa81a664b, 33 | 0xc24b8b70, 0xc76c51a3, 0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070, 34 | 0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5, 0x391c0cb3, 0x4ed8aa4a, 35 | 0x5b9cca4f, 0x682e6ff3, 0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208, 36 | 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2, 37 | ] 38 | 39 | A0 = 0x6a09e667 40 | B0 = 0xbb67ae85 41 | C0 = 0x3c6ef372 42 | D0 = 0xa54ff53a 43 | E0 = 0x510e527f 44 | F0 = 0x9b05688c 45 | G0 = 0x1f83d9ab 46 | H0 = 0x5be0cd19 47 | 48 | def rotateright(i,p): 49 | """i>>>p""" 50 | p &= 0x1F # p mod 32 51 | return i>>p | ((i<<(32-p)) & 0xFFFFFFFF) 52 | 53 | def addu32(*i): 54 | return sum(list(i))&0xFFFFFFFF 55 | 56 | def calculateMidstate(data, state=None, rounds=None): 57 | """Given a 512-bit (64-byte) block of (little-endian byteswapped) data, 58 | calculate a Bitcoin-style midstate. (That is, if SHA-256 were little-endian 59 | and only hashed the first block of input.) 60 | """ 61 | if len(data) != 64: 62 | raise ValueError('data must be 64 bytes long') 63 | 64 | w = list(struct.unpack('> 3) 94 | s1 = rotateright(w[14],17) ^ rotateright(w[14],19) ^ (w[14] >> 10) 95 | w.append(addu32(w[0], s0, w[9], s1)) 96 | w.pop(0) 97 | 98 | if rounds is None: 99 | a = addu32(a, A0) 100 | b = addu32(b, B0) 101 | c = addu32(c, C0) 102 | d = addu32(d, D0) 103 | e = addu32(e, E0) 104 | f = addu32(f, F0) 105 | g = addu32(g, G0) 106 | h = addu32(h, H0) 107 | 108 | return struct.pack(' 13 | 14 | # Do NOT "set -e" 15 | 16 | # PATH should only include /usr/* if it runs after the mountnfs.sh script 17 | PATH=/sbin:/usr/sbin:/bin:/usr/bin 18 | DESC="Stratum mining proxy" 19 | NAME=stratum-mining-proxy 20 | DAEMON=/usr/sbin/$NAME 21 | PIDFILE=/var/run/$NAME.pid 22 | DAEMON_ARGS="--log-file=/var/log/$NAME.log --pid-file=$PIDFILE" 23 | # did have --quiet 24 | SCRIPTNAME=/etc/init.d/$NAME 25 | 26 | # Exit if the package is not installed 27 | [ -x "$DAEMON" ] || exit 0 28 | 29 | # Read configuration variable file if it is present 30 | [ -r /etc/default/$NAME ] && . /etc/default/$NAME 31 | 32 | # Add settings from default file to $DAEMON_ARGS 33 | [ -n "$POOL_HOST" ] && DAEMON_ARGS="$DAEMON_ARGS -o $POOL_HOST" 34 | [ -n "$POOL_PORT" ] && DAEMON_ARGS="$DAEMON_ARGS -p $POOL_PORT" 35 | [ -n "$SCRYPT_TARGET" ] && DAEMON_ARGS="$DAEMON_ARGS -st" 36 | [ -n "$CUSTOM_USER" ] && DAEMON_ARGS="$DAEMON_ARGS -cu $CUSTOM_USER" 37 | 38 | # Load the VERBOSE setting and other rcS variables 39 | . /lib/init/vars.sh 40 | 41 | # Define LSB log_* functions. 42 | # Depend on lsb-base (>= 3.2-14) to ensure that this file is present 43 | # and status_of_proc is working. 44 | . /lib/lsb/init-functions 45 | 46 | # 47 | # Function that starts the daemon/service 48 | # 49 | do_start() 50 | { 51 | # Return 52 | # 0 if daemon has been started 53 | # 1 if daemon was already running 54 | # 2 if daemon could not be started 55 | start-stop-daemon --start --quiet --pidfile $PIDFILE --exec $DAEMON --test > /dev/null \ 56 | || return 1 57 | start-stop-daemon --start --quiet --pidfile $PIDFILE --exec $DAEMON --background -- \ 58 | $DAEMON_ARGS \ 59 | || return 2 60 | # Add code here, if necessary, that waits for the process to be ready 61 | # to handle requests from services started subsequently which depend 62 | # on this one. As a last resort, sleep for some time. 63 | } 64 | 65 | # 66 | # Function that stops the daemon/service 67 | # 68 | do_stop() 69 | { 70 | # Return 71 | # 0 if daemon has been stopped 72 | # 1 if daemon was already stopped 73 | # 2 if daemon could not be stopped 74 | # other if a failure occurred 75 | start-stop-daemon --stop --quiet --retry=TERM/30/KILL/5 --pidfile $PIDFILE 76 | RETVAL="$?" 77 | [ "$RETVAL" = 2 ] && return 2 78 | # Wait for children to finish too if this is a daemon that forks 79 | # and if the daemon is only ever run from this initscript. 80 | # If the above conditions are not satisfied then add some other code 81 | # that waits for the process to drop all resources that could be 82 | # needed by services started subsequently. A last resort is to 83 | # sleep for some time. 84 | start-stop-daemon --stop --quiet --oknodo --retry=0/30/KILL/5 --exec $DAEMON 85 | [ "$?" = 2 ] && return 2 86 | # Many daemons don't delete their pidfiles when they exit. 87 | rm -f $PIDFILE 88 | return "$RETVAL" 89 | } 90 | 91 | # 92 | # Function that sends a SIGHUP to the daemon/service 93 | # 94 | do_reload() { 95 | # 96 | # If the daemon can reload its configuration without 97 | # restarting (for example, when it is sent a SIGHUP), 98 | # then implement that here. 99 | # 100 | start-stop-daemon --stop --signal 1 --quiet --pidfile $PIDFILE --name $NAME 101 | return 0 102 | } 103 | 104 | case "$1" in 105 | start) 106 | [ "$VERBOSE" != no ] && log_daemon_msg "Starting $DESC" "$NAME" 107 | do_start 108 | case "$?" in 109 | 0|1) [ "$VERBOSE" != no ] && log_end_msg 0 ;; 110 | 2) [ "$VERBOSE" != no ] && log_end_msg 1 ;; 111 | esac 112 | ;; 113 | stop) 114 | [ "$VERBOSE" != no ] && log_daemon_msg "Stopping $DESC" "$NAME" 115 | do_stop 116 | case "$?" in 117 | 0|1) [ "$VERBOSE" != no ] && log_end_msg 0 ;; 118 | 2) [ "$VERBOSE" != no ] && log_end_msg 1 ;; 119 | esac 120 | ;; 121 | status) 122 | status_of_proc "$DAEMON" "$NAME" && exit 0 || exit $? 123 | ;; 124 | #reload|force-reload) 125 | # 126 | # If do_reload() is not implemented then leave this commented out 127 | # and leave 'force-reload' as an alias for 'restart'. 128 | # 129 | #log_daemon_msg "Reloading $DESC" "$NAME" 130 | #do_reload 131 | #log_end_msg $? 132 | #;; 133 | restart|force-reload) 134 | # 135 | # If the "reload" option is implemented then remove the 136 | # 'force-reload' alias 137 | # 138 | log_daemon_msg "Restarting $DESC" "$NAME" 139 | do_stop 140 | case "$?" in 141 | 0|1) 142 | do_start 143 | case "$?" in 144 | 0) log_end_msg 0 ;; 145 | 1) log_end_msg 1 ;; # Old process is still running 146 | *) log_end_msg 1 ;; # Failed to start 147 | esac 148 | ;; 149 | *) 150 | # Failed to stop 151 | log_end_msg 1 152 | ;; 153 | esac 154 | ;; 155 | *) 156 | #echo "Usage: $SCRIPTNAME {start|stop|restart|reload|force-reload}" >&2 157 | echo "Usage: $SCRIPTNAME {start|stop|status|restart|force-reload}" >&2 158 | exit 3 159 | ;; 160 | esac 161 | 162 | : 163 | -------------------------------------------------------------------------------- /midstatec/midstatemodule.c: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2012 Johannes Kimmel 2 | // Distributed under the MIT/X11 software license, see 3 | // http://www.opensource.org/licenses/mit-license.php 4 | 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include 10 | #include 11 | 12 | typedef union sha256_state_t sha256_state_t; 13 | union sha256_state_t { 14 | uint32_t h[8]; 15 | unsigned char byte[32]; 16 | }; 17 | 18 | static uint32_t h[] = { 19 | 0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a, 0x510e527f, 0x9b05688c, 0x1f83d9ab, 0x5be0cd19, 20 | }; 21 | 22 | static uint32_t k[] = { 23 | 0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5, 0x3956c25b, 0x59f111f1, 0x923f82a4, 0xab1c5ed5, 24 | 0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3, 0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174, 25 | 0xe49b69c1, 0xefbe4786, 0x0fc19dc6, 0x240ca1cc, 0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da, 26 | 0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7, 0xc6e00bf3, 0xd5a79147, 0x06ca6351, 0x14292967, 27 | 0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13, 0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85, 28 | 0xa2bfe8a1, 0xa81a664b, 0xc24b8b70, 0xc76c51a3, 0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070, 29 | 0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5, 0x391c0cb3, 0x4ed8aa4a, 0x5b9cca4f, 0x682e6ff3, 30 | 0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208, 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2, 31 | }; 32 | 33 | static inline uint32_t ror32(const uint32_t v, const uint32_t n) { 34 | return (v >> n) | (v << (32 - n)); 35 | }; 36 | 37 | static inline void update_state(sha256_state_t *state, const uint32_t data[16]) { 38 | uint32_t w[64]; 39 | sha256_state_t t = *state; 40 | 41 | for (size_t i = 0 ; i < 16; i++) { 42 | w[i] = htonl(data[i]); 43 | } 44 | 45 | for (size_t i = 16; i < 64; i++) { 46 | uint32_t s0 = ror32(w[i - 15], 7) ^ ror32(w[i - 15], 18) ^ (w[i - 15] >> 3); 47 | uint32_t s1 = ror32(w[i - 2], 17) ^ ror32(w[i - 2], 19) ^ (w[i - 2] >> 10); 48 | w[i] = w[i - 16] + s0 + w[i - 7] + s1; 49 | } 50 | 51 | for (size_t i = 0; i < 64; i++) { 52 | uint32_t s0 = ror32(t.h[0], 2) ^ ror32(t.h[0], 13) ^ ror32(t.h[0], 22); 53 | uint32_t maj = (t.h[0] & t.h[1]) ^ (t.h[0] & t.h[2]) ^ (t.h[1] & t.h[2]); 54 | uint32_t t2 = s0 + maj; 55 | uint32_t s1 = ror32(t.h[4], 6) ^ ror32(t.h[4], 11) ^ ror32(t.h[4], 25); 56 | uint32_t ch = (t.h[4] & t.h[5]) ^ (~t.h[4] & t.h[6]); 57 | uint32_t t1 = t.h[7] + s1 + ch + k[i] + w[i]; 58 | 59 | t.h[7] = t.h[6]; 60 | t.h[6] = t.h[5]; 61 | t.h[5] = t.h[4]; 62 | t.h[4] = t.h[3] + t1; 63 | t.h[3] = t.h[2]; 64 | t.h[2] = t.h[1]; 65 | t.h[1] = t.h[0]; 66 | t.h[0] = t1 + t2; 67 | } 68 | 69 | for (size_t i = 0; i < 8; i++) { 70 | state->h[i] += t.h[i]; 71 | } 72 | } 73 | 74 | static inline void init_state(sha256_state_t *state) { 75 | for (size_t i = 0; i < 8; i++) { 76 | state->h[i] = h[i]; 77 | } 78 | } 79 | 80 | static sha256_state_t midstate(const unsigned char data[64]) { 81 | sha256_state_t state; 82 | 83 | init_state(&state); 84 | update_state(&state, (const uint32_t const *) data); 85 | 86 | return state; 87 | } 88 | 89 | void print_hex(char unsigned *data, size_t s) { 90 | for (size_t i = 0; i < s; i++) { 91 | printf("%02hhx", data[i]); 92 | } 93 | printf("\n"); 94 | } 95 | 96 | PyObject *midstate_helper(PyObject *self, PyObject *arg) { 97 | Py_ssize_t s; 98 | PyObject *ret = NULL; 99 | PyObject *t_int = NULL; 100 | char *t; 101 | unsigned char data[64]; 102 | sha256_state_t mstate; 103 | 104 | if (PyBytes_Check(arg) != true) { 105 | PyErr_SetString(PyExc_ValueError, "Need bytes object as argument."); 106 | goto error; 107 | } 108 | if (PyBytes_AsStringAndSize(arg, &t, &s) == -1) { 109 | // Got exception 110 | goto error; 111 | } 112 | if (s < 64) { 113 | PyErr_SetString(PyExc_ValueError, "Argument length must be at least 64 bytes."); 114 | goto error; 115 | } 116 | 117 | memcpy(data, t, 64); 118 | mstate = midstate(data); 119 | 120 | ret = PyTuple_New(8); 121 | for (size_t i = 0; i < 8; i++) { 122 | t_int = PyLong_FromUnsignedLong(mstate.h[i]); 123 | if (PyTuple_SetItem(ret, i, t_int) != 0) { 124 | t_int = NULL; // ret is owner of the int now 125 | goto error; 126 | } 127 | } 128 | 129 | return ret; 130 | 131 | error: 132 | Py_XDECREF(t_int); 133 | Py_XDECREF(ret); 134 | 135 | return NULL; 136 | } 137 | 138 | static struct PyMethodDef midstate_functions[] = { 139 | {"SHA256", midstate_helper, METH_O, NULL}, 140 | {NULL, NULL, 0, NULL}, 141 | }; 142 | 143 | #if PY_MAJOR_VERSION >= 3 144 | static struct PyModuleDef moduledef = { 145 | PyModuleDef_HEAD_INIT, 146 | "midstate", 147 | NULL, 148 | -1, 149 | midstate_functions, 150 | NULL, 151 | NULL, 152 | NULL, 153 | NULL, 154 | }; 155 | #endif 156 | 157 | PyMODINIT_FUNC 158 | #if PY_MAJOR_VERSION >= 3 159 | PyInit_midstate(void) 160 | { 161 | return PyModule_Create(&midstatemodule); 162 | } 163 | #else 164 | initmidstate(void) { 165 | Py_InitModule3("midstate", midstate_functions, NULL); 166 | } 167 | #endif 168 | 169 | int main(int argc, char *argv[]) { 170 | const unsigned char data[] = "\1\0\0\0\xe4\xe8\x9d\xf8H\x1b\xc5v\xb9\x9f" "fWb\xcb\x82" "f\xf8U\xc6h" "@\x16\xb8\xb4\xd1iv\xf2\0\0\0\0\xe1\xd1O\x08\x98\xe6\x1d\x02O\x0e\1r\xfc" "cFi\xf5\xfc\xd5mN\1\xca\x10\xe9" "7{\x05hc\xd1U\xc8" "f O\xf8\xff\x07\x1d\0\0\0"; 171 | 172 | sha256_state_t state; 173 | 174 | //for (size_t i = 0; i < 1000000; i++) 175 | state = midstate(data); 176 | 177 | printf("b8101f7c4a8e294ecbccb941dde17fd461dc39ff102bc37bb7ac7d5b95290166 <-- want\n"); 178 | print_hex(state.byte, 32); 179 | return 0; 180 | } 181 | -------------------------------------------------------------------------------- /mining_libs/getwork_listener.py: -------------------------------------------------------------------------------- 1 | import json 2 | import time 3 | 4 | from twisted.internet import defer 5 | from twisted.web.resource import Resource 6 | from twisted.web.server import NOT_DONE_YET 7 | 8 | import stratum.logger 9 | log = stratum.logger.get_logger('proxy') 10 | 11 | class Root(Resource): 12 | isLeaf = True 13 | 14 | def __init__(self, job_registry, workers, stratum_host, stratum_port, 15 | custom_stratum=None, custom_lp=None, custom_user=None, custom_password=''): 16 | Resource.__init__(self) 17 | self.job_registry = job_registry 18 | self.workers = workers 19 | self.stratum_host = stratum_host 20 | self.stratum_port = stratum_port 21 | self.custom_stratum = custom_stratum 22 | self.custom_lp = custom_lp 23 | self.custom_user = custom_user 24 | self.custom_password = custom_password 25 | 26 | def json_response(self, msg_id, result): 27 | resp = json.dumps({'id': msg_id, 'result': result, 'error': None}) 28 | #print "RESPONSE", resp 29 | return resp 30 | 31 | def json_error(self, msg_id, code, message): 32 | resp = json.dumps({'id': msg_id, 'result': None, 'error': {'code': code, 'message': message}}) 33 | #print "ERROR", resp 34 | return resp 35 | 36 | def _on_submit(self, result, request, msg_id, blockheader, worker_name, start_time): 37 | response_time = (time.time() - start_time) * 1000 38 | if result == True: 39 | log.warning("[%dms] Share from '%s' accepted, diff %d" % (response_time, worker_name, self.job_registry.difficulty)) 40 | else: 41 | log.warning("[%dms] Share from '%s' REJECTED" % (response_time, worker_name)) 42 | 43 | try: 44 | request.write(self.json_response(msg_id, result)) 45 | request.finish() 46 | except RuntimeError: 47 | # RuntimeError is thrown by Request class when 48 | # client is disconnected already 49 | pass 50 | 51 | def _on_submit_failure(self, failure, request, msg_id, blockheader, worker_name, start_time): 52 | response_time = (time.time() - start_time) * 1000 53 | 54 | # Submit for some reason failed 55 | try: 56 | request.write(self.json_response(msg_id, False)) 57 | request.finish() 58 | except RuntimeError: 59 | # RuntimeError is thrown by Request class when 60 | # client is disconnected already 61 | pass 62 | 63 | log.warning("[%dms] Share from '%s' REJECTED: %s" % \ 64 | (response_time, worker_name, failure.getErrorMessage())) 65 | 66 | def _on_authorized(self, is_authorized, request, worker_name): 67 | data = json.loads(request.content.read()) 68 | 69 | if not is_authorized: 70 | request.write(self.json_error(data.get('id', 0), -1, "Bad worker credentials")) 71 | request.finish() 72 | return 73 | 74 | if not self.job_registry.last_job: 75 | log.warning('Getworkmaker is waiting for a job...') 76 | request.write(self.json_error(data.get('id', 0), -1, "Getworkmake is waiting for a job...")) 77 | request.finish() 78 | return 79 | 80 | if data['method'] == 'getwork': 81 | if 'params' not in data or not len(data['params']): 82 | 83 | # getwork request 84 | log.info("Worker '%s' asks for new work" % worker_name) 85 | extensions = request.getHeader('x-mining-extensions') 86 | no_midstate = extensions and 'midstate' in extensions 87 | request.write(self.json_response(data.get('id', 0), self.job_registry.getwork(no_midstate=no_midstate))) 88 | request.finish() 89 | return 90 | 91 | else: 92 | 93 | # submit 94 | d = defer.maybeDeferred(self.job_registry.submit, data['params'][0], worker_name) 95 | 96 | start_time = time.time() 97 | d.addCallback(self._on_submit, request, data.get('id', 0), data['params'][0][:160], worker_name, start_time) 98 | d.addErrback(self._on_submit_failure, request, data.get('id', 0), data['params'][0][:160], worker_name, start_time) 99 | return 100 | 101 | request.write(self.json_error(data.get('id'), -1, "Unsupported method '%s'" % data['method'])) 102 | request.finish() 103 | 104 | def _on_failure(self, failure, request): 105 | request.write(self.json_error(0, -1, "Unexpected error during authorization")) 106 | request.finish() 107 | raise failure 108 | 109 | def _prepare_headers(self, request): 110 | request.setHeader('content-type', 'application/json') 111 | 112 | if self.custom_stratum: 113 | request.setHeader('x-stratum', self.custom_stratum) 114 | elif self.stratum_port: 115 | request.setHeader('x-stratum', 'stratum+tcp://%s:%d' % (request.getRequestHostname(), self.stratum_port)) 116 | 117 | if self.custom_lp: 118 | request.setHeader('x-long-polling', self.custom_lp) 119 | else: 120 | request.setHeader('x-long-polling', '/lp') 121 | 122 | request.setHeader('x-roll-ntime', 1) 123 | 124 | def _on_lp_broadcast(self, _, request): 125 | try: 126 | worker_name = request.getUser() 127 | except: 128 | worker_name = '' 129 | 130 | log.info("LP broadcast for worker '%s'" % worker_name) 131 | extensions = request.getHeader('x-mining-extensions') 132 | no_midstate = extensions and 'midstate' in extensions 133 | payload = self.json_response(0, self.job_registry.getwork(no_midstate=no_midstate)) 134 | 135 | try: 136 | request.write(payload) 137 | request.finish() 138 | except RuntimeError: 139 | # RuntimeError is thrown by Request class when 140 | # client is disconnected already 141 | pass 142 | 143 | def render_POST(self, request): 144 | self._prepare_headers(request) 145 | 146 | (worker_name, password) = (request.getUser(), request.getPassword()) 147 | 148 | if self.custom_user: 149 | worker_name = self.custom_user 150 | password = self.custom_password 151 | 152 | if worker_name == '': 153 | log.warning("Authorization required") 154 | request.setResponseCode(401) 155 | request.setHeader('WWW-Authenticate', 'Basic realm="stratum-mining-proxy"') 156 | return "Authorization required" 157 | 158 | self._prepare_headers(request) 159 | 160 | if request.path.startswith('/lp'): 161 | log.info("Worker '%s' subscribed for LP" % worker_name) 162 | self.job_registry.on_block.addCallback(self._on_lp_broadcast, request) 163 | return NOT_DONE_YET 164 | 165 | d = defer.maybeDeferred(self.workers.authorize, worker_name, password) 166 | d.addCallback(self._on_authorized, request, worker_name) 167 | d.addErrback(self._on_failure, request) 168 | return NOT_DONE_YET 169 | 170 | def render_GET(self, request): 171 | self._prepare_headers(request) 172 | 173 | try: 174 | worker_name = request.getUser() 175 | except: 176 | worker_name = '' 177 | 178 | if self.custom_user: 179 | worker_name = self.custom_user 180 | password = self.custom_password 181 | 182 | log.info("Worker '%s' subscribed for LP at %s" % (worker_name, request.path)) 183 | self.job_registry.on_block.addCallback(self._on_lp_broadcast, request) 184 | return NOT_DONE_YET 185 | -------------------------------------------------------------------------------- /mining_libs/jobs.py: -------------------------------------------------------------------------------- 1 | import binascii 2 | import time 3 | import struct 4 | import subprocess 5 | import weakref 6 | 7 | from twisted.internet import defer 8 | 9 | import utils 10 | 11 | import stratum.logger 12 | log = stratum.logger.get_logger('proxy') 13 | 14 | # This fix py2exe issue with packaging the midstate module 15 | from midstate import calculateMidstate as __unusedimport 16 | 17 | try: 18 | from midstatec import test as midstateTest, midstate as calculateMidstate 19 | if not midstateTest(): 20 | log.warning("midstate library didn't passed self test!") 21 | raise ImportError("midstatec not usable") 22 | log.info("Using C extension for midstate speedup. Good!") 23 | except ImportError: 24 | log.info("C extension for midstate not available. Using default implementation instead.") 25 | try: 26 | from midstate import calculateMidstate 27 | except ImportError: 28 | calculateMidstate = None 29 | log.exception("No midstate generator available. Some old miners won't work properly.") 30 | 31 | class Job(object): 32 | def __init__(self): 33 | self.job_id = None 34 | self.prevhash = '' 35 | self.coinb1_bin = '' 36 | self.coinb2_bin = '' 37 | self.merkle_branch = [] 38 | self.version = 1 39 | self.nbits = 0 40 | self.ntime_delta = 0 41 | 42 | self.extranonce2 = 0 43 | self.merkle_to_extranonce2 = {} # Relation between merkle_hash and extranonce2 44 | 45 | @classmethod 46 | def build_from_broadcast(cls, job_id, prevhash, coinb1, coinb2, merkle_branch, version, nbits, ntime): 47 | '''Build job object from Stratum server broadcast''' 48 | job = Job() 49 | job.job_id = job_id 50 | job.prevhash = prevhash 51 | job.coinb1_bin = binascii.unhexlify(coinb1) 52 | job.coinb2_bin = binascii.unhexlify(coinb2) 53 | job.merkle_branch = [ binascii.unhexlify(tx) for tx in merkle_branch ] 54 | job.version = version 55 | job.nbits = nbits 56 | job.ntime_delta = int(ntime, 16) - int(time.time()) 57 | return job 58 | 59 | def increase_extranonce2(self): 60 | self.extranonce2 += 1 61 | return self.extranonce2 62 | 63 | def build_coinbase(self, extranonce): 64 | return self.coinb1_bin + extranonce + self.coinb2_bin 65 | 66 | def build_merkle_root(self, coinbase_hash): 67 | merkle_root = coinbase_hash 68 | for h in self.merkle_branch: 69 | merkle_root = utils.doublesha(merkle_root + h) 70 | return merkle_root 71 | 72 | def serialize_header(self, merkle_root, ntime, nonce): 73 | r = self.version 74 | r += self.prevhash 75 | r += merkle_root 76 | r += binascii.hexlify(struct.pack(">I", ntime)) 77 | r += self.nbits 78 | r += binascii.hexlify(struct.pack(">I", nonce)) 79 | r += '000000800000000000000000000000000000000000000000000000000000000000000000000000000000000080020000' # padding 80 | return r 81 | 82 | class JobRegistry(object): 83 | def __init__(self, f, cmd, no_midstate, real_target, use_old_target=False, scrypt_target=False): 84 | self.f = f 85 | self.cmd = cmd # execute this command on new block 86 | self.scrypt_target = scrypt_target # calculate target for scrypt algorithm instead of sha256 87 | self.no_midstate = no_midstate # Indicates if calculate midstate for getwork 88 | self.real_target = real_target # Indicates if real stratum target will be propagated to miners 89 | self.use_old_target = use_old_target # Use 00000000fffffff...f instead of correct 00000000ffffffff...0 target for really old miners 90 | self.jobs = [] 91 | self.last_job = None 92 | self.extranonce1 = None 93 | self.extranonce1_bin = None 94 | self.extranonce2_size = None 95 | 96 | self.target = 0 97 | self.target_hex = '' 98 | self.difficulty = 1 99 | self.set_difficulty(1) 100 | self.target1_hex = self.target_hex 101 | 102 | # Relation between merkle and job 103 | self.merkle_to_job= weakref.WeakValueDictionary() 104 | 105 | # Hook for LP broadcasts 106 | self.on_block = defer.Deferred() 107 | 108 | def execute_cmd(self, prevhash): 109 | if self.cmd: 110 | return subprocess.Popen(self.cmd.replace('%s', prevhash), shell=True) 111 | 112 | def set_extranonce(self, extranonce1, extranonce2_size): 113 | self.extranonce2_size = extranonce2_size 114 | self.extranonce1_bin = binascii.unhexlify(extranonce1) 115 | 116 | def set_difficulty(self, new_difficulty): 117 | if self.scrypt_target: 118 | dif1 = 0x0000ffff00000000000000000000000000000000000000000000000000000000 119 | else: 120 | dif1 = 0x00000000ffff0000000000000000000000000000000000000000000000000000 121 | self.target = int(dif1 / new_difficulty) 122 | self.target_hex = binascii.hexlify(utils.uint256_to_str(self.target)) 123 | self.difficulty = new_difficulty 124 | 125 | def build_full_extranonce(self, extranonce2): 126 | '''Join extranonce1 and extranonce2 together while padding 127 | extranonce2 length to extranonce2_size (provided by server).''' 128 | return self.extranonce1_bin + self.extranonce2_padding(extranonce2) 129 | 130 | def extranonce2_padding(self, extranonce2): 131 | '''Return extranonce2 with padding bytes''' 132 | 133 | if not self.extranonce2_size: 134 | raise Exception("Extranonce2_size isn't set yet") 135 | 136 | extranonce2_bin = struct.pack('>I', extranonce2) 137 | missing_len = self.extranonce2_size - len(extranonce2_bin) 138 | 139 | if missing_len < 0: 140 | # extranonce2 is too long, we should print warning on console, 141 | # but try to shorten extranonce2 142 | log.info("Extranonce size mismatch. Please report this error to pool operator!") 143 | return extranonce2_bin[abs(missing_len):] 144 | 145 | # This is probably more common situation, but it is perfectly 146 | # safe to add whitespaces 147 | return '\x00' * missing_len + extranonce2_bin 148 | 149 | def add_template(self, template, clean_jobs): 150 | if clean_jobs: 151 | # Pool asked us to stop submitting shares from previous jobs 152 | self.jobs = [] 153 | 154 | self.jobs.append(template) 155 | self.last_job = template 156 | 157 | if clean_jobs: 158 | # Force miners to reload jobs 159 | on_block = self.on_block 160 | self.on_block = defer.Deferred() 161 | on_block.callback(True) 162 | 163 | # blocknotify-compatible call 164 | self.execute_cmd(template.prevhash) 165 | 166 | def register_merkle(self, job, merkle_hash, extranonce2): 167 | # merkle_to_job is weak-ref, so it is cleaned up automatically 168 | # when job is dropped 169 | self.merkle_to_job[merkle_hash] = job 170 | job.merkle_to_extranonce2[merkle_hash] = extranonce2 171 | 172 | def get_job_from_header(self, header): 173 | '''Lookup for job and extranonce2 used for given blockheader (in hex)''' 174 | merkle_hash = header[72:136].lower() 175 | job = self.merkle_to_job[merkle_hash] 176 | extranonce2 = job.merkle_to_extranonce2[merkle_hash] 177 | return (job, extranonce2) 178 | 179 | def getwork(self, no_midstate=True): 180 | '''Miner requests for new getwork''' 181 | 182 | job = self.last_job # Pick the latest job from pool 183 | 184 | # 1. Increase extranonce2 185 | extranonce2 = job.increase_extranonce2() 186 | 187 | # 2. Build final extranonce 188 | extranonce = self.build_full_extranonce(extranonce2) 189 | 190 | # 3. Put coinbase transaction together 191 | coinbase_bin = job.build_coinbase(extranonce) 192 | 193 | # 4. Calculate coinbase hash 194 | coinbase_hash = utils.doublesha(coinbase_bin) 195 | 196 | # 5. Calculate merkle root 197 | merkle_root = binascii.hexlify(utils.reverse_hash(job.build_merkle_root(coinbase_hash))) 198 | 199 | # 6. Generate current ntime 200 | ntime = int(time.time()) + job.ntime_delta 201 | 202 | # 7. Serialize header 203 | block_header = job.serialize_header(merkle_root, ntime, 0) 204 | 205 | # 8. Register job params 206 | self.register_merkle(job, merkle_root, extranonce2) 207 | 208 | # 9. Prepare hash1, calculate midstate and fill the response object 209 | header_bin = binascii.unhexlify(block_header)[:64] 210 | hash1 = "00000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000010000" 211 | 212 | result = {'data': block_header, 213 | 'hash1': hash1} 214 | 215 | if self.use_old_target: 216 | result['target'] = 'ffffffffffffffffffffffffffffffffffffffffffffffffffffffff00000000' 217 | elif self.real_target: 218 | result['target'] = self.target_hex 219 | else: 220 | result['target'] = self.target1_hex 221 | 222 | if calculateMidstate and not (no_midstate or self.no_midstate): 223 | # Midstate module not found or disabled 224 | result['midstate'] = binascii.hexlify(calculateMidstate(header_bin)) 225 | 226 | return result 227 | 228 | def submit(self, header, worker_name): 229 | # Drop unused padding 230 | header = header[:160] 231 | 232 | # 1. Check if blockheader meets requested difficulty 233 | header_bin = binascii.unhexlify(header[:160]) 234 | rev = ''.join([ header_bin[i*4:i*4+4][::-1] for i in range(0, 20) ]) 235 | hash_bin = utils.doublesha(rev) 236 | block_hash = ''.join([ hash_bin[i*4:i*4+4][::-1] for i in range(0, 8) ]) 237 | 238 | #log.info('!!! %s' % header[:160]) 239 | log.info("Submitting %s" % utils.format_hash(binascii.hexlify(block_hash))) 240 | 241 | if utils.uint256_from_str(hash_bin) > self.target: 242 | log.debug("Share is below expected target") 243 | return True 244 | 245 | # 2. Lookup for job and extranonce used for creating given block header 246 | try: 247 | (job, extranonce2) = self.get_job_from_header(header) 248 | except KeyError: 249 | log.info("Job not found") 250 | return False 251 | 252 | # 3. Format extranonce2 to hex string 253 | extranonce2_hex = binascii.hexlify(self.extranonce2_padding(extranonce2)) 254 | 255 | # 4. Parse ntime and nonce from header 256 | ntimepos = 17*8 # 17th integer in datastring 257 | noncepos = 19*8 # 19th integer in datastring 258 | ntime = header[ntimepos:ntimepos+8] 259 | nonce = header[noncepos:noncepos+8] 260 | 261 | # 5. Submit share to the pool 262 | return self.f.rpc('mining.submit', [worker_name, job.job_id, extranonce2_hex, ntime, nonce]) 263 | -------------------------------------------------------------------------------- /mining_libs/stratum_listener.py: -------------------------------------------------------------------------------- 1 | import time 2 | import binascii 3 | import struct 4 | 5 | from twisted.internet import defer 6 | 7 | from stratum.services import GenericService 8 | from stratum.pubsub import Pubsub, Subscription 9 | from stratum.custom_exceptions import ServiceException, RemoteServiceException 10 | 11 | from jobs import JobRegistry 12 | 13 | import stratum.logger 14 | log = stratum.logger.get_logger('proxy') 15 | 16 | def var_int(i): 17 | if i <= 0xff: 18 | return struct.pack('>B', i) 19 | elif i <= 0xffff: 20 | return struct.pack('>H', i) 21 | raise Exception("number is too big") 22 | 23 | class UpstreamServiceException(ServiceException): 24 | code = -2 25 | 26 | class SubmitException(ServiceException): 27 | code = -2 28 | 29 | class DifficultySubscription(Subscription): 30 | event = 'mining.set_difficulty' 31 | difficulty = 1 32 | 33 | @classmethod 34 | def on_new_difficulty(cls, new_difficulty): 35 | cls.difficulty = new_difficulty 36 | cls.emit(new_difficulty) 37 | 38 | def after_subscribe(self, *args): 39 | self.emit_single(self.difficulty) 40 | 41 | class MiningSubscription(Subscription): 42 | '''This subscription object implements 43 | logic for broadcasting new jobs to the clients.''' 44 | 45 | event = 'mining.notify' 46 | 47 | last_broadcast = None 48 | 49 | @classmethod 50 | def disconnect_all(cls): 51 | for subs in Pubsub.iterate_subscribers(cls.event): 52 | if subs.connection_ref().transport != None: 53 | subs.connection_ref().transport.loseConnection() 54 | 55 | @classmethod 56 | def on_template(cls, job_id, prevhash, coinb1, coinb2, merkle_branch, version, nbits, ntime, clean_jobs): 57 | '''Push new job to subscribed clients''' 58 | cls.last_broadcast = (job_id, prevhash, coinb1, coinb2, merkle_branch, version, nbits, ntime, clean_jobs) 59 | cls.emit(job_id, prevhash, coinb1, coinb2, merkle_branch, version, nbits, ntime, clean_jobs) 60 | 61 | def _finish_after_subscribe(self, result): 62 | '''Send new job to newly subscribed client''' 63 | try: 64 | (job_id, prevhash, coinb1, coinb2, merkle_branch, version, nbits, ntime, _) = self.last_broadcast 65 | except Exception: 66 | log.error("Template not ready yet") 67 | return result 68 | 69 | self.emit_single(job_id, prevhash, coinb1, coinb2, merkle_branch, version, nbits, ntime, True) 70 | return result 71 | 72 | def after_subscribe(self, *args): 73 | '''This will send new job to the client *after* he receive subscription details. 74 | on_finish callback solve the issue that job is broadcasted *during* 75 | the subscription request and client receive messages in wrong order.''' 76 | self.connection_ref().on_finish.addCallback(self._finish_after_subscribe) 77 | 78 | class VersionMaskSubscription(Subscription): 79 | """Each instance represents per connection version mask subscription 80 | 81 | Note: mask may differ per connection depending on downstream configuration 82 | requests 83 | """ 84 | event = 'mining.set_version_mask' 85 | # Default mask as specified by the BIP 86 | version_mask = 0 87 | # By default, version rolling is not enabled. Therefore, no subscription is 88 | # possible 89 | enabled = False 90 | 91 | def __init__(self, subscriber_version_mask, **params): 92 | super(VersionMaskSubscription, self).__init__(**params) 93 | self.subscriber_version_mask = subscriber_version_mask 94 | 95 | @classmethod 96 | def on_new_mask(cls, version_mask): 97 | """ 98 | :param version_mask: new mask from upstream 99 | """ 100 | cls.version_mask = version_mask 101 | cls.emit() 102 | 103 | def get_effective_mask(self): 104 | """Calculates effective version rolling mask 105 | 106 | :return: combined masked based on mask requested by the miner 107 | and mask provided by the upstream connection 108 | allowed mask 109 | """ 110 | return self.version_mask & self.subscriber_version_mask 111 | 112 | def after_subscribe(self, *args): 113 | self.emit_single() 114 | 115 | def process(self, *args, **kwargs): 116 | return ['%x' % self.get_effective_mask()] 117 | 118 | @classmethod 119 | def enable(cls): 120 | cls.enabled = True 121 | 122 | 123 | class StratumExtension(object): 124 | @classmethod 125 | def configure_downstream(cls, connection_ref, values): 126 | """Default implementation that generates downstream extension response 127 | 128 | :param connection_ref: reference to the connection that demanded the 129 | extension 130 | :param values: values for the extension 131 | :return: dictionary that indicates the extension is not available 132 | """ 133 | return {cls.namespace: False} 134 | 135 | @classmethod 136 | def get_values(cls): 137 | pass 138 | 139 | class VersionRollingExtension(StratumExtension): 140 | """Version rolling extension 141 | This class provides version rolling extension parameter values when 142 | negotiating with upstream pool. It also provides combined version 143 | rolling mask for downstream miners based on previous negotiation with the 144 | pool. 145 | """ 146 | namespace = 'version-rolling' 147 | 148 | @classmethod 149 | def _get_mask_from_values(cls, values): 150 | """Extracts version mask from values dictionary 151 | 152 | :param values: a dictinary with all extensions 153 | :return: mask or throws an exception 154 | """ 155 | mask_str = values[cls.namespace + '.mask'] 156 | mask = int(mask_str, 16) 157 | return mask 158 | 159 | @classmethod 160 | def configure_downstream(cls, connection_ref, values): 161 | """Version rolling configuration for downstream miners 162 | 163 | :param connection_ref: 164 | :param values: 165 | :return: 166 | :todo consider whether the result should indicate version rolling 167 | disabled in case there is no intersection between the upstream mask 168 | and the downstream requested mask 169 | """ 170 | result = super(VersionRollingExtension, cls).configure_downstream( 171 | connection_ref, values) 172 | 173 | try: 174 | downstream_proposed_mask = cls._get_mask_from_values(values) 175 | # Only when subscription is enabled compose a valid mask 176 | if VersionMaskSubscription.enabled: 177 | sub = VersionMaskSubscription(downstream_proposed_mask) 178 | result = {cls.namespace: True, 179 | cls.namespace + '.mask': '%x' % sub.get_effective_mask()} 180 | _ = Pubsub.subscribe(connection_ref(), sub) 181 | except Exception as e: 182 | log.error("Version mask not calculated, error: '%s'" % e) 183 | 184 | return result 185 | 186 | @classmethod 187 | def prepare_upstream(cls, args): 188 | """Prepares version rolling extension for upstream 189 | :param args: object with command line arguments 190 | :return: a tuple - extension name and a dictionary with extension 191 | parameter values 192 | """ 193 | values = { 194 | cls.namespace + '.mask': '%x' % args.version_rolling_mask, 195 | cls.namespace + '.min-bit-count': 196 | args.version_rolling_min_bit_count, 197 | } 198 | return cls.namespace, values 199 | 200 | @classmethod 201 | def configure_upstream(cls, values): 202 | """Configure version rolling extension based on upstream response 203 | A valid mask parsed from the upstream values is provided for 204 | subscription only if it is non-zero 205 | 206 | :param values: dictionary with all extensions 207 | :return: nothing 208 | """ 209 | if values.get(cls.namespace, False): 210 | try: 211 | upstream_proposed_mask = cls._get_mask_from_values(values) 212 | except Exception as e: 213 | log.error("Cannot parse upstream version mask error: '%s'" % e) 214 | else: 215 | if upstream_proposed_mask != 0: 216 | VersionMaskSubscription.enable() 217 | VersionMaskSubscription.on_new_mask(upstream_proposed_mask) 218 | log.info('Version rolling stratum extension enabled (mask: ' 219 | '%x)' % 220 | upstream_proposed_mask) 221 | 222 | 223 | extensions = { 224 | VersionRollingExtension.namespace: VersionRollingExtension 225 | } 226 | 227 | 228 | class StratumProxyService(GenericService): 229 | service_type = 'mining' 230 | service_vendor = 'mining_proxy' 231 | is_default = True 232 | 233 | _f = None # Factory of upstream Stratum connection 234 | custom_user = None 235 | custom_password = None 236 | extranonce1 = None 237 | extranonce2_size = None 238 | tail_iterator = 0 239 | registered_tails= [] 240 | 241 | @classmethod 242 | def _set_upstream_factory(cls, f): 243 | cls._f = f 244 | 245 | @classmethod 246 | def _set_custom_user(cls, custom_user, custom_password): 247 | cls.custom_user = custom_user 248 | cls.custom_password = custom_password 249 | 250 | @classmethod 251 | def _set_extranonce(cls, extranonce1, extranonce2_size): 252 | cls.extranonce1 = extranonce1 253 | cls.extranonce2_size = extranonce2_size 254 | 255 | @classmethod 256 | def _get_unused_tail(cls): 257 | '''Currently adds up to two bytes to extranonce1, 258 | limiting proxy for up to 65535 connected clients.''' 259 | 260 | for _ in range(0, 0xffff): # 0-65535 261 | cls.tail_iterator += 1 262 | cls.tail_iterator %= 0xffff 263 | 264 | # Zero extranonce is reserved for getwork connections 265 | if cls.tail_iterator == 0: 266 | cls.tail_iterator += 1 267 | 268 | # var_int throws an exception when input is >= 0xffff 269 | tail = var_int(cls.tail_iterator) 270 | tail_len = len(tail) 271 | 272 | if tail not in cls.registered_tails: 273 | cls.registered_tails.append(tail) 274 | return (binascii.hexlify(tail), cls.extranonce2_size - tail_len) 275 | 276 | raise Exception("Extranonce slots are full, please disconnect some miners!") 277 | 278 | def _drop_tail(self, result, tail): 279 | tail = binascii.unhexlify(tail) 280 | if tail in self.registered_tails: 281 | self.registered_tails.remove(tail) 282 | else: 283 | log.error("Given extranonce is not registered1") 284 | return result 285 | 286 | @defer.inlineCallbacks 287 | def authorize(self, worker_name, worker_password, *args): 288 | if self._f.client == None or not self._f.client.connected: 289 | yield self._f.on_connect 290 | 291 | if self.custom_user != None: 292 | # Already subscribed by main() 293 | defer.returnValue(True) 294 | 295 | result = (yield self._f.rpc('mining.authorize', [worker_name, worker_password])) 296 | defer.returnValue(result) 297 | 298 | @defer.inlineCallbacks 299 | def configure(self, extensions_names, extensions_values, *args): 300 | """Handles configure method from downstream connections 301 | A list of extensions is matched against a list of supported extensions. 302 | When a matching extension is found it is provided with the parameter 303 | values from downstream and a resulting configuration is collected. 304 | :param extensions_names: 305 | :param extensions_values: 306 | :param args: 307 | :return: 308 | """ 309 | if self._f.client == None or not self._f.client.connected: 310 | yield self._f.on_connect 311 | 312 | config_result = {} 313 | for ext_name in extensions_names: 314 | ext = extensions.get(ext_name, None) 315 | if ext is not None: 316 | config_result.update(ext.configure_downstream( 317 | self.connection_ref, extensions_values)) 318 | 319 | defer.returnValue(config_result) 320 | 321 | @defer.inlineCallbacks 322 | def subscribe(self, *args): 323 | if self._f.client == None or not self._f.client.connected: 324 | yield self._f.on_connect 325 | 326 | if self._f.client == None or not self._f.client.connected: 327 | raise UpstreamServiceException("Upstream not connected") 328 | 329 | if self.extranonce1 == None: 330 | # This should never happen, because _f.on_connect is fired *after* 331 | # connection receive mining.subscribe response 332 | raise UpstreamServiceException("Not subscribed on upstream yet") 333 | 334 | (tail, extranonce2_size) = self._get_unused_tail() 335 | 336 | session = self.connection_ref().get_session() 337 | session['tail'] = tail 338 | 339 | # Remove extranonce from registry when client disconnect 340 | self.connection_ref().on_disconnect.addCallback(self._drop_tail, tail) 341 | 342 | subs1 = Pubsub.subscribe(self.connection_ref(), DifficultySubscription())[0] 343 | subs2 = Pubsub.subscribe(self.connection_ref(), MiningSubscription())[0] 344 | defer.returnValue(((subs1, subs2),) + (self.extranonce1+tail, extranonce2_size)) 345 | 346 | @defer.inlineCallbacks 347 | def submit(self, worker_name, job_id, extranonce2, ntime, nonce, *args): 348 | if self._f.client == None or not self._f.client.connected: 349 | raise SubmitException("Upstream not connected") 350 | 351 | session = self.connection_ref().get_session() 352 | tail = session.get('tail') 353 | if tail == None: 354 | raise SubmitException("Connection is not subscribed") 355 | 356 | if self.custom_user: 357 | worker_name = self.custom_user 358 | 359 | start = time.time() 360 | submit_params = [worker_name, job_id, tail+extranonce2, ntime, nonce] 361 | # A simplified way of detecting version rolling is that we have 362 | # received an additional version field and there is version mask 363 | # subscription enabled 364 | if len(args) == 1 and VersionMaskSubscription.enabled: 365 | submit_params.append(args[0]) 366 | try: 367 | result = (yield self._f.rpc('mining.submit', submit_params)) 368 | 369 | except RemoteServiceException as exc: 370 | response_time = (time.time() - start) * 1000 371 | log.info("[%dms] Share from '%s' REJECTED: %s" % (response_time, worker_name, str(exc))) 372 | raise SubmitException(*exc.args) 373 | 374 | response_time = (time.time() - start) * 1000 375 | log.info("[%dms] Share from '%s' accepted, diff %d" % (response_time, worker_name, DifficultySubscription.difficulty)) 376 | defer.returnValue(result) 377 | 378 | def get_transactions(self, *args): 379 | log.warn("mining.get_transactions isn't supported by proxy") 380 | return [] 381 | -------------------------------------------------------------------------------- /mining_proxy.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | ''' 3 | Stratum mining proxy 4 | Copyright (C) 2012 Marek Palatinus 5 | 6 | This program is free software: you can redistribute it and/or modify 7 | it under the terms of the GNU General Public License as published by 8 | the Free Software Foundation, either version 3 of the License, or 9 | (at your option) any later version. 10 | 11 | This program is distributed in the hope that it will be useful, 12 | but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | GNU General Public License for more details. 15 | 16 | You should have received a copy of the GNU General Public License 17 | along with this program. If not, see . 18 | ''' 19 | 20 | import argparse 21 | import time 22 | import os 23 | import socket 24 | 25 | from twisted.internet import defer 26 | 27 | 28 | @defer.inlineCallbacks 29 | def negotiate_stratum_extensions(f): 30 | """ 31 | Negotiates stratum extensions 32 | 33 | :param f: factory for rpc calls 34 | """ 35 | log.info("Negotiating stratum protocol extensions") 36 | extensions_names = [] 37 | extensions_values = {} 38 | # Collect all extensions and send configure to upstream 39 | for _, ext in stratum_listener.extensions.iteritems(): 40 | (ext_name, ext_values) = ext.prepare_upstream(args) 41 | extensions_names.append(ext_name) 42 | extensions_values.update(ext_values) 43 | 44 | config_params = [extensions_names, extensions_values] 45 | config_result = (yield f.rpc('mining.configure', config_params)) 46 | # configure all extensions based on the response from upstream 47 | for _, ext in stratum_listener.extensions.iteritems(): 48 | ext.configure_upstream(config_result) 49 | 50 | 51 | def parse_args(): 52 | parser = argparse.ArgumentParser(description='This proxy allows you to run getwork-based miners against Stratum mining pool.') 53 | parser.add_argument('-o', '--host', dest='host', type=str, 54 | default='stratum.slushpool.com', 55 | help='Hostname of Stratum mining pool') 56 | parser.add_argument('-p', '--port', dest='port', type=int, default=3333, help='Port of Stratum mining pool') 57 | parser.add_argument('-sh', '--stratum-host', dest='stratum_host', type=str, default='0.0.0.0', help='On which network interface listen for stratum miners. Use "localhost" for listening on internal IP only.') 58 | parser.add_argument('-sp', '--stratum-port', dest='stratum_port', type=int, default=3333, help='Port on which port listen for stratum miners.') 59 | parser.add_argument('-oh', '--getwork-host', dest='getwork_host', type=str, default='0.0.0.0', help='On which network interface listen for getwork miners. Use "localhost" for listening on internal IP only.') 60 | parser.add_argument('-gp', '--getwork-port', dest='getwork_port', type=int, default=8332, help='Port on which port listen for getwork miners. Use another port if you have bitcoind RPC running on this machine already.') 61 | parser.add_argument('-nm', '--no-midstate', dest='no_midstate', action='store_true', help="Don't compute midstate for getwork. This has outstanding performance boost, but some old miners like Diablo don't work without midstate.") 62 | parser.add_argument('-rt', '--real-target', dest='real_target', action='store_true', help="Propagate >diff1 target to getwork miners. Some miners work incorrectly with higher difficulty.") 63 | parser.add_argument('-cl', '--custom-lp', dest='custom_lp', type=str, help='Override URL provided in X-Long-Polling header') 64 | parser.add_argument('-cs', '--custom-stratum', dest='custom_stratum', type=str, help='Override URL provided in X-Stratum header') 65 | parser.add_argument('-cu', '--custom-user', dest='custom_user', type=str, help='Use this username for submitting shares') 66 | parser.add_argument('-cp', '--custom-password', dest='custom_password', type=str, help='Use this password for submitting shares') 67 | parser.add_argument('--old-target', dest='old_target', action='store_true', help='Provides backward compatible targets for some deprecated getwork miners.') 68 | parser.add_argument('--blocknotify', dest='blocknotify_cmd', type=str, default='', help='Execute command when the best block changes (%%s in BLOCKNOTIFY_CMD is replaced by block hash)') 69 | parser.add_argument('--socks', dest='proxy', type=str, default='', help='Use socks5 proxy for upstream Stratum connection, specify as host:port') 70 | parser.add_argument('--tor', dest='tor', action='store_true', help='Configure proxy to mine over Tor (requires Tor running on local machine)') 71 | parser.add_argument('-t', '--test', dest='test', action='store_true', help='Run performance test on startup') 72 | parser.add_argument('-v', '--verbose', dest='verbose', action='store_true', help='Enable low-level debugging messages') 73 | parser.add_argument('-q', '--quiet', dest='quiet', action='store_true', help='Make output more quiet') 74 | parser.add_argument('-i', '--pid-file', dest='pid_file', type=str, help='Store process pid to the file') 75 | parser.add_argument('-l', '--log-file', dest='log_file', type=str, help='Log to specified file') 76 | parser.add_argument('-st', '--scrypt-target', dest='scrypt_target', action='store_true', help='Calculate targets for scrypt algorithm') 77 | default_version_rolling_mask = 0x1fffe000 78 | parser.add_argument('-se', '--enable-stratum-extensions', 79 | dest='maybe_enable_stratum_extensions', 80 | action='store_const', 81 | const=negotiate_stratum_extensions, 82 | # no negotiation by default 83 | default=lambda x: None, 84 | help='Enable stratum extensions via ' 85 | 'mining.configure, default: DISABLED') 86 | parser.add_argument('-vm', '--version-rolling-mask', 87 | dest='version_rolling_mask', 88 | type=lambda x: int(x, 16), 89 | default=default_version_rolling_mask, 90 | help='Specify hex mask for version rolling extension - ' 91 | 'default (%x)' % default_version_rolling_mask) 92 | parser.add_argument('-vb', '--version-rolling-min-bit-count', 93 | dest='version_rolling_min_bit_count', type=int, 94 | default=2, 95 | help='Minimum number of bits requested for ' 96 | 'version mask when requesting version ' 97 | 'rolling, default (2)') 98 | 99 | return parser.parse_args() 100 | 101 | from stratum import settings 102 | settings.LOGLEVEL='INFO' 103 | 104 | if __name__ == '__main__': 105 | # We need to parse args & setup Stratum environment 106 | # before any other imports 107 | args = parse_args() 108 | if args.quiet: 109 | settings.DEBUG = False 110 | settings.LOGLEVEL = 'WARNING' 111 | elif args.verbose: 112 | settings.DEBUG = True 113 | settings.LOGLEVEL = 'DEBUG' 114 | if args.log_file: 115 | settings.LOGFILE = args.log_file 116 | 117 | from twisted.internet import reactor 118 | from stratum.socket_transport import SocketTransportFactory, SocketTransportClientFactory 119 | from stratum.services import ServiceEventHandler 120 | from twisted.web.server import Site 121 | 122 | from mining_libs import stratum_listener 123 | from mining_libs import getwork_listener 124 | from mining_libs import client_service 125 | from mining_libs import jobs 126 | from mining_libs import worker_registry 127 | from mining_libs import multicast_responder 128 | from mining_libs import version 129 | from mining_libs import utils 130 | 131 | import stratum.logger 132 | log = stratum.logger.get_logger('proxy') 133 | 134 | def on_shutdown(f): 135 | '''Clean environment properly''' 136 | log.info("Shutting down proxy...") 137 | f.is_reconnecting = False # Don't let stratum factory to reconnect again 138 | 139 | @defer.inlineCallbacks 140 | def on_connect(f, workers, job_registry): 141 | '''Callback when proxy get connected to the pool''' 142 | log.info("Connected to Stratum pool at %s:%d" % f.main_host) 143 | #reactor.callLater(30, f.client.transport.loseConnection) 144 | 145 | # Hook to on_connect again 146 | f.on_connect.addCallback(on_connect, workers, job_registry) 147 | 148 | # Every worker have to re-autorize 149 | workers.clear_authorizations() 150 | 151 | # Configure/enable stratum extensions 152 | yield args.maybe_enable_stratum_extensions(f) 153 | 154 | # Subscribe for receiving jobs 155 | log.info("Subscribing for mining jobs") 156 | (_, extranonce1, extranonce2_size) = (yield f.rpc('mining.subscribe', []))[:3] 157 | job_registry.set_extranonce(extranonce1, extranonce2_size) 158 | stratum_listener.StratumProxyService._set_extranonce(extranonce1, extranonce2_size) 159 | 160 | if args.custom_user: 161 | log.warning("Authorizing custom user %s, password %s" % (args.custom_user, args.custom_password)) 162 | workers.authorize(args.custom_user, args.custom_password) 163 | 164 | defer.returnValue(f) 165 | 166 | def on_disconnect(f, workers, job_registry): 167 | '''Callback when proxy get disconnected from the pool''' 168 | log.info("Disconnected from Stratum pool at %s:%d" % f.main_host) 169 | f.on_disconnect.addCallback(on_disconnect, workers, job_registry) 170 | 171 | stratum_listener.MiningSubscription.disconnect_all() 172 | 173 | # Reject miners because we don't give a *job :-) 174 | workers.clear_authorizations() 175 | 176 | return f 177 | 178 | def test_launcher(result, job_registry): 179 | def run_test(): 180 | log.info("Running performance self-test...") 181 | for m in (True, False): 182 | log.info("Generating with midstate: %s" % m) 183 | log.info("Example getwork:") 184 | log.info(job_registry.getwork(no_midstate=not m)) 185 | 186 | start = time.time() 187 | n = 10000 188 | 189 | for x in range(n): 190 | job_registry.getwork(no_midstate=not m) 191 | 192 | log.info("%d getworks generated in %.03f sec, %d gw/s" % \ 193 | (n, time.time() - start, n / (time.time()-start))) 194 | 195 | log.info("Test done") 196 | reactor.callLater(1, run_test) 197 | return result 198 | 199 | def print_deprecation_warning(): 200 | '''Once new version is detected, this method prints deprecation warning every 30 seconds.''' 201 | 202 | log.warning("New proxy version available! Please update!") 203 | reactor.callLater(30, print_deprecation_warning) 204 | 205 | def test_update(): 206 | '''Perform lookup for newer proxy version, on startup and then once a day. 207 | When new version is found, it starts printing warning message and turned off next checks.''' 208 | 209 | GIT_URL='https://raw.github.com/slush0/stratum-mining-proxy/master/mining_libs/version.py' 210 | 211 | import urllib2 212 | log.warning("Checking for updates...") 213 | try: 214 | if version.VERSION not in urllib2.urlopen(GIT_URL).read(): 215 | print_deprecation_warning() 216 | return # New version already detected, stop periodic checks 217 | except: 218 | log.warning("Check failed.") 219 | 220 | reactor.callLater(3600*24, test_update) 221 | 222 | @defer.inlineCallbacks 223 | def main(args): 224 | if args.pid_file: 225 | fp = file(args.pid_file, 'w') 226 | fp.write(str(os.getpid())) 227 | fp.close() 228 | 229 | if args.port != 3333: 230 | '''User most likely provided host/port 231 | for getwork interface. Let's try to detect 232 | Stratum host/port of given getwork pool.''' 233 | 234 | try: 235 | new_host = (yield utils.detect_stratum(args.host, args.port)) 236 | except: 237 | log.exception("Stratum host/port autodetection failed") 238 | new_host = None 239 | 240 | if new_host != None: 241 | args.host = new_host[0] 242 | args.port = new_host[1] 243 | 244 | log.warning("Stratum proxy version: %s" % version.VERSION) 245 | # Setup periodic checks for a new version 246 | test_update() 247 | 248 | if args.tor: 249 | log.warning("Configuring Tor connection") 250 | args.proxy = '127.0.0.1:9050' 251 | args.host = 'pool57wkuu5yuhzb.onion' 252 | args.port = 3333 253 | 254 | if args.proxy: 255 | proxy = args.proxy.split(':') 256 | if len(proxy) < 2: 257 | proxy = (proxy, 9050) 258 | else: 259 | proxy = (proxy[0], int(proxy[1])) 260 | log.warning("Using proxy %s:%d" % proxy) 261 | else: 262 | proxy = None 263 | 264 | log.warning("Trying to connect to Stratum pool at %s:%d" % (args.host, args.port)) 265 | 266 | # Connect to Stratum pool 267 | f = SocketTransportClientFactory(args.host, args.port, 268 | debug=args.verbose, proxy=proxy, 269 | event_handler=client_service.ClientMiningService) 270 | 271 | 272 | job_registry = jobs.JobRegistry(f, cmd=args.blocknotify_cmd, scrypt_target=args.scrypt_target, 273 | no_midstate=args.no_midstate, real_target=args.real_target, use_old_target=args.old_target) 274 | client_service.ClientMiningService.job_registry = job_registry 275 | client_service.ClientMiningService.reset_timeout() 276 | 277 | workers = worker_registry.WorkerRegistry(f) 278 | f.on_connect.addCallback(on_connect, workers, job_registry) 279 | f.on_disconnect.addCallback(on_disconnect, workers, job_registry) 280 | 281 | if args.test: 282 | f.on_connect.addCallback(test_launcher, job_registry) 283 | 284 | # Cleanup properly on shutdown 285 | reactor.addSystemEventTrigger('before', 'shutdown', on_shutdown, f) 286 | 287 | # Block until proxy connect to the pool 288 | yield f.on_connect 289 | 290 | # Setup getwork listener 291 | if args.getwork_port > 0: 292 | conn = reactor.listenTCP(args.getwork_port, Site(getwork_listener.Root(job_registry, workers, 293 | stratum_host=args.stratum_host, stratum_port=args.stratum_port, 294 | custom_lp=args.custom_lp, custom_stratum=args.custom_stratum, 295 | custom_user=args.custom_user, custom_password=args.custom_password)), 296 | interface=args.getwork_host) 297 | 298 | try: 299 | conn.socket.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) # Enable keepalive packets 300 | conn.socket.setsockopt(socket.SOL_TCP, socket.TCP_KEEPIDLE, 60) # Seconds before sending keepalive probes 301 | conn.socket.setsockopt(socket.SOL_TCP, socket.TCP_KEEPINTVL, 1) # Interval in seconds between keepalive probes 302 | conn.socket.setsockopt(socket.SOL_TCP, socket.TCP_KEEPCNT, 5) # Failed keepalive probles before declaring other end dead 303 | except: 304 | pass # Some socket features are not available on all platforms (you can guess which one) 305 | 306 | # Setup stratum listener 307 | if args.stratum_port > 0: 308 | stratum_listener.StratumProxyService._set_upstream_factory(f) 309 | stratum_listener.StratumProxyService._set_custom_user(args.custom_user, args.custom_password) 310 | reactor.listenTCP(args.stratum_port, SocketTransportFactory(debug=False, event_handler=ServiceEventHandler), interface=args.stratum_host) 311 | 312 | # Setup multicast responder 313 | reactor.listenMulticast(3333, multicast_responder.MulticastResponder((args.host, args.port), args.stratum_port, args.getwork_port), listenMultiple=True) 314 | 315 | log.warning("-----------------------------------------------------------------------") 316 | if args.getwork_host == '0.0.0.0' and args.stratum_host == '0.0.0.0': 317 | log.warning("PROXY IS LISTENING ON ALL IPs ON PORT %d (stratum) AND %d (getwork)" % (args.stratum_port, args.getwork_port)) 318 | else: 319 | log.warning("LISTENING FOR MINERS ON http://%s:%d (getwork) and stratum+tcp://%s:%d (stratum)" % \ 320 | (args.getwork_host, args.getwork_port, args.stratum_host, args.stratum_port)) 321 | log.warning("-----------------------------------------------------------------------") 322 | 323 | if __name__ == '__main__': 324 | main(args) 325 | reactor.run() 326 | -------------------------------------------------------------------------------- /distribute_setup.py: -------------------------------------------------------------------------------- 1 | #!python 2 | """Bootstrap distribute installation 3 | 4 | If you want to use setuptools in your package's setup.py, just include this 5 | file in the same directory with it, and add this to the top of your setup.py:: 6 | 7 | from distribute_setup import use_setuptools 8 | use_setuptools() 9 | 10 | If you want to require a specific version of setuptools, set a download 11 | mirror, or use an alternate download directory, you can do so by supplying 12 | the appropriate options to ``use_setuptools()``. 13 | 14 | This file can also be run as a script to install or upgrade setuptools. 15 | """ 16 | import os 17 | import sys 18 | import time 19 | import fnmatch 20 | import tempfile 21 | import tarfile 22 | from distutils import log 23 | 24 | try: 25 | from site import USER_SITE 26 | except ImportError: 27 | USER_SITE = None 28 | 29 | try: 30 | import subprocess 31 | 32 | def _python_cmd(*args): 33 | args = (sys.executable,) + args 34 | return subprocess.call(args) == 0 35 | 36 | except ImportError: 37 | # will be used for python 2.3 38 | def _python_cmd(*args): 39 | args = (sys.executable,) + args 40 | # quoting arguments if windows 41 | if sys.platform == 'win32': 42 | def quote(arg): 43 | if ' ' in arg: 44 | return '"%s"' % arg 45 | return arg 46 | args = [quote(arg) for arg in args] 47 | return os.spawnl(os.P_WAIT, sys.executable, *args) == 0 48 | 49 | DEFAULT_VERSION = "0.6.28" 50 | DEFAULT_URL = "https://pypi.python.org/packages/source/d/distribute/" 51 | SETUPTOOLS_FAKED_VERSION = "0.6c11" 52 | 53 | SETUPTOOLS_PKG_INFO = """\ 54 | Metadata-Version: 1.0 55 | Name: setuptools 56 | Version: %s 57 | Summary: xxxx 58 | Home-page: xxx 59 | Author: xxx 60 | Author-email: xxx 61 | License: xxx 62 | Description: xxx 63 | """ % SETUPTOOLS_FAKED_VERSION 64 | 65 | 66 | def _install(tarball, install_args=()): 67 | # extracting the tarball 68 | tmpdir = tempfile.mkdtemp() 69 | log.warn('Extracting in %s', tmpdir) 70 | old_wd = os.getcwd() 71 | try: 72 | os.chdir(tmpdir) 73 | tar = tarfile.open(tarball) 74 | _extractall(tar) 75 | tar.close() 76 | 77 | # going in the directory 78 | subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0]) 79 | os.chdir(subdir) 80 | log.warn('Now working in %s', subdir) 81 | 82 | # installing 83 | log.warn('Installing Distribute') 84 | if not _python_cmd('setup.py', 'install', *install_args): 85 | log.warn('Something went wrong during the installation.') 86 | log.warn('See the error message above.') 87 | finally: 88 | os.chdir(old_wd) 89 | 90 | 91 | def _build_egg(egg, tarball, to_dir): 92 | # extracting the tarball 93 | tmpdir = tempfile.mkdtemp() 94 | log.warn('Extracting in %s', tmpdir) 95 | old_wd = os.getcwd() 96 | try: 97 | os.chdir(tmpdir) 98 | tar = tarfile.open(tarball) 99 | _extractall(tar) 100 | tar.close() 101 | 102 | # going in the directory 103 | subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0]) 104 | os.chdir(subdir) 105 | log.warn('Now working in %s', subdir) 106 | 107 | # building an egg 108 | log.warn('Building a Distribute egg in %s', to_dir) 109 | _python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir) 110 | 111 | finally: 112 | os.chdir(old_wd) 113 | # returning the result 114 | log.warn(egg) 115 | if not os.path.exists(egg): 116 | raise IOError('Could not build the egg.') 117 | 118 | 119 | def _do_download(version, download_base, to_dir, download_delay): 120 | egg = os.path.join(to_dir, 'distribute-%s-py%d.%d.egg' 121 | % (version, sys.version_info[0], sys.version_info[1])) 122 | if not os.path.exists(egg): 123 | tarball = download_setuptools(version, download_base, 124 | to_dir, download_delay) 125 | _build_egg(egg, tarball, to_dir) 126 | sys.path.insert(0, egg) 127 | import setuptools 128 | setuptools.bootstrap_install_from = egg 129 | 130 | 131 | def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL, 132 | to_dir=os.curdir, download_delay=15, no_fake=True): 133 | # making sure we use the absolute path 134 | to_dir = os.path.abspath(to_dir) 135 | was_imported = 'pkg_resources' in sys.modules or \ 136 | 'setuptools' in sys.modules 137 | try: 138 | try: 139 | import pkg_resources 140 | if not hasattr(pkg_resources, '_distribute'): 141 | if not no_fake: 142 | _fake_setuptools() 143 | raise ImportError 144 | except ImportError: 145 | return _do_download(version, download_base, to_dir, download_delay) 146 | try: 147 | pkg_resources.require("distribute>=" + version) 148 | return 149 | except pkg_resources.VersionConflict: 150 | e = sys.exc_info()[1] 151 | if was_imported: 152 | sys.stderr.write( 153 | "The required version of distribute (>=%s) is not available,\n" 154 | "and can't be installed while this script is running. Please\n" 155 | "install a more recent version first, using\n" 156 | "'easy_install -U distribute'." 157 | "\n\n(Currently using %r)\n" % (version, e.args[0])) 158 | sys.exit(2) 159 | else: 160 | del pkg_resources, sys.modules['pkg_resources'] # reload ok 161 | return _do_download(version, download_base, to_dir, 162 | download_delay) 163 | except pkg_resources.DistributionNotFound: 164 | return _do_download(version, download_base, to_dir, 165 | download_delay) 166 | finally: 167 | if not no_fake: 168 | _create_fake_setuptools_pkg_info(to_dir) 169 | 170 | 171 | def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL, 172 | to_dir=os.curdir, delay=15): 173 | """Download distribute from a specified location and return its filename 174 | 175 | `version` should be a valid distribute version number that is available 176 | as an egg for download under the `download_base` URL (which should end 177 | with a '/'). `to_dir` is the directory where the egg will be downloaded. 178 | `delay` is the number of seconds to pause before an actual download 179 | attempt. 180 | """ 181 | # making sure we use the absolute path 182 | to_dir = os.path.abspath(to_dir) 183 | try: 184 | from urllib.request import urlopen 185 | except ImportError: 186 | from urllib2 import urlopen 187 | tgz_name = "distribute-%s.tar.gz" % version 188 | url = download_base + tgz_name 189 | saveto = os.path.join(to_dir, tgz_name) 190 | src = dst = None 191 | if not os.path.exists(saveto): # Avoid repeated downloads 192 | try: 193 | log.warn("Downloading %s", url) 194 | src = urlopen(url) 195 | # Read/write all in one block, so we don't create a corrupt file 196 | # if the download is interrupted. 197 | data = src.read() 198 | dst = open(saveto, "wb") 199 | dst.write(data) 200 | finally: 201 | if src: 202 | src.close() 203 | if dst: 204 | dst.close() 205 | return os.path.realpath(saveto) 206 | 207 | 208 | def _no_sandbox(function): 209 | def __no_sandbox(*args, **kw): 210 | try: 211 | from setuptools.sandbox import DirectorySandbox 212 | if not hasattr(DirectorySandbox, '_old'): 213 | def violation(*args): 214 | pass 215 | DirectorySandbox._old = DirectorySandbox._violation 216 | DirectorySandbox._violation = violation 217 | patched = True 218 | else: 219 | patched = False 220 | except ImportError: 221 | patched = False 222 | 223 | try: 224 | return function(*args, **kw) 225 | finally: 226 | if patched: 227 | DirectorySandbox._violation = DirectorySandbox._old 228 | del DirectorySandbox._old 229 | 230 | return __no_sandbox 231 | 232 | 233 | def _patch_file(path, content): 234 | """Will backup the file then patch it""" 235 | existing_content = open(path).read() 236 | if existing_content == content: 237 | # already patched 238 | log.warn('Already patched.') 239 | return False 240 | log.warn('Patching...') 241 | _rename_path(path) 242 | f = open(path, 'w') 243 | try: 244 | f.write(content) 245 | finally: 246 | f.close() 247 | return True 248 | 249 | _patch_file = _no_sandbox(_patch_file) 250 | 251 | 252 | def _same_content(path, content): 253 | return open(path).read() == content 254 | 255 | 256 | def _rename_path(path): 257 | new_name = path + '.OLD.%s' % time.time() 258 | log.warn('Renaming %s into %s', path, new_name) 259 | os.rename(path, new_name) 260 | return new_name 261 | 262 | 263 | def _remove_flat_installation(placeholder): 264 | if not os.path.isdir(placeholder): 265 | log.warn('Unkown installation at %s', placeholder) 266 | return False 267 | found = False 268 | for file in os.listdir(placeholder): 269 | if fnmatch.fnmatch(file, 'setuptools*.egg-info'): 270 | found = True 271 | break 272 | if not found: 273 | log.warn('Could not locate setuptools*.egg-info') 274 | return 275 | 276 | log.warn('Removing elements out of the way...') 277 | pkg_info = os.path.join(placeholder, file) 278 | if os.path.isdir(pkg_info): 279 | patched = _patch_egg_dir(pkg_info) 280 | else: 281 | patched = _patch_file(pkg_info, SETUPTOOLS_PKG_INFO) 282 | 283 | if not patched: 284 | log.warn('%s already patched.', pkg_info) 285 | return False 286 | # now let's move the files out of the way 287 | for element in ('setuptools', 'pkg_resources.py', 'site.py'): 288 | element = os.path.join(placeholder, element) 289 | if os.path.exists(element): 290 | _rename_path(element) 291 | else: 292 | log.warn('Could not find the %s element of the ' 293 | 'Setuptools distribution', element) 294 | return True 295 | 296 | _remove_flat_installation = _no_sandbox(_remove_flat_installation) 297 | 298 | 299 | def _after_install(dist): 300 | log.warn('After install bootstrap.') 301 | placeholder = dist.get_command_obj('install').install_purelib 302 | _create_fake_setuptools_pkg_info(placeholder) 303 | 304 | 305 | def _create_fake_setuptools_pkg_info(placeholder): 306 | if not placeholder or not os.path.exists(placeholder): 307 | log.warn('Could not find the install location') 308 | return 309 | pyver = '%s.%s' % (sys.version_info[0], sys.version_info[1]) 310 | setuptools_file = 'setuptools-%s-py%s.egg-info' % \ 311 | (SETUPTOOLS_FAKED_VERSION, pyver) 312 | pkg_info = os.path.join(placeholder, setuptools_file) 313 | if os.path.exists(pkg_info): 314 | log.warn('%s already exists', pkg_info) 315 | return 316 | 317 | if not os.access(pkg_info, os.W_OK): 318 | log.warn("Don't have permissions to write %s, skipping", pkg_info) 319 | 320 | log.warn('Creating %s', pkg_info) 321 | f = open(pkg_info, 'w') 322 | try: 323 | f.write(SETUPTOOLS_PKG_INFO) 324 | finally: 325 | f.close() 326 | 327 | pth_file = os.path.join(placeholder, 'setuptools.pth') 328 | log.warn('Creating %s', pth_file) 329 | f = open(pth_file, 'w') 330 | try: 331 | f.write(os.path.join(os.curdir, setuptools_file)) 332 | finally: 333 | f.close() 334 | 335 | _create_fake_setuptools_pkg_info = _no_sandbox( 336 | _create_fake_setuptools_pkg_info 337 | ) 338 | 339 | 340 | def _patch_egg_dir(path): 341 | # let's check if it's already patched 342 | pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO') 343 | if os.path.exists(pkg_info): 344 | if _same_content(pkg_info, SETUPTOOLS_PKG_INFO): 345 | log.warn('%s already patched.', pkg_info) 346 | return False 347 | _rename_path(path) 348 | os.mkdir(path) 349 | os.mkdir(os.path.join(path, 'EGG-INFO')) 350 | pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO') 351 | f = open(pkg_info, 'w') 352 | try: 353 | f.write(SETUPTOOLS_PKG_INFO) 354 | finally: 355 | f.close() 356 | return True 357 | 358 | _patch_egg_dir = _no_sandbox(_patch_egg_dir) 359 | 360 | 361 | def _before_install(): 362 | log.warn('Before install bootstrap.') 363 | _fake_setuptools() 364 | 365 | 366 | def _under_prefix(location): 367 | if 'install' not in sys.argv: 368 | return True 369 | args = sys.argv[sys.argv.index('install') + 1:] 370 | for index, arg in enumerate(args): 371 | for option in ('--root', '--prefix'): 372 | if arg.startswith('%s=' % option): 373 | top_dir = arg.split('root=')[-1] 374 | return location.startswith(top_dir) 375 | elif arg == option: 376 | if len(args) > index: 377 | top_dir = args[index + 1] 378 | return location.startswith(top_dir) 379 | if arg == '--user' and USER_SITE is not None: 380 | return location.startswith(USER_SITE) 381 | return True 382 | 383 | 384 | def _fake_setuptools(): 385 | log.warn('Scanning installed packages') 386 | try: 387 | import pkg_resources 388 | except ImportError: 389 | # we're cool 390 | log.warn('Setuptools or Distribute does not seem to be installed.') 391 | return 392 | ws = pkg_resources.working_set 393 | try: 394 | setuptools_dist = ws.find( 395 | pkg_resources.Requirement.parse('setuptools', replacement=False) 396 | ) 397 | except TypeError: 398 | # old distribute API 399 | setuptools_dist = ws.find( 400 | pkg_resources.Requirement.parse('setuptools') 401 | ) 402 | 403 | if setuptools_dist is None: 404 | log.warn('No setuptools distribution found') 405 | return 406 | # detecting if it was already faked 407 | setuptools_location = setuptools_dist.location 408 | log.warn('Setuptools installation detected at %s', setuptools_location) 409 | 410 | # if --root or --preix was provided, and if 411 | # setuptools is not located in them, we don't patch it 412 | if not _under_prefix(setuptools_location): 413 | log.warn('Not patching, --root or --prefix is installing Distribute' 414 | ' in another location') 415 | return 416 | 417 | # let's see if its an egg 418 | if not setuptools_location.endswith('.egg'): 419 | log.warn('Non-egg installation') 420 | res = _remove_flat_installation(setuptools_location) 421 | if not res: 422 | return 423 | else: 424 | log.warn('Egg installation') 425 | pkg_info = os.path.join(setuptools_location, 'EGG-INFO', 'PKG-INFO') 426 | if (os.path.exists(pkg_info) and 427 | _same_content(pkg_info, SETUPTOOLS_PKG_INFO)): 428 | log.warn('Already patched.') 429 | return 430 | log.warn('Patching...') 431 | # let's create a fake egg replacing setuptools one 432 | res = _patch_egg_dir(setuptools_location) 433 | if not res: 434 | return 435 | log.warn('Patched done.') 436 | _relaunch() 437 | 438 | 439 | def _relaunch(): 440 | log.warn('Relaunching...') 441 | # we have to relaunch the process 442 | # pip marker to avoid a relaunch bug 443 | _cmd = ['-c', 'install', '--single-version-externally-managed'] 444 | if sys.argv[:3] == _cmd: 445 | sys.argv[0] = 'setup.py' 446 | args = [sys.executable] + sys.argv 447 | sys.exit(subprocess.call(args)) 448 | 449 | 450 | def _extractall(self, path=".", members=None): 451 | """Extract all members from the archive to the current working 452 | directory and set owner, modification time and permissions on 453 | directories afterwards. `path' specifies a different directory 454 | to extract to. `members' is optional and must be a subset of the 455 | list returned by getmembers(). 456 | """ 457 | import copy 458 | import operator 459 | from tarfile import ExtractError 460 | directories = [] 461 | 462 | if members is None: 463 | members = self 464 | 465 | for tarinfo in members: 466 | if tarinfo.isdir(): 467 | # Extract directories with a safe mode. 468 | directories.append(tarinfo) 469 | tarinfo = copy.copy(tarinfo) 470 | tarinfo.mode = 448 # decimal for oct 0700 471 | self.extract(tarinfo, path) 472 | 473 | # Reverse sort directories. 474 | if sys.version_info < (2, 4): 475 | def sorter(dir1, dir2): 476 | return cmp(dir1.name, dir2.name) 477 | directories.sort(sorter) 478 | directories.reverse() 479 | else: 480 | directories.sort(key=operator.attrgetter('name'), reverse=True) 481 | 482 | # Set correct owner, mtime and filemode on directories. 483 | for tarinfo in directories: 484 | dirpath = os.path.join(path, tarinfo.name) 485 | try: 486 | self.chown(tarinfo, dirpath) 487 | self.utime(tarinfo, dirpath) 488 | self.chmod(tarinfo, dirpath) 489 | except ExtractError: 490 | e = sys.exc_info()[1] 491 | if self.errorlevel > 1: 492 | raise 493 | else: 494 | self._dbg(1, "tarfile: %s" % e) 495 | 496 | 497 | def _build_install_args(argv): 498 | install_args = [] 499 | user_install = '--user' in argv 500 | if user_install and sys.version_info < (2, 6): 501 | log.warn("--user requires Python 2.6 or later") 502 | raise SystemExit(1) 503 | if user_install: 504 | install_args.append('--user') 505 | return install_args 506 | 507 | 508 | def main(argv, version=DEFAULT_VERSION): 509 | """Install or upgrade setuptools and EasyInstall""" 510 | tarball = download_setuptools() 511 | _install(tarball, _build_install_args(argv)) 512 | 513 | 514 | if __name__ == '__main__': 515 | main(sys.argv[1:]) 516 | --------------------------------------------------------------------------------