├── .github └── workflows │ └── semgrep.yml ├── LICENSE.txt ├── README.rst ├── mmdbencoder └── __init__.py ├── setup.cfg └── setup.py /.github/workflows/semgrep.yml: -------------------------------------------------------------------------------- 1 | 2 | on: 3 | pull_request: {} 4 | workflow_dispatch: {} 5 | push: 6 | branches: 7 | - main 8 | - master 9 | schedule: 10 | - cron: '0 0 * * *' 11 | name: Semgrep config 12 | jobs: 13 | semgrep: 14 | name: semgrep/ci 15 | runs-on: ubuntu-20.04 16 | env: 17 | SEMGREP_APP_TOKEN: ${{ secrets.SEMGREP_APP_TOKEN }} 18 | SEMGREP_URL: https://cloudflare.semgrep.dev 19 | SEMGREP_APP_URL: https://cloudflare.semgrep.dev 20 | SEMGREP_VERSION_CHECK_URL: https://cloudflare.semgrep.dev/api/check-version 21 | container: 22 | image: returntocorp/semgrep 23 | steps: 24 | - uses: actions/checkout@v3 25 | - run: semgrep ci 26 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | Copyright (c) 2018, Cloudflare. All rights reserved. 2 | 3 | Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 4 | 5 | 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 6 | 7 | 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 8 | 9 | 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. 10 | 11 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | Python MMDB encoder 2 | ========================= 3 | 4 | At Cloudflare, as part of the network automation, we build our IP prefix tables. 5 | 6 | An example of how to use it: 7 | :: 8 | import mmdbencoder 9 | enc = mmdbencoder.Encoder( 10 | 6, # IP version 11 | 32, # Size of the pointers 12 | 'My-Custom-Table', # Name of the table 13 | ['en'], # Languages 14 | {'en': 'Lorem Ipsum'}, # Description 15 | compat=True) # Map IPv4 in IPv6 (::abcd instead of ::ffff:abcd) to be read by official libraries 16 | data = enc.insert_data({'info': 'Hello World'}) 17 | enc.insert_network(u'10.0.0.0/24', data) 18 | enc.write_file('hello.mmdb') 19 | 20 | Installation 21 | ============ 22 | 23 | From source: 24 | :: 25 | $ ./setup.py install 26 | $ 27 | 28 | From pypi: 29 | :: 30 | $ pip install py-mmdb-encoder 31 | $ 32 | -------------------------------------------------------------------------------- /mmdbencoder/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import ipaddress 4 | import struct 5 | import io 6 | import time 7 | import sys 8 | 9 | if sys.version_info > (3,): 10 | long = int 11 | 12 | class Node(): 13 | def __init__(self): 14 | self.left = None 15 | self.right = None 16 | self.data = None 17 | self.data_schema = {} 18 | 19 | self.written_id = None 20 | self.final = 0 21 | 22 | class Pointer(): 23 | def __init__(self, addr): 24 | self.addr = addr 25 | 26 | class DataCache(): 27 | def __init__(self, addr): 28 | self.addr = addr 29 | 30 | class EncoderConstants(): 31 | TYPE_PTR = 1 32 | TYPE_UTF8STR = 2 33 | TYPE_DOUBLE = 3 34 | TYPE_BYTES = 4 35 | TYPE_UINT16 = 5 36 | TYPE_UINT32 = 6 37 | TYPE_MAP = 7 38 | TYPE_INT32 = 8 39 | TYPE_UINT64 = 9 40 | TYPE_UINT128 = 10 41 | TYPE_ARRAY = 11 42 | TYPE_DATACACHE = 12 43 | TYPE_ENDMARKER = 13 44 | TYPE_BOOLEAN = 14 45 | TYPE_FLOAT = 15 46 | 47 | key_map = { 48 | 'ptr': TYPE_PTR, 49 | 'utf8-string': TYPE_UTF8STR, 50 | 'double': TYPE_DOUBLE, 51 | 'bytes': TYPE_BYTES, 52 | 'uint16': TYPE_UINT16, 53 | 'uint32': TYPE_UINT32, 54 | 'map': TYPE_MAP, 55 | 'int32': TYPE_INT32, 56 | 'uint64': TYPE_UINT64, 57 | 'uint128': TYPE_UINT128, 58 | 'array': TYPE_ARRAY, 59 | 'data_cache': TYPE_DATACACHE, 60 | 'end_marker': TYPE_ENDMARKER, 61 | 'boolean': TYPE_BOOLEAN, 62 | 'float': TYPE_FLOAT 63 | } 64 | 65 | class Encoder(): 66 | 67 | def __init__(self, 68 | ip_version, 69 | record_size, 70 | database_type, 71 | languages, 72 | description, 73 | compat=True): 74 | 75 | if ip_version != 4 and ip_version != 6: 76 | raise Exception('Encoder: __new__: %d is not a correct IP version (4 or 6)' % ip_version) 77 | 78 | self.ip_version = ip_version 79 | self.record_size = record_size 80 | self.database_type = database_type 81 | self.languages = languages 82 | self.description = description 83 | self.node_count = 1 84 | self.entries_count = 0 85 | self.trie = Node() 86 | self.data = [] 87 | self.data_serialized = [] 88 | self.data_pos = [ 0 ] 89 | self.compat = compat 90 | 91 | @staticmethod 92 | def ipnet_to_bits(ipnet): 93 | ipnum = int(ipnet.network_address) 94 | 95 | m = ipnet.max_prefixlen 96 | arr = [] 97 | 98 | for i in range(0, m/8): 99 | val = (ipnum&(0xff<<(m-(i+1)*8)))>>(m-(i+1)*8) 100 | arr.append(int(val)) 101 | 102 | cutsize = ipnet.prefixlen/8 + int(ipnet.prefixlen%8 != 0) 103 | return arr[0:cutsize] 104 | 105 | def _add_to_trie(self, ipnum, prefixlen, max_prefixlen, keyid, strict = True, final = True, originalprefixlen = 0): 106 | curnode = self.trie 107 | parentnode = None 108 | carrydata = None 109 | for i in range(0, prefixlen): 110 | val = int((ipnum&(0x1<<(max_prefixlen-(i+1))))>>(max_prefixlen-(i+1))) 111 | 112 | parentnode = curnode 113 | if val == 0: 114 | curnode = curnode.left 115 | elif val == 1: 116 | curnode = curnode.right 117 | 118 | if curnode == None: 119 | curnode = Node() 120 | 121 | if i < prefixlen-1: 122 | self.node_count += 1 123 | if val == 0: 124 | parentnode.left = curnode 125 | elif val == 1: 126 | parentnode.right = curnode 127 | 128 | if curnode.data != None and i < prefixlen-1 and strict: 129 | raise Exception('Encoder: add_to_trie: try setting data on a non-final: %s already has child. Not updating in strict mode.' % ipnum) 130 | elif curnode.data != None and i < prefixlen-1 and carrydata == None: 131 | carrydata = curnode.data 132 | carrylen = curnode.final 133 | curnode.data = None 134 | curnode.final = 0 135 | self.node_count += 1 136 | elif carrydata != None and i <= prefixlen-1: 137 | curnode.data = None 138 | curnode.final = 0 139 | if val == 0: 140 | carrynode = Node() 141 | carrynode.data = carrydata 142 | parentnode.right = carrynode 143 | carrynode.final = carrylen 144 | elif val == 1: 145 | carrynode = Node() 146 | carrynode.data = carrydata 147 | parentnode.left = carrynode 148 | carrynode.final = carrylen 149 | 150 | if i == prefixlen-1: 151 | if curnode.data is not None and strict: 152 | raise Exception('Encoder: add_to_trie: node %s already has data. Not updating in strict mode.' % ipnum) 153 | 154 | if (curnode.left is not None or curnode.right is not None) and strict: 155 | raise Exception('Encoder: add_to_trie: try setting data on a non-final: %s already has child. Not updating in strict mode.' % ipnum) 156 | 157 | if not strict and (curnode.left is not None or curnode.right is not None): 158 | oplen = prefixlen 159 | if originalprefixlen != 0: 160 | oplen = originalprefixlen 161 | if curnode.left is not None: 162 | newipnum = ipnum | 1<<(max_prefixlen-i-2) 163 | self._add_to_trie(newipnum, prefixlen+1, max_prefixlen, keyid, strict=False, final=False, originalprefixlen=oplen) 164 | if curnode.right is not None: 165 | newipnum = ipnum 166 | self._add_to_trie(newipnum, prefixlen+1, max_prefixlen, keyid, strict=False, final=False, originalprefixlen=oplen) 167 | elif curnode.data is None or final or (not final and originalprefixlen > curnode.final): 168 | #elif curnode.data is None or not curnode.final or final: 169 | #elif curnode.data is None or not curnode.final: 170 | curnode.data = keyid 171 | if originalprefixlen != 0: 172 | curnode.final = originalprefixlen 173 | else: 174 | curnode.final = prefixlen 175 | 176 | 177 | def explore(self): 178 | curnode = self.trie 179 | toexplore = [curnode] 180 | while len(toexplore) > 0: 181 | curnode = toexplore.pop(0) 182 | if curnode.left != None: 183 | toexplore.append(curnode.left) 184 | if curnode.right != None: 185 | toexplore.append(curnode.right) 186 | print('Node {node} | Left={left} | Right={right} | Data={data} | Final {final}'.format(node=curnode, 187 | left=curnode.left, right=curnode.right, data=curnode.data, final=curnode.final)) 188 | 189 | def add_to_trie(self, ipnet, keyid, strict = True): 190 | ipnum = int(ipnet.network_address) 191 | m = ipnet.max_prefixlen 192 | ipnet.prefixlen 193 | self._add_to_trie(ipnum, ipnet.prefixlen, ipnet.max_prefixlen, keyid, strict=strict) 194 | 195 | def add_data(self, d): 196 | self.data.append(d) 197 | 198 | buf = io.BytesIO() 199 | size = Encoder.write_data_single(buf, d) 200 | self.data_serialized.append(buf) 201 | self.data_pos.append( self.data_pos[-1] + size ) 202 | return self.data_pos[-2] 203 | 204 | def insert_raw_data(self, data): 205 | data_offset = self.add_data(data) 206 | return data_offset 207 | 208 | def insert_data(self, data): 209 | data_struct = Encoder.python_data_to_mmdb_struct(data) 210 | data_offset = self.add_data(data_struct) 211 | return data_offset 212 | 213 | def insert_network(self, prefix, data_offset, strict = True): 214 | self.entries_count += 1 215 | ipnet = ipaddress.ip_network(prefix, strict=False) 216 | 217 | if ipnet.version == 6 and self.ip_version != 6: 218 | raise Exception('Encoder: insert_network: cannot add IPv6 address in IPv4 table') 219 | 220 | if ipnet.version == 4 and self.ip_version == 6: 221 | base4in6 = ipaddress.IPv6Address(u'::ffff:0:0') 222 | v4in6addr = ipaddress.IPv6Address(int(ipnet.network_address)+int(base4in6)) 223 | 224 | # Maxmind DBs skips the first 96 bits (do not include the 0xffff) 225 | if self.compat: 226 | v4in6addr = ipaddress.IPv6Address(int(ipnet.network_address)) 227 | 228 | v4in6addr_plen = ipnet.prefixlen + 96 229 | ipnet = ipaddress.IPv6Network(u'{}/{}'.format(str(v4in6addr), v4in6addr_plen), strict=False) 230 | 231 | #print(ipnet) 232 | self.add_to_trie(ipnet, data_offset, strict=strict) 233 | 234 | @staticmethod 235 | def encode_single_ptrs(record_size, ptr): 236 | isnotmod8 = (record_size%8 != 0) 237 | ptr_list = [] 238 | 239 | m = record_size 240 | 241 | shift = 0 242 | if isnotmod8: 243 | shift = 4 244 | 245 | for i in range(0, int(m/8)): 246 | #print("{} {} {}".format(i, m, 0xff<<(m-(i+1)*8))) 247 | ptr_list.append(int((ptr&(0xff<<(m - (i+1)*8 - shift)))>>(m - (i+1)*8 - shift))) 248 | return ptr_list 249 | 250 | @staticmethod 251 | def encode_ptrs(record_size, ptrleft, ptrright): 252 | ptrs = [] 253 | isnotmod8 = (record_size%8 != 0) 254 | if record_size%4 != 0: 255 | raise Exception('Encoder: encode_ptrs: must have a size which can be modulo 4. Got %d.' % record_size) 256 | 257 | ptrleft_list = Encoder.encode_single_ptrs(record_size, ptrleft) 258 | ptrright_list = Encoder.encode_single_ptrs(record_size, ptrright) 259 | middle = [] 260 | if isnotmod8: 261 | middle.append( 262 | int( 263 | (ptrleft&(0xf<<(record_size-4)))>>(record_size-8) 264 | | 265 | (ptrright&(0xf<<(record_size-4)))>>(record_size-4) 266 | ) 267 | ) 268 | 269 | ptrs = ptrleft_list + middle + ptrright_list 270 | return ptrs 271 | 272 | @staticmethod 273 | def write_node(buf, record_size, ptrleft, ptrright): 274 | chars = Encoder.encode_ptrs(record_size, ptrleft, ptrright) 275 | #print("Writing node {} ({}) -> {} -> {}. Data {}".format(node, node.written_id, ptrleft, ptrright, node.data)) 276 | for i in chars: 277 | Encoder._write_v(buf, i) 278 | 279 | @staticmethod 280 | def write_nodes(buf, node_count, record_size, firstnode, datafirst = False): 281 | cur_id = 0 282 | firstnode.written_id = cur_id 283 | 284 | toexplore = [ firstnode ] 285 | itera = 0 286 | while True: 287 | future_id_left = node_count 288 | future_id_right = node_count 289 | 290 | if len(toexplore) > 0: 291 | #print(toexplore) 292 | curnode = toexplore.pop(0) 293 | 294 | if curnode.left != None: 295 | if curnode.left.data != None: 296 | future_id_left = curnode.left.data + 16 + node_count 297 | else: 298 | cur_id += 1 299 | future_id_left = cur_id 300 | curnode.left.written_id = future_id_left 301 | 302 | toexplore.append(curnode.left) 303 | #print("appending left {} -> {}".format(curnode, curnode.left)) 304 | 305 | if curnode.right != None: 306 | if curnode.right.data != None: 307 | future_id_right = curnode.right.data + 16 + node_count 308 | else: 309 | cur_id += 1 310 | future_id_right = cur_id 311 | curnode.right.written_id = future_id_right 312 | 313 | toexplore.append(curnode.right) 314 | #print("appending right {} -> {}".format(curnode, curnode.right)) 315 | #print('{} -> {} ({} {} {}) ({} {})'.format(itera, curnode, curnode.left, curnode.right, curnode.data, future_id_left, future_id_right)) 316 | Encoder.write_node(buf, record_size, future_id_left, future_id_right) 317 | itera += 1 318 | else: 319 | break 320 | 321 | @staticmethod 322 | def write_separator(buf): 323 | for i in range(0, 16): 324 | Encoder._write_v(buf, 0) 325 | 326 | @staticmethod 327 | def _write_v(buf, d): 328 | if type(d) is int: 329 | d = bytearray((d,)) 330 | if type(d) is str and sys.version_info > (3,): 331 | d = d.encode('utf-8') 332 | return buf.write(d) 333 | 334 | @staticmethod 335 | def write_field(buf, fieldid, value): 336 | length = 0 337 | fieldid_write = fieldid 338 | 339 | written = 0 340 | 341 | content = [] 342 | 343 | if fieldid == EncoderConstants.TYPE_MAP or fieldid == EncoderConstants.TYPE_ARRAY or fieldid == EncoderConstants.TYPE_UTF8STR: 344 | length = len(value) 345 | if fieldid == EncoderConstants.TYPE_UTF8STR: 346 | content = value 347 | elif fieldid == EncoderConstants.TYPE_BOOLEAN: 348 | length = int(value) 349 | elif fieldid == EncoderConstants.TYPE_FLOAT: 350 | length = 4 351 | content = struct.pack('>f', value) 352 | elif fieldid == EncoderConstants.TYPE_DOUBLE: 353 | length = 8 354 | content = struct.pack('>d', value) 355 | elif fieldid == EncoderConstants.TYPE_UINT16 or fieldid == EncoderConstants.TYPE_UINT32: 356 | length = 4 357 | content = struct.pack('>I', value) 358 | elif fieldid == EncoderConstants.TYPE_INT32: 359 | length = 4 360 | content = struct.pack('>i', value) 361 | elif fieldid == EncoderConstants.TYPE_UINT64: 362 | length = 8 363 | content = struct.pack('>Q', value) 364 | elif fieldid == EncoderConstants.TYPE_UINT128: 365 | raise Exception('Encoder: write_field: 128 bits unsigned integers encoding not implemented') 366 | elif fieldid == EncoderConstants.TYPE_PTR: 367 | length = 3<<3 368 | content = struct.pack('>I', value.addr) 369 | elif fieldid == EncoderConstants.TYPE_DATACACHE: 370 | raise Exception('Encoder: write_field: data cache container encoding not implemented') 371 | else: 372 | raise Exception('Encoder: write_field: %d encoding not implemented' % fieldid) 373 | 374 | if fieldid == EncoderConstants.TYPE_UINT16: 375 | length = 2 376 | content = content[len(content)-2:] 377 | 378 | if fieldid > 7: 379 | fieldid_write = 0 380 | 381 | length_mod = length 382 | if length >= 65821: 383 | length_mod = 31 384 | elif length >= 285: 385 | length_mod = 30 386 | elif length >= 29: 387 | length_mod = 29 388 | 389 | tow = length_mod&0x1f | (fieldid_write&0x7)<<5 390 | Encoder._write_v(buf, tow) 391 | written += 1 392 | 393 | if length >= 65821: 394 | Encoder._write_v(buf, (length - 65821)>>16&0xff) 395 | Encoder._write_v(buf, (length - 65821)>>8&0xff) 396 | Encoder._write_v(buf, (length - 65821)&0xff) 397 | written += 3 398 | elif length >= 285: 399 | Encoder._write_v(buf, (length - 285)>>8&0xff) 400 | Encoder._write_v(buf, (length - 285)&0xff) 401 | written += 2 402 | elif length >= 29: 403 | Encoder._write_v(buf, length - 29) 404 | written += 1 # When writing on a file, doesn't return anything 405 | 406 | if fieldid > 7: 407 | tow = fieldid-7 408 | Encoder._write_v(buf, tow) 409 | written += 1 410 | 411 | if fieldid == EncoderConstants.TYPE_MAP: 412 | if type(value) is not dict: 413 | raise Exception('Encoder: write_field: encountered not a map') 414 | 415 | for k,v in value.items(): 416 | written += Encoder.write_field(buf, EncoderConstants.TYPE_UTF8STR, k) 417 | written += Encoder.write_data_single(buf, v) 418 | 419 | elif fieldid == EncoderConstants.TYPE_ARRAY: 420 | if type(value) is not list: 421 | raise Exception('Encoder: write_field: encountered not a map') 422 | 423 | for v in value: 424 | written += Encoder.write_data_single(buf, v) 425 | 426 | else: 427 | if sys.version_info > (3,): 428 | Encoder._write_v(buf, content) 429 | written += len(content) 430 | else: 431 | for i in content: 432 | Encoder._write_v(buf, i) 433 | written += 1 434 | 435 | return written 436 | 437 | @staticmethod 438 | def write_data_single(buf, data): 439 | written = 0 440 | if data != None and 'type' in data: 441 | vtype = data['type'] 442 | if vtype in EncoderConstants.key_map: 443 | fieldid = EncoderConstants.key_map[vtype] 444 | value = None 445 | 446 | if not vtype == EncoderConstants.TYPE_ENDMARKER and 'content' not in data: 447 | raise Exception('Encoder: write_data: data must have a \'content\' key') 448 | 449 | value = data['content'] 450 | written += Encoder.write_field(buf, fieldid, value) 451 | else: 452 | raise Exception('Encoder: write_data: type %s unknown' % vtype) 453 | else: 454 | raise Exception('Encoder: write_data: data must have a \'type\' key') 455 | 456 | return written 457 | 458 | @staticmethod 459 | def write_data_serialized(buf, data_serialized): 460 | for curdata in data_serialized: 461 | Encoder._write_v(buf, curdata.getvalue()) 462 | 463 | @staticmethod 464 | def write_data(buf, data): 465 | for curdata in data: 466 | Encoder.write_data_single(buf, curdata) 467 | 468 | @staticmethod 469 | def write_meta(buf, node_count, record_size, ip_version, database_type, languages, description): 470 | buf.write(b'\xab\xcd\xefMaxMind.com') 471 | Encoder.write_field(buf, EncoderConstants.TYPE_MAP, 472 | {'node_count': 473 | {'type': 'uint32', 474 | 'content': node_count}, 475 | 'record_size': 476 | {'type': 'uint16', 477 | 'content': record_size}, 478 | 'ip_version': 479 | {'type': 'uint16', 480 | 'content': ip_version}, 481 | 'database_type': 482 | {'type': 'utf8-string', 483 | 'content': database_type}, 484 | 'description': Encoder.python_data_to_mmdb_struct(description), 485 | 'languages': Encoder.python_data_to_mmdb_struct(languages), 486 | 'binary_format_major_version': 487 | {'type': 'uint16', 488 | 'content': 2}, 489 | 'binary_format_minor_version': 490 | {'type': 'uint16', 491 | 'content': 0}, 492 | 'build_epoch': 493 | {'type': 'uint64', 494 | 'content': long(time.time())}, 495 | } 496 | ) 497 | 498 | @staticmethod 499 | def python_data_to_mmdb_struct(data): 500 | newstruct = {'type': None, 'content': None} 501 | if type(data) is dict: 502 | newstruct['type'] = 'map' 503 | newstruct['content'] = {} 504 | for k,v in data.items(): 505 | newstruct['content'][k] = Encoder.python_data_to_mmdb_struct(v) 506 | elif type(data) is list: 507 | newstruct['type'] = 'array' 508 | newstruct['content'] = [] 509 | for v in data: 510 | newstruct['content'].append(Encoder.python_data_to_mmdb_struct(v)) 511 | elif type(data) is int: 512 | newstruct['type'] = 'uint32' 513 | newstruct['content'] = data 514 | elif type(data) is long: 515 | newstruct['type'] = 'uint64' 516 | newstruct['content'] = data 517 | elif type(data) is float: 518 | newstruct['type'] = 'float' 519 | newstruct['content'] = data 520 | elif type(data) is str: 521 | newstruct['type'] = 'utf8-string' 522 | newstruct['content'] = data 523 | elif data.__class__ is Pointer: 524 | newstruct['type'] = 'ptr' 525 | newstruct['content'] = data 526 | elif data.__class__ is DataCache: 527 | newstruct['type'] = 'data_cache' 528 | newstruct['content'] = data 529 | else: 530 | raise Exception('Encoder: python_data_to_mmdb_struct: could not convert type {}'.format(type(data))) 531 | return newstruct 532 | 533 | def write(self, buf): 534 | if hasattr(buf, 'write'): 535 | Encoder.write_nodes(buf, self.node_count, self.record_size, self.trie) 536 | Encoder.write_separator(buf) 537 | Encoder.write_data_serialized(buf, self.data_serialized) 538 | Encoder.write_meta(buf, self.node_count, self.record_size, self.ip_version, self.database_type, self.languages, self.description) 539 | else: 540 | raise Exception('Encoder: write: no write method. Is the object a buffer?') 541 | 542 | def write_file(self, filename): 543 | with open(filename, 'wb') as f: 544 | self.write(f) 545 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [bdist_wheel] 2 | universal=1 -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from setuptools import setup, find_packages 3 | from codecs import open 4 | from os import path 5 | 6 | here = path.abspath(path.dirname(__file__)) 7 | 8 | with open(path.join(here, 'README.rst'), encoding='utf-8') as f: 9 | long_description = f.read() 10 | 11 | setup( 12 | name='py-mmdb-encoder', 13 | version='1.0.4', 14 | description='Python MMDB encoder', 15 | 16 | url='https://github.com/cloudflare/py-mmdb-encoder', 17 | 18 | author='Louis Poinsignon / Cloudflare', 19 | author_email='louis@cloudflare.com', 20 | 21 | license='BSD 3', 22 | 23 | classifiers=[ 24 | 'Development Status :: 4 - Beta', 25 | 26 | 'Intended Audience :: Developers', 27 | 'Topic :: Software Development :: Build Tools', 28 | 29 | 'License :: OSI Approved :: BSD License', 30 | 31 | 'Programming Language :: Python :: 2', 32 | 'Programming Language :: Python :: 2.7', 33 | 'Programming Language :: Python :: 3', 34 | 'Programming Language :: Python :: 3.4', 35 | 'Programming Language :: Python :: 3.5', 36 | 'Programming Language :: Python :: 3.6', 37 | ], 38 | 39 | keywords='maxmind geoip network geolocation database tree development', 40 | 41 | packages=find_packages(exclude=['contrib', 'docs', 'tests']), 42 | 43 | project_urls={ 44 | 'Bug Reports': 'https://github.com/cloudflare/py-mmdb-encoder/issues', 45 | 'Source': 'https://github.com/cloudflare/py-mmdb-encoder/', 46 | }, 47 | ) 48 | --------------------------------------------------------------------------------