├── LICENSE ├── Makefile ├── README.md ├── ctest.c ├── descriptor_compiled.h ├── generator.py ├── pb2.4 ├── Makefile ├── connexio.pb.cc ├── connexio.pb.h ├── connexio_pb2.py ├── google │ ├── __init__.py │ ├── __init__.pyc │ └── protobuf │ │ ├── __init__.py │ │ ├── __init__.pyc │ │ ├── compiler │ │ └── plugin_pb2.py │ │ ├── descriptor.py │ │ ├── descriptor.pyc │ │ ├── descriptor_pb2.py │ │ ├── descriptor_pb2.pyc │ │ ├── internal │ │ ├── __init__.py │ │ ├── __init__.pyc │ │ ├── _net_proto2___python.so │ │ ├── api_implementation.py │ │ ├── api_implementation.pyc │ │ ├── containers.py │ │ ├── cpp_message.py │ │ ├── cpp_message.pyc │ │ ├── decoder.py │ │ ├── encoder.py │ │ ├── message_listener.py │ │ ├── python_message.py │ │ ├── type_checkers.py │ │ └── wire_format.py │ │ ├── message.py │ │ ├── message.pyc │ │ ├── reflection.py │ │ ├── reflection.pyc │ │ ├── service.py │ │ ├── service_reflection.py │ │ └── text_format.py └── run_google24.py ├── pb_types.py ├── run_capi.py ├── run_lazy.py ├── test.pb2 └── test.proto /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2011, connex.io gmbh 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 5 | 6 | * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 7 | * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 8 | * Neither the name of the connex.io gmbh nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. 9 | 10 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 11 | 12 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | 2 | compile: ctest.o 3 | gcc -shared -undefined dynamic_lookup ctest.o -o ctest.so 4 | 5 | ctest.o: ctest.c 6 | gcc -c ctest.c -I/System/Library/Frameworks/Python.framework/Versions/2.6/include/python2.6 -O3 7 | 8 | clean: 9 | rm ctest.o 10 | rm ctest.so 11 | 12 | test-generate: 13 | protoc test.proto -o test.pb2 14 | python generator.py test.pb2 15 | 16 | 17 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | CyPB 2 | ==== 3 | 4 | What is CyPB? 5 | ------------- 6 | 7 | CyPB is fast and light Protocol Buffers decoder for Python. 8 | 9 | * Lazy Decoding support. Message are decoded on the fly as attributes are accessed for the first time. 10 | It is big performance gain (~100x in some cases) if only some parts of message are accessed. 11 | 12 | * Fast. Full decoder is twice faster than Google's new C++ wrapper module in PB 2.4 13 | 14 | * No additional dependencies and libraries. 15 | Automatically generated C code compiles to Python module. 16 | 17 | 18 | Usage and example 19 | ----------------- 20 | 21 | Compile your proto file 22 | 23 | protoc your_proto_file.proto -o your_proto_file.pb2 24 | 25 | Generate C module 26 | 27 | python generator.py your_proto_file.pb2 28 | 29 | Compile C module as Python extension 30 | 31 | make compile 32 | 33 | Decode messages in Python 34 | 35 | from ctest import PBMsg 36 | from pb_types import MSG_Contact 37 | 38 | msg = "CiAKBUF0YXNoGghBdGFtdXJhZCINSGV6cmV0a3VsaXlldioUChJhdGFteXJhdEBnbWFpbC5jb20q\nFAoSYXRhbXVyYWRAY29ubmV4LmlvKhQKEmF0YW11cmFkQGdtYWlsLmNvbTIOCgwrOTkzNjc2NDI2\nNDIyDgoMKzk5MzEyMjcwMjAzYh50aGlzIGlzIG5vdGUgZmllbGQgZm9yIGNvbnRhY3Q=\n".decode("base64") 39 | 40 | contact = PBMsg(MSG_Contact, msg) 41 | 42 | # Display first name and list of phone numbers 43 | print contact.name.first 44 | for e in contact.phone: 45 | print " * ", e.display_number 46 | 47 | Full Decode Benchmark 48 | --------------------- 49 | 50 | For benchmarking, only full decode method is used instead of lazy decoder. 51 | 52 | Comparison were done on decoding same message 5000 times. 53 | See connexio.proto and run_google.py for test message and structure. 54 | 55 | Environment: 56 | * MacBook Pro 2.66Ghz Intel Core 2 Duo 57 | * Python version: 2.6.1 58 | * GCC 4.2.1 59 | 60 | Average running time on 10 runs: 61 | * Google's Python module (2.3): 0.711271 seconds. 62 | * Google's C++ implementation for Python (2.4): 0.093410 63 | * CyPB: 0.041584 seconds. 64 | 65 | 66 | TODO 67 | ---- 68 | 69 | CyPB is in alpha stage and API is highly likely to change in the future. 70 | Below is the list of things to do, patches are welcome. 71 | 72 | * Support all data types: doubles, bytes, etc. 73 | * Handle unknown fields properly based on wire-type 74 | * Throw exception if invalid attribute is accessed 75 | * Merge Lazy and Full decoder into one API. 76 | * Encoder 77 | 78 | About / License 79 | --------------- 80 | 81 | CyPB is developed by [connex.io gmbh][connexio] and licensed under new BSD license. 82 | 83 | [connexio]: http://connex.io/ 84 | 85 | 86 | -------------------------------------------------------------------------------- /ctest.c: -------------------------------------------------------------------------------- 1 | /* 2 | * Protocol Buffers decoder for Python 3 | * 4 | * (C) Copyright 2011, connex.io gmbh 5 | * */ 6 | 7 | #include 8 | #include 9 | #include 10 | 11 | #define TYPE_DOUBLE 1 12 | #define TYPE_FLOAT 2 13 | #define TYPE_INT64 3 14 | #define TYPE_UINT64 4 15 | #define TYPE_INT32 5 16 | #define TYPE_FIXED64 6 17 | #define TYPE_FIXED32 7 18 | #define TYPE_BOOL 8 19 | #define TYPE_STRING 9 20 | #define TYPE_GROUP 10 21 | #define TYPE_MESSAGE 11 22 | #define TYPE_BYTES 12 23 | #define TYPE_UINT32 13 24 | #define TYPE_ENUM 14 25 | #define TYPE_SFIXED32 15 26 | #define TYPE_SFIXED64 16 27 | #define TYPE_SINT32 17 28 | #define TYPE_SINT64 18 29 | 30 | /* 31 | * message types / fields descriptor structure 32 | * It is initialized by setup_table function, automatically generated code 33 | * based on proto descriptor file. 34 | * 35 | * ft[Message_Type_ID][Field_Number] 36 | * Message Type ID is automatically assigned integer number to each message type. 37 | */ 38 | struct FieldDescriptor 39 | { 40 | PyObject *name_str; // pre-created PyString object for key / field name 41 | int repeated; 42 | int type; 43 | PyObject *default_val; 44 | } ft[256][256]; 45 | 46 | int flist[256][256]; 47 | 48 | /* import setup_table function */ 49 | #include "descriptor_compiled.h" 50 | 51 | 52 | /* Given message type and field number, returns type of field */ 53 | int get_type(int msgtype, int fieldid) 54 | { 55 | if( ft[msgtype][fieldid].name_str ) { 56 | return ft[msgtype][fieldid].type; 57 | } 58 | return -1; 59 | } 60 | 61 | /* create new object of specific type and return pointer to it */ 62 | PyObject *object_new(int type) 63 | { 64 | PyObject *obj = PyDict_New(); 65 | 66 | int i; 67 | for(i=0; flist[type][i] != -1; i++) { 68 | struct FieldDescriptor *f = &ft[type][flist[type][i]]; 69 | if( f->repeated ) 70 | PyDict_SetItem(obj, f->name_str, PyList_New(0)); 71 | // TODO: set default value for field. 72 | } 73 | return obj; 74 | } 75 | 76 | /* set attribute of object or append to list */ 77 | void object_add_field(PyObject* obj, int type, int field_id, PyObject* child) 78 | { 79 | struct FieldDescriptor *f = &ft[type][field_id]; 80 | if(f==NULL) 81 | return; 82 | 83 | if( ! f->repeated ) { 84 | PyDict_SetItem(obj, f->name_str, child); 85 | } else { 86 | PyObject* list; 87 | list = PyDict_GetItem(obj, f->name_str); 88 | PyList_Append(list, child); 89 | } 90 | } 91 | 92 | /* Varint, 32bit and 64bit decoders */ 93 | 94 | char *get_varint(char *c, unsigned long *val) 95 | { 96 | int shift = 0; 97 | *val = 0; 98 | do { 99 | *val |= (*c & 0x7F) << shift; 100 | if( (*c) & 0x80 ) 101 | shift += 7; 102 | else { 103 | c++; 104 | break; 105 | } 106 | c++; 107 | } while(1); 108 | return c; 109 | } 110 | 111 | char *get_32bit(char *c, unsigned int *val) 112 | { 113 | *val = c[0] | (c[1] << 8) | (c[2] << 16) | (c[3] << 24); 114 | c += 4; 115 | return c; 116 | } 117 | 118 | char *get_64bit(char *c, unsigned long *val) 119 | { 120 | int i; 121 | *val = 0; 122 | for (i = 7; i >= 0; i--) 123 | *val = (*val << 8) | c[i]; 124 | c += 8; 125 | return c; 126 | } 127 | 128 | 129 | /* Decoder context / stack. Used when decoding full message. */ 130 | struct context { 131 | PyObject *node; 132 | int type; 133 | int field_id; 134 | char *end_idx; 135 | } stack[128]; 136 | int top = 0; 137 | 138 | PyObject* full_decode(char *msg, int type) 139 | { 140 | char *c = msg; 141 | // create root object 142 | top = 1; 143 | stack[top].node = object_new(type); 144 | stack[top].type = type; 145 | stack[top].end_idx = c + strlen(msg); 146 | 147 | while(*c) { 148 | 149 | unsigned long tag, len, val; 150 | unsigned int wire_type; 151 | int type = -1; 152 | 153 | c = get_varint(c, &tag); 154 | wire_type = tag & 0x07; 155 | tag >>= 3; 156 | 157 | type = get_type(stack[top].type, tag); 158 | 159 | switch(type) { 160 | case -1: 161 | // Unknown field.. Skip. 162 | // TODO: based on wire-type.. 163 | break; 164 | 165 | case TYPE_STRING: 166 | c = get_varint(c, &len); 167 | 168 | // temporarily null-terminate string to slice it and restore back after copy. 169 | char tmp = c[len]; 170 | c[len] = '\0'; 171 | object_add_field(stack[top].node, stack[top].type, tag, PyString_FromString(c)); 172 | c[len] = tmp; 173 | c = c + len; 174 | break; 175 | 176 | case TYPE_ENUM: 177 | c = get_varint(c, &val); 178 | object_add_field(stack[top].node, stack[top].type, tag, Py_BuildValue("i", val)); 179 | break; 180 | 181 | case TYPE_INT32: 182 | c = get_varint(c, &val); 183 | object_add_field(stack[top].node, stack[top].type, tag, Py_BuildValue("i", val)); 184 | break; 185 | 186 | default: 187 | // Treat it as type_message 188 | c = get_varint(c, &len); 189 | 190 | // create new node and push it to stack 191 | top++; 192 | stack[top].node = object_new(type); 193 | stack[top].type = type; 194 | stack[top].field_id = tag; 195 | stack[top].end_idx = c + len; 196 | } 197 | 198 | // Pop decoded messages from stack 199 | while( top && c >= stack[top].end_idx ) { 200 | if( top-1 ) { 201 | /* Link child node to parent node */ 202 | object_add_field(stack[top-1].node, stack[top-1].type, stack[top].field_id, stack[top].node); 203 | top--; 204 | } else { 205 | return stack[top].node; 206 | } 207 | } 208 | } 209 | return NULL; 210 | } 211 | 212 | /* Python wrapper for full decode function */ 213 | static PyObject* py_decode(PyObject *self, PyObject *args) 214 | { 215 | int type; 216 | char *msg; 217 | 218 | if (PyArg_ParseTuple(args, "is", &type, &msg)) { 219 | return full_decode(msg, type); 220 | } 221 | 222 | return NULL; 223 | } 224 | 225 | static PyMethodDef methods[] = { 226 | {"decode", py_decode, METH_VARARGS, ""}, 227 | {NULL, NULL, 0, NULL} 228 | }; 229 | 230 | 231 | /* Lazy decoder */ 232 | 233 | typedef struct { 234 | PyObject_HEAD 235 | 236 | int decoded; 237 | int msg_type; 238 | char *msg_data; 239 | PyObject *fields; 240 | } PBMsg; 241 | 242 | static PBMsg* PBMsg_new(int msg_type, char *msg_data); 243 | static PyObject* PBMsg_getattr(PyObject *o, PyObject *attr_name); 244 | 245 | static int PBMsg_init(PBMsg *self, PyObject *arg, PyObject *kwds) { return 0; } 246 | 247 | static void PBMsg_dealloc(PBMsg *self) { 248 | free(self->msg_data); 249 | self->ob_type->tp_free((PyObject*)self); 250 | } 251 | 252 | static PyObject * 253 | PBMsg_new_py(PyTypeObject *type, PyObject *args, PyObject *kwds) 254 | { 255 | int msg_type; 256 | char *msg_data; 257 | 258 | if (PyArg_ParseTuple(args, "is", &msg_type, &msg_data)) { 259 | return (PyObject *)PBMsg_new(msg_type, msg_data); 260 | } 261 | } 262 | 263 | 264 | PyTypeObject PBMsgType = { 265 | PyObject_HEAD_INIT(NULL) 266 | 0, /*ob_size*/ 267 | "ctest.PBMsg", /*tp_name*/ 268 | sizeof(PBMsg), /*tp_basicsize*/ 269 | 0, /*tp_itemsize*/ 270 | /* methods */ 271 | (destructor)PBMsg_dealloc, /*tp_dealloc*/ 272 | 0, /*tp_print*/ 273 | 0, /*tp_getattr*/ 274 | 0, /*tp_setattr*/ 275 | 0, /*tp_compare*/ 276 | 0, /*tp_repr*/ 277 | 0, /*tp_as_number*/ 278 | 0, /*tp_as_sequence*/ 279 | 0, /*tp_as_mapping*/ 280 | 0, /*tp_hash*/ 281 | 0, /*tp_call*/ 282 | 0, /*tp_str*/ 283 | PBMsg_getattr, /*tp_getattro*/ 284 | 0, /*tp_setattro*/ 285 | 0, /*tp_as_buffer*/ 286 | Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /*tp_flags*/ 287 | 0, /*tp_doc*/ 288 | 0, /*tp_traverse*/ 289 | 0, /*tp_clear*/ 290 | 0, /*tp_richcompare*/ 291 | 0, /*tp_weaklistoffset*/ 292 | 0, /*tp_iter*/ 293 | 0, /*tp_iternext*/ 294 | 0, /*tp_methods*/ 295 | 0, /*tp_members*/ 296 | 0, /*tp_getset*/ 297 | 0, /*tp_base*/ 298 | 0, /*tp_dict*/ 299 | 0, /*tp_descr_get*/ 300 | 0, /*tp_descr_set*/ 301 | 0, /*tp_dictoffset*/ 302 | (initproc)PBMsg_init, /*tp_init*/ 303 | 0, /*tp_alloc*/ 304 | PBMsg_new_py, /*tp_new*/ 305 | 0, /*tp_free*/ 306 | 0, /*tp_is_gc*/ 307 | }; 308 | 309 | /* Initialize new lazy decoder object */ 310 | static PBMsg* PBMsg_new(int msg_type, char *msg_data) 311 | { 312 | PBMsg *self; 313 | self = (PBMsg *)PBMsgType.tp_alloc(&PBMsgType, 0); 314 | if (self == NULL) 315 | return NULL; 316 | 317 | self->decoded = 0; 318 | self->msg_type = msg_type; 319 | 320 | /* take copy of buffer */ 321 | self->msg_data = malloc(strlen(msg_data)+1); 322 | strcpy(self->msg_data, msg_data); 323 | 324 | return self; 325 | } 326 | 327 | /* Decode all primitive fields in current level, don't go deeper */ 328 | void PBMsg_decode(PBMsg *node) 329 | { 330 | char *c = node->msg_data; 331 | node->fields = object_new(node->msg_type); 332 | 333 | while(*c) { 334 | 335 | char tmp; 336 | unsigned long tag, len, val; 337 | unsigned int wire_type; 338 | int type = -1; 339 | 340 | c = get_varint(c, &tag); 341 | wire_type = tag & 0x07; 342 | tag >>= 3; 343 | 344 | type = get_type(node->msg_type, tag); 345 | 346 | switch(type) { 347 | case -1: 348 | // Unknown field.. Skip. 349 | // TODO: based on wire-type.. 350 | break; 351 | 352 | case TYPE_STRING: 353 | c = get_varint(c, &len); 354 | // temporarily null-terminate string to slice it and restore back after copy. 355 | tmp = c[len]; 356 | c[len] = '\0'; 357 | object_add_field(node->fields, node->msg_type, tag, PyString_FromString(c)); 358 | c[len] = tmp; 359 | c = c + len; 360 | break; 361 | 362 | case TYPE_ENUM: 363 | c = get_varint(c, &val); 364 | object_add_field(node->fields, node->msg_type, tag, Py_BuildValue("i", val)); 365 | break; 366 | 367 | case TYPE_INT32: 368 | c = get_varint(c, &val); 369 | object_add_field(node->fields, node->msg_type, tag, Py_BuildValue("i", val)); 370 | break; 371 | 372 | default: 373 | c = get_varint(c, &len); 374 | // Create unparsed child node.. 375 | tmp = c[len]; 376 | c[len] = '\0'; 377 | PBMsg *child = PBMsg_new(type, c); 378 | c[len] = tmp; 379 | 380 | object_add_field(node->fields, node->msg_type, tag, child); 381 | 382 | c += len; 383 | } 384 | } 385 | 386 | node->decoded = 1; 387 | } 388 | 389 | /* decode current node only when child attribute is accessed, then cache the result */ 390 | static PyObject* PBMsg_getattr(PyObject *o, PyObject *attr_name) 391 | { 392 | PBMsg *self = (PBMsg*)o; 393 | 394 | if(!self->decoded) 395 | PBMsg_decode(self); 396 | 397 | return PyDict_GetItem(self->fields, attr_name); 398 | } 399 | 400 | 401 | PyMODINIT_FUNC initctest(void); /*proto*/ 402 | PyMODINIT_FUNC initctest(void) 403 | { 404 | 405 | PBMsgType.tp_base = &PyBaseObject_Type; 406 | if (PyType_Ready(&PBMsgType) < 0) 407 | return; 408 | 409 | PyObject *mod = Py_InitModule("ctest", methods); 410 | 411 | Py_INCREF(&PBMsgType); 412 | PyModule_AddObject(mod, "PBMsg", (PyObject*)&PBMsgType); 413 | 414 | memset(ft, 0, sizeof(ft)); 415 | setup_table(); 416 | 417 | return; 418 | } 419 | 420 | 421 | -------------------------------------------------------------------------------- /descriptor_compiled.h: -------------------------------------------------------------------------------- 1 | /* automatically generated code. DO NOT EDIT. */ 2 | 3 | void setup_table() { 4 | flist[33][0] = 1; 5 | ft[33][1].name_str = PyString_FromString("display_name"); 6 | ft[33][1].repeated = 0; 7 | ft[33][1].type = 9; 8 | flist[33][1] = 2; 9 | ft[33][2].name_str = PyString_FromString("prefix"); 10 | ft[33][2].repeated = 0; 11 | ft[33][2].type = 9; 12 | flist[33][2] = 3; 13 | ft[33][3].name_str = PyString_FromString("first"); 14 | ft[33][3].repeated = 0; 15 | ft[33][3].type = 9; 16 | flist[33][3] = 4; 17 | ft[33][4].name_str = PyString_FromString("last"); 18 | ft[33][4].repeated = 0; 19 | ft[33][4].type = 9; 20 | flist[33][4] = 5; 21 | ft[33][5].name_str = PyString_FromString("middle"); 22 | ft[33][5].repeated = 0; 23 | ft[33][5].type = 9; 24 | flist[33][5] = 6; 25 | ft[33][6].name_str = PyString_FromString("suffix"); 26 | ft[33][6].repeated = 0; 27 | ft[33][6].type = 9; 28 | flist[33][6] = 7; 29 | ft[33][7].name_str = PyString_FromString("nickname"); 30 | ft[33][7].repeated = 0; 31 | ft[33][7].type = 9; 32 | flist[33][7] = -1; 33 | 34 | flist[35][0] = 1; 35 | ft[35][1].name_str = PyString_FromString("display_number"); 36 | ft[35][1].repeated = 0; 37 | ft[35][1].type = 9; 38 | flist[35][1] = 2; 39 | ft[35][2].name_str = PyString_FromString("_type"); 40 | ft[35][2].repeated = 0; 41 | ft[35][2].type = 14; 42 | flist[35][2] = 3; 43 | ft[35][3].name_str = PyString_FromString("country_code"); 44 | ft[35][3].repeated = 0; 45 | ft[35][3].type = 9; 46 | flist[35][3] = 4; 47 | ft[35][4].name_str = PyString_FromString("area_code"); 48 | ft[35][4].repeated = 0; 49 | ft[35][4].type = 9; 50 | flist[35][4] = 5; 51 | ft[35][5].name_str = PyString_FromString("number"); 52 | ft[35][5].repeated = 0; 53 | ft[35][5].type = 9; 54 | flist[35][5] = 6; 55 | ft[35][6].name_str = PyString_FromString("label"); 56 | ft[35][6].repeated = 0; 57 | ft[35][6].type = 9; 58 | flist[35][6] = -1; 59 | 60 | flist[37][0] = 1; 61 | ft[37][1].name_str = PyString_FromString("company"); 62 | ft[37][1].repeated = 0; 63 | ft[37][1].type = 9; 64 | flist[37][1] = 2; 65 | ft[37][2].name_str = PyString_FromString("position"); 66 | ft[37][2].repeated = 0; 67 | ft[37][2].type = 9; 68 | flist[37][2] = 3; 69 | ft[37][3].name_str = PyString_FromString("department"); 70 | ft[37][3].repeated = 0; 71 | ft[37][3].type = 9; 72 | flist[37][3] = 4; 73 | ft[37][4].name_str = PyString_FromString("description"); 74 | ft[37][4].repeated = 0; 75 | ft[37][4].type = 9; 76 | flist[37][4] = 5; 77 | ft[37][5].name_str = PyString_FromString("startdate"); 78 | ft[37][5].repeated = 0; 79 | ft[37][5].type = 34; 80 | flist[37][5] = 6; 81 | ft[37][6].name_str = PyString_FromString("enddate"); 82 | ft[37][6].repeated = 0; 83 | ft[37][6].type = 34; 84 | flist[37][6] = -1; 85 | 86 | flist[32][0] = 1; 87 | ft[32][1].name_str = PyString_FromString("name"); 88 | ft[32][1].repeated = 0; 89 | ft[32][1].type = 33; 90 | flist[32][1] = 3; 91 | ft[32][3].name_str = PyString_FromString("birthday"); 92 | ft[32][3].repeated = 0; 93 | ft[32][3].type = 34; 94 | flist[32][2] = 4; 95 | ft[32][4].name_str = PyString_FromString("anniversary"); 96 | ft[32][4].repeated = 0; 97 | ft[32][4].type = 34; 98 | flist[32][3] = 5; 99 | ft[32][5].name_str = PyString_FromString("email"); 100 | ft[32][5].repeated = 1; 101 | ft[32][5].type = 36; 102 | flist[32][4] = 6; 103 | ft[32][6].name_str = PyString_FromString("phone"); 104 | ft[32][6].repeated = 1; 105 | ft[32][6].type = 35; 106 | flist[32][5] = 10; 107 | ft[32][10].name_str = PyString_FromString("job"); 108 | ft[32][10].repeated = 1; 109 | ft[32][10].type = 37; 110 | flist[32][6] = 12; 111 | ft[32][12].name_str = PyString_FromString("note"); 112 | ft[32][12].repeated = 0; 113 | ft[32][12].type = 9; 114 | flist[32][7] = -1; 115 | 116 | flist[34][0] = 1; 117 | ft[34][1].name_str = PyString_FromString("year"); 118 | ft[34][1].repeated = 0; 119 | ft[34][1].type = 5; 120 | flist[34][1] = 2; 121 | ft[34][2].name_str = PyString_FromString("month"); 122 | ft[34][2].repeated = 0; 123 | ft[34][2].type = 5; 124 | flist[34][2] = 3; 125 | ft[34][3].name_str = PyString_FromString("day"); 126 | ft[34][3].repeated = 0; 127 | ft[34][3].type = 5; 128 | flist[34][3] = -1; 129 | 130 | flist[36][0] = 1; 131 | ft[36][1].name_str = PyString_FromString("email"); 132 | ft[36][1].repeated = 0; 133 | ft[36][1].type = 9; 134 | flist[36][1] = 2; 135 | ft[36][2].name_str = PyString_FromString("_type"); 136 | ft[36][2].repeated = 0; 137 | ft[36][2].type = 14; 138 | flist[36][2] = 3; 139 | ft[36][3].name_str = PyString_FromString("label"); 140 | ft[36][3].repeated = 0; 141 | ft[36][3].type = 9; 142 | flist[36][3] = -1; 143 | 144 | } 145 | -------------------------------------------------------------------------------- /generator.py: -------------------------------------------------------------------------------- 1 | 2 | from google.protobuf import descriptor_pb2 3 | import sys 4 | 5 | fds = descriptor_pb2.FileDescriptorSet() 6 | fds.ParseFromString(open(sys.argv[1]).read()) 7 | 8 | message_types = dict() 9 | message_type_ids = dict() 10 | 11 | i = 32 12 | 13 | # Contants - enum values, messagy type id, etc. 14 | consts = "# Automatically generated code. \n\n" 15 | for mt in fds.file[0].message_type: 16 | message_types[mt.name] = mt 17 | message_type_ids[mt.name] = i 18 | consts += "MSG_"+mt.name + " = " + str(i) + "\n" 19 | i += 1 20 | 21 | enum_vals = dict() 22 | for e in fds.file[0].enum_type: 23 | for v in e.value: 24 | consts += v.name + " = " + str(v.number) + "\n" 25 | 26 | f = open("pb_types.py", "w") 27 | f.write(consts) 28 | f.close() 29 | 30 | 31 | def get_type_number(f): 32 | global message_types 33 | 34 | if f.type == descriptor_pb2.FieldDescriptorProto.TYPE_MESSAGE: 35 | return message_type_ids[f.type_name.split(".")[-1]] 36 | 37 | return f.type 38 | 39 | table = "/* automatically generated code. DO NOT EDIT. */\n\n" 40 | table += "void setup_table() {" 41 | 42 | for (mt_name, mt) in message_types.items(): 43 | mt_id = message_type_ids[mt_name] 44 | i = 0 45 | for f in mt.field: 46 | table += "\n\tflist[%d][%d] = %d;" % (mt_id, i, f.number) 47 | table += "\n\tft[%d][%d].name_str = PyString_FromString(\"%s\");" % (mt_id, f.number, f.name) 48 | table += "\n\tft[%d][%d].repeated = %d;" % (mt_id, f.number, 1 if f.label == 3 else 0 ) 49 | table += "\n\tft[%d][%d].type = %d;" % (mt_id, f.number, get_type_number(f) ) 50 | i += 1 51 | table += "\n\tflist[%d][%d] = -1;\n" % (mt_id, i) 52 | 53 | table += "\n}\n" 54 | 55 | f = open("descriptor_compiled.h", "w") 56 | f.write(table) 57 | f.close() 58 | 59 | -------------------------------------------------------------------------------- /pb2.4/Makefile: -------------------------------------------------------------------------------- 1 | 2 | build: 3 | gcc -c connexio.pb.cc -I/System/Library/Frameworks/Python.framework/Versions/2.6/include/python2.6 4 | gcc -shared -undefined dynamic_lookup connexio.pb.o -lprotobuf -o connexiopb.so 5 | 6 | -------------------------------------------------------------------------------- /pb2.4/connexio_pb2.py: -------------------------------------------------------------------------------- 1 | # Generated by the protocol buffer compiler. DO NOT EDIT! 2 | 3 | from google.protobuf import descriptor 4 | from google.protobuf import message 5 | from google.protobuf import reflection 6 | from google.protobuf import descriptor_pb2 7 | # @@protoc_insertion_point(imports) 8 | 9 | 10 | 11 | DESCRIPTOR = descriptor.FileDescriptor( 12 | name='connexio.proto', 13 | package='connexio', 14 | serialized_pb='\n\x0e\x63onnexio.proto\x12\x08\x63onnexio\"u\n\x07\x43ontact\x12\x1c\n\x04name\x18\x01 \x01(\x0b\x32\x0e.connexio.Name\x12\x1e\n\x05\x65mail\x18\x05 \x03(\x0b\x32\x0f.connexio.Email\x12\x1e\n\x05phone\x18\x06 \x03(\x0b\x32\x0f.connexio.Phone\x12\x0c\n\x04note\x18\x0c \x01(\t\"{\n\x04Name\x12\x14\n\x0c\x64isplay_name\x18\x01 \x01(\t\x12\x0e\n\x06prefix\x18\x02 \x01(\t\x12\r\n\x05\x66irst\x18\x03 \x01(\t\x12\x0c\n\x04last\x18\x04 \x01(\t\x12\x0e\n\x06middle\x18\x05 \x01(\t\x12\x0e\n\x06suffix\x18\x06 \x01(\t\x12\x10\n\x08nickname\x18\x07 \x01(\t\"\x1f\n\x05Phone\x12\x16\n\x0e\x64isplay_number\x18\x01 \x02(\t\"\x16\n\x05\x45mail\x12\r\n\x05\x65mail\x18\x01 \x02(\t') 15 | 16 | 17 | 18 | 19 | _CONTACT = descriptor.Descriptor( 20 | name='Contact', 21 | full_name='connexio.Contact', 22 | filename=None, 23 | file=DESCRIPTOR, 24 | containing_type=None, 25 | fields=[ 26 | descriptor.FieldDescriptor( 27 | name='name', full_name='connexio.Contact.name', index=0, 28 | number=1, type=11, cpp_type=10, label=1, 29 | has_default_value=False, default_value=None, 30 | message_type=None, enum_type=None, containing_type=None, 31 | is_extension=False, extension_scope=None, 32 | options=None), 33 | descriptor.FieldDescriptor( 34 | name='email', full_name='connexio.Contact.email', index=1, 35 | number=5, type=11, cpp_type=10, label=3, 36 | has_default_value=False, default_value=[], 37 | message_type=None, enum_type=None, containing_type=None, 38 | is_extension=False, extension_scope=None, 39 | options=None), 40 | descriptor.FieldDescriptor( 41 | name='phone', full_name='connexio.Contact.phone', index=2, 42 | number=6, type=11, cpp_type=10, label=3, 43 | has_default_value=False, default_value=[], 44 | message_type=None, enum_type=None, containing_type=None, 45 | is_extension=False, extension_scope=None, 46 | options=None), 47 | descriptor.FieldDescriptor( 48 | name='note', full_name='connexio.Contact.note', index=3, 49 | number=12, type=9, cpp_type=9, label=1, 50 | has_default_value=False, default_value=unicode("", "utf-8"), 51 | message_type=None, enum_type=None, containing_type=None, 52 | is_extension=False, extension_scope=None, 53 | options=None), 54 | ], 55 | extensions=[ 56 | ], 57 | nested_types=[], 58 | enum_types=[ 59 | ], 60 | options=None, 61 | is_extendable=False, 62 | extension_ranges=[], 63 | serialized_start=28, 64 | serialized_end=145, 65 | ) 66 | 67 | 68 | _NAME = descriptor.Descriptor( 69 | name='Name', 70 | full_name='connexio.Name', 71 | filename=None, 72 | file=DESCRIPTOR, 73 | containing_type=None, 74 | fields=[ 75 | descriptor.FieldDescriptor( 76 | name='display_name', full_name='connexio.Name.display_name', index=0, 77 | number=1, type=9, cpp_type=9, label=1, 78 | has_default_value=False, default_value=unicode("", "utf-8"), 79 | message_type=None, enum_type=None, containing_type=None, 80 | is_extension=False, extension_scope=None, 81 | options=None), 82 | descriptor.FieldDescriptor( 83 | name='prefix', full_name='connexio.Name.prefix', index=1, 84 | number=2, type=9, cpp_type=9, label=1, 85 | has_default_value=False, default_value=unicode("", "utf-8"), 86 | message_type=None, enum_type=None, containing_type=None, 87 | is_extension=False, extension_scope=None, 88 | options=None), 89 | descriptor.FieldDescriptor( 90 | name='first', full_name='connexio.Name.first', index=2, 91 | number=3, type=9, cpp_type=9, label=1, 92 | has_default_value=False, default_value=unicode("", "utf-8"), 93 | message_type=None, enum_type=None, containing_type=None, 94 | is_extension=False, extension_scope=None, 95 | options=None), 96 | descriptor.FieldDescriptor( 97 | name='last', full_name='connexio.Name.last', index=3, 98 | number=4, type=9, cpp_type=9, label=1, 99 | has_default_value=False, default_value=unicode("", "utf-8"), 100 | message_type=None, enum_type=None, containing_type=None, 101 | is_extension=False, extension_scope=None, 102 | options=None), 103 | descriptor.FieldDescriptor( 104 | name='middle', full_name='connexio.Name.middle', index=4, 105 | number=5, type=9, cpp_type=9, label=1, 106 | has_default_value=False, default_value=unicode("", "utf-8"), 107 | message_type=None, enum_type=None, containing_type=None, 108 | is_extension=False, extension_scope=None, 109 | options=None), 110 | descriptor.FieldDescriptor( 111 | name='suffix', full_name='connexio.Name.suffix', index=5, 112 | number=6, type=9, cpp_type=9, label=1, 113 | has_default_value=False, default_value=unicode("", "utf-8"), 114 | message_type=None, enum_type=None, containing_type=None, 115 | is_extension=False, extension_scope=None, 116 | options=None), 117 | descriptor.FieldDescriptor( 118 | name='nickname', full_name='connexio.Name.nickname', index=6, 119 | number=7, type=9, cpp_type=9, label=1, 120 | has_default_value=False, default_value=unicode("", "utf-8"), 121 | message_type=None, enum_type=None, containing_type=None, 122 | is_extension=False, extension_scope=None, 123 | options=None), 124 | ], 125 | extensions=[ 126 | ], 127 | nested_types=[], 128 | enum_types=[ 129 | ], 130 | options=None, 131 | is_extendable=False, 132 | extension_ranges=[], 133 | serialized_start=147, 134 | serialized_end=270, 135 | ) 136 | 137 | 138 | _PHONE = descriptor.Descriptor( 139 | name='Phone', 140 | full_name='connexio.Phone', 141 | filename=None, 142 | file=DESCRIPTOR, 143 | containing_type=None, 144 | fields=[ 145 | descriptor.FieldDescriptor( 146 | name='display_number', full_name='connexio.Phone.display_number', index=0, 147 | number=1, type=9, cpp_type=9, label=2, 148 | has_default_value=False, default_value=unicode("", "utf-8"), 149 | message_type=None, enum_type=None, containing_type=None, 150 | is_extension=False, extension_scope=None, 151 | options=None), 152 | ], 153 | extensions=[ 154 | ], 155 | nested_types=[], 156 | enum_types=[ 157 | ], 158 | options=None, 159 | is_extendable=False, 160 | extension_ranges=[], 161 | serialized_start=272, 162 | serialized_end=303, 163 | ) 164 | 165 | 166 | _EMAIL = descriptor.Descriptor( 167 | name='Email', 168 | full_name='connexio.Email', 169 | filename=None, 170 | file=DESCRIPTOR, 171 | containing_type=None, 172 | fields=[ 173 | descriptor.FieldDescriptor( 174 | name='email', full_name='connexio.Email.email', index=0, 175 | number=1, type=9, cpp_type=9, label=2, 176 | has_default_value=False, default_value=unicode("", "utf-8"), 177 | message_type=None, enum_type=None, containing_type=None, 178 | is_extension=False, extension_scope=None, 179 | options=None), 180 | ], 181 | extensions=[ 182 | ], 183 | nested_types=[], 184 | enum_types=[ 185 | ], 186 | options=None, 187 | is_extendable=False, 188 | extension_ranges=[], 189 | serialized_start=305, 190 | serialized_end=327, 191 | ) 192 | 193 | _CONTACT.fields_by_name['name'].message_type = _NAME 194 | _CONTACT.fields_by_name['email'].message_type = _EMAIL 195 | _CONTACT.fields_by_name['phone'].message_type = _PHONE 196 | DESCRIPTOR.message_types_by_name['Contact'] = _CONTACT 197 | DESCRIPTOR.message_types_by_name['Name'] = _NAME 198 | DESCRIPTOR.message_types_by_name['Phone'] = _PHONE 199 | DESCRIPTOR.message_types_by_name['Email'] = _EMAIL 200 | 201 | class Contact(message.Message): 202 | __metaclass__ = reflection.GeneratedProtocolMessageType 203 | DESCRIPTOR = _CONTACT 204 | 205 | # @@protoc_insertion_point(class_scope:connexio.Contact) 206 | 207 | class Name(message.Message): 208 | __metaclass__ = reflection.GeneratedProtocolMessageType 209 | DESCRIPTOR = _NAME 210 | 211 | # @@protoc_insertion_point(class_scope:connexio.Name) 212 | 213 | class Phone(message.Message): 214 | __metaclass__ = reflection.GeneratedProtocolMessageType 215 | DESCRIPTOR = _PHONE 216 | 217 | # @@protoc_insertion_point(class_scope:connexio.Phone) 218 | 219 | class Email(message.Message): 220 | __metaclass__ = reflection.GeneratedProtocolMessageType 221 | DESCRIPTOR = _EMAIL 222 | 223 | # @@protoc_insertion_point(class_scope:connexio.Email) 224 | 225 | # @@protoc_insertion_point(module_scope) 226 | -------------------------------------------------------------------------------- /pb2.4/google/__init__.py: -------------------------------------------------------------------------------- 1 | __import__('pkg_resources').declare_namespace(__name__) 2 | -------------------------------------------------------------------------------- /pb2.4/google/__init__.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/connexio/cypb/dc3d5ae866f7cfcdc3e313ecced0e8fa912542e1/pb2.4/google/__init__.pyc -------------------------------------------------------------------------------- /pb2.4/google/protobuf/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/connexio/cypb/dc3d5ae866f7cfcdc3e313ecced0e8fa912542e1/pb2.4/google/protobuf/__init__.py -------------------------------------------------------------------------------- /pb2.4/google/protobuf/__init__.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/connexio/cypb/dc3d5ae866f7cfcdc3e313ecced0e8fa912542e1/pb2.4/google/protobuf/__init__.pyc -------------------------------------------------------------------------------- /pb2.4/google/protobuf/compiler/plugin_pb2.py: -------------------------------------------------------------------------------- 1 | # Generated by the protocol buffer compiler. DO NOT EDIT! 2 | 3 | from google.protobuf import descriptor 4 | from google.protobuf import message 5 | from google.protobuf import reflection 6 | from google.protobuf import descriptor_pb2 7 | # @@protoc_insertion_point(imports) 8 | 9 | 10 | import google.protobuf.descriptor_pb2 11 | 12 | DESCRIPTOR = descriptor.FileDescriptor( 13 | name='google/protobuf/compiler/plugin.proto', 14 | package='google.protobuf.compiler', 15 | serialized_pb='\n%google/protobuf/compiler/plugin.proto\x12\x18google.protobuf.compiler\x1a google/protobuf/descriptor.proto\"}\n\x14\x43odeGeneratorRequest\x12\x18\n\x10\x66ile_to_generate\x18\x01 \x03(\t\x12\x11\n\tparameter\x18\x02 \x01(\t\x12\x38\n\nproto_file\x18\x0f \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\"\xaa\x01\n\x15\x43odeGeneratorResponse\x12\r\n\x05\x65rror\x18\x01 \x01(\t\x12\x42\n\x04\x66ile\x18\x0f \x03(\x0b\x32\x34.google.protobuf.compiler.CodeGeneratorResponse.File\x1a>\n\x04\x46ile\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x17\n\x0finsertion_point\x18\x02 \x01(\t\x12\x0f\n\x07\x63ontent\x18\x0f \x01(\t') 16 | 17 | 18 | 19 | 20 | _CODEGENERATORREQUEST = descriptor.Descriptor( 21 | name='CodeGeneratorRequest', 22 | full_name='google.protobuf.compiler.CodeGeneratorRequest', 23 | filename=None, 24 | file=DESCRIPTOR, 25 | containing_type=None, 26 | fields=[ 27 | descriptor.FieldDescriptor( 28 | name='file_to_generate', full_name='google.protobuf.compiler.CodeGeneratorRequest.file_to_generate', index=0, 29 | number=1, type=9, cpp_type=9, label=3, 30 | has_default_value=False, default_value=[], 31 | message_type=None, enum_type=None, containing_type=None, 32 | is_extension=False, extension_scope=None, 33 | options=None), 34 | descriptor.FieldDescriptor( 35 | name='parameter', full_name='google.protobuf.compiler.CodeGeneratorRequest.parameter', index=1, 36 | number=2, type=9, cpp_type=9, label=1, 37 | has_default_value=False, default_value=unicode("", "utf-8"), 38 | message_type=None, enum_type=None, containing_type=None, 39 | is_extension=False, extension_scope=None, 40 | options=None), 41 | descriptor.FieldDescriptor( 42 | name='proto_file', full_name='google.protobuf.compiler.CodeGeneratorRequest.proto_file', index=2, 43 | number=15, type=11, cpp_type=10, label=3, 44 | has_default_value=False, default_value=[], 45 | message_type=None, enum_type=None, containing_type=None, 46 | is_extension=False, extension_scope=None, 47 | options=None), 48 | ], 49 | extensions=[ 50 | ], 51 | nested_types=[], 52 | enum_types=[ 53 | ], 54 | options=None, 55 | is_extendable=False, 56 | extension_ranges=[], 57 | serialized_start=101, 58 | serialized_end=226, 59 | ) 60 | 61 | 62 | _CODEGENERATORRESPONSE_FILE = descriptor.Descriptor( 63 | name='File', 64 | full_name='google.protobuf.compiler.CodeGeneratorResponse.File', 65 | filename=None, 66 | file=DESCRIPTOR, 67 | containing_type=None, 68 | fields=[ 69 | descriptor.FieldDescriptor( 70 | name='name', full_name='google.protobuf.compiler.CodeGeneratorResponse.File.name', index=0, 71 | number=1, type=9, cpp_type=9, label=1, 72 | has_default_value=False, default_value=unicode("", "utf-8"), 73 | message_type=None, enum_type=None, containing_type=None, 74 | is_extension=False, extension_scope=None, 75 | options=None), 76 | descriptor.FieldDescriptor( 77 | name='insertion_point', full_name='google.protobuf.compiler.CodeGeneratorResponse.File.insertion_point', index=1, 78 | number=2, type=9, cpp_type=9, label=1, 79 | has_default_value=False, default_value=unicode("", "utf-8"), 80 | message_type=None, enum_type=None, containing_type=None, 81 | is_extension=False, extension_scope=None, 82 | options=None), 83 | descriptor.FieldDescriptor( 84 | name='content', full_name='google.protobuf.compiler.CodeGeneratorResponse.File.content', index=2, 85 | number=15, type=9, cpp_type=9, label=1, 86 | has_default_value=False, default_value=unicode("", "utf-8"), 87 | message_type=None, enum_type=None, containing_type=None, 88 | is_extension=False, extension_scope=None, 89 | options=None), 90 | ], 91 | extensions=[ 92 | ], 93 | nested_types=[], 94 | enum_types=[ 95 | ], 96 | options=None, 97 | is_extendable=False, 98 | extension_ranges=[], 99 | serialized_start=337, 100 | serialized_end=399, 101 | ) 102 | 103 | _CODEGENERATORRESPONSE = descriptor.Descriptor( 104 | name='CodeGeneratorResponse', 105 | full_name='google.protobuf.compiler.CodeGeneratorResponse', 106 | filename=None, 107 | file=DESCRIPTOR, 108 | containing_type=None, 109 | fields=[ 110 | descriptor.FieldDescriptor( 111 | name='error', full_name='google.protobuf.compiler.CodeGeneratorResponse.error', index=0, 112 | number=1, type=9, cpp_type=9, label=1, 113 | has_default_value=False, default_value=unicode("", "utf-8"), 114 | message_type=None, enum_type=None, containing_type=None, 115 | is_extension=False, extension_scope=None, 116 | options=None), 117 | descriptor.FieldDescriptor( 118 | name='file', full_name='google.protobuf.compiler.CodeGeneratorResponse.file', index=1, 119 | number=15, type=11, cpp_type=10, label=3, 120 | has_default_value=False, default_value=[], 121 | message_type=None, enum_type=None, containing_type=None, 122 | is_extension=False, extension_scope=None, 123 | options=None), 124 | ], 125 | extensions=[ 126 | ], 127 | nested_types=[_CODEGENERATORRESPONSE_FILE, ], 128 | enum_types=[ 129 | ], 130 | options=None, 131 | is_extendable=False, 132 | extension_ranges=[], 133 | serialized_start=229, 134 | serialized_end=399, 135 | ) 136 | 137 | _CODEGENERATORREQUEST.fields_by_name['proto_file'].message_type = google.protobuf.descriptor_pb2._FILEDESCRIPTORPROTO 138 | _CODEGENERATORRESPONSE_FILE.containing_type = _CODEGENERATORRESPONSE; 139 | _CODEGENERATORRESPONSE.fields_by_name['file'].message_type = _CODEGENERATORRESPONSE_FILE 140 | DESCRIPTOR.message_types_by_name['CodeGeneratorRequest'] = _CODEGENERATORREQUEST 141 | DESCRIPTOR.message_types_by_name['CodeGeneratorResponse'] = _CODEGENERATORRESPONSE 142 | 143 | class CodeGeneratorRequest(message.Message): 144 | __metaclass__ = reflection.GeneratedProtocolMessageType 145 | DESCRIPTOR = _CODEGENERATORREQUEST 146 | 147 | # @@protoc_insertion_point(class_scope:google.protobuf.compiler.CodeGeneratorRequest) 148 | 149 | class CodeGeneratorResponse(message.Message): 150 | __metaclass__ = reflection.GeneratedProtocolMessageType 151 | 152 | class File(message.Message): 153 | __metaclass__ = reflection.GeneratedProtocolMessageType 154 | DESCRIPTOR = _CODEGENERATORRESPONSE_FILE 155 | 156 | # @@protoc_insertion_point(class_scope:google.protobuf.compiler.CodeGeneratorResponse.File) 157 | DESCRIPTOR = _CODEGENERATORRESPONSE 158 | 159 | # @@protoc_insertion_point(class_scope:google.protobuf.compiler.CodeGeneratorResponse) 160 | 161 | # @@protoc_insertion_point(module_scope) 162 | -------------------------------------------------------------------------------- /pb2.4/google/protobuf/descriptor.py: -------------------------------------------------------------------------------- 1 | # Protocol Buffers - Google's data interchange format 2 | # Copyright 2008 Google Inc. All rights reserved. 3 | # http://code.google.com/p/protobuf/ 4 | # 5 | # Redistribution and use in source and binary forms, with or without 6 | # modification, are permitted provided that the following conditions are 7 | # met: 8 | # 9 | # * Redistributions of source code must retain the above copyright 10 | # notice, this list of conditions and the following disclaimer. 11 | # * Redistributions in binary form must reproduce the above 12 | # copyright notice, this list of conditions and the following disclaimer 13 | # in the documentation and/or other materials provided with the 14 | # distribution. 15 | # * Neither the name of Google Inc. nor the names of its 16 | # contributors may be used to endorse or promote products derived from 17 | # this software without specific prior written permission. 18 | # 19 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 20 | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 21 | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 22 | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 23 | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 24 | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 25 | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 26 | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 27 | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 28 | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | 31 | """Descriptors essentially contain exactly the information found in a .proto 32 | file, in types that make this information accessible in Python. 33 | """ 34 | 35 | __author__ = 'robinson@google.com (Will Robinson)' 36 | 37 | 38 | from google.protobuf.internal import api_implementation 39 | 40 | 41 | if api_implementation.Type() == 'cpp': 42 | from google.protobuf.internal import cpp_message 43 | 44 | 45 | class Error(Exception): 46 | """Base error for this module.""" 47 | 48 | 49 | class DescriptorBase(object): 50 | 51 | """Descriptors base class. 52 | 53 | This class is the base of all descriptor classes. It provides common options 54 | related functionaility. 55 | 56 | Attributes: 57 | has_options: True if the descriptor has non-default options. Usually it 58 | is not necessary to read this -- just call GetOptions() which will 59 | happily return the default instance. However, it's sometimes useful 60 | for efficiency, and also useful inside the protobuf implementation to 61 | avoid some bootstrapping issues. 62 | """ 63 | 64 | def __init__(self, options, options_class_name): 65 | """Initialize the descriptor given its options message and the name of the 66 | class of the options message. The name of the class is required in case 67 | the options message is None and has to be created. 68 | """ 69 | self._options = options 70 | self._options_class_name = options_class_name 71 | 72 | # Does this descriptor have non-default options? 73 | self.has_options = options is not None 74 | 75 | def GetOptions(self): 76 | """Retrieves descriptor options. 77 | 78 | This method returns the options set or creates the default options for the 79 | descriptor. 80 | """ 81 | if self._options: 82 | return self._options 83 | from google.protobuf import descriptor_pb2 84 | try: 85 | options_class = getattr(descriptor_pb2, self._options_class_name) 86 | except AttributeError: 87 | raise RuntimeError('Unknown options class name %s!' % 88 | (self._options_class_name)) 89 | self._options = options_class() 90 | return self._options 91 | 92 | 93 | class _NestedDescriptorBase(DescriptorBase): 94 | """Common class for descriptors that can be nested.""" 95 | 96 | def __init__(self, options, options_class_name, name, full_name, 97 | file, containing_type, serialized_start=None, 98 | serialized_end=None): 99 | """Constructor. 100 | 101 | Args: 102 | options: Protocol message options or None 103 | to use default message options. 104 | options_class_name: (str) The class name of the above options. 105 | 106 | name: (str) Name of this protocol message type. 107 | full_name: (str) Fully-qualified name of this protocol message type, 108 | which will include protocol "package" name and the name of any 109 | enclosing types. 110 | file: (FileDescriptor) Reference to file info. 111 | containing_type: if provided, this is a nested descriptor, with this 112 | descriptor as parent, otherwise None. 113 | serialized_start: The start index (inclusive) in block in the 114 | file.serialized_pb that describes this descriptor. 115 | serialized_end: The end index (exclusive) in block in the 116 | file.serialized_pb that describes this descriptor. 117 | """ 118 | super(_NestedDescriptorBase, self).__init__( 119 | options, options_class_name) 120 | 121 | self.name = name 122 | # TODO(falk): Add function to calculate full_name instead of having it in 123 | # memory? 124 | self.full_name = full_name 125 | self.file = file 126 | self.containing_type = containing_type 127 | 128 | self._serialized_start = serialized_start 129 | self._serialized_end = serialized_end 130 | 131 | def GetTopLevelContainingType(self): 132 | """Returns the root if this is a nested type, or itself if its the root.""" 133 | desc = self 134 | while desc.containing_type is not None: 135 | desc = desc.containing_type 136 | return desc 137 | 138 | def CopyToProto(self, proto): 139 | """Copies this to the matching proto in descriptor_pb2. 140 | 141 | Args: 142 | proto: An empty proto instance from descriptor_pb2. 143 | 144 | Raises: 145 | Error: If self couldnt be serialized, due to to few constructor arguments. 146 | """ 147 | if (self.file is not None and 148 | self._serialized_start is not None and 149 | self._serialized_end is not None): 150 | proto.ParseFromString(self.file.serialized_pb[ 151 | self._serialized_start:self._serialized_end]) 152 | else: 153 | raise Error('Descriptor does not contain serialization.') 154 | 155 | 156 | class Descriptor(_NestedDescriptorBase): 157 | 158 | """Descriptor for a protocol message type. 159 | 160 | A Descriptor instance has the following attributes: 161 | 162 | name: (str) Name of this protocol message type. 163 | full_name: (str) Fully-qualified name of this protocol message type, 164 | which will include protocol "package" name and the name of any 165 | enclosing types. 166 | 167 | containing_type: (Descriptor) Reference to the descriptor of the 168 | type containing us, or None if this is top-level. 169 | 170 | fields: (list of FieldDescriptors) Field descriptors for all 171 | fields in this type. 172 | fields_by_number: (dict int -> FieldDescriptor) Same FieldDescriptor 173 | objects as in |fields|, but indexed by "number" attribute in each 174 | FieldDescriptor. 175 | fields_by_name: (dict str -> FieldDescriptor) Same FieldDescriptor 176 | objects as in |fields|, but indexed by "name" attribute in each 177 | FieldDescriptor. 178 | 179 | nested_types: (list of Descriptors) Descriptor references 180 | for all protocol message types nested within this one. 181 | nested_types_by_name: (dict str -> Descriptor) Same Descriptor 182 | objects as in |nested_types|, but indexed by "name" attribute 183 | in each Descriptor. 184 | 185 | enum_types: (list of EnumDescriptors) EnumDescriptor references 186 | for all enums contained within this type. 187 | enum_types_by_name: (dict str ->EnumDescriptor) Same EnumDescriptor 188 | objects as in |enum_types|, but indexed by "name" attribute 189 | in each EnumDescriptor. 190 | enum_values_by_name: (dict str -> EnumValueDescriptor) Dict mapping 191 | from enum value name to EnumValueDescriptor for that value. 192 | 193 | extensions: (list of FieldDescriptor) All extensions defined directly 194 | within this message type (NOT within a nested type). 195 | extensions_by_name: (dict, string -> FieldDescriptor) Same FieldDescriptor 196 | objects as |extensions|, but indexed by "name" attribute of each 197 | FieldDescriptor. 198 | 199 | is_extendable: Does this type define any extension ranges? 200 | 201 | options: (descriptor_pb2.MessageOptions) Protocol message options or None 202 | to use default message options. 203 | 204 | file: (FileDescriptor) Reference to file descriptor. 205 | """ 206 | 207 | def __init__(self, name, full_name, filename, containing_type, fields, 208 | nested_types, enum_types, extensions, options=None, 209 | is_extendable=True, extension_ranges=None, file=None, 210 | serialized_start=None, serialized_end=None): 211 | """Arguments to __init__() are as described in the description 212 | of Descriptor fields above. 213 | 214 | Note that filename is an obsolete argument, that is not used anymore. 215 | Please use file.name to access this as an attribute. 216 | """ 217 | super(Descriptor, self).__init__( 218 | options, 'MessageOptions', name, full_name, file, 219 | containing_type, serialized_start=serialized_start, 220 | serialized_end=serialized_start) 221 | 222 | # We have fields in addition to fields_by_name and fields_by_number, 223 | # so that: 224 | # 1. Clients can index fields by "order in which they're listed." 225 | # 2. Clients can easily iterate over all fields with the terse 226 | # syntax: for f in descriptor.fields: ... 227 | self.fields = fields 228 | for field in self.fields: 229 | field.containing_type = self 230 | self.fields_by_number = dict((f.number, f) for f in fields) 231 | self.fields_by_name = dict((f.name, f) for f in fields) 232 | 233 | self.nested_types = nested_types 234 | self.nested_types_by_name = dict((t.name, t) for t in nested_types) 235 | 236 | self.enum_types = enum_types 237 | for enum_type in self.enum_types: 238 | enum_type.containing_type = self 239 | self.enum_types_by_name = dict((t.name, t) for t in enum_types) 240 | self.enum_values_by_name = dict( 241 | (v.name, v) for t in enum_types for v in t.values) 242 | 243 | self.extensions = extensions 244 | for extension in self.extensions: 245 | extension.extension_scope = self 246 | self.extensions_by_name = dict((f.name, f) for f in extensions) 247 | self.is_extendable = is_extendable 248 | self.extension_ranges = extension_ranges 249 | 250 | self._serialized_start = serialized_start 251 | self._serialized_end = serialized_end 252 | 253 | def CopyToProto(self, proto): 254 | """Copies this to a descriptor_pb2.DescriptorProto. 255 | 256 | Args: 257 | proto: An empty descriptor_pb2.DescriptorProto. 258 | """ 259 | # This function is overriden to give a better doc comment. 260 | super(Descriptor, self).CopyToProto(proto) 261 | 262 | 263 | # TODO(robinson): We should have aggressive checking here, 264 | # for example: 265 | # * If you specify a repeated field, you should not be allowed 266 | # to specify a default value. 267 | # * [Other examples here as needed]. 268 | # 269 | # TODO(robinson): for this and other *Descriptor classes, we 270 | # might also want to lock things down aggressively (e.g., 271 | # prevent clients from setting the attributes). Having 272 | # stronger invariants here in general will reduce the number 273 | # of runtime checks we must do in reflection.py... 274 | class FieldDescriptor(DescriptorBase): 275 | 276 | """Descriptor for a single field in a .proto file. 277 | 278 | A FieldDescriptor instance has the following attriubtes: 279 | 280 | name: (str) Name of this field, exactly as it appears in .proto. 281 | full_name: (str) Name of this field, including containing scope. This is 282 | particularly relevant for extensions. 283 | index: (int) Dense, 0-indexed index giving the order that this 284 | field textually appears within its message in the .proto file. 285 | number: (int) Tag number declared for this field in the .proto file. 286 | 287 | type: (One of the TYPE_* constants below) Declared type. 288 | cpp_type: (One of the CPPTYPE_* constants below) C++ type used to 289 | represent this field. 290 | 291 | label: (One of the LABEL_* constants below) Tells whether this 292 | field is optional, required, or repeated. 293 | has_default_value: (bool) True if this field has a default value defined, 294 | otherwise false. 295 | default_value: (Varies) Default value of this field. Only 296 | meaningful for non-repeated scalar fields. Repeated fields 297 | should always set this to [], and non-repeated composite 298 | fields should always set this to None. 299 | 300 | containing_type: (Descriptor) Descriptor of the protocol message 301 | type that contains this field. Set by the Descriptor constructor 302 | if we're passed into one. 303 | Somewhat confusingly, for extension fields, this is the 304 | descriptor of the EXTENDED message, not the descriptor 305 | of the message containing this field. (See is_extension and 306 | extension_scope below). 307 | message_type: (Descriptor) If a composite field, a descriptor 308 | of the message type contained in this field. Otherwise, this is None. 309 | enum_type: (EnumDescriptor) If this field contains an enum, a 310 | descriptor of that enum. Otherwise, this is None. 311 | 312 | is_extension: True iff this describes an extension field. 313 | extension_scope: (Descriptor) Only meaningful if is_extension is True. 314 | Gives the message that immediately contains this extension field. 315 | Will be None iff we're a top-level (file-level) extension field. 316 | 317 | options: (descriptor_pb2.FieldOptions) Protocol message field options or 318 | None to use default field options. 319 | """ 320 | 321 | # Must be consistent with C++ FieldDescriptor::Type enum in 322 | # descriptor.h. 323 | # 324 | # TODO(robinson): Find a way to eliminate this repetition. 325 | TYPE_DOUBLE = 1 326 | TYPE_FLOAT = 2 327 | TYPE_INT64 = 3 328 | TYPE_UINT64 = 4 329 | TYPE_INT32 = 5 330 | TYPE_FIXED64 = 6 331 | TYPE_FIXED32 = 7 332 | TYPE_BOOL = 8 333 | TYPE_STRING = 9 334 | TYPE_GROUP = 10 335 | TYPE_MESSAGE = 11 336 | TYPE_BYTES = 12 337 | TYPE_UINT32 = 13 338 | TYPE_ENUM = 14 339 | TYPE_SFIXED32 = 15 340 | TYPE_SFIXED64 = 16 341 | TYPE_SINT32 = 17 342 | TYPE_SINT64 = 18 343 | MAX_TYPE = 18 344 | 345 | # Must be consistent with C++ FieldDescriptor::CppType enum in 346 | # descriptor.h. 347 | # 348 | # TODO(robinson): Find a way to eliminate this repetition. 349 | CPPTYPE_INT32 = 1 350 | CPPTYPE_INT64 = 2 351 | CPPTYPE_UINT32 = 3 352 | CPPTYPE_UINT64 = 4 353 | CPPTYPE_DOUBLE = 5 354 | CPPTYPE_FLOAT = 6 355 | CPPTYPE_BOOL = 7 356 | CPPTYPE_ENUM = 8 357 | CPPTYPE_STRING = 9 358 | CPPTYPE_MESSAGE = 10 359 | MAX_CPPTYPE = 10 360 | 361 | # Must be consistent with C++ FieldDescriptor::Label enum in 362 | # descriptor.h. 363 | # 364 | # TODO(robinson): Find a way to eliminate this repetition. 365 | LABEL_OPTIONAL = 1 366 | LABEL_REQUIRED = 2 367 | LABEL_REPEATED = 3 368 | MAX_LABEL = 3 369 | 370 | def __init__(self, name, full_name, index, number, type, cpp_type, label, 371 | default_value, message_type, enum_type, containing_type, 372 | is_extension, extension_scope, options=None, 373 | has_default_value=True): 374 | """The arguments are as described in the description of FieldDescriptor 375 | attributes above. 376 | 377 | Note that containing_type may be None, and may be set later if necessary 378 | (to deal with circular references between message types, for example). 379 | Likewise for extension_scope. 380 | """ 381 | super(FieldDescriptor, self).__init__(options, 'FieldOptions') 382 | self.name = name 383 | self.full_name = full_name 384 | self.index = index 385 | self.number = number 386 | self.type = type 387 | self.cpp_type = cpp_type 388 | self.label = label 389 | self.has_default_value = has_default_value 390 | self.default_value = default_value 391 | self.containing_type = containing_type 392 | self.message_type = message_type 393 | self.enum_type = enum_type 394 | self.is_extension = is_extension 395 | self.extension_scope = extension_scope 396 | if api_implementation.Type() == 'cpp': 397 | if is_extension: 398 | self._cdescriptor = cpp_message.GetExtensionDescriptor(full_name) 399 | else: 400 | self._cdescriptor = cpp_message.GetFieldDescriptor(full_name) 401 | else: 402 | self._cdescriptor = None 403 | 404 | 405 | class EnumDescriptor(_NestedDescriptorBase): 406 | 407 | """Descriptor for an enum defined in a .proto file. 408 | 409 | An EnumDescriptor instance has the following attributes: 410 | 411 | name: (str) Name of the enum type. 412 | full_name: (str) Full name of the type, including package name 413 | and any enclosing type(s). 414 | 415 | values: (list of EnumValueDescriptors) List of the values 416 | in this enum. 417 | values_by_name: (dict str -> EnumValueDescriptor) Same as |values|, 418 | but indexed by the "name" field of each EnumValueDescriptor. 419 | values_by_number: (dict int -> EnumValueDescriptor) Same as |values|, 420 | but indexed by the "number" field of each EnumValueDescriptor. 421 | containing_type: (Descriptor) Descriptor of the immediate containing 422 | type of this enum, or None if this is an enum defined at the 423 | top level in a .proto file. Set by Descriptor's constructor 424 | if we're passed into one. 425 | file: (FileDescriptor) Reference to file descriptor. 426 | options: (descriptor_pb2.EnumOptions) Enum options message or 427 | None to use default enum options. 428 | """ 429 | 430 | def __init__(self, name, full_name, filename, values, 431 | containing_type=None, options=None, file=None, 432 | serialized_start=None, serialized_end=None): 433 | """Arguments are as described in the attribute description above. 434 | 435 | Note that filename is an obsolete argument, that is not used anymore. 436 | Please use file.name to access this as an attribute. 437 | """ 438 | super(EnumDescriptor, self).__init__( 439 | options, 'EnumOptions', name, full_name, file, 440 | containing_type, serialized_start=serialized_start, 441 | serialized_end=serialized_start) 442 | 443 | self.values = values 444 | for value in self.values: 445 | value.type = self 446 | self.values_by_name = dict((v.name, v) for v in values) 447 | self.values_by_number = dict((v.number, v) for v in values) 448 | 449 | self._serialized_start = serialized_start 450 | self._serialized_end = serialized_end 451 | 452 | def CopyToProto(self, proto): 453 | """Copies this to a descriptor_pb2.EnumDescriptorProto. 454 | 455 | Args: 456 | proto: An empty descriptor_pb2.EnumDescriptorProto. 457 | """ 458 | # This function is overriden to give a better doc comment. 459 | super(EnumDescriptor, self).CopyToProto(proto) 460 | 461 | 462 | class EnumValueDescriptor(DescriptorBase): 463 | 464 | """Descriptor for a single value within an enum. 465 | 466 | name: (str) Name of this value. 467 | index: (int) Dense, 0-indexed index giving the order that this 468 | value appears textually within its enum in the .proto file. 469 | number: (int) Actual number assigned to this enum value. 470 | type: (EnumDescriptor) EnumDescriptor to which this value 471 | belongs. Set by EnumDescriptor's constructor if we're 472 | passed into one. 473 | options: (descriptor_pb2.EnumValueOptions) Enum value options message or 474 | None to use default enum value options options. 475 | """ 476 | 477 | def __init__(self, name, index, number, type=None, options=None): 478 | """Arguments are as described in the attribute description above.""" 479 | super(EnumValueDescriptor, self).__init__(options, 'EnumValueOptions') 480 | self.name = name 481 | self.index = index 482 | self.number = number 483 | self.type = type 484 | 485 | 486 | class ServiceDescriptor(_NestedDescriptorBase): 487 | 488 | """Descriptor for a service. 489 | 490 | name: (str) Name of the service. 491 | full_name: (str) Full name of the service, including package name. 492 | index: (int) 0-indexed index giving the order that this services 493 | definition appears withing the .proto file. 494 | methods: (list of MethodDescriptor) List of methods provided by this 495 | service. 496 | options: (descriptor_pb2.ServiceOptions) Service options message or 497 | None to use default service options. 498 | file: (FileDescriptor) Reference to file info. 499 | """ 500 | 501 | def __init__(self, name, full_name, index, methods, options=None, file=None, 502 | serialized_start=None, serialized_end=None): 503 | super(ServiceDescriptor, self).__init__( 504 | options, 'ServiceOptions', name, full_name, file, 505 | None, serialized_start=serialized_start, 506 | serialized_end=serialized_end) 507 | self.index = index 508 | self.methods = methods 509 | # Set the containing service for each method in this service. 510 | for method in self.methods: 511 | method.containing_service = self 512 | 513 | def FindMethodByName(self, name): 514 | """Searches for the specified method, and returns its descriptor.""" 515 | for method in self.methods: 516 | if name == method.name: 517 | return method 518 | return None 519 | 520 | def CopyToProto(self, proto): 521 | """Copies this to a descriptor_pb2.ServiceDescriptorProto. 522 | 523 | Args: 524 | proto: An empty descriptor_pb2.ServiceDescriptorProto. 525 | """ 526 | # This function is overriden to give a better doc comment. 527 | super(ServiceDescriptor, self).CopyToProto(proto) 528 | 529 | 530 | class MethodDescriptor(DescriptorBase): 531 | 532 | """Descriptor for a method in a service. 533 | 534 | name: (str) Name of the method within the service. 535 | full_name: (str) Full name of method. 536 | index: (int) 0-indexed index of the method inside the service. 537 | containing_service: (ServiceDescriptor) The service that contains this 538 | method. 539 | input_type: The descriptor of the message that this method accepts. 540 | output_type: The descriptor of the message that this method returns. 541 | options: (descriptor_pb2.MethodOptions) Method options message or 542 | None to use default method options. 543 | """ 544 | 545 | def __init__(self, name, full_name, index, containing_service, 546 | input_type, output_type, options=None): 547 | """The arguments are as described in the description of MethodDescriptor 548 | attributes above. 549 | 550 | Note that containing_service may be None, and may be set later if necessary. 551 | """ 552 | super(MethodDescriptor, self).__init__(options, 'MethodOptions') 553 | self.name = name 554 | self.full_name = full_name 555 | self.index = index 556 | self.containing_service = containing_service 557 | self.input_type = input_type 558 | self.output_type = output_type 559 | 560 | 561 | class FileDescriptor(DescriptorBase): 562 | """Descriptor for a file. Mimics the descriptor_pb2.FileDescriptorProto. 563 | 564 | name: name of file, relative to root of source tree. 565 | package: name of the package 566 | serialized_pb: (str) Byte string of serialized 567 | descriptor_pb2.FileDescriptorProto. 568 | """ 569 | 570 | def __init__(self, name, package, options=None, serialized_pb=None): 571 | """Constructor.""" 572 | super(FileDescriptor, self).__init__(options, 'FileOptions') 573 | 574 | self.message_types_by_name = {} 575 | self.name = name 576 | self.package = package 577 | self.serialized_pb = serialized_pb 578 | if (api_implementation.Type() == 'cpp' and 579 | self.serialized_pb is not None): 580 | cpp_message.BuildFile(self.serialized_pb) 581 | 582 | def CopyToProto(self, proto): 583 | """Copies this to a descriptor_pb2.FileDescriptorProto. 584 | 585 | Args: 586 | proto: An empty descriptor_pb2.FileDescriptorProto. 587 | """ 588 | proto.ParseFromString(self.serialized_pb) 589 | 590 | 591 | def _ParseOptions(message, string): 592 | """Parses serialized options. 593 | 594 | This helper function is used to parse serialized options in generated 595 | proto2 files. It must not be used outside proto2. 596 | """ 597 | message.ParseFromString(string) 598 | return message 599 | -------------------------------------------------------------------------------- /pb2.4/google/protobuf/descriptor.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/connexio/cypb/dc3d5ae866f7cfcdc3e313ecced0e8fa912542e1/pb2.4/google/protobuf/descriptor.pyc -------------------------------------------------------------------------------- /pb2.4/google/protobuf/descriptor_pb2.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/connexio/cypb/dc3d5ae866f7cfcdc3e313ecced0e8fa912542e1/pb2.4/google/protobuf/descriptor_pb2.pyc -------------------------------------------------------------------------------- /pb2.4/google/protobuf/internal/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/connexio/cypb/dc3d5ae866f7cfcdc3e313ecced0e8fa912542e1/pb2.4/google/protobuf/internal/__init__.py -------------------------------------------------------------------------------- /pb2.4/google/protobuf/internal/__init__.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/connexio/cypb/dc3d5ae866f7cfcdc3e313ecced0e8fa912542e1/pb2.4/google/protobuf/internal/__init__.pyc -------------------------------------------------------------------------------- /pb2.4/google/protobuf/internal/_net_proto2___python.so: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/connexio/cypb/dc3d5ae866f7cfcdc3e313ecced0e8fa912542e1/pb2.4/google/protobuf/internal/_net_proto2___python.so -------------------------------------------------------------------------------- /pb2.4/google/protobuf/internal/api_implementation.py: -------------------------------------------------------------------------------- 1 | # Protocol Buffers - Google's data interchange format 2 | # Copyright 2008 Google Inc. All rights reserved. 3 | # http://code.google.com/p/protobuf/ 4 | # 5 | # Redistribution and use in source and binary forms, with or without 6 | # modification, are permitted provided that the following conditions are 7 | # met: 8 | # 9 | # * Redistributions of source code must retain the above copyright 10 | # notice, this list of conditions and the following disclaimer. 11 | # * Redistributions in binary form must reproduce the above 12 | # copyright notice, this list of conditions and the following disclaimer 13 | # in the documentation and/or other materials provided with the 14 | # distribution. 15 | # * Neither the name of Google Inc. nor the names of its 16 | # contributors may be used to endorse or promote products derived from 17 | # this software without specific prior written permission. 18 | # 19 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 20 | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 21 | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 22 | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 23 | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 24 | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 25 | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 26 | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 27 | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 28 | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | 31 | """ 32 | This module is the central entity that determines which implementation of the 33 | API is used. 34 | """ 35 | 36 | __author__ = 'petar@google.com (Petar Petrov)' 37 | 38 | import os 39 | # This environment variable can be used to switch to a certain implementation 40 | # of the Python API. Right now only 'python' and 'cpp' are valid values. Any 41 | # other value will be ignored. 42 | _implementation_type = os.getenv('PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION', 43 | 'python') 44 | 45 | 46 | if _implementation_type != 'python': 47 | # For now, by default use the pure-Python implementation. 48 | # The code below checks if the C extension is available and 49 | # uses it if it is available. 50 | _implementation_type = 'cpp' 51 | ## Determine automatically which implementation to use. 52 | #try: 53 | # from google.protobuf.internal import cpp_message 54 | # _implementation_type = 'cpp' 55 | #except ImportError, e: 56 | # _implementation_type = 'python' 57 | 58 | 59 | # Usage of this function is discouraged. Clients shouldn't care which 60 | # implementation of the API is in use. Note that there is no guarantee 61 | # that differences between APIs will be maintained. 62 | # Please don't use this function if possible. 63 | def Type(): 64 | return _implementation_type 65 | -------------------------------------------------------------------------------- /pb2.4/google/protobuf/internal/api_implementation.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/connexio/cypb/dc3d5ae866f7cfcdc3e313ecced0e8fa912542e1/pb2.4/google/protobuf/internal/api_implementation.pyc -------------------------------------------------------------------------------- /pb2.4/google/protobuf/internal/containers.py: -------------------------------------------------------------------------------- 1 | # Protocol Buffers - Google's data interchange format 2 | # Copyright 2008 Google Inc. All rights reserved. 3 | # http://code.google.com/p/protobuf/ 4 | # 5 | # Redistribution and use in source and binary forms, with or without 6 | # modification, are permitted provided that the following conditions are 7 | # met: 8 | # 9 | # * Redistributions of source code must retain the above copyright 10 | # notice, this list of conditions and the following disclaimer. 11 | # * Redistributions in binary form must reproduce the above 12 | # copyright notice, this list of conditions and the following disclaimer 13 | # in the documentation and/or other materials provided with the 14 | # distribution. 15 | # * Neither the name of Google Inc. nor the names of its 16 | # contributors may be used to endorse or promote products derived from 17 | # this software without specific prior written permission. 18 | # 19 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 20 | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 21 | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 22 | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 23 | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 24 | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 25 | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 26 | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 27 | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 28 | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | 31 | """Contains container classes to represent different protocol buffer types. 32 | 33 | This file defines container classes which represent categories of protocol 34 | buffer field types which need extra maintenance. Currently these categories 35 | are: 36 | - Repeated scalar fields - These are all repeated fields which aren't 37 | composite (e.g. they are of simple types like int32, string, etc). 38 | - Repeated composite fields - Repeated fields which are composite. This 39 | includes groups and nested messages. 40 | """ 41 | 42 | __author__ = 'petar@google.com (Petar Petrov)' 43 | 44 | 45 | class BaseContainer(object): 46 | 47 | """Base container class.""" 48 | 49 | # Minimizes memory usage and disallows assignment to other attributes. 50 | __slots__ = ['_message_listener', '_values'] 51 | 52 | def __init__(self, message_listener): 53 | """ 54 | Args: 55 | message_listener: A MessageListener implementation. 56 | The RepeatedScalarFieldContainer will call this object's 57 | Modified() method when it is modified. 58 | """ 59 | self._message_listener = message_listener 60 | self._values = [] 61 | 62 | def __getitem__(self, key): 63 | """Retrieves item by the specified key.""" 64 | return self._values[key] 65 | 66 | def __len__(self): 67 | """Returns the number of elements in the container.""" 68 | return len(self._values) 69 | 70 | def __ne__(self, other): 71 | """Checks if another instance isn't equal to this one.""" 72 | # The concrete classes should define __eq__. 73 | return not self == other 74 | 75 | def __hash__(self): 76 | raise TypeError('unhashable object') 77 | 78 | def __repr__(self): 79 | return repr(self._values) 80 | 81 | def sort(self, sort_function=cmp): 82 | self._values.sort(sort_function) 83 | 84 | 85 | class RepeatedScalarFieldContainer(BaseContainer): 86 | 87 | """Simple, type-checked, list-like container for holding repeated scalars.""" 88 | 89 | # Disallows assignment to other attributes. 90 | __slots__ = ['_type_checker'] 91 | 92 | def __init__(self, message_listener, type_checker): 93 | """ 94 | Args: 95 | message_listener: A MessageListener implementation. 96 | The RepeatedScalarFieldContainer will call this object's 97 | Modified() method when it is modified. 98 | type_checker: A type_checkers.ValueChecker instance to run on elements 99 | inserted into this container. 100 | """ 101 | super(RepeatedScalarFieldContainer, self).__init__(message_listener) 102 | self._type_checker = type_checker 103 | 104 | def append(self, value): 105 | """Appends an item to the list. Similar to list.append().""" 106 | self._type_checker.CheckValue(value) 107 | self._values.append(value) 108 | if not self._message_listener.dirty: 109 | self._message_listener.Modified() 110 | 111 | def insert(self, key, value): 112 | """Inserts the item at the specified position. Similar to list.insert().""" 113 | self._type_checker.CheckValue(value) 114 | self._values.insert(key, value) 115 | if not self._message_listener.dirty: 116 | self._message_listener.Modified() 117 | 118 | def extend(self, elem_seq): 119 | """Extends by appending the given sequence. Similar to list.extend().""" 120 | if not elem_seq: 121 | return 122 | 123 | new_values = [] 124 | for elem in elem_seq: 125 | self._type_checker.CheckValue(elem) 126 | new_values.append(elem) 127 | self._values.extend(new_values) 128 | self._message_listener.Modified() 129 | 130 | def MergeFrom(self, other): 131 | """Appends the contents of another repeated field of the same type to this 132 | one. We do not check the types of the individual fields. 133 | """ 134 | self._values.extend(other._values) 135 | self._message_listener.Modified() 136 | 137 | def remove(self, elem): 138 | """Removes an item from the list. Similar to list.remove().""" 139 | self._values.remove(elem) 140 | self._message_listener.Modified() 141 | 142 | def __setitem__(self, key, value): 143 | """Sets the item on the specified position.""" 144 | self._type_checker.CheckValue(value) 145 | self._values[key] = value 146 | self._message_listener.Modified() 147 | 148 | def __getslice__(self, start, stop): 149 | """Retrieves the subset of items from between the specified indices.""" 150 | return self._values[start:stop] 151 | 152 | def __setslice__(self, start, stop, values): 153 | """Sets the subset of items from between the specified indices.""" 154 | new_values = [] 155 | for value in values: 156 | self._type_checker.CheckValue(value) 157 | new_values.append(value) 158 | self._values[start:stop] = new_values 159 | self._message_listener.Modified() 160 | 161 | def __delitem__(self, key): 162 | """Deletes the item at the specified position.""" 163 | del self._values[key] 164 | self._message_listener.Modified() 165 | 166 | def __delslice__(self, start, stop): 167 | """Deletes the subset of items from between the specified indices.""" 168 | del self._values[start:stop] 169 | self._message_listener.Modified() 170 | 171 | def __eq__(self, other): 172 | """Compares the current instance with another one.""" 173 | if self is other: 174 | return True 175 | # Special case for the same type which should be common and fast. 176 | if isinstance(other, self.__class__): 177 | return other._values == self._values 178 | # We are presumably comparing against some other sequence type. 179 | return other == self._values 180 | 181 | 182 | class RepeatedCompositeFieldContainer(BaseContainer): 183 | 184 | """Simple, list-like container for holding repeated composite fields.""" 185 | 186 | # Disallows assignment to other attributes. 187 | __slots__ = ['_message_descriptor'] 188 | 189 | def __init__(self, message_listener, message_descriptor): 190 | """ 191 | Note that we pass in a descriptor instead of the generated directly, 192 | since at the time we construct a _RepeatedCompositeFieldContainer we 193 | haven't yet necessarily initialized the type that will be contained in the 194 | container. 195 | 196 | Args: 197 | message_listener: A MessageListener implementation. 198 | The RepeatedCompositeFieldContainer will call this object's 199 | Modified() method when it is modified. 200 | message_descriptor: A Descriptor instance describing the protocol type 201 | that should be present in this container. We'll use the 202 | _concrete_class field of this descriptor when the client calls add(). 203 | """ 204 | super(RepeatedCompositeFieldContainer, self).__init__(message_listener) 205 | self._message_descriptor = message_descriptor 206 | 207 | def add(self, **kwargs): 208 | """Adds a new element at the end of the list and returns it. Keyword 209 | arguments may be used to initialize the element. 210 | """ 211 | new_element = self._message_descriptor._concrete_class(**kwargs) 212 | new_element._SetListener(self._message_listener) 213 | self._values.append(new_element) 214 | if not self._message_listener.dirty: 215 | self._message_listener.Modified() 216 | return new_element 217 | 218 | def extend(self, elem_seq): 219 | """Extends by appending the given sequence of elements of the same type 220 | as this one, copying each individual message. 221 | """ 222 | message_class = self._message_descriptor._concrete_class 223 | listener = self._message_listener 224 | values = self._values 225 | for message in elem_seq: 226 | new_element = message_class() 227 | new_element._SetListener(listener) 228 | new_element.MergeFrom(message) 229 | values.append(new_element) 230 | listener.Modified() 231 | 232 | def MergeFrom(self, other): 233 | """Appends the contents of another repeated field of the same type to this 234 | one, copying each individual message. 235 | """ 236 | self.extend(other._values) 237 | 238 | def __getslice__(self, start, stop): 239 | """Retrieves the subset of items from between the specified indices.""" 240 | return self._values[start:stop] 241 | 242 | def __delitem__(self, key): 243 | """Deletes the item at the specified position.""" 244 | del self._values[key] 245 | self._message_listener.Modified() 246 | 247 | def __delslice__(self, start, stop): 248 | """Deletes the subset of items from between the specified indices.""" 249 | del self._values[start:stop] 250 | self._message_listener.Modified() 251 | 252 | def __eq__(self, other): 253 | """Compares the current instance with another one.""" 254 | if self is other: 255 | return True 256 | if not isinstance(other, self.__class__): 257 | raise TypeError('Can only compare repeated composite fields against ' 258 | 'other repeated composite fields.') 259 | return self._values == other._values 260 | -------------------------------------------------------------------------------- /pb2.4/google/protobuf/internal/cpp_message.py: -------------------------------------------------------------------------------- 1 | # Protocol Buffers - Google's data interchange format 2 | # Copyright 2008 Google Inc. All rights reserved. 3 | # http://code.google.com/p/protobuf/ 4 | # 5 | # Redistribution and use in source and binary forms, with or without 6 | # modification, are permitted provided that the following conditions are 7 | # met: 8 | # 9 | # * Redistributions of source code must retain the above copyright 10 | # notice, this list of conditions and the following disclaimer. 11 | # * Redistributions in binary form must reproduce the above 12 | # copyright notice, this list of conditions and the following disclaimer 13 | # in the documentation and/or other materials provided with the 14 | # distribution. 15 | # * Neither the name of Google Inc. nor the names of its 16 | # contributors may be used to endorse or promote products derived from 17 | # this software without specific prior written permission. 18 | # 19 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 20 | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 21 | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 22 | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 23 | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 24 | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 25 | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 26 | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 27 | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 28 | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | 31 | """Contains helper functions used to create protocol message classes from 32 | Descriptor objects at runtime backed by the protocol buffer C++ API. 33 | """ 34 | 35 | __author__ = 'petar@google.com (Petar Petrov)' 36 | 37 | import operator 38 | from google.protobuf.internal import _net_proto2___python 39 | from google.protobuf import message 40 | 41 | 42 | _LABEL_REPEATED = _net_proto2___python.LABEL_REPEATED 43 | _LABEL_OPTIONAL = _net_proto2___python.LABEL_OPTIONAL 44 | _CPPTYPE_MESSAGE = _net_proto2___python.CPPTYPE_MESSAGE 45 | _TYPE_MESSAGE = _net_proto2___python.TYPE_MESSAGE 46 | 47 | 48 | def GetDescriptorPool(): 49 | """Creates a new DescriptorPool C++ object.""" 50 | return _net_proto2___python.NewCDescriptorPool() 51 | 52 | 53 | _pool = GetDescriptorPool() 54 | 55 | 56 | def GetFieldDescriptor(full_field_name): 57 | """Searches for a field descriptor given a full field name.""" 58 | return _pool.FindFieldByName(full_field_name) 59 | 60 | 61 | def BuildFile(content): 62 | """Registers a new proto file in the underlying C++ descriptor pool.""" 63 | _net_proto2___python.BuildFile(content) 64 | 65 | 66 | def GetExtensionDescriptor(full_extension_name): 67 | """Searches for extension descriptor given a full field name.""" 68 | return _pool.FindExtensionByName(full_extension_name) 69 | 70 | 71 | def NewCMessage(full_message_name): 72 | """Creates a new C++ protocol message by its name.""" 73 | return _net_proto2___python.NewCMessage(full_message_name) 74 | 75 | 76 | def ScalarProperty(cdescriptor): 77 | """Returns a scalar property for the given descriptor.""" 78 | 79 | def Getter(self): 80 | return self._cmsg.GetScalar(cdescriptor) 81 | 82 | def Setter(self, value): 83 | self._cmsg.SetScalar(cdescriptor, value) 84 | 85 | return property(Getter, Setter) 86 | 87 | 88 | def CompositeProperty(cdescriptor, message_type): 89 | """Returns a Python property the given composite field.""" 90 | 91 | def Getter(self): 92 | sub_message = self._composite_fields.get(cdescriptor.name, None) 93 | if sub_message is None: 94 | cmessage = self._cmsg.NewSubMessage(cdescriptor) 95 | sub_message = message_type._concrete_class(__cmessage=cmessage) 96 | self._composite_fields[cdescriptor.name] = sub_message 97 | return sub_message 98 | 99 | return property(Getter) 100 | 101 | 102 | class RepeatedScalarContainer(object): 103 | """Container for repeated scalar fields.""" 104 | 105 | __slots__ = ['_message', '_cfield_descriptor', '_cmsg'] 106 | 107 | def __init__(self, msg, cfield_descriptor): 108 | self._message = msg 109 | self._cmsg = msg._cmsg 110 | self._cfield_descriptor = cfield_descriptor 111 | 112 | def append(self, value): 113 | self._cmsg.AddRepeatedScalar( 114 | self._cfield_descriptor, value) 115 | 116 | def extend(self, sequence): 117 | for element in sequence: 118 | self.append(element) 119 | 120 | def insert(self, key, value): 121 | values = self[slice(None, None, None)] 122 | values.insert(key, value) 123 | self._cmsg.AssignRepeatedScalar(self._cfield_descriptor, values) 124 | 125 | def remove(self, value): 126 | values = self[slice(None, None, None)] 127 | values.remove(value) 128 | self._cmsg.AssignRepeatedScalar(self._cfield_descriptor, values) 129 | 130 | def __setitem__(self, key, value): 131 | values = self[slice(None, None, None)] 132 | values[key] = value 133 | self._cmsg.AssignRepeatedScalar(self._cfield_descriptor, values) 134 | 135 | def __getitem__(self, key): 136 | return self._cmsg.GetRepeatedScalar(self._cfield_descriptor, key) 137 | 138 | def __delitem__(self, key): 139 | self._cmsg.DeleteRepeatedField(self._cfield_descriptor, key) 140 | 141 | def __len__(self): 142 | return len(self[slice(None, None, None)]) 143 | 144 | def __eq__(self, other): 145 | if self is other: 146 | return True 147 | if not operator.isSequenceType(other): 148 | raise TypeError( 149 | 'Can only compare repeated scalar fields against sequences.') 150 | # We are presumably comparing against some other sequence type. 151 | return other == self[slice(None, None, None)] 152 | 153 | def __ne__(self, other): 154 | return not self == other 155 | 156 | def __hash__(self): 157 | raise TypeError('unhashable object') 158 | 159 | def sort(self, sort_function=cmp): 160 | values = self[slice(None, None, None)] 161 | values.sort(sort_function) 162 | self._cmsg.AssignRepeatedScalar(self._cfield_descriptor, values) 163 | 164 | 165 | def RepeatedScalarProperty(cdescriptor): 166 | """Returns a Python property the given repeated scalar field.""" 167 | 168 | def Getter(self): 169 | container = self._composite_fields.get(cdescriptor.name, None) 170 | if container is None: 171 | container = RepeatedScalarContainer(self, cdescriptor) 172 | self._composite_fields[cdescriptor.name] = container 173 | return container 174 | 175 | def Setter(self, new_value): 176 | raise AttributeError('Assignment not allowed to repeated field ' 177 | '"%s" in protocol message object.' % cdescriptor.name) 178 | 179 | doc = 'Magic attribute generated for "%s" proto field.' % cdescriptor.name 180 | return property(Getter, Setter, doc=doc) 181 | 182 | 183 | class RepeatedCompositeContainer(object): 184 | """Container for repeated composite fields.""" 185 | 186 | __slots__ = ['_message', '_subclass', '_cfield_descriptor', '_cmsg'] 187 | 188 | def __init__(self, msg, cfield_descriptor, subclass): 189 | self._message = msg 190 | self._cmsg = msg._cmsg 191 | self._subclass = subclass 192 | self._cfield_descriptor = cfield_descriptor 193 | 194 | def add(self, **kwargs): 195 | cmessage = self._cmsg.AddMessage(self._cfield_descriptor) 196 | return self._subclass(__cmessage=cmessage, __owner=self._message, **kwargs) 197 | 198 | def extend(self, elem_seq): 199 | """Extends by appending the given sequence of elements of the same type 200 | as this one, copying each individual message. 201 | """ 202 | for message in elem_seq: 203 | self.add().MergeFrom(message) 204 | 205 | def MergeFrom(self, other): 206 | for message in other[:]: 207 | self.add().MergeFrom(message) 208 | 209 | def __getitem__(self, key): 210 | cmessages = self._cmsg.GetRepeatedMessage( 211 | self._cfield_descriptor, key) 212 | subclass = self._subclass 213 | if not isinstance(cmessages, list): 214 | return subclass(__cmessage=cmessages, __owner=self._message) 215 | 216 | return [subclass(__cmessage=m, __owner=self._message) for m in cmessages] 217 | 218 | def __delitem__(self, key): 219 | self._cmsg.DeleteRepeatedField( 220 | self._cfield_descriptor, key) 221 | 222 | def __len__(self): 223 | return self._cmsg.FieldLength(self._cfield_descriptor) 224 | 225 | def __eq__(self, other): 226 | """Compares the current instance with another one.""" 227 | if self is other: 228 | return True 229 | if not isinstance(other, self.__class__): 230 | raise TypeError('Can only compare repeated composite fields against ' 231 | 'other repeated composite fields.') 232 | messages = self[slice(None, None, None)] 233 | other_messages = other[slice(None, None, None)] 234 | return messages == other_messages 235 | 236 | def __hash__(self): 237 | raise TypeError('unhashable object') 238 | 239 | def sort(self, sort_function=cmp): 240 | messages = [] 241 | for index in range(len(self)): 242 | # messages[i][0] is where the i-th element of the new array has to come 243 | # from. 244 | # messages[i][1] is where the i-th element of the old array has to go. 245 | messages.append([index, 0, self[index]]) 246 | messages.sort(lambda x,y: sort_function(x[2], y[2])) 247 | 248 | # Remember which position each elements has to move to. 249 | for i in range(len(messages)): 250 | messages[messages[i][0]][1] = i 251 | 252 | # Apply the transposition. 253 | for i in range(len(messages)): 254 | from_position = messages[i][0] 255 | if i == from_position: 256 | continue 257 | self._cmsg.SwapRepeatedFieldElements( 258 | self._cfield_descriptor, i, from_position) 259 | messages[messages[i][1]][0] = from_position 260 | 261 | 262 | def RepeatedCompositeProperty(cdescriptor, message_type): 263 | """Returns a Python property for the given repeated composite field.""" 264 | 265 | def Getter(self): 266 | container = self._composite_fields.get(cdescriptor.name, None) 267 | if container is None: 268 | container = RepeatedCompositeContainer( 269 | self, cdescriptor, message_type._concrete_class) 270 | self._composite_fields[cdescriptor.name] = container 271 | return container 272 | 273 | def Setter(self, new_value): 274 | raise AttributeError('Assignment not allowed to repeated field ' 275 | '"%s" in protocol message object.' % cdescriptor.name) 276 | 277 | doc = 'Magic attribute generated for "%s" proto field.' % cdescriptor.name 278 | return property(Getter, Setter, doc=doc) 279 | 280 | 281 | class ExtensionDict(object): 282 | """Extension dictionary added to each protocol message.""" 283 | 284 | def __init__(self, msg): 285 | self._message = msg 286 | self._cmsg = msg._cmsg 287 | self._values = {} 288 | 289 | def __setitem__(self, extension, value): 290 | from google.protobuf import descriptor 291 | if not isinstance(extension, descriptor.FieldDescriptor): 292 | raise KeyError('Bad extension %r.' % (extension,)) 293 | cdescriptor = extension._cdescriptor 294 | if (cdescriptor.label != _LABEL_OPTIONAL or 295 | cdescriptor.cpp_type == _CPPTYPE_MESSAGE): 296 | raise TypeError('Extension %r is repeated and/or a composite type.' % ( 297 | extension.full_name,)) 298 | self._cmsg.SetScalar(cdescriptor, value) 299 | self._values[extension] = value 300 | 301 | def __getitem__(self, extension): 302 | from google.protobuf import descriptor 303 | if not isinstance(extension, descriptor.FieldDescriptor): 304 | raise KeyError('Bad extension %r.' % (extension,)) 305 | 306 | cdescriptor = extension._cdescriptor 307 | if (cdescriptor.label != _LABEL_REPEATED and 308 | cdescriptor.cpp_type != _CPPTYPE_MESSAGE): 309 | return self._cmsg.GetScalar(cdescriptor) 310 | 311 | ext = self._values.get(extension, None) 312 | if ext is not None: 313 | return ext 314 | 315 | ext = self._CreateNewHandle(extension) 316 | self._values[extension] = ext 317 | return ext 318 | 319 | def ClearExtension(self, extension): 320 | from google.protobuf import descriptor 321 | if not isinstance(extension, descriptor.FieldDescriptor): 322 | raise KeyError('Bad extension %r.' % (extension,)) 323 | self._cmsg.ClearFieldByDescriptor(extension._cdescriptor) 324 | if extension in self._values: 325 | del self._values[extension] 326 | 327 | def HasExtension(self, extension): 328 | from google.protobuf import descriptor 329 | if not isinstance(extension, descriptor.FieldDescriptor): 330 | raise KeyError('Bad extension %r.' % (extension,)) 331 | return self._cmsg.HasFieldByDescriptor(extension._cdescriptor) 332 | 333 | def _FindExtensionByName(self, name): 334 | """Tries to find a known extension with the specified name. 335 | 336 | Args: 337 | name: Extension full name. 338 | 339 | Returns: 340 | Extension field descriptor. 341 | """ 342 | return self._message._extensions_by_name.get(name, None) 343 | 344 | def _CreateNewHandle(self, extension): 345 | cdescriptor = extension._cdescriptor 346 | if (cdescriptor.label != _LABEL_REPEATED and 347 | cdescriptor.cpp_type == _CPPTYPE_MESSAGE): 348 | cmessage = self._cmsg.NewSubMessage(cdescriptor) 349 | return extension.message_type._concrete_class(__cmessage=cmessage) 350 | 351 | if cdescriptor.label == _LABEL_REPEATED: 352 | if cdescriptor.cpp_type == _CPPTYPE_MESSAGE: 353 | return RepeatedCompositeContainer( 354 | self._message, cdescriptor, extension.message_type._concrete_class) 355 | else: 356 | return RepeatedScalarContainer(self._message, cdescriptor) 357 | # This shouldn't happen! 358 | assert False 359 | return None 360 | 361 | 362 | def NewMessage(message_descriptor, dictionary): 363 | """Creates a new protocol message *class*.""" 364 | _AddClassAttributesForNestedExtensions(message_descriptor, dictionary) 365 | _AddEnumValues(message_descriptor, dictionary) 366 | _AddDescriptors(message_descriptor, dictionary) 367 | 368 | 369 | def InitMessage(message_descriptor, cls): 370 | """Constructs a new message instance (called before instance's __init__).""" 371 | cls._extensions_by_name = {} 372 | _AddInitMethod(message_descriptor, cls) 373 | _AddMessageMethods(message_descriptor, cls) 374 | _AddPropertiesForExtensions(message_descriptor, cls) 375 | 376 | 377 | def _AddDescriptors(message_descriptor, dictionary): 378 | """Sets up a new protocol message class dictionary. 379 | 380 | Args: 381 | message_descriptor: A Descriptor instance describing this message type. 382 | dictionary: Class dictionary to which we'll add a '__slots__' entry. 383 | """ 384 | dictionary['__descriptors'] = {} 385 | for field in message_descriptor.fields: 386 | dictionary['__descriptors'][field.name] = GetFieldDescriptor( 387 | field.full_name) 388 | 389 | dictionary['__slots__'] = list(dictionary['__descriptors'].iterkeys()) + [ 390 | '_cmsg', '_owner', '_composite_fields', 'Extensions'] 391 | 392 | 393 | def _AddEnumValues(message_descriptor, dictionary): 394 | """Sets class-level attributes for all enum fields defined in this message. 395 | 396 | Args: 397 | message_descriptor: Descriptor object for this message type. 398 | dictionary: Class dictionary that should be populated. 399 | """ 400 | for enum_type in message_descriptor.enum_types: 401 | for enum_value in enum_type.values: 402 | dictionary[enum_value.name] = enum_value.number 403 | 404 | 405 | def _AddClassAttributesForNestedExtensions(message_descriptor, dictionary): 406 | """Adds class attributes for the nested extensions.""" 407 | extension_dict = message_descriptor.extensions_by_name 408 | for extension_name, extension_field in extension_dict.iteritems(): 409 | assert extension_name not in dictionary 410 | dictionary[extension_name] = extension_field 411 | 412 | 413 | def _AddInitMethod(message_descriptor, cls): 414 | """Adds an __init__ method to cls.""" 415 | 416 | # Create and attach message field properties to the message class. 417 | # This can be done just once per message class, since property setters and 418 | # getters are passed the message instance. 419 | # This makes message instantiation extremely fast, and at the same time it 420 | # doesn't require the creation of property objects for each message instance, 421 | # which saves a lot of memory. 422 | for field in message_descriptor.fields: 423 | field_cdescriptor = cls.__descriptors[field.name] 424 | if field.label == _LABEL_REPEATED: 425 | if field.cpp_type == _CPPTYPE_MESSAGE: 426 | value = RepeatedCompositeProperty(field_cdescriptor, field.message_type) 427 | else: 428 | value = RepeatedScalarProperty(field_cdescriptor) 429 | elif field.cpp_type == _CPPTYPE_MESSAGE: 430 | value = CompositeProperty(field_cdescriptor, field.message_type) 431 | else: 432 | value = ScalarProperty(field_cdescriptor) 433 | setattr(cls, field.name, value) 434 | 435 | # Attach a constant with the field number. 436 | constant_name = field.name.upper() + '_FIELD_NUMBER' 437 | setattr(cls, constant_name, field.number) 438 | 439 | def Init(self, **kwargs): 440 | """Message constructor.""" 441 | cmessage = kwargs.pop('__cmessage', None) 442 | if cmessage is None: 443 | self._cmsg = NewCMessage(message_descriptor.full_name) 444 | else: 445 | self._cmsg = cmessage 446 | 447 | # Keep a reference to the owner, as the owner keeps a reference to the 448 | # underlying protocol buffer message. 449 | owner = kwargs.pop('__owner', None) 450 | if owner is not None: 451 | self._owner = owner 452 | 453 | self.Extensions = ExtensionDict(self) 454 | self._composite_fields = {} 455 | 456 | for field_name, field_value in kwargs.iteritems(): 457 | field_cdescriptor = self.__descriptors.get(field_name, None) 458 | if field_cdescriptor is None: 459 | raise ValueError('Protocol message has no "%s" field.' % field_name) 460 | if field_cdescriptor.label == _LABEL_REPEATED: 461 | if field_cdescriptor.cpp_type == _CPPTYPE_MESSAGE: 462 | for val in field_value: 463 | getattr(self, field_name).add().MergeFrom(val) 464 | else: 465 | getattr(self, field_name).extend(field_value) 466 | elif field_cdescriptor.cpp_type == _CPPTYPE_MESSAGE: 467 | getattr(self, field_name).MergeFrom(field_value) 468 | else: 469 | setattr(self, field_name, field_value) 470 | 471 | Init.__module__ = None 472 | Init.__doc__ = None 473 | cls.__init__ = Init 474 | 475 | 476 | def _IsMessageSetExtension(field): 477 | """Checks if a field is a message set extension.""" 478 | return (field.is_extension and 479 | field.containing_type.has_options and 480 | field.containing_type.GetOptions().message_set_wire_format and 481 | field.type == _TYPE_MESSAGE and 482 | field.message_type == field.extension_scope and 483 | field.label == _LABEL_OPTIONAL) 484 | 485 | 486 | def _AddMessageMethods(message_descriptor, cls): 487 | """Adds the methods to a protocol message class.""" 488 | if message_descriptor.is_extendable: 489 | 490 | def ClearExtension(self, extension): 491 | self.Extensions.ClearExtension(extension) 492 | 493 | def HasExtension(self, extension): 494 | return self.Extensions.HasExtension(extension) 495 | 496 | def HasField(self, field_name): 497 | return self._cmsg.HasField(field_name) 498 | 499 | def ClearField(self, field_name): 500 | if field_name in self._composite_fields: 501 | del self._composite_fields[field_name] 502 | self._cmsg.ClearField(field_name) 503 | 504 | def Clear(self): 505 | return self._cmsg.Clear() 506 | 507 | def IsInitialized(self, errors=None): 508 | if self._cmsg.IsInitialized(): 509 | return True 510 | if errors is not None: 511 | errors.extend(self.FindInitializationErrors()); 512 | return False 513 | 514 | def SerializeToString(self): 515 | if not self.IsInitialized(): 516 | raise message.EncodeError( 517 | 'Message is missing required fields: ' + 518 | ','.join(self.FindInitializationErrors())) 519 | return self._cmsg.SerializeToString() 520 | 521 | def SerializePartialToString(self): 522 | return self._cmsg.SerializePartialToString() 523 | 524 | def ParseFromString(self, serialized): 525 | self.Clear() 526 | self.MergeFromString(serialized) 527 | 528 | def MergeFromString(self, serialized): 529 | byte_size = self._cmsg.MergeFromString(serialized) 530 | if byte_size < 0: 531 | raise message.DecodeError('Unable to merge from string.') 532 | return byte_size 533 | 534 | def MergeFrom(self, msg): 535 | if not isinstance(msg, cls): 536 | raise TypeError( 537 | "Parameter to MergeFrom() must be instance of same class.") 538 | self._cmsg.MergeFrom(msg._cmsg) 539 | 540 | def CopyFrom(self, msg): 541 | self._cmsg.CopyFrom(msg._cmsg) 542 | 543 | def ByteSize(self): 544 | return self._cmsg.ByteSize() 545 | 546 | def SetInParent(self): 547 | return self._cmsg.SetInParent() 548 | 549 | def ListFields(self): 550 | all_fields = [] 551 | field_list = self._cmsg.ListFields() 552 | fields_by_name = cls.DESCRIPTOR.fields_by_name 553 | for is_extension, field_name in field_list: 554 | if is_extension: 555 | extension = cls._extensions_by_name[field_name] 556 | all_fields.append((extension, self.Extensions[extension])) 557 | else: 558 | field_descriptor = fields_by_name[field_name] 559 | all_fields.append( 560 | (field_descriptor, getattr(self, field_name))) 561 | all_fields.sort(key=lambda item: item[0].number) 562 | return all_fields 563 | 564 | def FindInitializationErrors(self): 565 | return self._cmsg.FindInitializationErrors() 566 | 567 | def __str__(self): 568 | return self._cmsg.DebugString() 569 | 570 | def __eq__(self, other): 571 | if self is other: 572 | return True 573 | if not isinstance(other, self.__class__): 574 | return False 575 | return self.ListFields() == other.ListFields() 576 | 577 | def __ne__(self, other): 578 | return not self == other 579 | 580 | def __hash__(self): 581 | raise TypeError('unhashable object') 582 | 583 | def __unicode__(self): 584 | return text_format.MessageToString(self, as_utf8=True).decode('utf-8') 585 | 586 | # Attach the local methods to the message class. 587 | for key, value in locals().copy().iteritems(): 588 | if key not in ('key', 'value', '__builtins__', '__name__', '__doc__'): 589 | setattr(cls, key, value) 590 | 591 | # Static methods: 592 | 593 | def RegisterExtension(extension_handle): 594 | extension_handle.containing_type = cls.DESCRIPTOR 595 | cls._extensions_by_name[extension_handle.full_name] = extension_handle 596 | 597 | if _IsMessageSetExtension(extension_handle): 598 | # MessageSet extension. Also register under type name. 599 | cls._extensions_by_name[ 600 | extension_handle.message_type.full_name] = extension_handle 601 | cls.RegisterExtension = staticmethod(RegisterExtension) 602 | 603 | def FromString(string): 604 | msg = cls() 605 | msg.MergeFromString(string) 606 | return msg 607 | cls.FromString = staticmethod(FromString) 608 | 609 | 610 | 611 | def _AddPropertiesForExtensions(message_descriptor, cls): 612 | """Adds properties for all fields in this protocol message type.""" 613 | extension_dict = message_descriptor.extensions_by_name 614 | for extension_name, extension_field in extension_dict.iteritems(): 615 | constant_name = extension_name.upper() + '_FIELD_NUMBER' 616 | setattr(cls, constant_name, extension_field.number) 617 | -------------------------------------------------------------------------------- /pb2.4/google/protobuf/internal/cpp_message.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/connexio/cypb/dc3d5ae866f7cfcdc3e313ecced0e8fa912542e1/pb2.4/google/protobuf/internal/cpp_message.pyc -------------------------------------------------------------------------------- /pb2.4/google/protobuf/internal/decoder.py: -------------------------------------------------------------------------------- 1 | # Protocol Buffers - Google's data interchange format 2 | # Copyright 2008 Google Inc. All rights reserved. 3 | # http://code.google.com/p/protobuf/ 4 | # 5 | # Redistribution and use in source and binary forms, with or without 6 | # modification, are permitted provided that the following conditions are 7 | # met: 8 | # 9 | # * Redistributions of source code must retain the above copyright 10 | # notice, this list of conditions and the following disclaimer. 11 | # * Redistributions in binary form must reproduce the above 12 | # copyright notice, this list of conditions and the following disclaimer 13 | # in the documentation and/or other materials provided with the 14 | # distribution. 15 | # * Neither the name of Google Inc. nor the names of its 16 | # contributors may be used to endorse or promote products derived from 17 | # this software without specific prior written permission. 18 | # 19 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 20 | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 21 | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 22 | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 23 | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 24 | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 25 | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 26 | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 27 | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 28 | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | 31 | """Code for decoding protocol buffer primitives. 32 | 33 | This code is very similar to encoder.py -- read the docs for that module first. 34 | 35 | A "decoder" is a function with the signature: 36 | Decode(buffer, pos, end, message, field_dict) 37 | The arguments are: 38 | buffer: The string containing the encoded message. 39 | pos: The current position in the string. 40 | end: The position in the string where the current message ends. May be 41 | less than len(buffer) if we're reading a sub-message. 42 | message: The message object into which we're parsing. 43 | field_dict: message._fields (avoids a hashtable lookup). 44 | The decoder reads the field and stores it into field_dict, returning the new 45 | buffer position. A decoder for a repeated field may proactively decode all of 46 | the elements of that field, if they appear consecutively. 47 | 48 | Note that decoders may throw any of the following: 49 | IndexError: Indicates a truncated message. 50 | struct.error: Unpacking of a fixed-width field failed. 51 | message.DecodeError: Other errors. 52 | 53 | Decoders are expected to raise an exception if they are called with pos > end. 54 | This allows callers to be lax about bounds checking: it's fineto read past 55 | "end" as long as you are sure that someone else will notice and throw an 56 | exception later on. 57 | 58 | Something up the call stack is expected to catch IndexError and struct.error 59 | and convert them to message.DecodeError. 60 | 61 | Decoders are constructed using decoder constructors with the signature: 62 | MakeDecoder(field_number, is_repeated, is_packed, key, new_default) 63 | The arguments are: 64 | field_number: The field number of the field we want to decode. 65 | is_repeated: Is the field a repeated field? (bool) 66 | is_packed: Is the field a packed field? (bool) 67 | key: The key to use when looking up the field within field_dict. 68 | (This is actually the FieldDescriptor but nothing in this 69 | file should depend on that.) 70 | new_default: A function which takes a message object as a parameter and 71 | returns a new instance of the default value for this field. 72 | (This is called for repeated fields and sub-messages, when an 73 | instance does not already exist.) 74 | 75 | As with encoders, we define a decoder constructor for every type of field. 76 | Then, for every field of every message class we construct an actual decoder. 77 | That decoder goes into a dict indexed by tag, so when we decode a message 78 | we repeatedly read a tag, look up the corresponding decoder, and invoke it. 79 | """ 80 | 81 | __author__ = 'kenton@google.com (Kenton Varda)' 82 | 83 | import struct 84 | from google.protobuf.internal import encoder 85 | from google.protobuf.internal import wire_format 86 | from google.protobuf import message 87 | 88 | 89 | # This will overflow and thus become IEEE-754 "infinity". We would use 90 | # "float('inf')" but it doesn't work on Windows pre-Python-2.6. 91 | _POS_INF = 1e10000 92 | _NEG_INF = -_POS_INF 93 | _NAN = _POS_INF * 0 94 | 95 | 96 | # This is not for optimization, but rather to avoid conflicts with local 97 | # variables named "message". 98 | _DecodeError = message.DecodeError 99 | 100 | 101 | def _VarintDecoder(mask): 102 | """Return an encoder for a basic varint value (does not include tag). 103 | 104 | Decoded values will be bitwise-anded with the given mask before being 105 | returned, e.g. to limit them to 32 bits. The returned decoder does not 106 | take the usual "end" parameter -- the caller is expected to do bounds checking 107 | after the fact (often the caller can defer such checking until later). The 108 | decoder returns a (value, new_pos) pair. 109 | """ 110 | 111 | local_ord = ord 112 | def DecodeVarint(buffer, pos): 113 | result = 0 114 | shift = 0 115 | while 1: 116 | b = local_ord(buffer[pos]) 117 | result |= ((b & 0x7f) << shift) 118 | pos += 1 119 | if not (b & 0x80): 120 | result &= mask 121 | return (result, pos) 122 | shift += 7 123 | if shift >= 64: 124 | raise _DecodeError('Too many bytes when decoding varint.') 125 | return DecodeVarint 126 | 127 | 128 | def _SignedVarintDecoder(mask): 129 | """Like _VarintDecoder() but decodes signed values.""" 130 | 131 | local_ord = ord 132 | def DecodeVarint(buffer, pos): 133 | result = 0 134 | shift = 0 135 | while 1: 136 | b = local_ord(buffer[pos]) 137 | result |= ((b & 0x7f) << shift) 138 | pos += 1 139 | if not (b & 0x80): 140 | if result > 0x7fffffffffffffff: 141 | result -= (1 << 64) 142 | result |= ~mask 143 | else: 144 | result &= mask 145 | return (result, pos) 146 | shift += 7 147 | if shift >= 64: 148 | raise _DecodeError('Too many bytes when decoding varint.') 149 | return DecodeVarint 150 | 151 | 152 | _DecodeVarint = _VarintDecoder((1 << 64) - 1) 153 | _DecodeSignedVarint = _SignedVarintDecoder((1 << 64) - 1) 154 | 155 | # Use these versions for values which must be limited to 32 bits. 156 | _DecodeVarint32 = _VarintDecoder((1 << 32) - 1) 157 | _DecodeSignedVarint32 = _SignedVarintDecoder((1 << 32) - 1) 158 | 159 | 160 | def ReadTag(buffer, pos): 161 | """Read a tag from the buffer, and return a (tag_bytes, new_pos) tuple. 162 | 163 | We return the raw bytes of the tag rather than decoding them. The raw 164 | bytes can then be used to look up the proper decoder. This effectively allows 165 | us to trade some work that would be done in pure-python (decoding a varint) 166 | for work that is done in C (searching for a byte string in a hash table). 167 | In a low-level language it would be much cheaper to decode the varint and 168 | use that, but not in Python. 169 | """ 170 | 171 | start = pos 172 | while ord(buffer[pos]) & 0x80: 173 | pos += 1 174 | pos += 1 175 | return (buffer[start:pos], pos) 176 | 177 | 178 | # -------------------------------------------------------------------- 179 | 180 | 181 | def _SimpleDecoder(wire_type, decode_value): 182 | """Return a constructor for a decoder for fields of a particular type. 183 | 184 | Args: 185 | wire_type: The field's wire type. 186 | decode_value: A function which decodes an individual value, e.g. 187 | _DecodeVarint() 188 | """ 189 | 190 | def SpecificDecoder(field_number, is_repeated, is_packed, key, new_default): 191 | if is_packed: 192 | local_DecodeVarint = _DecodeVarint 193 | def DecodePackedField(buffer, pos, end, message, field_dict): 194 | value = field_dict.get(key) 195 | if value is None: 196 | value = field_dict.setdefault(key, new_default(message)) 197 | (endpoint, pos) = local_DecodeVarint(buffer, pos) 198 | endpoint += pos 199 | if endpoint > end: 200 | raise _DecodeError('Truncated message.') 201 | while pos < endpoint: 202 | (element, pos) = decode_value(buffer, pos) 203 | value.append(element) 204 | if pos > endpoint: 205 | del value[-1] # Discard corrupt value. 206 | raise _DecodeError('Packed element was truncated.') 207 | return pos 208 | return DecodePackedField 209 | elif is_repeated: 210 | tag_bytes = encoder.TagBytes(field_number, wire_type) 211 | tag_len = len(tag_bytes) 212 | def DecodeRepeatedField(buffer, pos, end, message, field_dict): 213 | value = field_dict.get(key) 214 | if value is None: 215 | value = field_dict.setdefault(key, new_default(message)) 216 | while 1: 217 | (element, new_pos) = decode_value(buffer, pos) 218 | value.append(element) 219 | # Predict that the next tag is another copy of the same repeated 220 | # field. 221 | pos = new_pos + tag_len 222 | if buffer[new_pos:pos] != tag_bytes or new_pos >= end: 223 | # Prediction failed. Return. 224 | if new_pos > end: 225 | raise _DecodeError('Truncated message.') 226 | return new_pos 227 | return DecodeRepeatedField 228 | else: 229 | def DecodeField(buffer, pos, end, message, field_dict): 230 | (field_dict[key], pos) = decode_value(buffer, pos) 231 | if pos > end: 232 | del field_dict[key] # Discard corrupt value. 233 | raise _DecodeError('Truncated message.') 234 | return pos 235 | return DecodeField 236 | 237 | return SpecificDecoder 238 | 239 | 240 | def _ModifiedDecoder(wire_type, decode_value, modify_value): 241 | """Like SimpleDecoder but additionally invokes modify_value on every value 242 | before storing it. Usually modify_value is ZigZagDecode. 243 | """ 244 | 245 | # Reusing _SimpleDecoder is slightly slower than copying a bunch of code, but 246 | # not enough to make a significant difference. 247 | 248 | def InnerDecode(buffer, pos): 249 | (result, new_pos) = decode_value(buffer, pos) 250 | return (modify_value(result), new_pos) 251 | return _SimpleDecoder(wire_type, InnerDecode) 252 | 253 | 254 | def _StructPackDecoder(wire_type, format): 255 | """Return a constructor for a decoder for a fixed-width field. 256 | 257 | Args: 258 | wire_type: The field's wire type. 259 | format: The format string to pass to struct.unpack(). 260 | """ 261 | 262 | value_size = struct.calcsize(format) 263 | local_unpack = struct.unpack 264 | 265 | # Reusing _SimpleDecoder is slightly slower than copying a bunch of code, but 266 | # not enough to make a significant difference. 267 | 268 | # Note that we expect someone up-stack to catch struct.error and convert 269 | # it to _DecodeError -- this way we don't have to set up exception- 270 | # handling blocks every time we parse one value. 271 | 272 | def InnerDecode(buffer, pos): 273 | new_pos = pos + value_size 274 | result = local_unpack(format, buffer[pos:new_pos])[0] 275 | return (result, new_pos) 276 | return _SimpleDecoder(wire_type, InnerDecode) 277 | 278 | 279 | def _FloatDecoder(): 280 | """Returns a decoder for a float field. 281 | 282 | This code works around a bug in struct.unpack for non-finite 32-bit 283 | floating-point values. 284 | """ 285 | 286 | local_unpack = struct.unpack 287 | 288 | def InnerDecode(buffer, pos): 289 | # We expect a 32-bit value in little-endian byte order. Bit 1 is the sign 290 | # bit, bits 2-9 represent the exponent, and bits 10-32 are the significand. 291 | new_pos = pos + 4 292 | float_bytes = buffer[pos:new_pos] 293 | 294 | # If this value has all its exponent bits set, then it's non-finite. 295 | # In Python 2.4, struct.unpack will convert it to a finite 64-bit value. 296 | # To avoid that, we parse it specially. 297 | if ((float_bytes[3] in '\x7F\xFF') 298 | and (float_bytes[2] >= '\x80')): 299 | # If at least one significand bit is set... 300 | if float_bytes[0:3] != '\x00\x00\x80': 301 | return (_NAN, new_pos) 302 | # If sign bit is set... 303 | if float_bytes[3] == '\xFF': 304 | return (_NEG_INF, new_pos) 305 | return (_POS_INF, new_pos) 306 | 307 | # Note that we expect someone up-stack to catch struct.error and convert 308 | # it to _DecodeError -- this way we don't have to set up exception- 309 | # handling blocks every time we parse one value. 310 | result = local_unpack('= '\xF0') 334 | and (double_bytes[0:7] != '\x00\x00\x00\x00\x00\x00\xF0')): 335 | return (_NAN, new_pos) 336 | 337 | # Note that we expect someone up-stack to catch struct.error and convert 338 | # it to _DecodeError -- this way we don't have to set up exception- 339 | # handling blocks every time we parse one value. 340 | result = local_unpack(' end: 396 | raise _DecodeError('Truncated string.') 397 | value.append(local_unicode(buffer[pos:new_pos], 'utf-8')) 398 | # Predict that the next tag is another copy of the same repeated field. 399 | pos = new_pos + tag_len 400 | if buffer[new_pos:pos] != tag_bytes or new_pos == end: 401 | # Prediction failed. Return. 402 | return new_pos 403 | return DecodeRepeatedField 404 | else: 405 | def DecodeField(buffer, pos, end, message, field_dict): 406 | (size, pos) = local_DecodeVarint(buffer, pos) 407 | new_pos = pos + size 408 | if new_pos > end: 409 | raise _DecodeError('Truncated string.') 410 | field_dict[key] = local_unicode(buffer[pos:new_pos], 'utf-8') 411 | return new_pos 412 | return DecodeField 413 | 414 | 415 | def BytesDecoder(field_number, is_repeated, is_packed, key, new_default): 416 | """Returns a decoder for a bytes field.""" 417 | 418 | local_DecodeVarint = _DecodeVarint 419 | 420 | assert not is_packed 421 | if is_repeated: 422 | tag_bytes = encoder.TagBytes(field_number, 423 | wire_format.WIRETYPE_LENGTH_DELIMITED) 424 | tag_len = len(tag_bytes) 425 | def DecodeRepeatedField(buffer, pos, end, message, field_dict): 426 | value = field_dict.get(key) 427 | if value is None: 428 | value = field_dict.setdefault(key, new_default(message)) 429 | while 1: 430 | (size, pos) = local_DecodeVarint(buffer, pos) 431 | new_pos = pos + size 432 | if new_pos > end: 433 | raise _DecodeError('Truncated string.') 434 | value.append(buffer[pos:new_pos]) 435 | # Predict that the next tag is another copy of the same repeated field. 436 | pos = new_pos + tag_len 437 | if buffer[new_pos:pos] != tag_bytes or new_pos == end: 438 | # Prediction failed. Return. 439 | return new_pos 440 | return DecodeRepeatedField 441 | else: 442 | def DecodeField(buffer, pos, end, message, field_dict): 443 | (size, pos) = local_DecodeVarint(buffer, pos) 444 | new_pos = pos + size 445 | if new_pos > end: 446 | raise _DecodeError('Truncated string.') 447 | field_dict[key] = buffer[pos:new_pos] 448 | return new_pos 449 | return DecodeField 450 | 451 | 452 | def GroupDecoder(field_number, is_repeated, is_packed, key, new_default): 453 | """Returns a decoder for a group field.""" 454 | 455 | end_tag_bytes = encoder.TagBytes(field_number, 456 | wire_format.WIRETYPE_END_GROUP) 457 | end_tag_len = len(end_tag_bytes) 458 | 459 | assert not is_packed 460 | if is_repeated: 461 | tag_bytes = encoder.TagBytes(field_number, 462 | wire_format.WIRETYPE_START_GROUP) 463 | tag_len = len(tag_bytes) 464 | def DecodeRepeatedField(buffer, pos, end, message, field_dict): 465 | value = field_dict.get(key) 466 | if value is None: 467 | value = field_dict.setdefault(key, new_default(message)) 468 | while 1: 469 | value = field_dict.get(key) 470 | if value is None: 471 | value = field_dict.setdefault(key, new_default(message)) 472 | # Read sub-message. 473 | pos = value.add()._InternalParse(buffer, pos, end) 474 | # Read end tag. 475 | new_pos = pos+end_tag_len 476 | if buffer[pos:new_pos] != end_tag_bytes or new_pos > end: 477 | raise _DecodeError('Missing group end tag.') 478 | # Predict that the next tag is another copy of the same repeated field. 479 | pos = new_pos + tag_len 480 | if buffer[new_pos:pos] != tag_bytes or new_pos == end: 481 | # Prediction failed. Return. 482 | return new_pos 483 | return DecodeRepeatedField 484 | else: 485 | def DecodeField(buffer, pos, end, message, field_dict): 486 | value = field_dict.get(key) 487 | if value is None: 488 | value = field_dict.setdefault(key, new_default(message)) 489 | # Read sub-message. 490 | pos = value._InternalParse(buffer, pos, end) 491 | # Read end tag. 492 | new_pos = pos+end_tag_len 493 | if buffer[pos:new_pos] != end_tag_bytes or new_pos > end: 494 | raise _DecodeError('Missing group end tag.') 495 | return new_pos 496 | return DecodeField 497 | 498 | 499 | def MessageDecoder(field_number, is_repeated, is_packed, key, new_default): 500 | """Returns a decoder for a message field.""" 501 | 502 | local_DecodeVarint = _DecodeVarint 503 | 504 | assert not is_packed 505 | if is_repeated: 506 | tag_bytes = encoder.TagBytes(field_number, 507 | wire_format.WIRETYPE_LENGTH_DELIMITED) 508 | tag_len = len(tag_bytes) 509 | def DecodeRepeatedField(buffer, pos, end, message, field_dict): 510 | value = field_dict.get(key) 511 | if value is None: 512 | value = field_dict.setdefault(key, new_default(message)) 513 | while 1: 514 | value = field_dict.get(key) 515 | if value is None: 516 | value = field_dict.setdefault(key, new_default(message)) 517 | # Read length. 518 | (size, pos) = local_DecodeVarint(buffer, pos) 519 | new_pos = pos + size 520 | if new_pos > end: 521 | raise _DecodeError('Truncated message.') 522 | # Read sub-message. 523 | if value.add()._InternalParse(buffer, pos, new_pos) != new_pos: 524 | # The only reason _InternalParse would return early is if it 525 | # encountered an end-group tag. 526 | raise _DecodeError('Unexpected end-group tag.') 527 | # Predict that the next tag is another copy of the same repeated field. 528 | pos = new_pos + tag_len 529 | if buffer[new_pos:pos] != tag_bytes or new_pos == end: 530 | # Prediction failed. Return. 531 | return new_pos 532 | return DecodeRepeatedField 533 | else: 534 | def DecodeField(buffer, pos, end, message, field_dict): 535 | value = field_dict.get(key) 536 | if value is None: 537 | value = field_dict.setdefault(key, new_default(message)) 538 | # Read length. 539 | (size, pos) = local_DecodeVarint(buffer, pos) 540 | new_pos = pos + size 541 | if new_pos > end: 542 | raise _DecodeError('Truncated message.') 543 | # Read sub-message. 544 | if value._InternalParse(buffer, pos, new_pos) != new_pos: 545 | # The only reason _InternalParse would return early is if it encountered 546 | # an end-group tag. 547 | raise _DecodeError('Unexpected end-group tag.') 548 | return new_pos 549 | return DecodeField 550 | 551 | 552 | # -------------------------------------------------------------------- 553 | 554 | MESSAGE_SET_ITEM_TAG = encoder.TagBytes(1, wire_format.WIRETYPE_START_GROUP) 555 | 556 | def MessageSetItemDecoder(extensions_by_number): 557 | """Returns a decoder for a MessageSet item. 558 | 559 | The parameter is the _extensions_by_number map for the message class. 560 | 561 | The message set message looks like this: 562 | message MessageSet { 563 | repeated group Item = 1 { 564 | required int32 type_id = 2; 565 | required string message = 3; 566 | } 567 | } 568 | """ 569 | 570 | type_id_tag_bytes = encoder.TagBytes(2, wire_format.WIRETYPE_VARINT) 571 | message_tag_bytes = encoder.TagBytes(3, wire_format.WIRETYPE_LENGTH_DELIMITED) 572 | item_end_tag_bytes = encoder.TagBytes(1, wire_format.WIRETYPE_END_GROUP) 573 | 574 | local_ReadTag = ReadTag 575 | local_DecodeVarint = _DecodeVarint 576 | local_SkipField = SkipField 577 | 578 | def DecodeItem(buffer, pos, end, message, field_dict): 579 | type_id = -1 580 | message_start = -1 581 | message_end = -1 582 | 583 | # Technically, type_id and message can appear in any order, so we need 584 | # a little loop here. 585 | while 1: 586 | (tag_bytes, pos) = local_ReadTag(buffer, pos) 587 | if tag_bytes == type_id_tag_bytes: 588 | (type_id, pos) = local_DecodeVarint(buffer, pos) 589 | elif tag_bytes == message_tag_bytes: 590 | (size, message_start) = local_DecodeVarint(buffer, pos) 591 | pos = message_end = message_start + size 592 | elif tag_bytes == item_end_tag_bytes: 593 | break 594 | else: 595 | pos = SkipField(buffer, pos, end, tag_bytes) 596 | if pos == -1: 597 | raise _DecodeError('Missing group end tag.') 598 | 599 | if pos > end: 600 | raise _DecodeError('Truncated message.') 601 | 602 | if type_id == -1: 603 | raise _DecodeError('MessageSet item missing type_id.') 604 | if message_start == -1: 605 | raise _DecodeError('MessageSet item missing message.') 606 | 607 | extension = extensions_by_number.get(type_id) 608 | if extension is not None: 609 | value = field_dict.get(extension) 610 | if value is None: 611 | value = field_dict.setdefault( 612 | extension, extension.message_type._concrete_class()) 613 | if value._InternalParse(buffer, message_start,message_end) != message_end: 614 | # The only reason _InternalParse would return early is if it encountered 615 | # an end-group tag. 616 | raise _DecodeError('Unexpected end-group tag.') 617 | 618 | return pos 619 | 620 | return DecodeItem 621 | 622 | # -------------------------------------------------------------------- 623 | # Optimization is not as heavy here because calls to SkipField() are rare, 624 | # except for handling end-group tags. 625 | 626 | def _SkipVarint(buffer, pos, end): 627 | """Skip a varint value. Returns the new position.""" 628 | 629 | while ord(buffer[pos]) & 0x80: 630 | pos += 1 631 | pos += 1 632 | if pos > end: 633 | raise _DecodeError('Truncated message.') 634 | return pos 635 | 636 | def _SkipFixed64(buffer, pos, end): 637 | """Skip a fixed64 value. Returns the new position.""" 638 | 639 | pos += 8 640 | if pos > end: 641 | raise _DecodeError('Truncated message.') 642 | return pos 643 | 644 | def _SkipLengthDelimited(buffer, pos, end): 645 | """Skip a length-delimited value. Returns the new position.""" 646 | 647 | (size, pos) = _DecodeVarint(buffer, pos) 648 | pos += size 649 | if pos > end: 650 | raise _DecodeError('Truncated message.') 651 | return pos 652 | 653 | def _SkipGroup(buffer, pos, end): 654 | """Skip sub-group. Returns the new position.""" 655 | 656 | while 1: 657 | (tag_bytes, pos) = ReadTag(buffer, pos) 658 | new_pos = SkipField(buffer, pos, end, tag_bytes) 659 | if new_pos == -1: 660 | return pos 661 | pos = new_pos 662 | 663 | def _EndGroup(buffer, pos, end): 664 | """Skipping an END_GROUP tag returns -1 to tell the parent loop to break.""" 665 | 666 | return -1 667 | 668 | def _SkipFixed32(buffer, pos, end): 669 | """Skip a fixed32 value. Returns the new position.""" 670 | 671 | pos += 4 672 | if pos > end: 673 | raise _DecodeError('Truncated message.') 674 | return pos 675 | 676 | def _RaiseInvalidWireType(buffer, pos, end): 677 | """Skip function for unknown wire types. Raises an exception.""" 678 | 679 | raise _DecodeError('Tag had invalid wire type.') 680 | 681 | def _FieldSkipper(): 682 | """Constructs the SkipField function.""" 683 | 684 | WIRETYPE_TO_SKIPPER = [ 685 | _SkipVarint, 686 | _SkipFixed64, 687 | _SkipLengthDelimited, 688 | _SkipGroup, 689 | _EndGroup, 690 | _SkipFixed32, 691 | _RaiseInvalidWireType, 692 | _RaiseInvalidWireType, 693 | ] 694 | 695 | wiretype_mask = wire_format.TAG_TYPE_MASK 696 | local_ord = ord 697 | 698 | def SkipField(buffer, pos, end, tag_bytes): 699 | """Skips a field with the specified tag. 700 | 701 | |pos| should point to the byte immediately after the tag. 702 | 703 | Returns: 704 | The new position (after the tag value), or -1 if the tag is an end-group 705 | tag (in which case the calling loop should break). 706 | """ 707 | 708 | # The wire type is always in the first byte since varints are little-endian. 709 | wire_type = local_ord(tag_bytes[0]) & wiretype_mask 710 | return WIRETYPE_TO_SKIPPER[wire_type](buffer, pos, end) 711 | 712 | return SkipField 713 | 714 | SkipField = _FieldSkipper() 715 | -------------------------------------------------------------------------------- /pb2.4/google/protobuf/internal/message_listener.py: -------------------------------------------------------------------------------- 1 | # Protocol Buffers - Google's data interchange format 2 | # Copyright 2008 Google Inc. All rights reserved. 3 | # http://code.google.com/p/protobuf/ 4 | # 5 | # Redistribution and use in source and binary forms, with or without 6 | # modification, are permitted provided that the following conditions are 7 | # met: 8 | # 9 | # * Redistributions of source code must retain the above copyright 10 | # notice, this list of conditions and the following disclaimer. 11 | # * Redistributions in binary form must reproduce the above 12 | # copyright notice, this list of conditions and the following disclaimer 13 | # in the documentation and/or other materials provided with the 14 | # distribution. 15 | # * Neither the name of Google Inc. nor the names of its 16 | # contributors may be used to endorse or promote products derived from 17 | # this software without specific prior written permission. 18 | # 19 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 20 | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 21 | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 22 | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 23 | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 24 | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 25 | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 26 | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 27 | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 28 | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | 31 | """Defines a listener interface for observing certain 32 | state transitions on Message objects. 33 | 34 | Also defines a null implementation of this interface. 35 | """ 36 | 37 | __author__ = 'robinson@google.com (Will Robinson)' 38 | 39 | 40 | class MessageListener(object): 41 | 42 | """Listens for modifications made to a message. Meant to be registered via 43 | Message._SetListener(). 44 | 45 | Attributes: 46 | dirty: If True, then calling Modified() would be a no-op. This can be 47 | used to avoid these calls entirely in the common case. 48 | """ 49 | 50 | def Modified(self): 51 | """Called every time the message is modified in such a way that the parent 52 | message may need to be updated. This currently means either: 53 | (a) The message was modified for the first time, so the parent message 54 | should henceforth mark the message as present. 55 | (b) The message's cached byte size became dirty -- i.e. the message was 56 | modified for the first time after a previous call to ByteSize(). 57 | Therefore the parent should also mark its byte size as dirty. 58 | Note that (a) implies (b), since new objects start out with a client cached 59 | size (zero). However, we document (a) explicitly because it is important. 60 | 61 | Modified() will *only* be called in response to one of these two events -- 62 | not every time the sub-message is modified. 63 | 64 | Note that if the listener's |dirty| attribute is true, then calling 65 | Modified at the moment would be a no-op, so it can be skipped. Performance- 66 | sensitive callers should check this attribute directly before calling since 67 | it will be true most of the time. 68 | """ 69 | 70 | raise NotImplementedError 71 | 72 | 73 | class NullMessageListener(object): 74 | 75 | """No-op MessageListener implementation.""" 76 | 77 | def Modified(self): 78 | pass 79 | -------------------------------------------------------------------------------- /pb2.4/google/protobuf/internal/type_checkers.py: -------------------------------------------------------------------------------- 1 | # Protocol Buffers - Google's data interchange format 2 | # Copyright 2008 Google Inc. All rights reserved. 3 | # http://code.google.com/p/protobuf/ 4 | # 5 | # Redistribution and use in source and binary forms, with or without 6 | # modification, are permitted provided that the following conditions are 7 | # met: 8 | # 9 | # * Redistributions of source code must retain the above copyright 10 | # notice, this list of conditions and the following disclaimer. 11 | # * Redistributions in binary form must reproduce the above 12 | # copyright notice, this list of conditions and the following disclaimer 13 | # in the documentation and/or other materials provided with the 14 | # distribution. 15 | # * Neither the name of Google Inc. nor the names of its 16 | # contributors may be used to endorse or promote products derived from 17 | # this software without specific prior written permission. 18 | # 19 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 20 | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 21 | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 22 | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 23 | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 24 | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 25 | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 26 | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 27 | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 28 | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | 31 | """Provides type checking routines. 32 | 33 | This module defines type checking utilities in the forms of dictionaries: 34 | 35 | VALUE_CHECKERS: A dictionary of field types and a value validation object. 36 | TYPE_TO_BYTE_SIZE_FN: A dictionary with field types and a size computing 37 | function. 38 | TYPE_TO_SERIALIZE_METHOD: A dictionary with field types and serialization 39 | function. 40 | FIELD_TYPE_TO_WIRE_TYPE: A dictionary with field typed and their 41 | coresponding wire types. 42 | TYPE_TO_DESERIALIZE_METHOD: A dictionary with field types and deserialization 43 | function. 44 | """ 45 | 46 | __author__ = 'robinson@google.com (Will Robinson)' 47 | 48 | from google.protobuf.internal import decoder 49 | from google.protobuf.internal import encoder 50 | from google.protobuf.internal import wire_format 51 | from google.protobuf import descriptor 52 | 53 | _FieldDescriptor = descriptor.FieldDescriptor 54 | 55 | 56 | def GetTypeChecker(cpp_type, field_type): 57 | """Returns a type checker for a message field of the specified types. 58 | 59 | Args: 60 | cpp_type: C++ type of the field (see descriptor.py). 61 | field_type: Protocol message field type (see descriptor.py). 62 | 63 | Returns: 64 | An instance of TypeChecker which can be used to verify the types 65 | of values assigned to a field of the specified type. 66 | """ 67 | if (cpp_type == _FieldDescriptor.CPPTYPE_STRING and 68 | field_type == _FieldDescriptor.TYPE_STRING): 69 | return UnicodeValueChecker() 70 | return _VALUE_CHECKERS[cpp_type] 71 | 72 | 73 | # None of the typecheckers below make any attempt to guard against people 74 | # subclassing builtin types and doing weird things. We're not trying to 75 | # protect against malicious clients here, just people accidentally shooting 76 | # themselves in the foot in obvious ways. 77 | 78 | class TypeChecker(object): 79 | 80 | """Type checker used to catch type errors as early as possible 81 | when the client is setting scalar fields in protocol messages. 82 | """ 83 | 84 | def __init__(self, *acceptable_types): 85 | self._acceptable_types = acceptable_types 86 | 87 | def CheckValue(self, proposed_value): 88 | if not isinstance(proposed_value, self._acceptable_types): 89 | message = ('%.1024r has type %s, but expected one of: %s' % 90 | (proposed_value, type(proposed_value), self._acceptable_types)) 91 | raise TypeError(message) 92 | 93 | 94 | # IntValueChecker and its subclasses perform integer type-checks 95 | # and bounds-checks. 96 | class IntValueChecker(object): 97 | 98 | """Checker used for integer fields. Performs type-check and range check.""" 99 | 100 | def CheckValue(self, proposed_value): 101 | if not isinstance(proposed_value, (int, long)): 102 | message = ('%.1024r has type %s, but expected one of: %s' % 103 | (proposed_value, type(proposed_value), (int, long))) 104 | raise TypeError(message) 105 | if not self._MIN <= proposed_value <= self._MAX: 106 | raise ValueError('Value out of range: %d' % proposed_value) 107 | 108 | 109 | class UnicodeValueChecker(object): 110 | 111 | """Checker used for string fields.""" 112 | 113 | def CheckValue(self, proposed_value): 114 | if not isinstance(proposed_value, (str, unicode)): 115 | message = ('%.1024r has type %s, but expected one of: %s' % 116 | (proposed_value, type(proposed_value), (str, unicode))) 117 | raise TypeError(message) 118 | 119 | # If the value is of type 'str' make sure that it is in 7-bit ASCII 120 | # encoding. 121 | if isinstance(proposed_value, str): 122 | try: 123 | unicode(proposed_value, 'ascii') 124 | except UnicodeDecodeError: 125 | raise ValueError('%.1024r has type str, but isn\'t in 7-bit ASCII ' 126 | 'encoding. Non-ASCII strings must be converted to ' 127 | 'unicode objects before being added.' % 128 | (proposed_value)) 129 | 130 | 131 | class Int32ValueChecker(IntValueChecker): 132 | # We're sure to use ints instead of longs here since comparison may be more 133 | # efficient. 134 | _MIN = -2147483648 135 | _MAX = 2147483647 136 | 137 | 138 | class Uint32ValueChecker(IntValueChecker): 139 | _MIN = 0 140 | _MAX = (1 << 32) - 1 141 | 142 | 143 | class Int64ValueChecker(IntValueChecker): 144 | _MIN = -(1 << 63) 145 | _MAX = (1 << 63) - 1 146 | 147 | 148 | class Uint64ValueChecker(IntValueChecker): 149 | _MIN = 0 150 | _MAX = (1 << 64) - 1 151 | 152 | 153 | # Type-checkers for all scalar CPPTYPEs. 154 | _VALUE_CHECKERS = { 155 | _FieldDescriptor.CPPTYPE_INT32: Int32ValueChecker(), 156 | _FieldDescriptor.CPPTYPE_INT64: Int64ValueChecker(), 157 | _FieldDescriptor.CPPTYPE_UINT32: Uint32ValueChecker(), 158 | _FieldDescriptor.CPPTYPE_UINT64: Uint64ValueChecker(), 159 | _FieldDescriptor.CPPTYPE_DOUBLE: TypeChecker( 160 | float, int, long), 161 | _FieldDescriptor.CPPTYPE_FLOAT: TypeChecker( 162 | float, int, long), 163 | _FieldDescriptor.CPPTYPE_BOOL: TypeChecker(bool, int), 164 | _FieldDescriptor.CPPTYPE_ENUM: Int32ValueChecker(), 165 | _FieldDescriptor.CPPTYPE_STRING: TypeChecker(str), 166 | } 167 | 168 | 169 | # Map from field type to a function F, such that F(field_num, value) 170 | # gives the total byte size for a value of the given type. This 171 | # byte size includes tag information and any other additional space 172 | # associated with serializing "value". 173 | TYPE_TO_BYTE_SIZE_FN = { 174 | _FieldDescriptor.TYPE_DOUBLE: wire_format.DoubleByteSize, 175 | _FieldDescriptor.TYPE_FLOAT: wire_format.FloatByteSize, 176 | _FieldDescriptor.TYPE_INT64: wire_format.Int64ByteSize, 177 | _FieldDescriptor.TYPE_UINT64: wire_format.UInt64ByteSize, 178 | _FieldDescriptor.TYPE_INT32: wire_format.Int32ByteSize, 179 | _FieldDescriptor.TYPE_FIXED64: wire_format.Fixed64ByteSize, 180 | _FieldDescriptor.TYPE_FIXED32: wire_format.Fixed32ByteSize, 181 | _FieldDescriptor.TYPE_BOOL: wire_format.BoolByteSize, 182 | _FieldDescriptor.TYPE_STRING: wire_format.StringByteSize, 183 | _FieldDescriptor.TYPE_GROUP: wire_format.GroupByteSize, 184 | _FieldDescriptor.TYPE_MESSAGE: wire_format.MessageByteSize, 185 | _FieldDescriptor.TYPE_BYTES: wire_format.BytesByteSize, 186 | _FieldDescriptor.TYPE_UINT32: wire_format.UInt32ByteSize, 187 | _FieldDescriptor.TYPE_ENUM: wire_format.EnumByteSize, 188 | _FieldDescriptor.TYPE_SFIXED32: wire_format.SFixed32ByteSize, 189 | _FieldDescriptor.TYPE_SFIXED64: wire_format.SFixed64ByteSize, 190 | _FieldDescriptor.TYPE_SINT32: wire_format.SInt32ByteSize, 191 | _FieldDescriptor.TYPE_SINT64: wire_format.SInt64ByteSize 192 | } 193 | 194 | 195 | # Maps from field types to encoder constructors. 196 | TYPE_TO_ENCODER = { 197 | _FieldDescriptor.TYPE_DOUBLE: encoder.DoubleEncoder, 198 | _FieldDescriptor.TYPE_FLOAT: encoder.FloatEncoder, 199 | _FieldDescriptor.TYPE_INT64: encoder.Int64Encoder, 200 | _FieldDescriptor.TYPE_UINT64: encoder.UInt64Encoder, 201 | _FieldDescriptor.TYPE_INT32: encoder.Int32Encoder, 202 | _FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Encoder, 203 | _FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Encoder, 204 | _FieldDescriptor.TYPE_BOOL: encoder.BoolEncoder, 205 | _FieldDescriptor.TYPE_STRING: encoder.StringEncoder, 206 | _FieldDescriptor.TYPE_GROUP: encoder.GroupEncoder, 207 | _FieldDescriptor.TYPE_MESSAGE: encoder.MessageEncoder, 208 | _FieldDescriptor.TYPE_BYTES: encoder.BytesEncoder, 209 | _FieldDescriptor.TYPE_UINT32: encoder.UInt32Encoder, 210 | _FieldDescriptor.TYPE_ENUM: encoder.EnumEncoder, 211 | _FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Encoder, 212 | _FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Encoder, 213 | _FieldDescriptor.TYPE_SINT32: encoder.SInt32Encoder, 214 | _FieldDescriptor.TYPE_SINT64: encoder.SInt64Encoder, 215 | } 216 | 217 | 218 | # Maps from field types to sizer constructors. 219 | TYPE_TO_SIZER = { 220 | _FieldDescriptor.TYPE_DOUBLE: encoder.DoubleSizer, 221 | _FieldDescriptor.TYPE_FLOAT: encoder.FloatSizer, 222 | _FieldDescriptor.TYPE_INT64: encoder.Int64Sizer, 223 | _FieldDescriptor.TYPE_UINT64: encoder.UInt64Sizer, 224 | _FieldDescriptor.TYPE_INT32: encoder.Int32Sizer, 225 | _FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Sizer, 226 | _FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Sizer, 227 | _FieldDescriptor.TYPE_BOOL: encoder.BoolSizer, 228 | _FieldDescriptor.TYPE_STRING: encoder.StringSizer, 229 | _FieldDescriptor.TYPE_GROUP: encoder.GroupSizer, 230 | _FieldDescriptor.TYPE_MESSAGE: encoder.MessageSizer, 231 | _FieldDescriptor.TYPE_BYTES: encoder.BytesSizer, 232 | _FieldDescriptor.TYPE_UINT32: encoder.UInt32Sizer, 233 | _FieldDescriptor.TYPE_ENUM: encoder.EnumSizer, 234 | _FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Sizer, 235 | _FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Sizer, 236 | _FieldDescriptor.TYPE_SINT32: encoder.SInt32Sizer, 237 | _FieldDescriptor.TYPE_SINT64: encoder.SInt64Sizer, 238 | } 239 | 240 | 241 | # Maps from field type to a decoder constructor. 242 | TYPE_TO_DECODER = { 243 | _FieldDescriptor.TYPE_DOUBLE: decoder.DoubleDecoder, 244 | _FieldDescriptor.TYPE_FLOAT: decoder.FloatDecoder, 245 | _FieldDescriptor.TYPE_INT64: decoder.Int64Decoder, 246 | _FieldDescriptor.TYPE_UINT64: decoder.UInt64Decoder, 247 | _FieldDescriptor.TYPE_INT32: decoder.Int32Decoder, 248 | _FieldDescriptor.TYPE_FIXED64: decoder.Fixed64Decoder, 249 | _FieldDescriptor.TYPE_FIXED32: decoder.Fixed32Decoder, 250 | _FieldDescriptor.TYPE_BOOL: decoder.BoolDecoder, 251 | _FieldDescriptor.TYPE_STRING: decoder.StringDecoder, 252 | _FieldDescriptor.TYPE_GROUP: decoder.GroupDecoder, 253 | _FieldDescriptor.TYPE_MESSAGE: decoder.MessageDecoder, 254 | _FieldDescriptor.TYPE_BYTES: decoder.BytesDecoder, 255 | _FieldDescriptor.TYPE_UINT32: decoder.UInt32Decoder, 256 | _FieldDescriptor.TYPE_ENUM: decoder.EnumDecoder, 257 | _FieldDescriptor.TYPE_SFIXED32: decoder.SFixed32Decoder, 258 | _FieldDescriptor.TYPE_SFIXED64: decoder.SFixed64Decoder, 259 | _FieldDescriptor.TYPE_SINT32: decoder.SInt32Decoder, 260 | _FieldDescriptor.TYPE_SINT64: decoder.SInt64Decoder, 261 | } 262 | 263 | # Maps from field type to expected wiretype. 264 | FIELD_TYPE_TO_WIRE_TYPE = { 265 | _FieldDescriptor.TYPE_DOUBLE: wire_format.WIRETYPE_FIXED64, 266 | _FieldDescriptor.TYPE_FLOAT: wire_format.WIRETYPE_FIXED32, 267 | _FieldDescriptor.TYPE_INT64: wire_format.WIRETYPE_VARINT, 268 | _FieldDescriptor.TYPE_UINT64: wire_format.WIRETYPE_VARINT, 269 | _FieldDescriptor.TYPE_INT32: wire_format.WIRETYPE_VARINT, 270 | _FieldDescriptor.TYPE_FIXED64: wire_format.WIRETYPE_FIXED64, 271 | _FieldDescriptor.TYPE_FIXED32: wire_format.WIRETYPE_FIXED32, 272 | _FieldDescriptor.TYPE_BOOL: wire_format.WIRETYPE_VARINT, 273 | _FieldDescriptor.TYPE_STRING: 274 | wire_format.WIRETYPE_LENGTH_DELIMITED, 275 | _FieldDescriptor.TYPE_GROUP: wire_format.WIRETYPE_START_GROUP, 276 | _FieldDescriptor.TYPE_MESSAGE: 277 | wire_format.WIRETYPE_LENGTH_DELIMITED, 278 | _FieldDescriptor.TYPE_BYTES: 279 | wire_format.WIRETYPE_LENGTH_DELIMITED, 280 | _FieldDescriptor.TYPE_UINT32: wire_format.WIRETYPE_VARINT, 281 | _FieldDescriptor.TYPE_ENUM: wire_format.WIRETYPE_VARINT, 282 | _FieldDescriptor.TYPE_SFIXED32: wire_format.WIRETYPE_FIXED32, 283 | _FieldDescriptor.TYPE_SFIXED64: wire_format.WIRETYPE_FIXED64, 284 | _FieldDescriptor.TYPE_SINT32: wire_format.WIRETYPE_VARINT, 285 | _FieldDescriptor.TYPE_SINT64: wire_format.WIRETYPE_VARINT, 286 | } 287 | -------------------------------------------------------------------------------- /pb2.4/google/protobuf/internal/wire_format.py: -------------------------------------------------------------------------------- 1 | # Protocol Buffers - Google's data interchange format 2 | # Copyright 2008 Google Inc. All rights reserved. 3 | # http://code.google.com/p/protobuf/ 4 | # 5 | # Redistribution and use in source and binary forms, with or without 6 | # modification, are permitted provided that the following conditions are 7 | # met: 8 | # 9 | # * Redistributions of source code must retain the above copyright 10 | # notice, this list of conditions and the following disclaimer. 11 | # * Redistributions in binary form must reproduce the above 12 | # copyright notice, this list of conditions and the following disclaimer 13 | # in the documentation and/or other materials provided with the 14 | # distribution. 15 | # * Neither the name of Google Inc. nor the names of its 16 | # contributors may be used to endorse or promote products derived from 17 | # this software without specific prior written permission. 18 | # 19 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 20 | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 21 | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 22 | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 23 | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 24 | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 25 | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 26 | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 27 | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 28 | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | 31 | """Constants and static functions to support protocol buffer wire format.""" 32 | 33 | __author__ = 'robinson@google.com (Will Robinson)' 34 | 35 | import struct 36 | from google.protobuf import descriptor 37 | from google.protobuf import message 38 | 39 | 40 | TAG_TYPE_BITS = 3 # Number of bits used to hold type info in a proto tag. 41 | TAG_TYPE_MASK = (1 << TAG_TYPE_BITS) - 1 # 0x7 42 | 43 | # These numbers identify the wire type of a protocol buffer value. 44 | # We use the least-significant TAG_TYPE_BITS bits of the varint-encoded 45 | # tag-and-type to store one of these WIRETYPE_* constants. 46 | # These values must match WireType enum in google/protobuf/wire_format.h. 47 | WIRETYPE_VARINT = 0 48 | WIRETYPE_FIXED64 = 1 49 | WIRETYPE_LENGTH_DELIMITED = 2 50 | WIRETYPE_START_GROUP = 3 51 | WIRETYPE_END_GROUP = 4 52 | WIRETYPE_FIXED32 = 5 53 | _WIRETYPE_MAX = 5 54 | 55 | 56 | # Bounds for various integer types. 57 | INT32_MAX = int((1 << 31) - 1) 58 | INT32_MIN = int(-(1 << 31)) 59 | UINT32_MAX = (1 << 32) - 1 60 | 61 | INT64_MAX = (1 << 63) - 1 62 | INT64_MIN = -(1 << 63) 63 | UINT64_MAX = (1 << 64) - 1 64 | 65 | # "struct" format strings that will encode/decode the specified formats. 66 | FORMAT_UINT32_LITTLE_ENDIAN = '> TAG_TYPE_BITS), (tag & TAG_TYPE_MASK) 98 | 99 | 100 | def ZigZagEncode(value): 101 | """ZigZag Transform: Encodes signed integers so that they can be 102 | effectively used with varint encoding. See wire_format.h for 103 | more details. 104 | """ 105 | if value >= 0: 106 | return value << 1 107 | return (value << 1) ^ (~0) 108 | 109 | 110 | def ZigZagDecode(value): 111 | """Inverse of ZigZagEncode().""" 112 | if not value & 0x1: 113 | return value >> 1 114 | return (value >> 1) ^ (~0) 115 | 116 | 117 | 118 | # The *ByteSize() functions below return the number of bytes required to 119 | # serialize "field number + type" information and then serialize the value. 120 | 121 | 122 | def Int32ByteSize(field_number, int32): 123 | return Int64ByteSize(field_number, int32) 124 | 125 | 126 | def Int32ByteSizeNoTag(int32): 127 | return _VarUInt64ByteSizeNoTag(0xffffffffffffffff & int32) 128 | 129 | 130 | def Int64ByteSize(field_number, int64): 131 | # Have to convert to uint before calling UInt64ByteSize(). 132 | return UInt64ByteSize(field_number, 0xffffffffffffffff & int64) 133 | 134 | 135 | def UInt32ByteSize(field_number, uint32): 136 | return UInt64ByteSize(field_number, uint32) 137 | 138 | 139 | def UInt64ByteSize(field_number, uint64): 140 | return TagByteSize(field_number) + _VarUInt64ByteSizeNoTag(uint64) 141 | 142 | 143 | def SInt32ByteSize(field_number, int32): 144 | return UInt32ByteSize(field_number, ZigZagEncode(int32)) 145 | 146 | 147 | def SInt64ByteSize(field_number, int64): 148 | return UInt64ByteSize(field_number, ZigZagEncode(int64)) 149 | 150 | 151 | def Fixed32ByteSize(field_number, fixed32): 152 | return TagByteSize(field_number) + 4 153 | 154 | 155 | def Fixed64ByteSize(field_number, fixed64): 156 | return TagByteSize(field_number) + 8 157 | 158 | 159 | def SFixed32ByteSize(field_number, sfixed32): 160 | return TagByteSize(field_number) + 4 161 | 162 | 163 | def SFixed64ByteSize(field_number, sfixed64): 164 | return TagByteSize(field_number) + 8 165 | 166 | 167 | def FloatByteSize(field_number, flt): 168 | return TagByteSize(field_number) + 4 169 | 170 | 171 | def DoubleByteSize(field_number, double): 172 | return TagByteSize(field_number) + 8 173 | 174 | 175 | def BoolByteSize(field_number, b): 176 | return TagByteSize(field_number) + 1 177 | 178 | 179 | def EnumByteSize(field_number, enum): 180 | return UInt32ByteSize(field_number, enum) 181 | 182 | 183 | def StringByteSize(field_number, string): 184 | return BytesByteSize(field_number, string.encode('utf-8')) 185 | 186 | 187 | def BytesByteSize(field_number, b): 188 | return (TagByteSize(field_number) 189 | + _VarUInt64ByteSizeNoTag(len(b)) 190 | + len(b)) 191 | 192 | 193 | def GroupByteSize(field_number, message): 194 | return (2 * TagByteSize(field_number) # START and END group. 195 | + message.ByteSize()) 196 | 197 | 198 | def MessageByteSize(field_number, message): 199 | return (TagByteSize(field_number) 200 | + _VarUInt64ByteSizeNoTag(message.ByteSize()) 201 | + message.ByteSize()) 202 | 203 | 204 | def MessageSetItemByteSize(field_number, msg): 205 | # First compute the sizes of the tags. 206 | # There are 2 tags for the beginning and ending of the repeated group, that 207 | # is field number 1, one with field number 2 (type_id) and one with field 208 | # number 3 (message). 209 | total_size = (2 * TagByteSize(1) + TagByteSize(2) + TagByteSize(3)) 210 | 211 | # Add the number of bytes for type_id. 212 | total_size += _VarUInt64ByteSizeNoTag(field_number) 213 | 214 | message_size = msg.ByteSize() 215 | 216 | # The number of bytes for encoding the length of the message. 217 | total_size += _VarUInt64ByteSizeNoTag(message_size) 218 | 219 | # The size of the message. 220 | total_size += message_size 221 | return total_size 222 | 223 | 224 | def TagByteSize(field_number): 225 | """Returns the bytes required to serialize a tag with this field number.""" 226 | # Just pass in type 0, since the type won't affect the tag+type size. 227 | return _VarUInt64ByteSizeNoTag(PackTag(field_number, 0)) 228 | 229 | 230 | # Private helper function for the *ByteSize() functions above. 231 | 232 | def _VarUInt64ByteSizeNoTag(uint64): 233 | """Returns the number of bytes required to serialize a single varint 234 | using boundary value comparisons. (unrolled loop optimization -WPierce) 235 | uint64 must be unsigned. 236 | """ 237 | if uint64 <= 0x7f: return 1 238 | if uint64 <= 0x3fff: return 2 239 | if uint64 <= 0x1fffff: return 3 240 | if uint64 <= 0xfffffff: return 4 241 | if uint64 <= 0x7ffffffff: return 5 242 | if uint64 <= 0x3ffffffffff: return 6 243 | if uint64 <= 0x1ffffffffffff: return 7 244 | if uint64 <= 0xffffffffffffff: return 8 245 | if uint64 <= 0x7fffffffffffffff: return 9 246 | if uint64 > UINT64_MAX: 247 | raise message.EncodeError('Value out of range: %d' % uint64) 248 | return 10 249 | 250 | 251 | NON_PACKABLE_TYPES = ( 252 | descriptor.FieldDescriptor.TYPE_STRING, 253 | descriptor.FieldDescriptor.TYPE_GROUP, 254 | descriptor.FieldDescriptor.TYPE_MESSAGE, 255 | descriptor.FieldDescriptor.TYPE_BYTES 256 | ) 257 | 258 | 259 | def IsTypePackable(field_type): 260 | """Return true iff packable = true is valid for fields of this type. 261 | 262 | Args: 263 | field_type: a FieldDescriptor::Type value. 264 | 265 | Returns: 266 | True iff fields of this type are packable. 267 | """ 268 | return field_type not in NON_PACKABLE_TYPES 269 | -------------------------------------------------------------------------------- /pb2.4/google/protobuf/message.py: -------------------------------------------------------------------------------- 1 | # Protocol Buffers - Google's data interchange format 2 | # Copyright 2008 Google Inc. All rights reserved. 3 | # http://code.google.com/p/protobuf/ 4 | # 5 | # Redistribution and use in source and binary forms, with or without 6 | # modification, are permitted provided that the following conditions are 7 | # met: 8 | # 9 | # * Redistributions of source code must retain the above copyright 10 | # notice, this list of conditions and the following disclaimer. 11 | # * Redistributions in binary form must reproduce the above 12 | # copyright notice, this list of conditions and the following disclaimer 13 | # in the documentation and/or other materials provided with the 14 | # distribution. 15 | # * Neither the name of Google Inc. nor the names of its 16 | # contributors may be used to endorse or promote products derived from 17 | # this software without specific prior written permission. 18 | # 19 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 20 | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 21 | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 22 | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 23 | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 24 | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 25 | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 26 | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 27 | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 28 | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | 31 | # TODO(robinson): We should just make these methods all "pure-virtual" and move 32 | # all implementation out, into reflection.py for now. 33 | 34 | 35 | """Contains an abstract base class for protocol messages.""" 36 | 37 | __author__ = 'robinson@google.com (Will Robinson)' 38 | 39 | 40 | class Error(Exception): pass 41 | class DecodeError(Error): pass 42 | class EncodeError(Error): pass 43 | 44 | 45 | class Message(object): 46 | 47 | """Abstract base class for protocol messages. 48 | 49 | Protocol message classes are almost always generated by the protocol 50 | compiler. These generated types subclass Message and implement the methods 51 | shown below. 52 | 53 | TODO(robinson): Link to an HTML document here. 54 | 55 | TODO(robinson): Document that instances of this class will also 56 | have an Extensions attribute with __getitem__ and __setitem__. 57 | Again, not sure how to best convey this. 58 | 59 | TODO(robinson): Document that the class must also have a static 60 | RegisterExtension(extension_field) method. 61 | Not sure how to best express at this point. 62 | """ 63 | 64 | # TODO(robinson): Document these fields and methods. 65 | 66 | __slots__ = [] 67 | 68 | DESCRIPTOR = None 69 | 70 | def __deepcopy__(self, memo=None): 71 | clone = type(self)() 72 | clone.MergeFrom(self) 73 | return clone 74 | 75 | def __eq__(self, other_msg): 76 | raise NotImplementedError 77 | 78 | def __ne__(self, other_msg): 79 | # Can't just say self != other_msg, since that would infinitely recurse. :) 80 | return not self == other_msg 81 | 82 | def __hash__(self): 83 | raise TypeError('unhashable object') 84 | 85 | def __str__(self): 86 | raise NotImplementedError 87 | 88 | def __unicode__(self): 89 | raise NotImplementedError 90 | 91 | def MergeFrom(self, other_msg): 92 | """Merges the contents of the specified message into current message. 93 | 94 | This method merges the contents of the specified message into the current 95 | message. Singular fields that are set in the specified message overwrite 96 | the corresponding fields in the current message. Repeated fields are 97 | appended. Singular sub-messages and groups are recursively merged. 98 | 99 | Args: 100 | other_msg: Message to merge into the current message. 101 | """ 102 | raise NotImplementedError 103 | 104 | def CopyFrom(self, other_msg): 105 | """Copies the content of the specified message into the current message. 106 | 107 | The method clears the current message and then merges the specified 108 | message using MergeFrom. 109 | 110 | Args: 111 | other_msg: Message to copy into the current one. 112 | """ 113 | if self is other_msg: 114 | return 115 | self.Clear() 116 | self.MergeFrom(other_msg) 117 | 118 | def Clear(self): 119 | """Clears all data that was set in the message.""" 120 | raise NotImplementedError 121 | 122 | def SetInParent(self): 123 | """Mark this as present in the parent. 124 | 125 | This normally happens automatically when you assign a field of a 126 | sub-message, but sometimes you want to make the sub-message 127 | present while keeping it empty. If you find yourself using this, 128 | you may want to reconsider your design.""" 129 | raise NotImplementedError 130 | 131 | def IsInitialized(self): 132 | """Checks if the message is initialized. 133 | 134 | Returns: 135 | The method returns True if the message is initialized (i.e. all of its 136 | required fields are set). 137 | """ 138 | raise NotImplementedError 139 | 140 | # TODO(robinson): MergeFromString() should probably return None and be 141 | # implemented in terms of a helper that returns the # of bytes read. Our 142 | # deserialization routines would use the helper when recursively 143 | # deserializing, but the end user would almost always just want the no-return 144 | # MergeFromString(). 145 | 146 | def MergeFromString(self, serialized): 147 | """Merges serialized protocol buffer data into this message. 148 | 149 | When we find a field in |serialized| that is already present 150 | in this message: 151 | - If it's a "repeated" field, we append to the end of our list. 152 | - Else, if it's a scalar, we overwrite our field. 153 | - Else, (it's a nonrepeated composite), we recursively merge 154 | into the existing composite. 155 | 156 | TODO(robinson): Document handling of unknown fields. 157 | 158 | Args: 159 | serialized: Any object that allows us to call buffer(serialized) 160 | to access a string of bytes using the buffer interface. 161 | 162 | TODO(robinson): When we switch to a helper, this will return None. 163 | 164 | Returns: 165 | The number of bytes read from |serialized|. 166 | For non-group messages, this will always be len(serialized), 167 | but for messages which are actually groups, this will 168 | generally be less than len(serialized), since we must 169 | stop when we reach an END_GROUP tag. Note that if 170 | we *do* stop because of an END_GROUP tag, the number 171 | of bytes returned does not include the bytes 172 | for the END_GROUP tag information. 173 | """ 174 | raise NotImplementedError 175 | 176 | def ParseFromString(self, serialized): 177 | """Like MergeFromString(), except we clear the object first.""" 178 | self.Clear() 179 | self.MergeFromString(serialized) 180 | 181 | def SerializeToString(self): 182 | """Serializes the protocol message to a binary string. 183 | 184 | Returns: 185 | A binary string representation of the message if all of the required 186 | fields in the message are set (i.e. the message is initialized). 187 | 188 | Raises: 189 | message.EncodeError if the message isn't initialized. 190 | """ 191 | raise NotImplementedError 192 | 193 | def SerializePartialToString(self): 194 | """Serializes the protocol message to a binary string. 195 | 196 | This method is similar to SerializeToString but doesn't check if the 197 | message is initialized. 198 | 199 | Returns: 200 | A string representation of the partial message. 201 | """ 202 | raise NotImplementedError 203 | 204 | # TODO(robinson): Decide whether we like these better 205 | # than auto-generated has_foo() and clear_foo() methods 206 | # on the instances themselves. This way is less consistent 207 | # with C++, but it makes reflection-type access easier and 208 | # reduces the number of magically autogenerated things. 209 | # 210 | # TODO(robinson): Be sure to document (and test) exactly 211 | # which field names are accepted here. Are we case-sensitive? 212 | # What do we do with fields that share names with Python keywords 213 | # like 'lambda' and 'yield'? 214 | # 215 | # nnorwitz says: 216 | # """ 217 | # Typically (in python), an underscore is appended to names that are 218 | # keywords. So they would become lambda_ or yield_. 219 | # """ 220 | def ListFields(self): 221 | """Returns a list of (FieldDescriptor, value) tuples for all 222 | fields in the message which are not empty. A singular field is non-empty 223 | if HasField() would return true, and a repeated field is non-empty if 224 | it contains at least one element. The fields are ordered by field 225 | number""" 226 | raise NotImplementedError 227 | 228 | def HasField(self, field_name): 229 | """Checks if a certain field is set for the message. Note if the 230 | field_name is not defined in the message descriptor, ValueError will be 231 | raised.""" 232 | raise NotImplementedError 233 | 234 | def ClearField(self, field_name): 235 | raise NotImplementedError 236 | 237 | def HasExtension(self, extension_handle): 238 | raise NotImplementedError 239 | 240 | def ClearExtension(self, extension_handle): 241 | raise NotImplementedError 242 | 243 | def ByteSize(self): 244 | """Returns the serialized size of this message. 245 | Recursively calls ByteSize() on all contained messages. 246 | """ 247 | raise NotImplementedError 248 | 249 | def _SetListener(self, message_listener): 250 | """Internal method used by the protocol message implementation. 251 | Clients should not call this directly. 252 | 253 | Sets a listener that this message will call on certain state transitions. 254 | 255 | The purpose of this method is to register back-edges from children to 256 | parents at runtime, for the purpose of setting "has" bits and 257 | byte-size-dirty bits in the parent and ancestor objects whenever a child or 258 | descendant object is modified. 259 | 260 | If the client wants to disconnect this Message from the object tree, she 261 | explicitly sets callback to None. 262 | 263 | If message_listener is None, unregisters any existing listener. Otherwise, 264 | message_listener must implement the MessageListener interface in 265 | internal/message_listener.py, and we discard any listener registered 266 | via a previous _SetListener() call. 267 | """ 268 | raise NotImplementedError 269 | -------------------------------------------------------------------------------- /pb2.4/google/protobuf/message.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/connexio/cypb/dc3d5ae866f7cfcdc3e313ecced0e8fa912542e1/pb2.4/google/protobuf/message.pyc -------------------------------------------------------------------------------- /pb2.4/google/protobuf/reflection.py: -------------------------------------------------------------------------------- 1 | # Protocol Buffers - Google's data interchange format 2 | # Copyright 2008 Google Inc. All rights reserved. 3 | # http://code.google.com/p/protobuf/ 4 | # 5 | # Redistribution and use in source and binary forms, with or without 6 | # modification, are permitted provided that the following conditions are 7 | # met: 8 | # 9 | # * Redistributions of source code must retain the above copyright 10 | # notice, this list of conditions and the following disclaimer. 11 | # * Redistributions in binary form must reproduce the above 12 | # copyright notice, this list of conditions and the following disclaimer 13 | # in the documentation and/or other materials provided with the 14 | # distribution. 15 | # * Neither the name of Google Inc. nor the names of its 16 | # contributors may be used to endorse or promote products derived from 17 | # this software without specific prior written permission. 18 | # 19 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 20 | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 21 | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 22 | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 23 | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 24 | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 25 | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 26 | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 27 | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 28 | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | 31 | # This code is meant to work on Python 2.4 and above only. 32 | 33 | """Contains a metaclass and helper functions used to create 34 | protocol message classes from Descriptor objects at runtime. 35 | 36 | Recall that a metaclass is the "type" of a class. 37 | (A class is to a metaclass what an instance is to a class.) 38 | 39 | In this case, we use the GeneratedProtocolMessageType metaclass 40 | to inject all the useful functionality into the classes 41 | output by the protocol compiler at compile-time. 42 | 43 | The upshot of all this is that the real implementation 44 | details for ALL pure-Python protocol buffers are *here in 45 | this file*. 46 | """ 47 | 48 | __author__ = 'robinson@google.com (Will Robinson)' 49 | 50 | 51 | from google.protobuf.internal import api_implementation 52 | from google.protobuf import descriptor as descriptor_mod 53 | _FieldDescriptor = descriptor_mod.FieldDescriptor 54 | 55 | 56 | if api_implementation.Type() == 'cpp': 57 | from google.protobuf.internal import cpp_message 58 | _NewMessage = cpp_message.NewMessage 59 | _InitMessage = cpp_message.InitMessage 60 | else: 61 | from google.protobuf.internal import python_message 62 | _NewMessage = python_message.NewMessage 63 | _InitMessage = python_message.InitMessage 64 | 65 | 66 | class GeneratedProtocolMessageType(type): 67 | 68 | """Metaclass for protocol message classes created at runtime from Descriptors. 69 | 70 | We add implementations for all methods described in the Message class. We 71 | also create properties to allow getting/setting all fields in the protocol 72 | message. Finally, we create slots to prevent users from accidentally 73 | "setting" nonexistent fields in the protocol message, which then wouldn't get 74 | serialized / deserialized properly. 75 | 76 | The protocol compiler currently uses this metaclass to create protocol 77 | message classes at runtime. Clients can also manually create their own 78 | classes at runtime, as in this example: 79 | 80 | mydescriptor = Descriptor(.....) 81 | class MyProtoClass(Message): 82 | __metaclass__ = GeneratedProtocolMessageType 83 | DESCRIPTOR = mydescriptor 84 | myproto_instance = MyProtoClass() 85 | myproto.foo_field = 23 86 | ... 87 | """ 88 | 89 | # Must be consistent with the protocol-compiler code in 90 | # proto2/compiler/internal/generator.*. 91 | _DESCRIPTOR_KEY = 'DESCRIPTOR' 92 | 93 | def __new__(cls, name, bases, dictionary): 94 | """Custom allocation for runtime-generated class types. 95 | 96 | We override __new__ because this is apparently the only place 97 | where we can meaningfully set __slots__ on the class we're creating(?). 98 | (The interplay between metaclasses and slots is not very well-documented). 99 | 100 | Args: 101 | name: Name of the class (ignored, but required by the 102 | metaclass protocol). 103 | bases: Base classes of the class we're constructing. 104 | (Should be message.Message). We ignore this field, but 105 | it's required by the metaclass protocol 106 | dictionary: The class dictionary of the class we're 107 | constructing. dictionary[_DESCRIPTOR_KEY] must contain 108 | a Descriptor object describing this protocol message 109 | type. 110 | 111 | Returns: 112 | Newly-allocated class. 113 | """ 114 | descriptor = dictionary[GeneratedProtocolMessageType._DESCRIPTOR_KEY] 115 | _NewMessage(descriptor, dictionary) 116 | superclass = super(GeneratedProtocolMessageType, cls) 117 | 118 | new_class = superclass.__new__(cls, name, bases, dictionary) 119 | setattr(descriptor, '_concrete_class', new_class) 120 | return new_class 121 | 122 | def __init__(cls, name, bases, dictionary): 123 | """Here we perform the majority of our work on the class. 124 | We add enum getters, an __init__ method, implementations 125 | of all Message methods, and properties for all fields 126 | in the protocol type. 127 | 128 | Args: 129 | name: Name of the class (ignored, but required by the 130 | metaclass protocol). 131 | bases: Base classes of the class we're constructing. 132 | (Should be message.Message). We ignore this field, but 133 | it's required by the metaclass protocol 134 | dictionary: The class dictionary of the class we're 135 | constructing. dictionary[_DESCRIPTOR_KEY] must contain 136 | a Descriptor object describing this protocol message 137 | type. 138 | """ 139 | descriptor = dictionary[GeneratedProtocolMessageType._DESCRIPTOR_KEY] 140 | _InitMessage(descriptor, cls) 141 | superclass = super(GeneratedProtocolMessageType, cls) 142 | superclass.__init__(name, bases, dictionary) 143 | -------------------------------------------------------------------------------- /pb2.4/google/protobuf/reflection.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/connexio/cypb/dc3d5ae866f7cfcdc3e313ecced0e8fa912542e1/pb2.4/google/protobuf/reflection.pyc -------------------------------------------------------------------------------- /pb2.4/google/protobuf/service.py: -------------------------------------------------------------------------------- 1 | # Protocol Buffers - Google's data interchange format 2 | # Copyright 2008 Google Inc. All rights reserved. 3 | # http://code.google.com/p/protobuf/ 4 | # 5 | # Redistribution and use in source and binary forms, with or without 6 | # modification, are permitted provided that the following conditions are 7 | # met: 8 | # 9 | # * Redistributions of source code must retain the above copyright 10 | # notice, this list of conditions and the following disclaimer. 11 | # * Redistributions in binary form must reproduce the above 12 | # copyright notice, this list of conditions and the following disclaimer 13 | # in the documentation and/or other materials provided with the 14 | # distribution. 15 | # * Neither the name of Google Inc. nor the names of its 16 | # contributors may be used to endorse or promote products derived from 17 | # this software without specific prior written permission. 18 | # 19 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 20 | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 21 | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 22 | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 23 | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 24 | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 25 | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 26 | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 27 | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 28 | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | 31 | """DEPRECATED: Declares the RPC service interfaces. 32 | 33 | This module declares the abstract interfaces underlying proto2 RPC 34 | services. These are intended to be independent of any particular RPC 35 | implementation, so that proto2 services can be used on top of a variety 36 | of implementations. Starting with version 2.3.0, RPC implementations should 37 | not try to build on these, but should instead provide code generator plugins 38 | which generate code specific to the particular RPC implementation. This way 39 | the generated code can be more appropriate for the implementation in use 40 | and can avoid unnecessary layers of indirection. 41 | """ 42 | 43 | __author__ = 'petar@google.com (Petar Petrov)' 44 | 45 | 46 | class RpcException(Exception): 47 | """Exception raised on failed blocking RPC method call.""" 48 | pass 49 | 50 | 51 | class Service(object): 52 | 53 | """Abstract base interface for protocol-buffer-based RPC services. 54 | 55 | Services themselves are abstract classes (implemented either by servers or as 56 | stubs), but they subclass this base interface. The methods of this 57 | interface can be used to call the methods of the service without knowing 58 | its exact type at compile time (analogous to the Message interface). 59 | """ 60 | 61 | def GetDescriptor(): 62 | """Retrieves this service's descriptor.""" 63 | raise NotImplementedError 64 | 65 | def CallMethod(self, method_descriptor, rpc_controller, 66 | request, done): 67 | """Calls a method of the service specified by method_descriptor. 68 | 69 | If "done" is None then the call is blocking and the response 70 | message will be returned directly. Otherwise the call is asynchronous 71 | and "done" will later be called with the response value. 72 | 73 | In the blocking case, RpcException will be raised on error. 74 | 75 | Preconditions: 76 | * method_descriptor.service == GetDescriptor 77 | * request is of the exact same classes as returned by 78 | GetRequestClass(method). 79 | * After the call has started, the request must not be modified. 80 | * "rpc_controller" is of the correct type for the RPC implementation being 81 | used by this Service. For stubs, the "correct type" depends on the 82 | RpcChannel which the stub is using. 83 | 84 | Postconditions: 85 | * "done" will be called when the method is complete. This may be 86 | before CallMethod() returns or it may be at some point in the future. 87 | * If the RPC failed, the response value passed to "done" will be None. 88 | Further details about the failure can be found by querying the 89 | RpcController. 90 | """ 91 | raise NotImplementedError 92 | 93 | def GetRequestClass(self, method_descriptor): 94 | """Returns the class of the request message for the specified method. 95 | 96 | CallMethod() requires that the request is of a particular subclass of 97 | Message. GetRequestClass() gets the default instance of this required 98 | type. 99 | 100 | Example: 101 | method = service.GetDescriptor().FindMethodByName("Foo") 102 | request = stub.GetRequestClass(method)() 103 | request.ParseFromString(input) 104 | service.CallMethod(method, request, callback) 105 | """ 106 | raise NotImplementedError 107 | 108 | def GetResponseClass(self, method_descriptor): 109 | """Returns the class of the response message for the specified method. 110 | 111 | This method isn't really needed, as the RpcChannel's CallMethod constructs 112 | the response protocol message. It's provided anyway in case it is useful 113 | for the caller to know the response type in advance. 114 | """ 115 | raise NotImplementedError 116 | 117 | 118 | class RpcController(object): 119 | 120 | """An RpcController mediates a single method call. 121 | 122 | The primary purpose of the controller is to provide a way to manipulate 123 | settings specific to the RPC implementation and to find out about RPC-level 124 | errors. The methods provided by the RpcController interface are intended 125 | to be a "least common denominator" set of features which we expect all 126 | implementations to support. Specific implementations may provide more 127 | advanced features (e.g. deadline propagation). 128 | """ 129 | 130 | # Client-side methods below 131 | 132 | def Reset(self): 133 | """Resets the RpcController to its initial state. 134 | 135 | After the RpcController has been reset, it may be reused in 136 | a new call. Must not be called while an RPC is in progress. 137 | """ 138 | raise NotImplementedError 139 | 140 | def Failed(self): 141 | """Returns true if the call failed. 142 | 143 | After a call has finished, returns true if the call failed. The possible 144 | reasons for failure depend on the RPC implementation. Failed() must not 145 | be called before a call has finished. If Failed() returns true, the 146 | contents of the response message are undefined. 147 | """ 148 | raise NotImplementedError 149 | 150 | def ErrorText(self): 151 | """If Failed is true, returns a human-readable description of the error.""" 152 | raise NotImplementedError 153 | 154 | def StartCancel(self): 155 | """Initiate cancellation. 156 | 157 | Advises the RPC system that the caller desires that the RPC call be 158 | canceled. The RPC system may cancel it immediately, may wait awhile and 159 | then cancel it, or may not even cancel the call at all. If the call is 160 | canceled, the "done" callback will still be called and the RpcController 161 | will indicate that the call failed at that time. 162 | """ 163 | raise NotImplementedError 164 | 165 | # Server-side methods below 166 | 167 | def SetFailed(self, reason): 168 | """Sets a failure reason. 169 | 170 | Causes Failed() to return true on the client side. "reason" will be 171 | incorporated into the message returned by ErrorText(). If you find 172 | you need to return machine-readable information about failures, you 173 | should incorporate it into your response protocol buffer and should 174 | NOT call SetFailed(). 175 | """ 176 | raise NotImplementedError 177 | 178 | def IsCanceled(self): 179 | """Checks if the client cancelled the RPC. 180 | 181 | If true, indicates that the client canceled the RPC, so the server may 182 | as well give up on replying to it. The server should still call the 183 | final "done" callback. 184 | """ 185 | raise NotImplementedError 186 | 187 | def NotifyOnCancel(self, callback): 188 | """Sets a callback to invoke on cancel. 189 | 190 | Asks that the given callback be called when the RPC is canceled. The 191 | callback will always be called exactly once. If the RPC completes without 192 | being canceled, the callback will be called after completion. If the RPC 193 | has already been canceled when NotifyOnCancel() is called, the callback 194 | will be called immediately. 195 | 196 | NotifyOnCancel() must be called no more than once per request. 197 | """ 198 | raise NotImplementedError 199 | 200 | 201 | class RpcChannel(object): 202 | 203 | """Abstract interface for an RPC channel. 204 | 205 | An RpcChannel represents a communication line to a service which can be used 206 | to call that service's methods. The service may be running on another 207 | machine. Normally, you should not use an RpcChannel directly, but instead 208 | construct a stub {@link Service} wrapping it. Example: 209 | 210 | Example: 211 | RpcChannel channel = rpcImpl.Channel("remotehost.example.com:1234") 212 | RpcController controller = rpcImpl.Controller() 213 | MyService service = MyService_Stub(channel) 214 | service.MyMethod(controller, request, callback) 215 | """ 216 | 217 | def CallMethod(self, method_descriptor, rpc_controller, 218 | request, response_class, done): 219 | """Calls the method identified by the descriptor. 220 | 221 | Call the given method of the remote service. The signature of this 222 | procedure looks the same as Service.CallMethod(), but the requirements 223 | are less strict in one important way: the request object doesn't have to 224 | be of any specific class as long as its descriptor is method.input_type. 225 | """ 226 | raise NotImplementedError 227 | -------------------------------------------------------------------------------- /pb2.4/google/protobuf/service_reflection.py: -------------------------------------------------------------------------------- 1 | # Protocol Buffers - Google's data interchange format 2 | # Copyright 2008 Google Inc. All rights reserved. 3 | # http://code.google.com/p/protobuf/ 4 | # 5 | # Redistribution and use in source and binary forms, with or without 6 | # modification, are permitted provided that the following conditions are 7 | # met: 8 | # 9 | # * Redistributions of source code must retain the above copyright 10 | # notice, this list of conditions and the following disclaimer. 11 | # * Redistributions in binary form must reproduce the above 12 | # copyright notice, this list of conditions and the following disclaimer 13 | # in the documentation and/or other materials provided with the 14 | # distribution. 15 | # * Neither the name of Google Inc. nor the names of its 16 | # contributors may be used to endorse or promote products derived from 17 | # this software without specific prior written permission. 18 | # 19 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 20 | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 21 | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 22 | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 23 | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 24 | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 25 | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 26 | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 27 | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 28 | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | 31 | """Contains metaclasses used to create protocol service and service stub 32 | classes from ServiceDescriptor objects at runtime. 33 | 34 | The GeneratedServiceType and GeneratedServiceStubType metaclasses are used to 35 | inject all useful functionality into the classes output by the protocol 36 | compiler at compile-time. 37 | """ 38 | 39 | __author__ = 'petar@google.com (Petar Petrov)' 40 | 41 | 42 | class GeneratedServiceType(type): 43 | 44 | """Metaclass for service classes created at runtime from ServiceDescriptors. 45 | 46 | Implementations for all methods described in the Service class are added here 47 | by this class. We also create properties to allow getting/setting all fields 48 | in the protocol message. 49 | 50 | The protocol compiler currently uses this metaclass to create protocol service 51 | classes at runtime. Clients can also manually create their own classes at 52 | runtime, as in this example: 53 | 54 | mydescriptor = ServiceDescriptor(.....) 55 | class MyProtoService(service.Service): 56 | __metaclass__ = GeneratedServiceType 57 | DESCRIPTOR = mydescriptor 58 | myservice_instance = MyProtoService() 59 | ... 60 | """ 61 | 62 | _DESCRIPTOR_KEY = 'DESCRIPTOR' 63 | 64 | def __init__(cls, name, bases, dictionary): 65 | """Creates a message service class. 66 | 67 | Args: 68 | name: Name of the class (ignored, but required by the metaclass 69 | protocol). 70 | bases: Base classes of the class being constructed. 71 | dictionary: The class dictionary of the class being constructed. 72 | dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object 73 | describing this protocol service type. 74 | """ 75 | # Don't do anything if this class doesn't have a descriptor. This happens 76 | # when a service class is subclassed. 77 | if GeneratedServiceType._DESCRIPTOR_KEY not in dictionary: 78 | return 79 | descriptor = dictionary[GeneratedServiceType._DESCRIPTOR_KEY] 80 | service_builder = _ServiceBuilder(descriptor) 81 | service_builder.BuildService(cls) 82 | 83 | 84 | class GeneratedServiceStubType(GeneratedServiceType): 85 | 86 | """Metaclass for service stubs created at runtime from ServiceDescriptors. 87 | 88 | This class has similar responsibilities as GeneratedServiceType, except that 89 | it creates the service stub classes. 90 | """ 91 | 92 | _DESCRIPTOR_KEY = 'DESCRIPTOR' 93 | 94 | def __init__(cls, name, bases, dictionary): 95 | """Creates a message service stub class. 96 | 97 | Args: 98 | name: Name of the class (ignored, here). 99 | bases: Base classes of the class being constructed. 100 | dictionary: The class dictionary of the class being constructed. 101 | dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object 102 | describing this protocol service type. 103 | """ 104 | super(GeneratedServiceStubType, cls).__init__(name, bases, dictionary) 105 | # Don't do anything if this class doesn't have a descriptor. This happens 106 | # when a service stub is subclassed. 107 | if GeneratedServiceStubType._DESCRIPTOR_KEY not in dictionary: 108 | return 109 | descriptor = dictionary[GeneratedServiceStubType._DESCRIPTOR_KEY] 110 | service_stub_builder = _ServiceStubBuilder(descriptor) 111 | service_stub_builder.BuildServiceStub(cls) 112 | 113 | 114 | class _ServiceBuilder(object): 115 | 116 | """This class constructs a protocol service class using a service descriptor. 117 | 118 | Given a service descriptor, this class constructs a class that represents 119 | the specified service descriptor. One service builder instance constructs 120 | exactly one service class. That means all instances of that class share the 121 | same builder. 122 | """ 123 | 124 | def __init__(self, service_descriptor): 125 | """Initializes an instance of the service class builder. 126 | 127 | Args: 128 | service_descriptor: ServiceDescriptor to use when constructing the 129 | service class. 130 | """ 131 | self.descriptor = service_descriptor 132 | 133 | def BuildService(self, cls): 134 | """Constructs the service class. 135 | 136 | Args: 137 | cls: The class that will be constructed. 138 | """ 139 | 140 | # CallMethod needs to operate with an instance of the Service class. This 141 | # internal wrapper function exists only to be able to pass the service 142 | # instance to the method that does the real CallMethod work. 143 | def _WrapCallMethod(srvc, method_descriptor, 144 | rpc_controller, request, callback): 145 | return self._CallMethod(srvc, method_descriptor, 146 | rpc_controller, request, callback) 147 | self.cls = cls 148 | cls.CallMethod = _WrapCallMethod 149 | cls.GetDescriptor = staticmethod(lambda: self.descriptor) 150 | cls.GetDescriptor.__doc__ = "Returns the service descriptor." 151 | cls.GetRequestClass = self._GetRequestClass 152 | cls.GetResponseClass = self._GetResponseClass 153 | for method in self.descriptor.methods: 154 | setattr(cls, method.name, self._GenerateNonImplementedMethod(method)) 155 | 156 | def _CallMethod(self, srvc, method_descriptor, 157 | rpc_controller, request, callback): 158 | """Calls the method described by a given method descriptor. 159 | 160 | Args: 161 | srvc: Instance of the service for which this method is called. 162 | method_descriptor: Descriptor that represent the method to call. 163 | rpc_controller: RPC controller to use for this method's execution. 164 | request: Request protocol message. 165 | callback: A callback to invoke after the method has completed. 166 | """ 167 | if method_descriptor.containing_service != self.descriptor: 168 | raise RuntimeError( 169 | 'CallMethod() given method descriptor for wrong service type.') 170 | method = getattr(srvc, method_descriptor.name) 171 | return method(rpc_controller, request, callback) 172 | 173 | def _GetRequestClass(self, method_descriptor): 174 | """Returns the class of the request protocol message. 175 | 176 | Args: 177 | method_descriptor: Descriptor of the method for which to return the 178 | request protocol message class. 179 | 180 | Returns: 181 | A class that represents the input protocol message of the specified 182 | method. 183 | """ 184 | if method_descriptor.containing_service != self.descriptor: 185 | raise RuntimeError( 186 | 'GetRequestClass() given method descriptor for wrong service type.') 187 | return method_descriptor.input_type._concrete_class 188 | 189 | def _GetResponseClass(self, method_descriptor): 190 | """Returns the class of the response protocol message. 191 | 192 | Args: 193 | method_descriptor: Descriptor of the method for which to return the 194 | response protocol message class. 195 | 196 | Returns: 197 | A class that represents the output protocol message of the specified 198 | method. 199 | """ 200 | if method_descriptor.containing_service != self.descriptor: 201 | raise RuntimeError( 202 | 'GetResponseClass() given method descriptor for wrong service type.') 203 | return method_descriptor.output_type._concrete_class 204 | 205 | def _GenerateNonImplementedMethod(self, method): 206 | """Generates and returns a method that can be set for a service methods. 207 | 208 | Args: 209 | method: Descriptor of the service method for which a method is to be 210 | generated. 211 | 212 | Returns: 213 | A method that can be added to the service class. 214 | """ 215 | return lambda inst, rpc_controller, request, callback: ( 216 | self._NonImplementedMethod(method.name, rpc_controller, callback)) 217 | 218 | def _NonImplementedMethod(self, method_name, rpc_controller, callback): 219 | """The body of all methods in the generated service class. 220 | 221 | Args: 222 | method_name: Name of the method being executed. 223 | rpc_controller: RPC controller used to execute this method. 224 | callback: A callback which will be invoked when the method finishes. 225 | """ 226 | rpc_controller.SetFailed('Method %s not implemented.' % method_name) 227 | callback(None) 228 | 229 | 230 | class _ServiceStubBuilder(object): 231 | 232 | """Constructs a protocol service stub class using a service descriptor. 233 | 234 | Given a service descriptor, this class constructs a suitable stub class. 235 | A stub is just a type-safe wrapper around an RpcChannel which emulates a 236 | local implementation of the service. 237 | 238 | One service stub builder instance constructs exactly one class. It means all 239 | instances of that class share the same service stub builder. 240 | """ 241 | 242 | def __init__(self, service_descriptor): 243 | """Initializes an instance of the service stub class builder. 244 | 245 | Args: 246 | service_descriptor: ServiceDescriptor to use when constructing the 247 | stub class. 248 | """ 249 | self.descriptor = service_descriptor 250 | 251 | def BuildServiceStub(self, cls): 252 | """Constructs the stub class. 253 | 254 | Args: 255 | cls: The class that will be constructed. 256 | """ 257 | 258 | def _ServiceStubInit(stub, rpc_channel): 259 | stub.rpc_channel = rpc_channel 260 | self.cls = cls 261 | cls.__init__ = _ServiceStubInit 262 | for method in self.descriptor.methods: 263 | setattr(cls, method.name, self._GenerateStubMethod(method)) 264 | 265 | def _GenerateStubMethod(self, method): 266 | return (lambda inst, rpc_controller, request, callback=None: 267 | self._StubMethod(inst, method, rpc_controller, request, callback)) 268 | 269 | def _StubMethod(self, stub, method_descriptor, 270 | rpc_controller, request, callback): 271 | """The body of all service methods in the generated stub class. 272 | 273 | Args: 274 | stub: Stub instance. 275 | method_descriptor: Descriptor of the invoked method. 276 | rpc_controller: Rpc controller to execute the method. 277 | request: Request protocol message. 278 | callback: A callback to execute when the method finishes. 279 | Returns: 280 | Response message (in case of blocking call). 281 | """ 282 | return stub.rpc_channel.CallMethod( 283 | method_descriptor, rpc_controller, request, 284 | method_descriptor.output_type._concrete_class, callback) 285 | -------------------------------------------------------------------------------- /pb2.4/google/protobuf/text_format.py: -------------------------------------------------------------------------------- 1 | # Protocol Buffers - Google's data interchange format 2 | # Copyright 2008 Google Inc. All rights reserved. 3 | # http://code.google.com/p/protobuf/ 4 | # 5 | # Redistribution and use in source and binary forms, with or without 6 | # modification, are permitted provided that the following conditions are 7 | # met: 8 | # 9 | # * Redistributions of source code must retain the above copyright 10 | # notice, this list of conditions and the following disclaimer. 11 | # * Redistributions in binary form must reproduce the above 12 | # copyright notice, this list of conditions and the following disclaimer 13 | # in the documentation and/or other materials provided with the 14 | # distribution. 15 | # * Neither the name of Google Inc. nor the names of its 16 | # contributors may be used to endorse or promote products derived from 17 | # this software without specific prior written permission. 18 | # 19 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 20 | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 21 | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 22 | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 23 | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 24 | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 25 | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 26 | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 27 | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 28 | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | 31 | """Contains routines for printing protocol messages in text format.""" 32 | 33 | __author__ = 'kenton@google.com (Kenton Varda)' 34 | 35 | import cStringIO 36 | import re 37 | 38 | from collections import deque 39 | from google.protobuf.internal import type_checkers 40 | from google.protobuf import descriptor 41 | 42 | __all__ = [ 'MessageToString', 'PrintMessage', 'PrintField', 43 | 'PrintFieldValue', 'Merge' ] 44 | 45 | 46 | # Infinity and NaN are not explicitly supported by Python pre-2.6, and 47 | # float('inf') does not work on Windows (pre-2.6). 48 | _INFINITY = 1e10000 # overflows, thus will actually be infinity. 49 | _NAN = _INFINITY * 0 50 | 51 | 52 | class ParseError(Exception): 53 | """Thrown in case of ASCII parsing error.""" 54 | 55 | 56 | def MessageToString(message, as_utf8=False, as_one_line=False): 57 | out = cStringIO.StringIO() 58 | PrintMessage(message, out, as_utf8=as_utf8, as_one_line=as_one_line) 59 | result = out.getvalue() 60 | out.close() 61 | if as_one_line: 62 | return result.rstrip() 63 | return result 64 | 65 | 66 | def PrintMessage(message, out, indent=0, as_utf8=False, as_one_line=False): 67 | for field, value in message.ListFields(): 68 | if field.label == descriptor.FieldDescriptor.LABEL_REPEATED: 69 | for element in value: 70 | PrintField(field, element, out, indent, as_utf8, as_one_line) 71 | else: 72 | PrintField(field, value, out, indent, as_utf8, as_one_line) 73 | 74 | 75 | def PrintField(field, value, out, indent=0, as_utf8=False, as_one_line=False): 76 | """Print a single field name/value pair. For repeated fields, the value 77 | should be a single element.""" 78 | 79 | out.write(' ' * indent); 80 | if field.is_extension: 81 | out.write('[') 82 | if (field.containing_type.GetOptions().message_set_wire_format and 83 | field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and 84 | field.message_type == field.extension_scope and 85 | field.label == descriptor.FieldDescriptor.LABEL_OPTIONAL): 86 | out.write(field.message_type.full_name) 87 | else: 88 | out.write(field.full_name) 89 | out.write(']') 90 | elif field.type == descriptor.FieldDescriptor.TYPE_GROUP: 91 | # For groups, use the capitalized name. 92 | out.write(field.message_type.name) 93 | else: 94 | out.write(field.name) 95 | 96 | if field.cpp_type != descriptor.FieldDescriptor.CPPTYPE_MESSAGE: 97 | # The colon is optional in this case, but our cross-language golden files 98 | # don't include it. 99 | out.write(': ') 100 | 101 | PrintFieldValue(field, value, out, indent, as_utf8, as_one_line) 102 | if as_one_line: 103 | out.write(' ') 104 | else: 105 | out.write('\n') 106 | 107 | 108 | def PrintFieldValue(field, value, out, indent=0, 109 | as_utf8=False, as_one_line=False): 110 | """Print a single field value (not including name). For repeated fields, 111 | the value should be a single element.""" 112 | 113 | if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: 114 | if as_one_line: 115 | out.write(' { ') 116 | PrintMessage(value, out, indent, as_utf8, as_one_line) 117 | out.write('}') 118 | else: 119 | out.write(' {\n') 120 | PrintMessage(value, out, indent + 2, as_utf8, as_one_line) 121 | out.write(' ' * indent + '}') 122 | elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM: 123 | out.write(field.enum_type.values_by_number[value].name) 124 | elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING: 125 | out.write('\"') 126 | if type(value) is unicode: 127 | out.write(_CEscape(value.encode('utf-8'), as_utf8)) 128 | else: 129 | out.write(_CEscape(value, as_utf8)) 130 | out.write('\"') 131 | elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL: 132 | if value: 133 | out.write("true") 134 | else: 135 | out.write("false") 136 | else: 137 | out.write(str(value)) 138 | 139 | 140 | def Merge(text, message): 141 | """Merges an ASCII representation of a protocol message into a message. 142 | 143 | Args: 144 | text: Message ASCII representation. 145 | message: A protocol buffer message to merge into. 146 | 147 | Raises: 148 | ParseError: On ASCII parsing problems. 149 | """ 150 | tokenizer = _Tokenizer(text) 151 | while not tokenizer.AtEnd(): 152 | _MergeField(tokenizer, message) 153 | 154 | 155 | def _MergeField(tokenizer, message): 156 | """Merges a single protocol message field into a message. 157 | 158 | Args: 159 | tokenizer: A tokenizer to parse the field name and values. 160 | message: A protocol message to record the data. 161 | 162 | Raises: 163 | ParseError: In case of ASCII parsing problems. 164 | """ 165 | message_descriptor = message.DESCRIPTOR 166 | if tokenizer.TryConsume('['): 167 | name = [tokenizer.ConsumeIdentifier()] 168 | while tokenizer.TryConsume('.'): 169 | name.append(tokenizer.ConsumeIdentifier()) 170 | name = '.'.join(name) 171 | 172 | if not message_descriptor.is_extendable: 173 | raise tokenizer.ParseErrorPreviousToken( 174 | 'Message type "%s" does not have extensions.' % 175 | message_descriptor.full_name) 176 | field = message.Extensions._FindExtensionByName(name) 177 | if not field: 178 | raise tokenizer.ParseErrorPreviousToken( 179 | 'Extension "%s" not registered.' % name) 180 | elif message_descriptor != field.containing_type: 181 | raise tokenizer.ParseErrorPreviousToken( 182 | 'Extension "%s" does not extend message type "%s".' % ( 183 | name, message_descriptor.full_name)) 184 | tokenizer.Consume(']') 185 | else: 186 | name = tokenizer.ConsumeIdentifier() 187 | field = message_descriptor.fields_by_name.get(name, None) 188 | 189 | # Group names are expected to be capitalized as they appear in the 190 | # .proto file, which actually matches their type names, not their field 191 | # names. 192 | if not field: 193 | field = message_descriptor.fields_by_name.get(name.lower(), None) 194 | if field and field.type != descriptor.FieldDescriptor.TYPE_GROUP: 195 | field = None 196 | 197 | if (field and field.type == descriptor.FieldDescriptor.TYPE_GROUP and 198 | field.message_type.name != name): 199 | field = None 200 | 201 | if not field: 202 | raise tokenizer.ParseErrorPreviousToken( 203 | 'Message type "%s" has no field named "%s".' % ( 204 | message_descriptor.full_name, name)) 205 | 206 | if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: 207 | tokenizer.TryConsume(':') 208 | 209 | if tokenizer.TryConsume('<'): 210 | end_token = '>' 211 | else: 212 | tokenizer.Consume('{') 213 | end_token = '}' 214 | 215 | if field.label == descriptor.FieldDescriptor.LABEL_REPEATED: 216 | if field.is_extension: 217 | sub_message = message.Extensions[field].add() 218 | else: 219 | sub_message = getattr(message, field.name).add() 220 | else: 221 | if field.is_extension: 222 | sub_message = message.Extensions[field] 223 | else: 224 | sub_message = getattr(message, field.name) 225 | sub_message.SetInParent() 226 | 227 | while not tokenizer.TryConsume(end_token): 228 | if tokenizer.AtEnd(): 229 | raise tokenizer.ParseErrorPreviousToken('Expected "%s".' % (end_token)) 230 | _MergeField(tokenizer, sub_message) 231 | else: 232 | _MergeScalarField(tokenizer, message, field) 233 | 234 | 235 | def _MergeScalarField(tokenizer, message, field): 236 | """Merges a single protocol message scalar field into a message. 237 | 238 | Args: 239 | tokenizer: A tokenizer to parse the field value. 240 | message: A protocol message to record the data. 241 | field: The descriptor of the field to be merged. 242 | 243 | Raises: 244 | ParseError: In case of ASCII parsing problems. 245 | RuntimeError: On runtime errors. 246 | """ 247 | tokenizer.Consume(':') 248 | value = None 249 | 250 | if field.type in (descriptor.FieldDescriptor.TYPE_INT32, 251 | descriptor.FieldDescriptor.TYPE_SINT32, 252 | descriptor.FieldDescriptor.TYPE_SFIXED32): 253 | value = tokenizer.ConsumeInt32() 254 | elif field.type in (descriptor.FieldDescriptor.TYPE_INT64, 255 | descriptor.FieldDescriptor.TYPE_SINT64, 256 | descriptor.FieldDescriptor.TYPE_SFIXED64): 257 | value = tokenizer.ConsumeInt64() 258 | elif field.type in (descriptor.FieldDescriptor.TYPE_UINT32, 259 | descriptor.FieldDescriptor.TYPE_FIXED32): 260 | value = tokenizer.ConsumeUint32() 261 | elif field.type in (descriptor.FieldDescriptor.TYPE_UINT64, 262 | descriptor.FieldDescriptor.TYPE_FIXED64): 263 | value = tokenizer.ConsumeUint64() 264 | elif field.type in (descriptor.FieldDescriptor.TYPE_FLOAT, 265 | descriptor.FieldDescriptor.TYPE_DOUBLE): 266 | value = tokenizer.ConsumeFloat() 267 | elif field.type == descriptor.FieldDescriptor.TYPE_BOOL: 268 | value = tokenizer.ConsumeBool() 269 | elif field.type == descriptor.FieldDescriptor.TYPE_STRING: 270 | value = tokenizer.ConsumeString() 271 | elif field.type == descriptor.FieldDescriptor.TYPE_BYTES: 272 | value = tokenizer.ConsumeByteString() 273 | elif field.type == descriptor.FieldDescriptor.TYPE_ENUM: 274 | # Enum can be specified by a number (the enum value), or by 275 | # a string literal (the enum name). 276 | enum_descriptor = field.enum_type 277 | if tokenizer.LookingAtInteger(): 278 | number = tokenizer.ConsumeInt32() 279 | enum_value = enum_descriptor.values_by_number.get(number, None) 280 | if enum_value is None: 281 | raise tokenizer.ParseErrorPreviousToken( 282 | 'Enum type "%s" has no value with number %d.' % ( 283 | enum_descriptor.full_name, number)) 284 | else: 285 | identifier = tokenizer.ConsumeIdentifier() 286 | enum_value = enum_descriptor.values_by_name.get(identifier, None) 287 | if enum_value is None: 288 | raise tokenizer.ParseErrorPreviousToken( 289 | 'Enum type "%s" has no value named %s.' % ( 290 | enum_descriptor.full_name, identifier)) 291 | value = enum_value.number 292 | else: 293 | raise RuntimeError('Unknown field type %d' % field.type) 294 | 295 | if field.label == descriptor.FieldDescriptor.LABEL_REPEATED: 296 | if field.is_extension: 297 | message.Extensions[field].append(value) 298 | else: 299 | getattr(message, field.name).append(value) 300 | else: 301 | if field.is_extension: 302 | message.Extensions[field] = value 303 | else: 304 | setattr(message, field.name, value) 305 | 306 | 307 | class _Tokenizer(object): 308 | """Protocol buffer ASCII representation tokenizer. 309 | 310 | This class handles the lower level string parsing by splitting it into 311 | meaningful tokens. 312 | 313 | It was directly ported from the Java protocol buffer API. 314 | """ 315 | 316 | _WHITESPACE = re.compile('(\\s|(#.*$))+', re.MULTILINE) 317 | _TOKEN = re.compile( 318 | '[a-zA-Z_][0-9a-zA-Z_+-]*|' # an identifier 319 | '[0-9+-][0-9a-zA-Z_.+-]*|' # a number 320 | '\"([^\"\n\\\\]|\\\\.)*(\"|\\\\?$)|' # a double-quoted string 321 | '\'([^\'\n\\\\]|\\\\.)*(\'|\\\\?$)') # a single-quoted string 322 | _IDENTIFIER = re.compile('\w+') 323 | _INTEGER_CHECKERS = [type_checkers.Uint32ValueChecker(), 324 | type_checkers.Int32ValueChecker(), 325 | type_checkers.Uint64ValueChecker(), 326 | type_checkers.Int64ValueChecker()] 327 | _FLOAT_INFINITY = re.compile('-?inf(inity)?f?', re.IGNORECASE) 328 | _FLOAT_NAN = re.compile("nanf?", re.IGNORECASE) 329 | 330 | def __init__(self, text_message): 331 | self._text_message = text_message 332 | 333 | self._position = 0 334 | self._line = -1 335 | self._column = 0 336 | self._token_start = None 337 | self.token = '' 338 | self._lines = deque(text_message.split('\n')) 339 | self._current_line = '' 340 | self._previous_line = 0 341 | self._previous_column = 0 342 | self._SkipWhitespace() 343 | self.NextToken() 344 | 345 | def AtEnd(self): 346 | """Checks the end of the text was reached. 347 | 348 | Returns: 349 | True iff the end was reached. 350 | """ 351 | return self.token == '' 352 | 353 | def _PopLine(self): 354 | while len(self._current_line) <= self._column: 355 | if not self._lines: 356 | self._current_line = '' 357 | return 358 | self._line += 1 359 | self._column = 0 360 | self._current_line = self._lines.popleft() 361 | 362 | def _SkipWhitespace(self): 363 | while True: 364 | self._PopLine() 365 | match = self._WHITESPACE.match(self._current_line, self._column) 366 | if not match: 367 | break 368 | length = len(match.group(0)) 369 | self._column += length 370 | 371 | def TryConsume(self, token): 372 | """Tries to consume a given piece of text. 373 | 374 | Args: 375 | token: Text to consume. 376 | 377 | Returns: 378 | True iff the text was consumed. 379 | """ 380 | if self.token == token: 381 | self.NextToken() 382 | return True 383 | return False 384 | 385 | def Consume(self, token): 386 | """Consumes a piece of text. 387 | 388 | Args: 389 | token: Text to consume. 390 | 391 | Raises: 392 | ParseError: If the text couldn't be consumed. 393 | """ 394 | if not self.TryConsume(token): 395 | raise self._ParseError('Expected "%s".' % token) 396 | 397 | def LookingAtInteger(self): 398 | """Checks if the current token is an integer. 399 | 400 | Returns: 401 | True iff the current token is an integer. 402 | """ 403 | if not self.token: 404 | return False 405 | c = self.token[0] 406 | return (c >= '0' and c <= '9') or c == '-' or c == '+' 407 | 408 | def ConsumeIdentifier(self): 409 | """Consumes protocol message field identifier. 410 | 411 | Returns: 412 | Identifier string. 413 | 414 | Raises: 415 | ParseError: If an identifier couldn't be consumed. 416 | """ 417 | result = self.token 418 | if not self._IDENTIFIER.match(result): 419 | raise self._ParseError('Expected identifier.') 420 | self.NextToken() 421 | return result 422 | 423 | def ConsumeInt32(self): 424 | """Consumes a signed 32bit integer number. 425 | 426 | Returns: 427 | The integer parsed. 428 | 429 | Raises: 430 | ParseError: If a signed 32bit integer couldn't be consumed. 431 | """ 432 | try: 433 | result = self._ParseInteger(self.token, is_signed=True, is_long=False) 434 | except ValueError, e: 435 | raise self._IntegerParseError(e) 436 | self.NextToken() 437 | return result 438 | 439 | def ConsumeUint32(self): 440 | """Consumes an unsigned 32bit integer number. 441 | 442 | Returns: 443 | The integer parsed. 444 | 445 | Raises: 446 | ParseError: If an unsigned 32bit integer couldn't be consumed. 447 | """ 448 | try: 449 | result = self._ParseInteger(self.token, is_signed=False, is_long=False) 450 | except ValueError, e: 451 | raise self._IntegerParseError(e) 452 | self.NextToken() 453 | return result 454 | 455 | def ConsumeInt64(self): 456 | """Consumes a signed 64bit integer number. 457 | 458 | Returns: 459 | The integer parsed. 460 | 461 | Raises: 462 | ParseError: If a signed 64bit integer couldn't be consumed. 463 | """ 464 | try: 465 | result = self._ParseInteger(self.token, is_signed=True, is_long=True) 466 | except ValueError, e: 467 | raise self._IntegerParseError(e) 468 | self.NextToken() 469 | return result 470 | 471 | def ConsumeUint64(self): 472 | """Consumes an unsigned 64bit integer number. 473 | 474 | Returns: 475 | The integer parsed. 476 | 477 | Raises: 478 | ParseError: If an unsigned 64bit integer couldn't be consumed. 479 | """ 480 | try: 481 | result = self._ParseInteger(self.token, is_signed=False, is_long=True) 482 | except ValueError, e: 483 | raise self._IntegerParseError(e) 484 | self.NextToken() 485 | return result 486 | 487 | def ConsumeFloat(self): 488 | """Consumes an floating point number. 489 | 490 | Returns: 491 | The number parsed. 492 | 493 | Raises: 494 | ParseError: If a floating point number couldn't be consumed. 495 | """ 496 | text = self.token 497 | if self._FLOAT_INFINITY.match(text): 498 | self.NextToken() 499 | if text.startswith('-'): 500 | return -_INFINITY 501 | return _INFINITY 502 | 503 | if self._FLOAT_NAN.match(text): 504 | self.NextToken() 505 | return _NAN 506 | 507 | try: 508 | result = float(text) 509 | except ValueError, e: 510 | raise self._FloatParseError(e) 511 | self.NextToken() 512 | return result 513 | 514 | def ConsumeBool(self): 515 | """Consumes a boolean value. 516 | 517 | Returns: 518 | The bool parsed. 519 | 520 | Raises: 521 | ParseError: If a boolean value couldn't be consumed. 522 | """ 523 | if self.token in ('true', 't', '1'): 524 | self.NextToken() 525 | return True 526 | elif self.token in ('false', 'f', '0'): 527 | self.NextToken() 528 | return False 529 | else: 530 | raise self._ParseError('Expected "true" or "false".') 531 | 532 | def ConsumeString(self): 533 | """Consumes a string value. 534 | 535 | Returns: 536 | The string parsed. 537 | 538 | Raises: 539 | ParseError: If a string value couldn't be consumed. 540 | """ 541 | bytes = self.ConsumeByteString() 542 | try: 543 | return unicode(bytes, 'utf-8') 544 | except UnicodeDecodeError, e: 545 | raise self._StringParseError(e) 546 | 547 | def ConsumeByteString(self): 548 | """Consumes a byte array value. 549 | 550 | Returns: 551 | The array parsed (as a string). 552 | 553 | Raises: 554 | ParseError: If a byte array value couldn't be consumed. 555 | """ 556 | list = [self._ConsumeSingleByteString()] 557 | while len(self.token) > 0 and self.token[0] in ('\'', '"'): 558 | list.append(self._ConsumeSingleByteString()) 559 | return "".join(list) 560 | 561 | def _ConsumeSingleByteString(self): 562 | """Consume one token of a string literal. 563 | 564 | String literals (whether bytes or text) can come in multiple adjacent 565 | tokens which are automatically concatenated, like in C or Python. This 566 | method only consumes one token. 567 | """ 568 | text = self.token 569 | if len(text) < 1 or text[0] not in ('\'', '"'): 570 | raise self._ParseError('Exptected string.') 571 | 572 | if len(text) < 2 or text[-1] != text[0]: 573 | raise self._ParseError('String missing ending quote.') 574 | 575 | try: 576 | result = _CUnescape(text[1:-1]) 577 | except ValueError, e: 578 | raise self._ParseError(str(e)) 579 | self.NextToken() 580 | return result 581 | 582 | def _ParseInteger(self, text, is_signed=False, is_long=False): 583 | """Parses an integer. 584 | 585 | Args: 586 | text: The text to parse. 587 | is_signed: True if a signed integer must be parsed. 588 | is_long: True if a long integer must be parsed. 589 | 590 | Returns: 591 | The integer value. 592 | 593 | Raises: 594 | ValueError: Thrown Iff the text is not a valid integer. 595 | """ 596 | pos = 0 597 | if text.startswith('-'): 598 | pos += 1 599 | 600 | base = 10 601 | if text.startswith('0x', pos) or text.startswith('0X', pos): 602 | base = 16 603 | elif text.startswith('0', pos): 604 | base = 8 605 | 606 | # Do the actual parsing. Exception handling is propagated to caller. 607 | result = int(text, base) 608 | 609 | # Check if the integer is sane. Exceptions handled by callers. 610 | checker = self._INTEGER_CHECKERS[2 * int(is_long) + int(is_signed)] 611 | checker.CheckValue(result) 612 | return result 613 | 614 | def ParseErrorPreviousToken(self, message): 615 | """Creates and *returns* a ParseError for the previously read token. 616 | 617 | Args: 618 | message: A message to set for the exception. 619 | 620 | Returns: 621 | A ParseError instance. 622 | """ 623 | return ParseError('%d:%d : %s' % ( 624 | self._previous_line + 1, self._previous_column + 1, message)) 625 | 626 | def _ParseError(self, message): 627 | """Creates and *returns* a ParseError for the current token.""" 628 | return ParseError('%d:%d : %s' % ( 629 | self._line + 1, self._column - len(self.token) + 1, message)) 630 | 631 | def _IntegerParseError(self, e): 632 | return self._ParseError('Couldn\'t parse integer: ' + str(e)) 633 | 634 | def _FloatParseError(self, e): 635 | return self._ParseError('Couldn\'t parse number: ' + str(e)) 636 | 637 | def _StringParseError(self, e): 638 | return self._ParseError('Couldn\'t parse string: ' + str(e)) 639 | 640 | def NextToken(self): 641 | """Reads the next meaningful token.""" 642 | self._previous_line = self._line 643 | self._previous_column = self._column 644 | 645 | self._column += len(self.token) 646 | self._SkipWhitespace() 647 | 648 | if not self._lines and len(self._current_line) <= self._column: 649 | self.token = '' 650 | return 651 | 652 | match = self._TOKEN.match(self._current_line, self._column) 653 | if match: 654 | token = match.group(0) 655 | self.token = token 656 | else: 657 | self.token = self._current_line[self._column] 658 | 659 | 660 | # text.encode('string_escape') does not seem to satisfy our needs as it 661 | # encodes unprintable characters using two-digit hex escapes whereas our 662 | # C++ unescaping function allows hex escapes to be any length. So, 663 | # "\0011".encode('string_escape') ends up being "\\x011", which will be 664 | # decoded in C++ as a single-character string with char code 0x11. 665 | def _CEscape(text, as_utf8): 666 | def escape(c): 667 | o = ord(c) 668 | if o == 10: return r"\n" # optional escape 669 | if o == 13: return r"\r" # optional escape 670 | if o == 9: return r"\t" # optional escape 671 | if o == 39: return r"\'" # optional escape 672 | 673 | if o == 34: return r'\"' # necessary escape 674 | if o == 92: return r"\\" # necessary escape 675 | 676 | # necessary escapes 677 | if not as_utf8 and (o >= 127 or o < 32): return "\\%03o" % o 678 | return c 679 | return "".join([escape(c) for c in text]) 680 | 681 | 682 | _CUNESCAPE_HEX = re.compile('\\\\x([0-9a-fA-F]{2}|[0-9a-fA-F])') 683 | 684 | 685 | def _CUnescape(text): 686 | def ReplaceHex(m): 687 | return chr(int(m.group(0)[2:], 16)) 688 | # This is required because the 'string_escape' encoding doesn't 689 | # allow single-digit hex escapes (like '\xf'). 690 | result = _CUNESCAPE_HEX.sub(ReplaceHex, text) 691 | return result.decode('string_escape') 692 | -------------------------------------------------------------------------------- /pb2.4/run_google24.py: -------------------------------------------------------------------------------- 1 | 2 | import time 3 | import connexiopb 4 | from connexio_pb2 import * 5 | 6 | msg = "CiAKBUF0YXNoGghBdGFtdXJhZCINSGV6cmV0a3VsaXlldioUChJhdGFteXJhdEBnbWFpbC5jb20q\nFAoSYXRhbXVyYWRAY29ubmV4LmlvKhQKEmF0YW11cmFkQGdtYWlsLmNvbTIOCgwrOTkzNjc2NDI2\nNDIyDgoMKzk5MzEyMjcwMjAzYh50aGlzIGlzIG5vdGUgZmllbGQgZm9yIGNvbnRhY3Q=\n".decode("base64") 7 | 8 | start = time.time() 9 | for i in range(5000): 10 | c = Contact() 11 | c.ParseFromString(msg) 12 | end = time.time() 13 | 14 | print end-start 15 | print c 16 | 17 | -------------------------------------------------------------------------------- /pb_types.py: -------------------------------------------------------------------------------- 1 | # Automatically generated code. 2 | 3 | MSG_Contact = 32 4 | MSG_Name = 33 5 | MSG_Date = 34 6 | MSG_Phone = 35 7 | MSG_Email = 36 8 | MSG_Job = 37 9 | UNKNOWN = 1 10 | HOME_PHONE = 2 11 | HOME_FAX = 3 12 | HOME_MOBILE = 4 13 | WORK_PHONE = 5 14 | WORK_FAX = 6 15 | WORK_MOBILE = 7 16 | PAGER = 8 17 | ASSISTANT = 9 18 | PERSONAL = 1 19 | BUSINESS = 2 20 | OTHER_EMAIL = 3 21 | -------------------------------------------------------------------------------- /run_capi.py: -------------------------------------------------------------------------------- 1 | 2 | import time 3 | import ctest 4 | from pb_types import * 5 | 6 | msg = "Ch8KB0F0YXNoa2EaBUF0YXNoIg1IZXpyZXRrdWxpeWV2KhYKEmF0YW11cmFkQGNvbm5leC5pbxAC\nKhYKEmF0YW15cmF0QGdtYWlsLmNvbRABMgoKBjI3MDIwMxACMhIKDis5OTMgNjcgNjQyNjQyEAdS\nJAoJY29ubmV4LmlvEhBDVE8gJiBDby1Gb3VuZGVyKgUI2g8QAWIXdGVzdCBub3RlIGZyb20gc29t\nZSBndXk=\n".decode("base64") 7 | 8 | start = time.time() 9 | for i in range(5000): 10 | c = ctest.decode(MSG_Contact, msg) 11 | print "Our pure c parser: ", time.time()-start 12 | 13 | print "Parse done!" 14 | print c["name"]["display_name"], " ", c["name"]["first"], " ", c["name"]["last"] 15 | 16 | print "Emails: " 17 | for e in c["email"]: 18 | print " * ", e["email"] 19 | print "Phones: " 20 | for p in c["phone"]: 21 | print " * ", p["display_number"] 22 | 23 | print "Note: ", c["note"] 24 | 25 | print c 26 | 27 | -------------------------------------------------------------------------------- /run_lazy.py: -------------------------------------------------------------------------------- 1 | 2 | import time 3 | 4 | import ctest 5 | from pb_types import * 6 | 7 | 8 | msg = "CiAKBUF0YXNoGghBdGFtdXJhZCINSGV6cmV0a3VsaXlldioUChJhdGFteXJhdEBnbWFpbC5jb20q\nFAoSYXRhbXVyYWRAY29ubmV4LmlvKhQKEmF0YW11cmFkQGdtYWlsLmNvbTIOCgwrOTkzNjc2NDI2\nNDIyDgoMKzk5MzEyMjcwMjAzYh50aGlzIGlzIG5vdGUgZmllbGQgZm9yIGNvbnRhY3Q=\n".decode("base64") 9 | 10 | msg = "Ch8KB0F0YXNoa2EaBUF0YXNoIg1IZXpyZXRrdWxpeWV2KhYKEmF0YW11cmFkQGNvbm5leC5pbxAC\nKhYKEmF0YW15cmF0QGdtYWlsLmNvbRABMgoKBjI3MDIwMxACMhIKDis5OTMgNjcgNjQyNjQyEAdS\nJAoJY29ubmV4LmlvEhBDVE8gJiBDby1Gb3VuZGVyKgUI2g8QAWIXdGVzdCBub3RlIGZyb20gc29t\nZSBndXk=\n".decode("base64") 11 | 12 | start = time.time() 13 | phones = [] 14 | for i in range(5000): 15 | a = ctest.PBMsg(MSG_Contact, msg) 16 | for p in a.phone: 17 | phones.append(p.display_number) 18 | print len(phones) 19 | print "Our lazy c parser: ", time.time()-start 20 | 21 | 22 | print a.name.display_name, " ", a.name.first, " ", a.name.last 23 | 24 | print "Emails (first access):" 25 | print a.email 26 | 27 | print "Emails: " 28 | for e in a.email: 29 | print " * ", e.email, e._type 30 | 31 | print "Phones: " 32 | for p in a.phone: 33 | print " * ", p.display_number, p._type 34 | 35 | print "Note:" 36 | print a.note 37 | 38 | print "Job: " 39 | for j in a.job: 40 | print j.company, " - ", j.position, " [", j.startdate.year, "/", j.startdate.month, "]" 41 | -------------------------------------------------------------------------------- /test.pb2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/connexio/cypb/dc3d5ae866f7cfcdc3e313ecced0e8fa912542e1/test.pb2 -------------------------------------------------------------------------------- /test.proto: -------------------------------------------------------------------------------- 1 | 2 | message Contact { 3 | 4 | optional Name name = 1; 5 | 6 | // 2 is reserved for photo 7 | optional Date birthday = 3; 8 | optional Date anniversary = 4; 9 | 10 | // contact info 11 | repeated Email email = 5; 12 | repeated Phone phone = 6; 13 | 14 | repeated Job job = 10; 15 | 16 | optional string note = 12; 17 | } 18 | 19 | message Name { 20 | // Raw - unstructured 21 | optional string display_name = 1; // Display / formatted name 22 | 23 | // Structured info - parts of name 24 | optional string prefix = 2; 25 | optional string first = 3; 26 | optional string last = 4; 27 | optional string middle = 5; 28 | optional string suffix = 6; 29 | 30 | // nickname 31 | optional string nickname = 7; 32 | } 33 | 34 | // Date field. 35 | // For bithdays - year is optional, for job/education history - day is optional 36 | message Date { 37 | optional int32 year = 1; 38 | optional int32 month = 2; 39 | optional int32 day = 3; 40 | } 41 | 42 | enum PhoneType { 43 | UNKNOWN = 1; 44 | HOME_PHONE = 2; 45 | HOME_FAX = 3; 46 | HOME_MOBILE = 4; 47 | WORK_PHONE = 5; 48 | WORK_FAX = 6; 49 | WORK_MOBILE = 7; 50 | PAGER = 8; 51 | ASSISTANT = 9; 52 | } 53 | 54 | message Phone { 55 | required string display_number = 1; 56 | 57 | optional PhoneType _type = 2[default = UNKNOWN]; 58 | 59 | // strucutured phone number 60 | optional string country_code = 3; 61 | optional string area_code = 4; 62 | optional string number = 5; 63 | 64 | // custom label 65 | optional string label = 6; 66 | } 67 | 68 | enum EmailType { 69 | PERSONAL = 1; 70 | BUSINESS = 2; 71 | OTHER_EMAIL = 3; 72 | } 73 | 74 | message Email { 75 | required string email = 1; 76 | optional EmailType _type = 2; 77 | optional string label = 3; 78 | } 79 | 80 | message Job { 81 | required string company = 1; 82 | optional string position = 2; 83 | optional string department = 3; 84 | optional string description = 4; 85 | 86 | optional Date startdate = 5; 87 | optional Date enddate = 6; 88 | } 89 | 90 | --------------------------------------------------------------------------------