Revision indexing in progress... (symbol navigation in revisions will be accurate after indexed)
Showing first 13 files as there are too many
-
-
1 - # Protocol Buffers - Google's data interchange format 2 - # Copyright 2008 Google Inc. All rights reserved. 3 - # http://code.google.com/p/protobuf/ 4 - # 5 - # Redistribution and use in source and binary forms, with or without 6 - # modification, are permitted provided that the following conditions are 7 - # met: 8 - # 9 - # * Redistributions of source code must retain the above copyright 10 - # notice, this list of conditions and the following disclaimer. 11 - # * Redistributions in binary form must reproduce the above 12 - # copyright notice, this list of conditions and the following disclaimer 13 - # in the documentation and/or other materials provided with the 14 - # distribution. 15 - # * Neither the name of Google Inc. nor the names of its 16 - # contributors may be used to endorse or promote products derived from 17 - # this software without specific prior written permission. 18 - # 19 - # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 20 - # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 21 - # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 22 - # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 23 - # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 24 - # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 25 - # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 26 - # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 27 - # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 28 - # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 - # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 - 31 - """Descriptors essentially contain exactly the information found in a .proto 32 - file, in types that make this information accessible in Python. 33 - """ 34 - 35 - __author__ = '[email protected] (Will Robinson)' 36 - 37 - 38 - from google.protobuf.internal import api_implementation 39 - 40 - 41 - if api_implementation.Type() == 'cpp': 42 - if api_implementation.Version() == 2: 43 - from google.protobuf.internal.cpp import _message 44 - else: 45 - from google.protobuf.internal import cpp_message 46 - 47 - 48 - class Error(Exception): 49 - """Base error for this module.""" 50 - 51 - 52 - class TypeTransformationError(Error): 53 - """Error transforming between python proto type and corresponding C++ type.""" 54 - 55 - 56 - class DescriptorBase(object): 57 - 58 - """Descriptors base class. 59 - 60 - This class is the base of all descriptor classes. It provides common options 61 - related functionaility. 62 - 63 - Attributes: 64 - has_options: True if the descriptor has non-default options. Usually it 65 - is not necessary to read this -- just call GetOptions() which will 66 - happily return the default instance. However, it's sometimes useful 67 - for efficiency, and also useful inside the protobuf implementation to 68 - avoid some bootstrapping issues. 69 - """ 70 - 71 - def __init__(self, options, options_class_name): 72 - """Initialize the descriptor given its options message and the name of the 73 - class of the options message. The name of the class is required in case 74 - the options message is None and has to be created. 75 - """ 76 - self._options = options 77 - self._options_class_name = options_class_name 78 - 79 - # Does this descriptor have non-default options? 80 - self.has_options = options is not None 81 - 82 - def _SetOptions(self, options, options_class_name): 83 - """Sets the descriptor's options 84 - 85 - This function is used in generated proto2 files to update descriptor 86 - options. It must not be used outside proto2. 87 - """ 88 - self._options = options 89 - self._options_class_name = options_class_name 90 - 91 - # Does this descriptor have non-default options? 92 - self.has_options = options is not None 93 - 94 - def GetOptions(self): 95 - """Retrieves descriptor options. 96 - 97 - This method returns the options set or creates the default options for the 98 - descriptor. 99 - """ 100 - if self._options: 101 - return self._options 102 - from google.protobuf import descriptor_pb2 103 - try: 104 - options_class = getattr(descriptor_pb2, self._options_class_name) 105 - except AttributeError: 106 - raise RuntimeError('Unknown options class name %s!' % 107 - (self._options_class_name)) 108 - self._options = options_class() 109 - return self._options 110 - 111 - 112 - class _NestedDescriptorBase(DescriptorBase): 113 - """Common class for descriptors that can be nested.""" 114 - 115 - def __init__(self, options, options_class_name, name, full_name, 116 - file, containing_type, serialized_start=None, 117 - serialized_end=None): 118 - """Constructor. 119 - 120 - Args: 121 - options: Protocol message options or None 122 - to use default message options. 123 - options_class_name: (str) The class name of the above options. 124 - 125 - name: (str) Name of this protocol message type. 126 - full_name: (str) Fully-qualified name of this protocol message type, 127 - which will include protocol "package" name and the name of any 128 - enclosing types. 129 - file: (FileDescriptor) Reference to file info. 130 - containing_type: if provided, this is a nested descriptor, with this 131 - descriptor as parent, otherwise None. 132 - serialized_start: The start index (inclusive) in block in the 133 - file.serialized_pb that describes this descriptor. 134 - serialized_end: The end index (exclusive) in block in the 135 - file.serialized_pb that describes this descriptor. 136 - """ 137 - super(_NestedDescriptorBase, self).__init__( 138 - options, options_class_name) 139 - 140 - self.name = name 141 - # TODO(falk): Add function to calculate full_name instead of having it in 142 - # memory? 143 - self.full_name = full_name 144 - self.file = file 145 - self.containing_type = containing_type 146 - 147 - self._serialized_start = serialized_start 148 - self._serialized_end = serialized_end 149 - 150 - def GetTopLevelContainingType(self): 151 - """Returns the root if this is a nested type, or itself if its the root.""" 152 - desc = self 153 - while desc.containing_type is not None: 154 - desc = desc.containing_type 155 - return desc 156 - 157 - def CopyToProto(self, proto): 158 - """Copies this to the matching proto in descriptor_pb2. 159 - 160 - Args: 161 - proto: An empty proto instance from descriptor_pb2. 162 - 163 - Raises: 164 - Error: If self couldnt be serialized, due to to few constructor arguments. 165 - """ 166 - if (self.file is not None and 167 - self._serialized_start is not None and 168 - self._serialized_end is not None): 169 - proto.ParseFromString(self.file.serialized_pb[ 170 - self._serialized_start:self._serialized_end]) 171 - else: 172 - raise Error('Descriptor does not contain serialization.') 173 - 174 - 175 - class Descriptor(_NestedDescriptorBase): 176 - 177 - """Descriptor for a protocol message type. 178 - 179 - A Descriptor instance has the following attributes: 180 - 181 - name: (str) Name of this protocol message type. 182 - full_name: (str) Fully-qualified name of this protocol message type, 183 - which will include protocol "package" name and the name of any 184 - enclosing types. 185 - 186 - containing_type: (Descriptor) Reference to the descriptor of the 187 - type containing us, or None if this is top-level. 188 - 189 - fields: (list of FieldDescriptors) Field descriptors for all 190 - fields in this type. 191 - fields_by_number: (dict int -> FieldDescriptor) Same FieldDescriptor 192 - objects as in |fields|, but indexed by "number" attribute in each 193 - FieldDescriptor. 194 - fields_by_name: (dict str -> FieldDescriptor) Same FieldDescriptor 195 - objects as in |fields|, but indexed by "name" attribute in each 196 - FieldDescriptor. 197 - 198 - nested_types: (list of Descriptors) Descriptor references 199 - for all protocol message types nested within this one. 200 - nested_types_by_name: (dict str -> Descriptor) Same Descriptor 201 - objects as in |nested_types|, but indexed by "name" attribute 202 - in each Descriptor. 203 - 204 - enum_types: (list of EnumDescriptors) EnumDescriptor references 205 - for all enums contained within this type. 206 - enum_types_by_name: (dict str ->EnumDescriptor) Same EnumDescriptor 207 - objects as in |enum_types|, but indexed by "name" attribute 208 - in each EnumDescriptor. 209 - enum_values_by_name: (dict str -> EnumValueDescriptor) Dict mapping 210 - from enum value name to EnumValueDescriptor for that value. 211 - 212 - extensions: (list of FieldDescriptor) All extensions defined directly 213 - within this message type (NOT within a nested type). 214 - extensions_by_name: (dict, string -> FieldDescriptor) Same FieldDescriptor 215 - objects as |extensions|, but indexed by "name" attribute of each 216 - FieldDescriptor. 217 - 218 - is_extendable: Does this type define any extension ranges? 219 - 220 - options: (descriptor_pb2.MessageOptions) Protocol message options or None 221 - to use default message options. 222 - 223 - file: (FileDescriptor) Reference to file descriptor. 224 - """ 225 - 226 - def __init__(self, name, full_name, filename, containing_type, fields, 227 - nested_types, enum_types, extensions, options=None, 228 - is_extendable=True, extension_ranges=None, file=None, 229 - serialized_start=None, serialized_end=None): 230 - """Arguments to __init__() are as described in the description 231 - of Descriptor fields above. 232 - 233 - Note that filename is an obsolete argument, that is not used anymore. 234 - Please use file.name to access this as an attribute. 235 - """ 236 - super(Descriptor, self).__init__( 237 - options, 'MessageOptions', name, full_name, file, 238 - containing_type, serialized_start=serialized_start, 239 - serialized_end=serialized_start) 240 - 241 - # We have fields in addition to fields_by_name and fields_by_number, 242 - # so that: 243 - # 1. Clients can index fields by "order in which they're listed." 244 - # 2. Clients can easily iterate over all fields with the terse 245 - # syntax: for f in descriptor.fields: ... 246 - self.fields = fields 247 - for field in self.fields: 248 - field.containing_type = self 249 - self.fields_by_number = dict((f.number, f) for f in fields) 250 - self.fields_by_name = dict((f.name, f) for f in fields) 251 - 252 - self.nested_types = nested_types 253 - self.nested_types_by_name = dict((t.name, t) for t in nested_types) 254 - 255 - self.enum_types = enum_types 256 - for enum_type in self.enum_types: 257 - enum_type.containing_type = self 258 - self.enum_types_by_name = dict((t.name, t) for t in enum_types) 259 - self.enum_values_by_name = dict( 260 - (v.name, v) for t in enum_types for v in t.values) 261 - 262 - self.extensions = extensions 263 - for extension in self.extensions: 264 - extension.extension_scope = self 265 - self.extensions_by_name = dict((f.name, f) for f in extensions) 266 - self.is_extendable = is_extendable 267 - self.extension_ranges = extension_ranges 268 - 269 - self._serialized_start = serialized_start 270 - self._serialized_end = serialized_end 271 - 272 - def EnumValueName(self, enum, value): 273 - """Returns the string name of an enum value. 274 - 275 - This is just a small helper method to simplify a common operation. 276 - 277 - Args: 278 - enum: string name of the Enum. 279 - value: int, value of the enum. 280 - 281 - Returns: 282 - string name of the enum value. 283 - 284 - Raises: 285 - KeyError if either the Enum doesn't exist or the value is not a valid 286 - value for the enum. 287 - """ 288 - return self.enum_types_by_name[enum].values_by_number[value].name 289 - 290 - def CopyToProto(self, proto): 291 - """Copies this to a descriptor_pb2.DescriptorProto. 292 - 293 - Args: 294 - proto: An empty descriptor_pb2.DescriptorProto. 295 - """ 296 - # This function is overriden to give a better doc comment. 297 - super(Descriptor, self).CopyToProto(proto) 298 - 299 - 300 - # TODO(robinson): We should have aggressive checking here, 301 - # for example: 302 - # * If you specify a repeated field, you should not be allowed 303 - # to specify a default value. 304 - # * [Other examples here as needed]. 305 - # 306 - # TODO(robinson): for this and other *Descriptor classes, we 307 - # might also want to lock things down aggressively (e.g., 308 - # prevent clients from setting the attributes). Having 309 - # stronger invariants here in general will reduce the number 310 - # of runtime checks we must do in reflection.py... 311 - class FieldDescriptor(DescriptorBase): 312 - 313 - """Descriptor for a single field in a .proto file. 314 - 315 - A FieldDescriptor instance has the following attributes: 316 - 317 - name: (str) Name of this field, exactly as it appears in .proto. 318 - full_name: (str) Name of this field, including containing scope. This is 319 - particularly relevant for extensions. 320 - index: (int) Dense, 0-indexed index giving the order that this 321 - field textually appears within its message in the .proto file. 322 - number: (int) Tag number declared for this field in the .proto file. 323 - 324 - type: (One of the TYPE_* constants below) Declared type. 325 - cpp_type: (One of the CPPTYPE_* constants below) C++ type used to 326 - represent this field. 327 - 328 - label: (One of the LABEL_* constants below) Tells whether this 329 - field is optional, required, or repeated. 330 - has_default_value: (bool) True if this field has a default value defined, 331 - otherwise false. 332 - default_value: (Varies) Default value of this field. Only 333 - meaningful for non-repeated scalar fields. Repeated fields 334 - should always set this to [], and non-repeated composite 335 - fields should always set this to None. 336 - 337 - containing_type: (Descriptor) Descriptor of the protocol message 338 - type that contains this field. Set by the Descriptor constructor 339 - if we're passed into one. 340 - Somewhat confusingly, for extension fields, this is the 341 - descriptor of the EXTENDED message, not the descriptor 342 - of the message containing this field. (See is_extension and 343 - extension_scope below). 344 - message_type: (Descriptor) If a composite field, a descriptor 345 - of the message type contained in this field. Otherwise, this is None. 346 - enum_type: (EnumDescriptor) If this field contains an enum, a 347 - descriptor of that enum. Otherwise, this is None. 348 - 349 - is_extension: True iff this describes an extension field. 350 - extension_scope: (Descriptor) Only meaningful if is_extension is True. 351 - Gives the message that immediately contains this extension field. 352 - Will be None iff we're a top-level (file-level) extension field. 353 - 354 - options: (descriptor_pb2.FieldOptions) Protocol message field options or 355 - None to use default field options. 356 - """ 357 - 358 - # Must be consistent with C++ FieldDescriptor::Type enum in 359 - # descriptor.h. 360 - # 361 - # TODO(robinson): Find a way to eliminate this repetition. 362 - TYPE_DOUBLE = 1 363 - TYPE_FLOAT = 2 364 - TYPE_INT64 = 3 365 - TYPE_UINT64 = 4 366 - TYPE_INT32 = 5 367 - TYPE_FIXED64 = 6 368 - TYPE_FIXED32 = 7 369 - TYPE_BOOL = 8 370 - TYPE_STRING = 9 371 - TYPE_GROUP = 10 372 - TYPE_MESSAGE = 11 373 - TYPE_BYTES = 12 374 - TYPE_UINT32 = 13 375 - TYPE_ENUM = 14 376 - TYPE_SFIXED32 = 15 377 - TYPE_SFIXED64 = 16 378 - TYPE_SINT32 = 17 379 - TYPE_SINT64 = 18 380 - MAX_TYPE = 18 381 - 382 - # Must be consistent with C++ FieldDescriptor::CppType enum in 383 - # descriptor.h. 384 - # 385 - # TODO(robinson): Find a way to eliminate this repetition. 386 - CPPTYPE_INT32 = 1 387 - CPPTYPE_INT64 = 2 388 - CPPTYPE_UINT32 = 3 389 - CPPTYPE_UINT64 = 4 390 - CPPTYPE_DOUBLE = 5 391 - CPPTYPE_FLOAT = 6 392 - CPPTYPE_BOOL = 7 393 - CPPTYPE_ENUM = 8 394 - CPPTYPE_STRING = 9 395 - CPPTYPE_MESSAGE = 10 396 - MAX_CPPTYPE = 10 397 - 398 - _PYTHON_TO_CPP_PROTO_TYPE_MAP = { 399 - TYPE_DOUBLE: CPPTYPE_DOUBLE, 400 - TYPE_FLOAT: CPPTYPE_FLOAT, 401 - TYPE_ENUM: CPPTYPE_ENUM, 402 - TYPE_INT64: CPPTYPE_INT64, 403 - TYPE_SINT64: CPPTYPE_INT64, 404 - TYPE_SFIXED64: CPPTYPE_INT64, 405 - TYPE_UINT64: CPPTYPE_UINT64, 406 - TYPE_FIXED64: CPPTYPE_UINT64, 407 - TYPE_INT32: CPPTYPE_INT32, 408 - TYPE_SFIXED32: CPPTYPE_INT32, 409 - TYPE_SINT32: CPPTYPE_INT32, 410 - TYPE_UINT32: CPPTYPE_UINT32, 411 - TYPE_FIXED32: CPPTYPE_UINT32, 412 - TYPE_BYTES: CPPTYPE_STRING, 413 - TYPE_STRING: CPPTYPE_STRING, 414 - TYPE_BOOL: CPPTYPE_BOOL, 415 - TYPE_MESSAGE: CPPTYPE_MESSAGE, 416 - TYPE_GROUP: CPPTYPE_MESSAGE 417 - } 418 - 419 - # Must be consistent with C++ FieldDescriptor::Label enum in 420 - # descriptor.h. 421 - # 422 - # TODO(robinson): Find a way to eliminate this repetition. 423 - LABEL_OPTIONAL = 1 424 - LABEL_REQUIRED = 2 425 - LABEL_REPEATED = 3 426 - MAX_LABEL = 3 427 - 428 - def __init__(self, name, full_name, index, number, type, cpp_type, label, 429 - default_value, message_type, enum_type, containing_type, 430 - is_extension, extension_scope, options=None, 431 - has_default_value=True): 432 - """The arguments are as described in the description of FieldDescriptor 433 - attributes above. 434 - 435 - Note that containing_type may be None, and may be set later if necessary 436 - (to deal with circular references between message types, for example). 437 - Likewise for extension_scope. 438 - """ 439 - super(FieldDescriptor, self).__init__(options, 'FieldOptions') 440 - self.name = name 441 - self.full_name = full_name 442 - self.index = index 443 - self.number = number 444 - self.type = type 445 - self.cpp_type = cpp_type 446 - self.label = label 447 - self.has_default_value = has_default_value 448 - self.default_value = default_value 449 - self.containing_type = containing_type 450 - self.message_type = message_type 451 - self.enum_type = enum_type 452 - self.is_extension = is_extension 453 - self.extension_scope = extension_scope 454 - if api_implementation.Type() == 'cpp': 455 - if is_extension: 456 - if api_implementation.Version() == 2: 457 - self._cdescriptor = _message.GetExtensionDescriptor(full_name) 458 - else: 459 - self._cdescriptor = cpp_message.GetExtensionDescriptor(full_name) 460 - else: 461 - if api_implementation.Version() == 2: 462 - self._cdescriptor = _message.GetFieldDescriptor(full_name) 463 - else: 464 - self._cdescriptor = cpp_message.GetFieldDescriptor(full_name) 465 - else: 466 - self._cdescriptor = None 467 - 468 - @staticmethod 469 - def ProtoTypeToCppProtoType(proto_type): 470 - """Converts from a Python proto type to a C++ Proto Type. 471 - 472 - The Python ProtocolBuffer classes specify both the 'Python' datatype and the 473 - 'C++' datatype - and they're not the same. This helper method should 474 - translate from one to another. 475 - 476 - Args: 477 - proto_type: the Python proto type (descriptor.FieldDescriptor.TYPE_*) 478 - Returns: 479 - descriptor.FieldDescriptor.CPPTYPE_*, the C++ type. 480 - Raises: 481 - TypeTransformationError: when the Python proto type isn't known. 482 - """ 483 - try: 484 - return FieldDescriptor._PYTHON_TO_CPP_PROTO_TYPE_MAP[proto_type] 485 - except KeyError: 486 - raise TypeTransformationError('Unknown proto_type: %s' % proto_type) 487 - 488 - 489 - class EnumDescriptor(_NestedDescriptorBase): 490 - 491 - """Descriptor for an enum defined in a .proto file. 492 - 493 - An EnumDescriptor instance has the following attributes: 494 - 495 - name: (str) Name of the enum type. 496 - full_name: (str) Full name of the type, including package name 497 - and any enclosing type(s). 498 - 499 - values: (list of EnumValueDescriptors) List of the values 500 - in this enum. 501 - values_by_name: (dict str -> EnumValueDescriptor) Same as |values|, 502 - but indexed by the "name" field of each EnumValueDescriptor. 503 - values_by_number: (dict int -> EnumValueDescriptor) Same as |values|, 504 - but indexed by the "number" field of each EnumValueDescriptor. 505 - containing_type: (Descriptor) Descriptor of the immediate containing 506 - type of this enum, or None if this is an enum defined at the 507 - top level in a .proto file. Set by Descriptor's constructor 508 - if we're passed into one. 509 - file: (FileDescriptor) Reference to file descriptor. 510 - options: (descriptor_pb2.EnumOptions) Enum options message or 511 - None to use default enum options. 512 - """ 513 - 514 - def __init__(self, name, full_name, filename, values, 515 - containing_type=None, options=None, file=None, 516 - serialized_start=None, serialized_end=None): 517 - """Arguments are as described in the attribute description above. 518 - 519 - Note that filename is an obsolete argument, that is not used anymore. 520 - Please use file.name to access this as an attribute. 521 - """ 522 - super(EnumDescriptor, self).__init__( 523 - options, 'EnumOptions', name, full_name, file, 524 - containing_type, serialized_start=serialized_start, 525 - serialized_end=serialized_start) 526 - 527 - self.values = values 528 - for value in self.values: 529 - value.type = self 530 - self.values_by_name = dict((v.name, v) for v in values) 531 - self.values_by_number = dict((v.number, v) for v in values) 532 - 533 - self._serialized_start = serialized_start 534 - self._serialized_end = serialized_end 535 - 536 - def CopyToProto(self, proto): 537 - """Copies this to a descriptor_pb2.EnumDescriptorProto. 538 - 539 - Args: 540 - proto: An empty descriptor_pb2.EnumDescriptorProto. 541 - """ 542 - # This function is overriden to give a better doc comment. 543 - super(EnumDescriptor, self).CopyToProto(proto) 544 - 545 - 546 - class EnumValueDescriptor(DescriptorBase): 547 - 548 - """Descriptor for a single value within an enum. 549 - 550 - name: (str) Name of this value. 551 - index: (int) Dense, 0-indexed index giving the order that this 552 - value appears textually within its enum in the .proto file. 553 - number: (int) Actual number assigned to this enum value. 554 - type: (EnumDescriptor) EnumDescriptor to which this value 555 - belongs. Set by EnumDescriptor's constructor if we're 556 - passed into one. 557 - options: (descriptor_pb2.EnumValueOptions) Enum value options message or 558 - None to use default enum value options options. 559 - """ 560 - 561 - def __init__(self, name, index, number, type=None, options=None): 562 - """Arguments are as described in the attribute description above.""" 563 - super(EnumValueDescriptor, self).__init__(options, 'EnumValueOptions') 564 - self.name = name 565 - self.index = index 566 - self.number = number 567 - self.type = type 568 - 569 - 570 - class ServiceDescriptor(_NestedDescriptorBase): 571 - 572 - """Descriptor for a service. 573 - 574 - name: (str) Name of the service. 575 - full_name: (str) Full name of the service, including package name. 576 - index: (int) 0-indexed index giving the order that this services 577 - definition appears withing the .proto file. 578 - methods: (list of MethodDescriptor) List of methods provided by this 579 - service. 580 - options: (descriptor_pb2.ServiceOptions) Service options message or 581 - None to use default service options. 582 - file: (FileDescriptor) Reference to file info. 583 - """ 584 - 585 - def __init__(self, name, full_name, index, methods, options=None, file=None, 586 - serialized_start=None, serialized_end=None): 587 - super(ServiceDescriptor, self).__init__( 588 - options, 'ServiceOptions', name, full_name, file, 589 - None, serialized_start=serialized_start, 590 - serialized_end=serialized_end) 591 - self.index = index 592 - self.methods = methods 593 - # Set the containing service for each method in this service. 594 - for method in self.methods: 595 - method.containing_service = self 596 - 597 - def FindMethodByName(self, name): 598 - """Searches for the specified method, and returns its descriptor.""" 599 - for method in self.methods: 600 - if name == method.name: 601 - return method 602 - return None 603 - 604 - def CopyToProto(self, proto): 605 - """Copies this to a descriptor_pb2.ServiceDescriptorProto. 606 - 607 - Args: 608 - proto: An empty descriptor_pb2.ServiceDescriptorProto. 609 - """ 610 - # This function is overriden to give a better doc comment. 611 - super(ServiceDescriptor, self).CopyToProto(proto) 612 - 613 - 614 - class MethodDescriptor(DescriptorBase): 615 - 616 - """Descriptor for a method in a service. 617 - 618 - name: (str) Name of the method within the service. 619 - full_name: (str) Full name of method. 620 - index: (int) 0-indexed index of the method inside the service. 621 - containing_service: (ServiceDescriptor) The service that contains this 622 - method. 623 - input_type: The descriptor of the message that this method accepts. 624 - output_type: The descriptor of the message that this method returns. 625 - options: (descriptor_pb2.MethodOptions) Method options message or 626 - None to use default method options. 627 - """ 628 - 629 - def __init__(self, name, full_name, index, containing_service, 630 - input_type, output_type, options=None): 631 - """The arguments are as described in the description of MethodDescriptor 632 - attributes above. 633 - 634 - Note that containing_service may be None, and may be set later if necessary. 635 - """ 636 - super(MethodDescriptor, self).__init__(options, 'MethodOptions') 637 - self.name = name 638 - self.full_name = full_name 639 - self.index = index 640 - self.containing_service = containing_service 641 - self.input_type = input_type 642 - self.output_type = output_type 643 - 644 - 645 - class FileDescriptor(DescriptorBase): 646 - """Descriptor for a file. Mimics the descriptor_pb2.FileDescriptorProto. 647 - 648 - name: name of file, relative to root of source tree. 649 - package: name of the package 650 - serialized_pb: (str) Byte string of serialized 651 - descriptor_pb2.FileDescriptorProto. 652 - """ 653 - 654 - def __init__(self, name, package, options=None, serialized_pb=None): 655 - """Constructor.""" 656 - super(FileDescriptor, self).__init__(options, 'FileOptions') 657 - 658 - self.message_types_by_name = {} 659 - self.name = name 660 - self.package = package 661 - self.serialized_pb = serialized_pb 662 - if (api_implementation.Type() == 'cpp' and 663 - self.serialized_pb is not None): 664 - if api_implementation.Version() == 2: 665 - _message.BuildFile(self.serialized_pb) 666 - else: 667 - cpp_message.BuildFile(self.serialized_pb) 668 - 669 - def CopyToProto(self, proto): 670 - """Copies this to a descriptor_pb2.FileDescriptorProto. 671 - 672 - Args: 673 - proto: An empty descriptor_pb2.FileDescriptorProto. 674 - """ 675 - proto.ParseFromString(self.serialized_pb) 676 - 677 - 678 - def _ParseOptions(message, string): 679 - """Parses serialized options. 680 - 681 - This helper function is used to parse serialized options in generated 682 - proto2 files. It must not be used outside proto2. 683 - """ 684 - message.ParseFromString(string) 685 - return message 686 - 687 - 688 - def MakeDescriptor(desc_proto, package=''): 689 - """Make a protobuf Descriptor given a DescriptorProto protobuf. 690 - 691 - Args: 692 - desc_proto: The descriptor_pb2.DescriptorProto protobuf message. 693 - package: Optional package name for the new message Descriptor (string). 694 - 695 - Returns: 696 - A Descriptor for protobuf messages. 697 - """ 698 - full_message_name = [desc_proto.name] 699 - if package: full_message_name.insert(0, package) 700 - fields = [] 701 - for field_proto in desc_proto.field: 702 - full_name = '.'.join(full_message_name + [field_proto.name]) 703 - field = FieldDescriptor( 704 - field_proto.name, full_name, field_proto.number - 1, 705 - field_proto.number, field_proto.type, 706 - FieldDescriptor.ProtoTypeToCppProtoType(field_proto.type), 707 - field_proto.label, None, None, None, None, False, None, 708 - has_default_value=False) 709 - fields.append(field) 710 - 711 - desc_name = '.'.join(full_message_name) 712 - return Descriptor(desc_proto.name, desc_name, None, None, fields, 713 - [], [], []) 714 - -
1 - # Protocol Buffers - Google's data interchange format 2 - # Copyright 2008 Google Inc. All rights reserved. 3 - # http://code.google.com/p/protobuf/ 4 - # 5 - # Redistribution and use in source and binary forms, with or without 6 - # modification, are permitted provided that the following conditions are 7 - # met: 8 - # 9 - # * Redistributions of source code must retain the above copyright 10 - # notice, this list of conditions and the following disclaimer. 11 - # * Redistributions in binary form must reproduce the above 12 - # copyright notice, this list of conditions and the following disclaimer 13 - # in the documentation and/or other materials provided with the 14 - # distribution. 15 - # * Neither the name of Google Inc. nor the names of its 16 - # contributors may be used to endorse or promote products derived from 17 - # this software without specific prior written permission. 18 - # 19 - # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 20 - # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 21 - # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 22 - # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 23 - # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 24 - # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 25 - # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 26 - # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 27 - # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 28 - # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 - # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 - 31 - """Provides a container for DescriptorProtos.""" 32 - 33 - __author__ = '[email protected] (Matt Toia)' 34 - 35 - 36 - class DescriptorDatabase(object): 37 - """A container accepting FileDescriptorProtos and maps DescriptorProtos.""" 38 - 39 - def __init__(self): 40 - self._file_desc_protos_by_file = {} 41 - self._file_desc_protos_by_symbol = {} 42 - 43 - def Add(self, file_desc_proto): 44 - """Adds the FileDescriptorProto and its types to this database. 45 - 46 - Args: 47 - file_desc_proto: The FileDescriptorProto to add. 48 - """ 49 - 50 - self._file_desc_protos_by_file[file_desc_proto.name] = file_desc_proto 51 - package = file_desc_proto.package 52 - for message in file_desc_proto.message_type: 53 - self._file_desc_protos_by_symbol.update( 54 - (name, file_desc_proto) for name in _ExtractSymbols(message, package)) 55 - for enum in file_desc_proto.enum_type: 56 - self._file_desc_protos_by_symbol[ 57 - '.'.join((package, enum.name))] = file_desc_proto 58 - 59 - def FindFileByName(self, name): 60 - """Finds the file descriptor proto by file name. 61 - 62 - Typically the file name is a relative path ending to a .proto file. The 63 - proto with the given name will have to have been added to this database 64 - using the Add method or else an error will be raised. 65 - 66 - Args: 67 - name: The file name to find. 68 - 69 - Returns: 70 - The file descriptor proto matching the name. 71 - 72 - Raises: 73 - KeyError if no file by the given name was added. 74 - """ 75 - 76 - return self._file_desc_protos_by_file[name] 77 - 78 - def FindFileContainingSymbol(self, symbol): 79 - """Finds the file descriptor proto containing the specified symbol. 80 - 81 - The symbol should be a fully qualified name including the file descriptor's 82 - package and any containing messages. Some examples: 83 - 84 - 'some.package.name.Message' 85 - 'some.package.name.Message.NestedEnum' 86 - 87 - The file descriptor proto containing the specified symbol must be added to 88 - this database using the Add method or else an error will be raised. 89 - 90 - Args: 91 - symbol: The fully qualified symbol name. 92 - 93 - Returns: 94 - The file descriptor proto containing the symbol. 95 - 96 - Raises: 97 - KeyError if no file contains the specified symbol. 98 - """ 99 - 100 - return self._file_desc_protos_by_symbol[symbol] 101 - 102 - 103 - def _ExtractSymbols(desc_proto, package): 104 - """Pulls out all the symbols from a descriptor proto. 105 - 106 - Args: 107 - desc_proto: The proto to extract symbols from. 108 - package: The package containing the descriptor type. 109 - 110 - Yields: 111 - The fully qualified name found in the descriptor. 112 - """ 113 - 114 - message_name = '.'.join((package, desc_proto.name)) 115 - yield message_name 116 - for nested_type in desc_proto.nested_type: 117 - for symbol in _ExtractSymbols(nested_type, message_name): 118 - yield symbol 119 - for enum_type in desc_proto.enum_type: 120 - yield '.'.join((message_name, enum_type.name)) 121 - -
-
1 - # Protocol Buffers - Google's data interchange format 2 - # Copyright 2008 Google Inc. All rights reserved. 3 - # http://code.google.com/p/protobuf/ 4 - # 5 - # Redistribution and use in source and binary forms, with or without 6 - # modification, are permitted provided that the following conditions are 7 - # met: 8 - # 9 - # * Redistributions of source code must retain the above copyright 10 - # notice, this list of conditions and the following disclaimer. 11 - # * Redistributions in binary form must reproduce the above 12 - # copyright notice, this list of conditions and the following disclaimer 13 - # in the documentation and/or other materials provided with the 14 - # distribution. 15 - # * Neither the name of Google Inc. nor the names of its 16 - # contributors may be used to endorse or promote products derived from 17 - # this software without specific prior written permission. 18 - # 19 - # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 20 - # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 21 - # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 22 - # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 23 - # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 24 - # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 25 - # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 26 - # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 27 - # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 28 - # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 - # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 - 31 - """Provides DescriptorPool to use as a container for proto2 descriptors. 32 - 33 - The DescriptorPool is used in conjection with a DescriptorDatabase to maintain 34 - a collection of protocol buffer descriptors for use when dynamically creating 35 - message types at runtime. 36 - 37 - For most applications protocol buffers should be used via modules generated by 38 - the protocol buffer compiler tool. This should only be used when the type of 39 - protocol buffers used in an application or library cannot be predetermined. 40 - 41 - Below is a straightforward example on how to use this class: 42 - 43 - pool = DescriptorPool() 44 - file_descriptor_protos = [ ... ] 45 - for file_descriptor_proto in file_descriptor_protos: 46 - pool.Add(file_descriptor_proto) 47 - my_message_descriptor = pool.FindMessageTypeByName('some.package.MessageType') 48 - 49 - The message descriptor can be used in conjunction with the message_factory 50 - module in order to create a protocol buffer class that can be encoded and 51 - decoded. 52 - """ 53 - 54 - __author__ = '[email protected] (Matt Toia)' 55 - 56 - from google.protobuf import descriptor_pb2 57 - from google.protobuf import descriptor 58 - from google.protobuf import descriptor_database 59 - 60 - 61 - class DescriptorPool(object): 62 - """A collection of protobufs dynamically constructed by descriptor protos.""" 63 - 64 - def __init__(self, descriptor_db=None): 65 - """Initializes a Pool of proto buffs. 66 - 67 - The descriptor_db argument to the constructor is provided to allow 68 - specialized file descriptor proto lookup code to be triggered on demand. An 69 - example would be an implementation which will read and compile a file 70 - specified in a call to FindFileByName() and not require the call to Add() 71 - at all. Results from this database will be cached internally here as well. 72 - 73 - Args: 74 - descriptor_db: A secondary source of file descriptors. 75 - """ 76 - 77 - self._internal_db = descriptor_database.DescriptorDatabase() 78 - self._descriptor_db = descriptor_db 79 - self._descriptors = {} 80 - self._enum_descriptors = {} 81 - self._file_descriptors = {} 82 - 83 - def Add(self, file_desc_proto): 84 - """Adds the FileDescriptorProto and its types to this pool. 85 - 86 - Args: 87 - file_desc_proto: The FileDescriptorProto to add. 88 - """ 89 - 90 - self._internal_db.Add(file_desc_proto) 91 - 92 - def FindFileByName(self, file_name): 93 - """Gets a FileDescriptor by file name. 94 - 95 - Args: 96 - file_name: The path to the file to get a descriptor for. 97 - 98 - Returns: 99 - A FileDescriptor for the named file. 100 - 101 - Raises: 102 - KeyError: if the file can not be found in the pool. 103 - """ 104 - 105 - try: 106 - file_proto = self._internal_db.FindFileByName(file_name) 107 - except KeyError as error: 108 - if self._descriptor_db: 109 - file_proto = self._descriptor_db.FindFileByName(file_name) 110 - else: 111 - raise error 112 - if not file_proto: 113 - raise KeyError('Cannot find a file named %s' % file_name) 114 - return self._ConvertFileProtoToFileDescriptor(file_proto) 115 - 116 - def FindFileContainingSymbol(self, symbol): 117 - """Gets the FileDescriptor for the file containing the specified symbol. 118 - 119 - Args: 120 - symbol: The name of the symbol to search for. 121 - 122 - Returns: 123 - A FileDescriptor that contains the specified symbol. 124 - 125 - Raises: 126 - KeyError: if the file can not be found in the pool. 127 - """ 128 - 129 - try: 130 - file_proto = self._internal_db.FindFileContainingSymbol(symbol) 131 - except KeyError as error: 132 - if self._descriptor_db: 133 - file_proto = self._descriptor_db.FindFileContainingSymbol(symbol) 134 - else: 135 - raise error 136 - if not file_proto: 137 - raise KeyError('Cannot find a file containing %s' % symbol) 138 - return self._ConvertFileProtoToFileDescriptor(file_proto) 139 - 140 - def FindMessageTypeByName(self, full_name): 141 - """Loads the named descriptor from the pool. 142 - 143 - Args: 144 - full_name: The full name of the descriptor to load. 145 - 146 - Returns: 147 - The descriptor for the named type. 148 - """ 149 - 150 - full_name = full_name.lstrip('.') # fix inconsistent qualified name formats 151 - if full_name not in self._descriptors: 152 - self.FindFileContainingSymbol(full_name) 153 - return self._descriptors[full_name] 154 - 155 - def FindEnumTypeByName(self, full_name): 156 - """Loads the named enum descriptor from the pool. 157 - 158 - Args: 159 - full_name: The full name of the enum descriptor to load. 160 - 161 - Returns: 162 - The enum descriptor for the named type. 163 - """ 164 - 165 - full_name = full_name.lstrip('.') # fix inconsistent qualified name formats 166 - if full_name not in self._enum_descriptors: 167 - self.FindFileContainingSymbol(full_name) 168 - return self._enum_descriptors[full_name] 169 - 170 - def _ConvertFileProtoToFileDescriptor(self, file_proto): 171 - """Creates a FileDescriptor from a proto or returns a cached copy. 172 - 173 - This method also has the side effect of loading all the symbols found in 174 - the file into the appropriate dictionaries in the pool. 175 - 176 - Args: 177 - file_proto: The proto to convert. 178 - 179 - Returns: 180 - A FileDescriptor matching the passed in proto. 181 - """ 182 - 183 - if file_proto.name not in self._file_descriptors: 184 - file_descriptor = descriptor.FileDescriptor( 185 - name=file_proto.name, 186 - package=file_proto.package, 187 - options=file_proto.options, 188 - serialized_pb=file_proto.SerializeToString()) 189 - scope = {} 190 - dependencies = list(self._GetDeps(file_proto)) 191 - 192 - for dependency in dependencies: 193 - dep_desc = self.FindFileByName(dependency.name) 194 - dep_proto = descriptor_pb2.FileDescriptorProto.FromString( 195 - dep_desc.serialized_pb) 196 - package = '.' + dep_proto.package 197 - package_prefix = package + '.' 198 - 199 - def _strip_package(symbol): 200 - if symbol.startswith(package_prefix): 201 - return symbol[len(package_prefix):] 202 - return symbol 203 - 204 - symbols = list(self._ExtractSymbols(dep_proto.message_type, package)) 205 - scope.update(symbols) 206 - scope.update((_strip_package(k), v) for k, v in symbols) 207 - 208 - symbols = list(self._ExtractEnums(dep_proto.enum_type, package)) 209 - scope.update(symbols) 210 - scope.update((_strip_package(k), v) for k, v in symbols) 211 - 212 - for message_type in file_proto.message_type: 213 - message_desc = self._ConvertMessageDescriptor( 214 - message_type, file_proto.package, file_descriptor, scope) 215 - file_descriptor.message_types_by_name[message_desc.name] = message_desc 216 - for enum_type in file_proto.enum_type: 217 - self._ConvertEnumDescriptor(enum_type, file_proto.package, 218 - file_descriptor, None, scope) 219 - for desc_proto in self._ExtractMessages(file_proto.message_type): 220 - self._SetFieldTypes(desc_proto, scope) 221 - 222 - for desc_proto in file_proto.message_type: 223 - desc = scope[desc_proto.name] 224 - file_descriptor.message_types_by_name[desc_proto.name] = desc 225 - self.Add(file_proto) 226 - self._file_descriptors[file_proto.name] = file_descriptor 227 - 228 - return self._file_descriptors[file_proto.name] 229 - 230 - def _ConvertMessageDescriptor(self, desc_proto, package=None, file_desc=None, 231 - scope=None): 232 - """Adds the proto to the pool in the specified package. 233 - 234 - Args: 235 - desc_proto: The descriptor_pb2.DescriptorProto protobuf message. 236 - package: The package the proto should be located in. 237 - file_desc: The file containing this message. 238 - scope: Dict mapping short and full symbols to message and enum types. 239 - 240 - Returns: 241 - The added descriptor. 242 - """ 243 - 244 - if package: 245 - desc_name = '.'.join((package, desc_proto.name)) 246 - else: 247 - desc_name = desc_proto.name 248 - 249 - if file_desc is None: 250 - file_name = None 251 - else: 252 - file_name = file_desc.name 253 - 254 - if scope is None: 255 - scope = {} 256 - 257 - nested = [ 258 - self._ConvertMessageDescriptor(nested, desc_name, file_desc, scope) 259 - for nested in desc_proto.nested_type] 260 - enums = [ 261 - self._ConvertEnumDescriptor(enum, desc_name, file_desc, None, scope) 262 - for enum in desc_proto.enum_type] 263 - fields = [self._MakeFieldDescriptor(field, desc_name, index) 264 - for index, field in enumerate(desc_proto.field)] 265 - extensions = [self._MakeFieldDescriptor(extension, desc_name, True) 266 - for index, extension in enumerate(desc_proto.extension)] 267 - extension_ranges = [(r.start, r.end) for r in desc_proto.extension_range] 268 - if extension_ranges: 269 - is_extendable = True 270 - else: 271 - is_extendable = False 272 - desc = descriptor.Descriptor( 273 - name=desc_proto.name, 274 - full_name=desc_name, 275 - filename=file_name, 276 - containing_type=None, 277 - fields=fields, 278 - nested_types=nested, 279 - enum_types=enums, 280 - extensions=extensions, 281 - options=desc_proto.options, 282 - is_extendable=is_extendable, 283 - extension_ranges=extension_ranges, 284 - file=file_desc, 285 - serialized_start=None, 286 - serialized_end=None) 287 - for nested in desc.nested_types: 288 - nested.containing_type = desc 289 - for enum in desc.enum_types: 290 - enum.containing_type = desc 291 - scope[desc_proto.name] = desc 292 - scope['.' + desc_name] = desc 293 - self._descriptors[desc_name] = desc 294 - return desc 295 - 296 - def _ConvertEnumDescriptor(self, enum_proto, package=None, file_desc=None, 297 - containing_type=None, scope=None): 298 - """Make a protobuf EnumDescriptor given an EnumDescriptorProto protobuf. 299 - 300 - Args: 301 - enum_proto: The descriptor_pb2.EnumDescriptorProto protobuf message. 302 - package: Optional package name for the new message EnumDescriptor. 303 - file_desc: The file containing the enum descriptor. 304 - containing_type: The type containing this enum. 305 - scope: Scope containing available types. 306 - 307 - Returns: 308 - The added descriptor 309 - """ 310 - 311 - if package: 312 - enum_name = '.'.join((package, enum_proto.name)) 313 - else: 314 - enum_name = enum_proto.name 315 - 316 - if file_desc is None: 317 - file_name = None 318 - else: 319 - file_name = file_desc.name 320 - 321 - values = [self._MakeEnumValueDescriptor(value, index) 322 - for index, value in enumerate(enum_proto.value)] 323 - desc = descriptor.EnumDescriptor(name=enum_proto.name, 324 - full_name=enum_name, 325 - filename=file_name, 326 - file=file_desc, 327 - values=values, 328 - containing_type=containing_type, 329 - options=enum_proto.options) 330 - scope[enum_proto.name] = desc 331 - scope['.%s' % enum_name] = desc 332 - self._enum_descriptors[enum_name] = desc 333 - return desc 334 - 335 - def _MakeFieldDescriptor(self, field_proto, message_name, index, 336 - is_extension=False): 337 - """Creates a field descriptor from a FieldDescriptorProto. 338 - 339 - For message and enum type fields, this method will do a look up 340 - in the pool for the appropriate descriptor for that type. If it 341 - is unavailable, it will fall back to the _source function to 342 - create it. If this type is still unavailable, construction will 343 - fail. 344 - 345 - Args: 346 - field_proto: The proto describing the field. 347 - message_name: The name of the containing message. 348 - index: Index of the field 349 - is_extension: Indication that this field is for an extension. 350 - 351 - Returns: 352 - An initialized FieldDescriptor object 353 - """ 354 - 355 - if message_name: 356 - full_name = '.'.join((message_name, field_proto.name)) 357 - else: 358 - full_name = field_proto.name 359 - 360 - return descriptor.FieldDescriptor( 361 - name=field_proto.name, 362 - full_name=full_name, 363 - index=index, 364 - number=field_proto.number, 365 - type=field_proto.type, 366 - cpp_type=None, 367 - message_type=None, 368 - enum_type=None, 369 - containing_type=None, 370 - label=field_proto.label, 371 - has_default_value=False, 372 - default_value=None, 373 - is_extension=is_extension, 374 - extension_scope=None, 375 - options=field_proto.options) 376 - 377 - def _SetFieldTypes(self, desc_proto, scope): 378 - """Sets the field's type, cpp_type, message_type and enum_type. 379 - 380 - Args: 381 - desc_proto: The message descriptor to update. 382 - scope: Enclosing scope of available types. 383 - """ 384 - 385 - desc = scope[desc_proto.name] 386 - for field_proto, field_desc in zip(desc_proto.field, desc.fields): 387 - if field_proto.type_name: 388 - type_name = field_proto.type_name 389 - if type_name not in scope: 390 - type_name = '.' + type_name 391 - desc = scope[type_name] 392 - else: 393 - desc = None 394 - 395 - if not field_proto.HasField('type'): 396 - if isinstance(desc, descriptor.Descriptor): 397 - field_proto.type = descriptor.FieldDescriptor.TYPE_MESSAGE 398 - else: 399 - field_proto.type = descriptor.FieldDescriptor.TYPE_ENUM 400 - 401 - field_desc.cpp_type = descriptor.FieldDescriptor.ProtoTypeToCppProtoType( 402 - field_proto.type) 403 - 404 - if (field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE 405 - or field_proto.type == descriptor.FieldDescriptor.TYPE_GROUP): 406 - field_desc.message_type = desc 407 - 408 - if field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM: 409 - field_desc.enum_type = desc 410 - 411 - if field_proto.label == descriptor.FieldDescriptor.LABEL_REPEATED: 412 - field_desc.has_default = False 413 - field_desc.default_value = [] 414 - elif field_proto.HasField('default_value'): 415 - field_desc.has_default = True 416 - if (field_proto.type == descriptor.FieldDescriptor.TYPE_DOUBLE or 417 - field_proto.type == descriptor.FieldDescriptor.TYPE_FLOAT): 418 - field_desc.default_value = float(field_proto.default_value) 419 - elif field_proto.type == descriptor.FieldDescriptor.TYPE_STRING: 420 - field_desc.default_value = field_proto.default_value 421 - elif field_proto.type == descriptor.FieldDescriptor.TYPE_BOOL: 422 - field_desc.default_value = field_proto.default_value.lower() == 'true' 423 - elif field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM: 424 - field_desc.default_value = field_desc.enum_type.values_by_name[ 425 - field_proto.default_value].index 426 - else: 427 - field_desc.default_value = int(field_proto.default_value) 428 - else: 429 - field_desc.has_default = False 430 - field_desc.default_value = None 431 - 432 - field_desc.type = field_proto.type 433 - 434 - for nested_type in desc_proto.nested_type: 435 - self._SetFieldTypes(nested_type, scope) 436 - 437 - def _MakeEnumValueDescriptor(self, value_proto, index): 438 - """Creates a enum value descriptor object from a enum value proto. 439 - 440 - Args: 441 - value_proto: The proto describing the enum value. 442 - index: The index of the enum value. 443 - 444 - Returns: 445 - An initialized EnumValueDescriptor object. 446 - """ 447 - 448 - return descriptor.EnumValueDescriptor( 449 - name=value_proto.name, 450 - index=index, 451 - number=value_proto.number, 452 - options=value_proto.options, 453 - type=None) 454 - 455 - def _ExtractSymbols(self, desc_protos, package): 456 - """Pulls out all the symbols from descriptor protos. 457 - 458 - Args: 459 - desc_protos: The protos to extract symbols from. 460 - package: The package containing the descriptor type. 461 - Yields: 462 - A two element tuple of the type name and descriptor object. 463 - """ 464 - 465 - for desc_proto in desc_protos: 466 - if package: 467 - message_name = '.'.join((package, desc_proto.name)) 468 - else: 469 - message_name = desc_proto.name 470 - message_desc = self.FindMessageTypeByName(message_name) 471 - yield (message_name, message_desc) 472 - for symbol in self._ExtractSymbols(desc_proto.nested_type, message_name): 473 - yield symbol 474 - for symbol in self._ExtractEnums(desc_proto.enum_type, message_name): 475 - yield symbol 476 - 477 - def _ExtractEnums(self, enum_protos, package): 478 - """Pulls out all the symbols from enum protos. 479 - 480 - Args: 481 - enum_protos: The protos to extract symbols from. 482 - package: The package containing the enum type. 483 - 484 - Yields: 485 - A two element tuple of the type name and enum descriptor object. 486 - """ 487 - 488 - for enum_proto in enum_protos: 489 - if package: 490 - enum_name = '.'.join((package, enum_proto.name)) 491 - else: 492 - enum_name = enum_proto.name 493 - enum_desc = self.FindEnumTypeByName(enum_name) 494 - yield (enum_name, enum_desc) 495 - 496 - def _ExtractMessages(self, desc_protos): 497 - """Pulls out all the message protos from descriptos. 498 - 499 - Args: 500 - desc_protos: The protos to extract symbols from. 501 - 502 - Yields: 503 - Descriptor protos. 504 - """ 505 - 506 - for desc_proto in desc_protos: 507 - yield desc_proto 508 - for message in self._ExtractMessages(desc_proto.nested_type): 509 - yield message 510 - 511 - def _GetDeps(self, file_proto): 512 - """Recursively finds dependencies for file protos. 513 - 514 - Args: 515 - file_proto: The proto to get dependencies from. 516 - 517 - Yields: 518 - Each direct and indirect dependency. 519 - """ 520 - 521 - for dependency in file_proto.dependency: 522 - dep_desc = self.FindFileByName(dependency) 523 - dep_proto = descriptor_pb2.FileDescriptorProto.FromString( 524 - dep_desc.serialized_pb) 525 - yield dep_proto 526 - for parent_dep in self._GetDeps(dep_proto): 527 - yield parent_dep 528 - -
1 - # Protocol Buffers - Google's data interchange format 2 - # Copyright 2008 Google Inc. All rights reserved. 3 - # http://code.google.com/p/protobuf/ 4 - # 5 - # Redistribution and use in source and binary forms, with or without 6 - # modification, are permitted provided that the following conditions are 7 - # met: 8 - # 9 - # * Redistributions of source code must retain the above copyright 10 - # notice, this list of conditions and the following disclaimer. 11 - # * Redistributions in binary form must reproduce the above 12 - # copyright notice, this list of conditions and the following disclaimer 13 - # in the documentation and/or other materials provided with the 14 - # distribution. 15 - # * Neither the name of Google Inc. nor the names of its 16 - # contributors may be used to endorse or promote products derived from 17 - # this software without specific prior written permission. 18 - # 19 - # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 20 - # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 21 - # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 22 - # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 23 - # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 24 - # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 25 - # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 26 - # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 27 - # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 28 - # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 - # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 - 31 - """ 32 - This module is the central entity that determines which implementation of the 33 - API is used. 34 - """ 35 - 36 - __author__ = '[email protected] (Petar Petrov)' 37 - 38 - import os 39 - # This environment variable can be used to switch to a certain implementation 40 - # of the Python API. Right now only 'python' and 'cpp' are valid values. Any 41 - # other value will be ignored. 42 - _implementation_type = os.getenv('PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION', 43 - 'python') 44 - 45 - 46 - if _implementation_type != 'python': 47 - # For now, by default use the pure-Python implementation. 48 - # The code below checks if the C extension is available and 49 - # uses it if it is available. 50 - _implementation_type = 'cpp' 51 - ## Determine automatically which implementation to use. 52 - #try: 53 - # from google.protobuf.internal import cpp_message 54 - # _implementation_type = 'cpp' 55 - #except ImportError, e: 56 - # _implementation_type = 'python' 57 - 58 - 59 - # This environment variable can be used to switch between the two 60 - # 'cpp' implementations. Right now only 1 and 2 are valid values. Any 61 - # other value will be ignored. 62 - _implementation_version_str = os.getenv( 63 - 'PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION_VERSION', 64 - '1') 65 - 66 - 67 - if _implementation_version_str not in ('1', '2'): 68 - raise ValueError( 69 - "unsupported PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION_VERSION: '" + 70 - _implementation_version_str + "' (supported versions: 1, 2)" 71 - ) 72 - 73 - 74 - _implementation_version = int(_implementation_version_str) 75 - 76 - 77 - 78 - # Usage of this function is discouraged. Clients shouldn't care which 79 - # implementation of the API is in use. Note that there is no guarantee 80 - # that differences between APIs will be maintained. 81 - # Please don't use this function if possible. 82 - def Type(): 83 - return _implementation_type 84 - 85 - # See comment on 'Type' above. 86 - def Version(): 87 - return _implementation_version 88 - -
1 - # Protocol Buffers - Google's data interchange format 2 - # Copyright 2008 Google Inc. All rights reserved. 3 - # http://code.google.com/p/protobuf/ 4 - # 5 - # Redistribution and use in source and binary forms, with or without 6 - # modification, are permitted provided that the following conditions are 7 - # met: 8 - # 9 - # * Redistributions of source code must retain the above copyright 10 - # notice, this list of conditions and the following disclaimer. 11 - # * Redistributions in binary form must reproduce the above 12 - # copyright notice, this list of conditions and the following disclaimer 13 - # in the documentation and/or other materials provided with the 14 - # distribution. 15 - # * Neither the name of Google Inc. nor the names of its 16 - # contributors may be used to endorse or promote products derived from 17 - # this software without specific prior written permission. 18 - # 19 - # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 20 - # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 21 - # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 22 - # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 23 - # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 24 - # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 25 - # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 26 - # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 27 - # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 28 - # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 - # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 - 31 - """Contains container classes to represent different protocol buffer types. 32 - 33 - This file defines container classes which represent categories of protocol 34 - buffer field types which need extra maintenance. Currently these categories 35 - are: 36 - - Repeated scalar fields - These are all repeated fields which aren't 37 - composite (e.g. they are of simple types like int32, string, etc). 38 - - Repeated composite fields - Repeated fields which are composite. This 39 - includes groups and nested messages. 40 - """ 41 - 42 - __author__ = '[email protected] (Petar Petrov)' 43 - 44 - 45 - class BaseContainer(object): 46 - 47 - """Base container class.""" 48 - 49 - # Minimizes memory usage and disallows assignment to other attributes. 50 - __slots__ = ['_message_listener', '_values'] 51 - 52 - def __init__(self, message_listener): 53 - """ 54 - Args: 55 - message_listener: A MessageListener implementation. 56 - The RepeatedScalarFieldContainer will call this object's 57 - Modified() method when it is modified. 58 - """ 59 - self._message_listener = message_listener 60 - self._values = [] 61 - 62 - def __getitem__(self, key): 63 - """Retrieves item by the specified key.""" 64 - return self._values[key] 65 - 66 - def __len__(self): 67 - """Returns the number of elements in the container.""" 68 - return len(self._values) 69 - 70 - def __ne__(self, other): 71 - """Checks if another instance isn't equal to this one.""" 72 - # The concrete classes should define __eq__. 73 - return not self == other 74 - 75 - def __hash__(self): 76 - raise TypeError('unhashable object') 77 - 78 - def __repr__(self): 79 - return repr(self._values) 80 - 81 - def sort(self, *args, **kwargs): 82 - # Continue to support the old sort_function keyword argument. 83 - # This is expected to be a rare occurrence, so use LBYL to avoid 84 - # the overhead of actually catching KeyError. 85 - if 'sort_function' in kwargs: 86 - kwargs['cmp'] = kwargs.pop('sort_function') 87 - self._values.sort(*args, **kwargs) 88 - 89 - 90 - class RepeatedScalarFieldContainer(BaseContainer): 91 - 92 - """Simple, type-checked, list-like container for holding repeated scalars.""" 93 - 94 - # Disallows assignment to other attributes. 95 - __slots__ = ['_type_checker'] 96 - 97 - def __init__(self, message_listener, type_checker): 98 - """ 99 - Args: 100 - message_listener: A MessageListener implementation. 101 - The RepeatedScalarFieldContainer will call this object's 102 - Modified() method when it is modified. 103 - type_checker: A type_checkers.ValueChecker instance to run on elements 104 - inserted into this container. 105 - """ 106 - super(RepeatedScalarFieldContainer, self).__init__(message_listener) 107 - self._type_checker = type_checker 108 - 109 - def append(self, value): 110 - """Appends an item to the list. Similar to list.append().""" 111 - self._type_checker.CheckValue(value) 112 - self._values.append(value) 113 - if not self._message_listener.dirty: 114 - self._message_listener.Modified() 115 - 116 - def insert(self, key, value): 117 - """Inserts the item at the specified position. Similar to list.insert().""" 118 - self._type_checker.CheckValue(value) 119 - self._values.insert(key, value) 120 - if not self._message_listener.dirty: 121 - self._message_listener.Modified() 122 - 123 - def extend(self, elem_seq): 124 - """Extends by appending the given sequence. Similar to list.extend().""" 125 - if not elem_seq: 126 - return 127 - 128 - new_values = [] 129 - for elem in elem_seq: 130 - self._type_checker.CheckValue(elem) 131 - new_values.append(elem) 132 - self._values.extend(new_values) 133 - self._message_listener.Modified() 134 - 135 - def MergeFrom(self, other): 136 - """Appends the contents of another repeated field of the same type to this 137 - one. We do not check the types of the individual fields. 138 - """ 139 - self._values.extend(other._values) 140 - self._message_listener.Modified() 141 - 142 - def remove(self, elem): 143 - """Removes an item from the list. Similar to list.remove().""" 144 - self._values.remove(elem) 145 - self._message_listener.Modified() 146 - 147 - def __setitem__(self, key, value): 148 - """Sets the item on the specified position.""" 149 - self._type_checker.CheckValue(value) 150 - self._values[key] = value 151 - self._message_listener.Modified() 152 - 153 - def __getslice__(self, start, stop): 154 - """Retrieves the subset of items from between the specified indices.""" 155 - return self._values[start:stop] 156 - 157 - def __setslice__(self, start, stop, values): 158 - """Sets the subset of items from between the specified indices.""" 159 - new_values = [] 160 - for value in values: 161 - self._type_checker.CheckValue(value) 162 - new_values.append(value) 163 - self._values[start:stop] = new_values 164 - self._message_listener.Modified() 165 - 166 - def __delitem__(self, key): 167 - """Deletes the item at the specified position.""" 168 - del self._values[key] 169 - self._message_listener.Modified() 170 - 171 - def __delslice__(self, start, stop): 172 - """Deletes the subset of items from between the specified indices.""" 173 - del self._values[start:stop] 174 - self._message_listener.Modified() 175 - 176 - def __eq__(self, other): 177 - """Compares the current instance with another one.""" 178 - if self is other: 179 - return True 180 - # Special case for the same type which should be common and fast. 181 - if isinstance(other, self.__class__): 182 - return other._values == self._values 183 - # We are presumably comparing against some other sequence type. 184 - return other == self._values 185 - 186 - 187 - class RepeatedCompositeFieldContainer(BaseContainer): 188 - 189 - """Simple, list-like container for holding repeated composite fields.""" 190 - 191 - # Disallows assignment to other attributes. 192 - __slots__ = ['_message_descriptor'] 193 - 194 - def __init__(self, message_listener, message_descriptor): 195 - """ 196 - Note that we pass in a descriptor instead of the generated directly, 197 - since at the time we construct a _RepeatedCompositeFieldContainer we 198 - haven't yet necessarily initialized the type that will be contained in the 199 - container. 200 - 201 - Args: 202 - message_listener: A MessageListener implementation. 203 - The RepeatedCompositeFieldContainer will call this object's 204 - Modified() method when it is modified. 205 - message_descriptor: A Descriptor instance describing the protocol type 206 - that should be present in this container. We'll use the 207 - _concrete_class field of this descriptor when the client calls add(). 208 - """ 209 - super(RepeatedCompositeFieldContainer, self).__init__(message_listener) 210 - self._message_descriptor = message_descriptor 211 - 212 - def add(self, **kwargs): 213 - """Adds a new element at the end of the list and returns it. Keyword 214 - arguments may be used to initialize the element. 215 - """ 216 - new_element = self._message_descriptor._concrete_class(**kwargs) 217 - new_element._SetListener(self._message_listener) 218 - self._values.append(new_element) 219 - if not self._message_listener.dirty: 220 - self._message_listener.Modified() 221 - return new_element 222 - 223 - def extend(self, elem_seq): 224 - """Extends by appending the given sequence of elements of the same type 225 - as this one, copying each individual message. 226 - """ 227 - message_class = self._message_descriptor._concrete_class 228 - listener = self._message_listener 229 - values = self._values 230 - for message in elem_seq: 231 - new_element = message_class() 232 - new_element._SetListener(listener) 233 - new_element.MergeFrom(message) 234 - values.append(new_element) 235 - listener.Modified() 236 - 237 - def MergeFrom(self, other): 238 - """Appends the contents of another repeated field of the same type to this 239 - one, copying each individual message. 240 - """ 241 - self.extend(other._values) 242 - 243 - def remove(self, elem): 244 - """Removes an item from the list. Similar to list.remove().""" 245 - self._values.remove(elem) 246 - self._message_listener.Modified() 247 - 248 - def __getslice__(self, start, stop): 249 - """Retrieves the subset of items from between the specified indices.""" 250 - return self._values[start:stop] 251 - 252 - def __delitem__(self, key): 253 - """Deletes the item at the specified position.""" 254 - del self._values[key] 255 - self._message_listener.Modified() 256 - 257 - def __delslice__(self, start, stop): 258 - """Deletes the subset of items from between the specified indices.""" 259 - del self._values[start:stop] 260 - self._message_listener.Modified() 261 - 262 - def __eq__(self, other): 263 - """Compares the current instance with another one.""" 264 - if self is other: 265 - return True 266 - if not isinstance(other, self.__class__): 267 - raise TypeError('Can only compare repeated composite fields against ' 268 - 'other repeated composite fields.') 269 - return self._values == other._values 270 - -
1 - # Protocol Buffers - Google's data interchange format 2 - # Copyright 2008 Google Inc. All rights reserved. 3 - # http://code.google.com/p/protobuf/ 4 - # 5 - # Redistribution and use in source and binary forms, with or without 6 - # modification, are permitted provided that the following conditions are 7 - # met: 8 - # 9 - # * Redistributions of source code must retain the above copyright 10 - # notice, this list of conditions and the following disclaimer. 11 - # * Redistributions in binary form must reproduce the above 12 - # copyright notice, this list of conditions and the following disclaimer 13 - # in the documentation and/or other materials provided with the 14 - # distribution. 15 - # * Neither the name of Google Inc. nor the names of its 16 - # contributors may be used to endorse or promote products derived from 17 - # this software without specific prior written permission. 18 - # 19 - # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 20 - # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 21 - # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 22 - # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 23 - # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 24 - # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 25 - # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 26 - # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 27 - # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 28 - # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 - # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 - 31 - """Contains helper functions used to create protocol message classes from 32 - Descriptor objects at runtime backed by the protocol buffer C++ API. 33 - """ 34 - 35 - __author__ = '[email protected] (Petar Petrov)' 36 - 37 - import copy_reg 38 - import operator 39 - from google.protobuf.internal import _net_proto2___python 40 - from google.protobuf.internal import enum_type_wrapper 41 - from google.protobuf import message 42 - 43 - 44 - _LABEL_REPEATED = _net_proto2___python.LABEL_REPEATED 45 - _LABEL_OPTIONAL = _net_proto2___python.LABEL_OPTIONAL 46 - _CPPTYPE_MESSAGE = _net_proto2___python.CPPTYPE_MESSAGE 47 - _TYPE_MESSAGE = _net_proto2___python.TYPE_MESSAGE 48 - 49 - 50 - def GetDescriptorPool(): 51 - """Creates a new DescriptorPool C++ object.""" 52 - return _net_proto2___python.NewCDescriptorPool() 53 - 54 - 55 - _pool = GetDescriptorPool() 56 - 57 - 58 - def GetFieldDescriptor(full_field_name): 59 - """Searches for a field descriptor given a full field name.""" 60 - return _pool.FindFieldByName(full_field_name) 61 - 62 - 63 - def BuildFile(content): 64 - """Registers a new proto file in the underlying C++ descriptor pool.""" 65 - _net_proto2___python.BuildFile(content) 66 - 67 - 68 - def GetExtensionDescriptor(full_extension_name): 69 - """Searches for extension descriptor given a full field name.""" 70 - return _pool.FindExtensionByName(full_extension_name) 71 - 72 - 73 - def NewCMessage(full_message_name): 74 - """Creates a new C++ protocol message by its name.""" 75 - return _net_proto2___python.NewCMessage(full_message_name) 76 - 77 - 78 - def ScalarProperty(cdescriptor): 79 - """Returns a scalar property for the given descriptor.""" 80 - 81 - def Getter(self): 82 - return self._cmsg.GetScalar(cdescriptor) 83 - 84 - def Setter(self, value): 85 - self._cmsg.SetScalar(cdescriptor, value) 86 - 87 - return property(Getter, Setter) 88 - 89 - 90 - def CompositeProperty(cdescriptor, message_type): 91 - """Returns a Python property the given composite field.""" 92 - 93 - def Getter(self): 94 - sub_message = self._composite_fields.get(cdescriptor.name, None) 95 - if sub_message is None: 96 - cmessage = self._cmsg.NewSubMessage(cdescriptor) 97 - sub_message = message_type._concrete_class(__cmessage=cmessage) 98 - self._composite_fields[cdescriptor.name] = sub_message 99 - return sub_message 100 - 101 - return property(Getter) 102 - 103 - 104 - class RepeatedScalarContainer(object): 105 - """Container for repeated scalar fields.""" 106 - 107 - __slots__ = ['_message', '_cfield_descriptor', '_cmsg'] 108 - 109 - def __init__(self, msg, cfield_descriptor): 110 - self._message = msg 111 - self._cmsg = msg._cmsg 112 - self._cfield_descriptor = cfield_descriptor 113 - 114 - def append(self, value): 115 - self._cmsg.AddRepeatedScalar( 116 - self._cfield_descriptor, value) 117 - 118 - def extend(self, sequence): 119 - for element in sequence: 120 - self.append(element) 121 - 122 - def insert(self, key, value): 123 - values = self[slice(None, None, None)] 124 - values.insert(key, value) 125 - self._cmsg.AssignRepeatedScalar(self._cfield_descriptor, values) 126 - 127 - def remove(self, value): 128 - values = self[slice(None, None, None)] 129 - values.remove(value) 130 - self._cmsg.AssignRepeatedScalar(self._cfield_descriptor, values) 131 - 132 - def __setitem__(self, key, value): 133 - values = self[slice(None, None, None)] 134 - values[key] = value 135 - self._cmsg.AssignRepeatedScalar(self._cfield_descriptor, values) 136 - 137 - def __getitem__(self, key): 138 - return self._cmsg.GetRepeatedScalar(self._cfield_descriptor, key) 139 - 140 - def __delitem__(self, key): 141 - self._cmsg.DeleteRepeatedField(self._cfield_descriptor, key) 142 - 143 - def __len__(self): 144 - return len(self[slice(None, None, None)]) 145 - 146 - def __eq__(self, other): 147 - if self is other: 148 - return True 149 - if not operator.isSequenceType(other): 150 - raise TypeError( 151 - 'Can only compare repeated scalar fields against sequences.') 152 - # We are presumably comparing against some other sequence type. 153 - return other == self[slice(None, None, None)] 154 - 155 - def __ne__(self, other): 156 - return not self == other 157 - 158 - def __hash__(self): 159 - raise TypeError('unhashable object') 160 - 161 - def sort(self, *args, **kwargs): 162 - # Maintain compatibility with the previous interface. 163 - if 'sort_function' in kwargs: 164 - kwargs['cmp'] = kwargs.pop('sort_function') 165 - self._cmsg.AssignRepeatedScalar(self._cfield_descriptor, 166 - sorted(self, *args, **kwargs)) 167 - 168 - 169 - def RepeatedScalarProperty(cdescriptor): 170 - """Returns a Python property the given repeated scalar field.""" 171 - 172 - def Getter(self): 173 - container = self._composite_fields.get(cdescriptor.name, None) 174 - if container is None: 175 - container = RepeatedScalarContainer(self, cdescriptor) 176 - self._composite_fields[cdescriptor.name] = container 177 - return container 178 - 179 - def Setter(self, new_value): 180 - raise AttributeError('Assignment not allowed to repeated field ' 181 - '"%s" in protocol message object.' % cdescriptor.name) 182 - 183 - doc = 'Magic attribute generated for "%s" proto field.' % cdescriptor.name 184 - return property(Getter, Setter, doc=doc) 185 - 186 - 187 - class RepeatedCompositeContainer(object): 188 - """Container for repeated composite fields.""" 189 - 190 - __slots__ = ['_message', '_subclass', '_cfield_descriptor', '_cmsg'] 191 - 192 - def __init__(self, msg, cfield_descriptor, subclass): 193 - self._message = msg 194 - self._cmsg = msg._cmsg 195 - self._subclass = subclass 196 - self._cfield_descriptor = cfield_descriptor 197 - 198 - def add(self, **kwargs): 199 - cmessage = self._cmsg.AddMessage(self._cfield_descriptor) 200 - return self._subclass(__cmessage=cmessage, __owner=self._message, **kwargs) 201 - 202 - def extend(self, elem_seq): 203 - """Extends by appending the given sequence of elements of the same type 204 - as this one, copying each individual message. 205 - """ 206 - for message in elem_seq: 207 - self.add().MergeFrom(message) 208 - 209 - def remove(self, value): 210 - # TODO(protocol-devel): This is inefficient as it needs to generate a 211 - # message pointer for each message only to do index(). Move this to a C++ 212 - # extension function. 213 - self.__delitem__(self[slice(None, None, None)].index(value)) 214 - 215 - def MergeFrom(self, other): 216 - for message in other[:]: 217 - self.add().MergeFrom(message) 218 - 219 - def __getitem__(self, key): 220 - cmessages = self._cmsg.GetRepeatedMessage( 221 - self._cfield_descriptor, key) 222 - subclass = self._subclass 223 - if not isinstance(cmessages, list): 224 - return subclass(__cmessage=cmessages, __owner=self._message) 225 - 226 - return [subclass(__cmessage=m, __owner=self._message) for m in cmessages] 227 - 228 - def __delitem__(self, key): 229 - self._cmsg.DeleteRepeatedField( 230 - self._cfield_descriptor, key) 231 - 232 - def __len__(self): 233 - return self._cmsg.FieldLength(self._cfield_descriptor) 234 - 235 - def __eq__(self, other): 236 - """Compares the current instance with another one.""" 237 - if self is other: 238 - return True 239 - if not isinstance(other, self.__class__): 240 - raise TypeError('Can only compare repeated composite fields against ' 241 - 'other repeated composite fields.') 242 - messages = self[slice(None, None, None)] 243 - other_messages = other[slice(None, None, None)] 244 - return messages == other_messages 245 - 246 - def __hash__(self): 247 - raise TypeError('unhashable object') 248 - 249 - def sort(self, cmp=None, key=None, reverse=False, **kwargs): 250 - # Maintain compatibility with the old interface. 251 - if cmp is None and 'sort_function' in kwargs: 252 - cmp = kwargs.pop('sort_function') 253 - 254 - # The cmp function, if provided, is passed the results of the key function, 255 - # so we only need to wrap one of them. 256 - if key is None: 257 - index_key = self.__getitem__ 258 - else: 259 - index_key = lambda i: key(self[i]) 260 - 261 - # Sort the list of current indexes by the underlying object. 262 - indexes = range(len(self)) 263 - indexes.sort(cmp=cmp, key=index_key, reverse=reverse) 264 - 265 - # Apply the transposition. 266 - for dest, src in enumerate(indexes): 267 - if dest == src: 268 - continue 269 - self._cmsg.SwapRepeatedFieldElements(self._cfield_descriptor, dest, src) 270 - # Don't swap the same value twice. 271 - indexes[src] = src 272 - 273 - 274 - def RepeatedCompositeProperty(cdescriptor, message_type): 275 - """Returns a Python property for the given repeated composite field.""" 276 - 277 - def Getter(self): 278 - container = self._composite_fields.get(cdescriptor.name, None) 279 - if container is None: 280 - container = RepeatedCompositeContainer( 281 - self, cdescriptor, message_type._concrete_class) 282 - self._composite_fields[cdescriptor.name] = container 283 - return container 284 - 285 - def Setter(self, new_value): 286 - raise AttributeError('Assignment not allowed to repeated field ' 287 - '"%s" in protocol message object.' % cdescriptor.name) 288 - 289 - doc = 'Magic attribute generated for "%s" proto field.' % cdescriptor.name 290 - return property(Getter, Setter, doc=doc) 291 - 292 - 293 - class ExtensionDict(object): 294 - """Extension dictionary added to each protocol message.""" 295 - 296 - def __init__(self, msg): 297 - self._message = msg 298 - self._cmsg = msg._cmsg 299 - self._values = {} 300 - 301 - def __setitem__(self, extension, value): 302 - from google.protobuf import descriptor 303 - if not isinstance(extension, descriptor.FieldDescriptor): 304 - raise KeyError('Bad extension %r.' % (extension,)) 305 - cdescriptor = extension._cdescriptor 306 - if (cdescriptor.label != _LABEL_OPTIONAL or 307 - cdescriptor.cpp_type == _CPPTYPE_MESSAGE): 308 - raise TypeError('Extension %r is repeated and/or a composite type.' % ( 309 - extension.full_name,)) 310 - self._cmsg.SetScalar(cdescriptor, value) 311 - self._values[extension] = value 312 - 313 - def __getitem__(self, extension): 314 - from google.protobuf import descriptor 315 - if not isinstance(extension, descriptor.FieldDescriptor): 316 - raise KeyError('Bad extension %r.' % (extension,)) 317 - 318 - cdescriptor = extension._cdescriptor 319 - if (cdescriptor.label != _LABEL_REPEATED and 320 - cdescriptor.cpp_type != _CPPTYPE_MESSAGE): 321 - return self._cmsg.GetScalar(cdescriptor) 322 - 323 - ext = self._values.get(extension, None) 324 - if ext is not None: 325 - return ext 326 - 327 - ext = self._CreateNewHandle(extension) 328 - self._values[extension] = ext 329 - return ext 330 - 331 - def ClearExtension(self, extension): 332 - from google.protobuf import descriptor 333 - if not isinstance(extension, descriptor.FieldDescriptor): 334 - raise KeyError('Bad extension %r.' % (extension,)) 335 - self._cmsg.ClearFieldByDescriptor(extension._cdescriptor) 336 - if extension in self._values: 337 - del self._values[extension] 338 - 339 - def HasExtension(self, extension): 340 - from google.protobuf import descriptor 341 - if not isinstance(extension, descriptor.FieldDescriptor): 342 - raise KeyError('Bad extension %r.' % (extension,)) 343 - return self._cmsg.HasFieldByDescriptor(extension._cdescriptor) 344 - 345 - def _FindExtensionByName(self, name): 346 - """Tries to find a known extension with the specified name. 347 - 348 - Args: 349 - name: Extension full name. 350 - 351 - Returns: 352 - Extension field descriptor. 353 - """ 354 - return self._message._extensions_by_name.get(name, None) 355 - 356 - def _CreateNewHandle(self, extension): 357 - cdescriptor = extension._cdescriptor 358 - if (cdescriptor.label != _LABEL_REPEATED and 359 - cdescriptor.cpp_type == _CPPTYPE_MESSAGE): 360 - cmessage = self._cmsg.NewSubMessage(cdescriptor) 361 - return extension.message_type._concrete_class(__cmessage=cmessage) 362 - 363 - if cdescriptor.label == _LABEL_REPEATED: 364 - if cdescriptor.cpp_type == _CPPTYPE_MESSAGE: 365 - return RepeatedCompositeContainer( 366 - self._message, cdescriptor, extension.message_type._concrete_class) 367 - else: 368 - return RepeatedScalarContainer(self._message, cdescriptor) 369 - # This shouldn't happen! 370 - assert False 371 - return None 372 - 373 - 374 - def NewMessage(bases, message_descriptor, dictionary): 375 - """Creates a new protocol message *class*.""" 376 - _AddClassAttributesForNestedExtensions(message_descriptor, dictionary) 377 - _AddEnumValues(message_descriptor, dictionary) 378 - _AddDescriptors(message_descriptor, dictionary) 379 - return bases 380 - 381 - 382 - def InitMessage(message_descriptor, cls): 383 - """Constructs a new message instance (called before instance's __init__).""" 384 - cls._extensions_by_name = {} 385 - _AddInitMethod(message_descriptor, cls) 386 - _AddMessageMethods(message_descriptor, cls) 387 - _AddPropertiesForExtensions(message_descriptor, cls) 388 - copy_reg.pickle(cls, lambda obj: (cls, (), obj.__getstate__())) 389 - 390 - 391 - def _AddDescriptors(message_descriptor, dictionary): 392 - """Sets up a new protocol message class dictionary. 393 - 394 - Args: 395 - message_descriptor: A Descriptor instance describing this message type. 396 - dictionary: Class dictionary to which we'll add a '__slots__' entry. 397 - """ 398 - dictionary['__descriptors'] = {} 399 - for field in message_descriptor.fields: 400 - dictionary['__descriptors'][field.name] = GetFieldDescriptor( 401 - field.full_name) 402 - 403 - dictionary['__slots__'] = list(dictionary['__descriptors'].iterkeys()) + [ 404 - '_cmsg', '_owner', '_composite_fields', 'Extensions', '_HACK_REFCOUNTS'] 405 - 406 - 407 - def _AddEnumValues(message_descriptor, dictionary): 408 - """Sets class-level attributes for all enum fields defined in this message. 409 - 410 - Args: 411 - message_descriptor: Descriptor object for this message type. 412 - dictionary: Class dictionary that should be populated. 413 - """ 414 - for enum_type in message_descriptor.enum_types: 415 - dictionary[enum_type.name] = enum_type_wrapper.EnumTypeWrapper(enum_type) 416 - for enum_value in enum_type.values: 417 - dictionary[enum_value.name] = enum_value.number 418 - 419 - 420 - def _AddClassAttributesForNestedExtensions(message_descriptor, dictionary): 421 - """Adds class attributes for the nested extensions.""" 422 - extension_dict = message_descriptor.extensions_by_name 423 - for extension_name, extension_field in extension_dict.iteritems(): 424 - assert extension_name not in dictionary 425 - dictionary[extension_name] = extension_field 426 - 427 - 428 - def _AddInitMethod(message_descriptor, cls): 429 - """Adds an __init__ method to cls.""" 430 - 431 - # Create and attach message field properties to the message class. 432 - # This can be done just once per message class, since property setters and 433 - # getters are passed the message instance. 434 - # This makes message instantiation extremely fast, and at the same time it 435 - # doesn't require the creation of property objects for each message instance, 436 - # which saves a lot of memory. 437 - for field in message_descriptor.fields: 438 - field_cdescriptor = cls.__descriptors[field.name] 439 - if field.label == _LABEL_REPEATED: 440 - if field.cpp_type == _CPPTYPE_MESSAGE: 441 - value = RepeatedCompositeProperty(field_cdescriptor, field.message_type) 442 - else: 443 - value = RepeatedScalarProperty(field_cdescriptor) 444 - elif field.cpp_type == _CPPTYPE_MESSAGE: 445 - value = CompositeProperty(field_cdescriptor, field.message_type) 446 - else: 447 - value = ScalarProperty(field_cdescriptor) 448 - setattr(cls, field.name, value) 449 - 450 - # Attach a constant with the field number. 451 - constant_name = field.name.upper() + '_FIELD_NUMBER' 452 - setattr(cls, constant_name, field.number) 453 - 454 - def Init(self, **kwargs): 455 - """Message constructor.""" 456 - cmessage = kwargs.pop('__cmessage', None) 457 - if cmessage: 458 - self._cmsg = cmessage 459 - else: 460 - self._cmsg = NewCMessage(message_descriptor.full_name) 461 - 462 - # Keep a reference to the owner, as the owner keeps a reference to the 463 - # underlying protocol buffer message. 464 - owner = kwargs.pop('__owner', None) 465 - if owner: 466 - self._owner = owner 467 - 468 - if message_descriptor.is_extendable: 469 - self.Extensions = ExtensionDict(self) 470 - else: 471 - # Reference counting in the C++ code is broken and depends on 472 - # the Extensions reference to keep this object alive during unit 473 - # tests (see b/4856052). Remove this once b/4945904 is fixed. 474 - self._HACK_REFCOUNTS = self 475 - self._composite_fields = {} 476 - 477 - for field_name, field_value in kwargs.iteritems(): 478 - field_cdescriptor = self.__descriptors.get(field_name, None) 479 - if not field_cdescriptor: 480 - raise ValueError('Protocol message has no "%s" field.' % field_name) 481 - if field_cdescriptor.label == _LABEL_REPEATED: 482 - if field_cdescriptor.cpp_type == _CPPTYPE_MESSAGE: 483 - field_name = getattr(self, field_name) 484 - for val in field_value: 485 - field_name.add().MergeFrom(val) 486 - else: 487 - getattr(self, field_name).extend(field_value) 488 - elif field_cdescriptor.cpp_type == _CPPTYPE_MESSAGE: 489 - getattr(self, field_name).MergeFrom(field_value) 490 - else: 491 - setattr(self, field_name, field_value) 492 - 493 - Init.__module__ = None 494 - Init.__doc__ = None 495 - cls.__init__ = Init 496 - 497 - 498 - def _IsMessageSetExtension(field): 499 - """Checks if a field is a message set extension.""" 500 - return (field.is_extension and 501 - field.containing_type.has_options and 502 - field.containing_type.GetOptions().message_set_wire_format and 503 - field.type == _TYPE_MESSAGE and 504 - field.message_type == field.extension_scope and 505 - field.label == _LABEL_OPTIONAL) 506 - 507 - 508 - def _AddMessageMethods(message_descriptor, cls): 509 - """Adds the methods to a protocol message class.""" 510 - if message_descriptor.is_extendable: 511 - 512 - def ClearExtension(self, extension): 513 - self.Extensions.ClearExtension(extension) 514 - 515 - def HasExtension(self, extension): 516 - return self.Extensions.HasExtension(extension) 517 - 518 - def HasField(self, field_name): 519 - return self._cmsg.HasField(field_name) 520 - 521 - def ClearField(self, field_name): 522 - child_cmessage = None 523 - if field_name in self._composite_fields: 524 - child_field = self._composite_fields[field_name] 525 - del self._composite_fields[field_name] 526 - 527 - child_cdescriptor = self.__descriptors[field_name] 528 - # TODO(anuraag): Support clearing repeated message fields as well. 529 - if (child_cdescriptor.label != _LABEL_REPEATED and 530 - child_cdescriptor.cpp_type == _CPPTYPE_MESSAGE): 531 - child_field._owner = None 532 - child_cmessage = child_field._cmsg 533 - 534 - if child_cmessage is not None: 535 - self._cmsg.ClearField(field_name, child_cmessage) 536 - else: 537 - self._cmsg.ClearField(field_name) 538 - 539 - def Clear(self): 540 - cmessages_to_release = [] 541 - for field_name, child_field in self._composite_fields.iteritems(): 542 - child_cdescriptor = self.__descriptors[field_name] 543 - # TODO(anuraag): Support clearing repeated message fields as well. 544 - if (child_cdescriptor.label != _LABEL_REPEATED and 545 - child_cdescriptor.cpp_type == _CPPTYPE_MESSAGE): 546 - child_field._owner = None 547 - cmessages_to_release.append((child_cdescriptor, child_field._cmsg)) 548 - self._composite_fields.clear() 549 - self._cmsg.Clear(cmessages_to_release) 550 - 551 - def IsInitialized(self, errors=None): 552 - if self._cmsg.IsInitialized(): 553 - return True 554 - if errors is not None: 555 - errors.extend(self.FindInitializationErrors()); 556 - return False 557 - 558 - def SerializeToString(self): 559 - if not self.IsInitialized(): 560 - raise message.EncodeError( 561 - 'Message %s is missing required fields: %s' % ( 562 - self._cmsg.full_name, ','.join(self.FindInitializationErrors()))) 563 - return self._cmsg.SerializeToString() 564 - 565 - def SerializePartialToString(self): 566 - return self._cmsg.SerializePartialToString() 567 - 568 - def ParseFromString(self, serialized): 569 - self.Clear() 570 - self.MergeFromString(serialized) 571 - 572 - def MergeFromString(self, serialized): 573 - byte_size = self._cmsg.MergeFromString(serialized) 574 - if byte_size < 0: 575 - raise message.DecodeError('Unable to merge from string.') 576 - return byte_size 577 - 578 - def MergeFrom(self, msg): 579 - if not isinstance(msg, cls): 580 - raise TypeError( 581 - "Parameter to MergeFrom() must be instance of same class: " 582 - "expected %s got %s." % (cls.__name__, type(msg).__name__)) 583 - self._cmsg.MergeFrom(msg._cmsg) 584 - 585 - def CopyFrom(self, msg): 586 - self._cmsg.CopyFrom(msg._cmsg) 587 - 588 - def ByteSize(self): 589 - return self._cmsg.ByteSize() 590 - 591 - def SetInParent(self): 592 - return self._cmsg.SetInParent() 593 - 594 - def ListFields(self): 595 - all_fields = [] 596 - field_list = self._cmsg.ListFields() 597 - fields_by_name = cls.DESCRIPTOR.fields_by_name 598 - for is_extension, field_name in field_list: 599 - if is_extension: 600 - extension = cls._extensions_by_name[field_name] 601 - all_fields.append((extension, self.Extensions[extension])) 602 - else: 603 - field_descriptor = fields_by_name[field_name] 604 - all_fields.append( 605 - (field_descriptor, getattr(self, field_name))) 606 - all_fields.sort(key=lambda item: item[0].number) 607 - return all_fields 608 - 609 - def FindInitializationErrors(self): 610 - return self._cmsg.FindInitializationErrors() 611 - 612 - def __str__(self): 613 - return self._cmsg.DebugString() 614 - 615 - def __eq__(self, other): 616 - if self is other: 617 - return True 618 - if not isinstance(other, self.__class__): 619 - return False 620 - return self.ListFields() == other.ListFields() 621 - 622 - def __ne__(self, other): 623 - return not self == other 624 - 625 - def __hash__(self): 626 - raise TypeError('unhashable object') 627 - 628 - def __unicode__(self): 629 - # Lazy import to prevent circular import when text_format imports this file. 630 - from google.protobuf import text_format 631 - return text_format.MessageToString(self, as_utf8=True).decode('utf-8') 632 - 633 - # Attach the local methods to the message class. 634 - for key, value in locals().copy().iteritems(): 635 - if key not in ('key', 'value', '__builtins__', '__name__', '__doc__'): 636 - setattr(cls, key, value) 637 - 638 - # Static methods: 639 - 640 - def RegisterExtension(extension_handle): 641 - extension_handle.containing_type = cls.DESCRIPTOR 642 - cls._extensions_by_name[extension_handle.full_name] = extension_handle 643 - 644 - if _IsMessageSetExtension(extension_handle): 645 - # MessageSet extension. Also register under type name. 646 - cls._extensions_by_name[ 647 - extension_handle.message_type.full_name] = extension_handle 648 - cls.RegisterExtension = staticmethod(RegisterExtension) 649 - 650 - def FromString(string): 651 - msg = cls() 652 - msg.MergeFromString(string) 653 - return msg 654 - cls.FromString = staticmethod(FromString) 655 - 656 - 657 - 658 - def _AddPropertiesForExtensions(message_descriptor, cls): 659 - """Adds properties for all fields in this protocol message type.""" 660 - extension_dict = message_descriptor.extensions_by_name 661 - for extension_name, extension_field in extension_dict.iteritems(): 662 - constant_name = extension_name.upper() + '_FIELD_NUMBER' 663 - setattr(cls, constant_name, extension_field.number) 664 - -
1 - # Protocol Buffers - Google's data interchange format 2 - # Copyright 2008 Google Inc. All rights reserved. 3 - # http://code.google.com/p/protobuf/ 4 - # 5 - # Redistribution and use in source and binary forms, with or without 6 - # modification, are permitted provided that the following conditions are 7 - # met: 8 - # 9 - # * Redistributions of source code must retain the above copyright 10 - # notice, this list of conditions and the following disclaimer. 11 - # * Redistributions in binary form must reproduce the above 12 - # copyright notice, this list of conditions and the following disclaimer 13 - # in the documentation and/or other materials provided with the 14 - # distribution. 15 - # * Neither the name of Google Inc. nor the names of its 16 - # contributors may be used to endorse or promote products derived from 17 - # this software without specific prior written permission. 18 - # 19 - # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 20 - # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 21 - # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 22 - # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 23 - # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 24 - # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 25 - # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 26 - # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 27 - # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 28 - # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 - # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 - 31 - """Code for decoding protocol buffer primitives. 32 - 33 - This code is very similar to encoder.py -- read the docs for that module first. 34 - 35 - A "decoder" is a function with the signature: 36 - Decode(buffer, pos, end, message, field_dict) 37 - The arguments are: 38 - buffer: The string containing the encoded message. 39 - pos: The current position in the string. 40 - end: The position in the string where the current message ends. May be 41 - less than len(buffer) if we're reading a sub-message. 42 - message: The message object into which we're parsing. 43 - field_dict: message._fields (avoids a hashtable lookup). 44 - The decoder reads the field and stores it into field_dict, returning the new 45 - buffer position. A decoder for a repeated field may proactively decode all of 46 - the elements of that field, if they appear consecutively. 47 - 48 - Note that decoders may throw any of the following: 49 - IndexError: Indicates a truncated message. 50 - struct.error: Unpacking of a fixed-width field failed. 51 - message.DecodeError: Other errors. 52 - 53 - Decoders are expected to raise an exception if they are called with pos > end. 54 - This allows callers to be lax about bounds checking: it's fineto read past 55 - "end" as long as you are sure that someone else will notice and throw an 56 - exception later on. 57 - 58 - Something up the call stack is expected to catch IndexError and struct.error 59 - and convert them to message.DecodeError. 60 - 61 - Decoders are constructed using decoder constructors with the signature: 62 - MakeDecoder(field_number, is_repeated, is_packed, key, new_default) 63 - The arguments are: 64 - field_number: The field number of the field we want to decode. 65 - is_repeated: Is the field a repeated field? (bool) 66 - is_packed: Is the field a packed field? (bool) 67 - key: The key to use when looking up the field within field_dict. 68 - (This is actually the FieldDescriptor but nothing in this 69 - file should depend on that.) 70 - new_default: A function which takes a message object as a parameter and 71 - returns a new instance of the default value for this field. 72 - (This is called for repeated fields and sub-messages, when an 73 - instance does not already exist.) 74 - 75 - As with encoders, we define a decoder constructor for every type of field. 76 - Then, for every field of every message class we construct an actual decoder. 77 - That decoder goes into a dict indexed by tag, so when we decode a message 78 - we repeatedly read a tag, look up the corresponding decoder, and invoke it. 79 - """ 80 - 81 - __author__ = '[email protected] (Kenton Varda)' 82 - 83 - import struct 84 - from google.protobuf.internal import encoder 85 - from google.protobuf.internal import wire_format 86 - from google.protobuf import message 87 - 88 - 89 - # This will overflow and thus become IEEE-754 "infinity". We would use 90 - # "float('inf')" but it doesn't work on Windows pre-Python-2.6. 91 - _POS_INF = 1e10000 92 - _NEG_INF = -_POS_INF 93 - _NAN = _POS_INF * 0 94 - 95 - 96 - # This is not for optimization, but rather to avoid conflicts with local 97 - # variables named "message". 98 - _DecodeError = message.DecodeError 99 - 100 - 101 - def _VarintDecoder(mask): 102 - """Return an encoder for a basic varint value (does not include tag). 103 - 104 - Decoded values will be bitwise-anded with the given mask before being 105 - returned, e.g. to limit them to 32 bits. The returned decoder does not 106 - take the usual "end" parameter -- the caller is expected to do bounds checking 107 - after the fact (often the caller can defer such checking until later). The 108 - decoder returns a (value, new_pos) pair. 109 - """ 110 - 111 - local_ord = ord 112 - def DecodeVarint(buffer, pos): 113 - result = 0 114 - shift = 0 115 - while 1: 116 - b = local_ord(buffer[pos]) 117 - result |= ((b & 0x7f) << shift) 118 - pos += 1 119 - if not (b & 0x80): 120 - result &= mask 121 - return (result, pos) 122 - shift += 7 123 - if shift >= 64: 124 - raise _DecodeError('Too many bytes when decoding varint.') 125 - return DecodeVarint 126 - 127 - 128 - def _SignedVarintDecoder(mask): 129 - """Like _VarintDecoder() but decodes signed values.""" 130 - 131 - local_ord = ord 132 - def DecodeVarint(buffer, pos): 133 - result = 0 134 - shift = 0 135 - while 1: 136 - b = local_ord(buffer[pos]) 137 - result |= ((b & 0x7f) << shift) 138 - pos += 1 139 - if not (b & 0x80): 140 - if result > 0x7fffffffffffffff: 141 - result -= (1 << 64) 142 - result |= ~mask 143 - else: 144 - result &= mask 145 - return (result, pos) 146 - shift += 7 147 - if shift >= 64: 148 - raise _DecodeError('Too many bytes when decoding varint.') 149 - return DecodeVarint 150 - 151 - 152 - _DecodeVarint = _VarintDecoder((1 << 64) - 1) 153 - _DecodeSignedVarint = _SignedVarintDecoder((1 << 64) - 1) 154 - 155 - # Use these versions for values which must be limited to 32 bits. 156 - _DecodeVarint32 = _VarintDecoder((1 << 32) - 1) 157 - _DecodeSignedVarint32 = _SignedVarintDecoder((1 << 32) - 1) 158 - 159 - 160 - def ReadTag(buffer, pos): 161 - """Read a tag from the buffer, and return a (tag_bytes, new_pos) tuple. 162 - 163 - We return the raw bytes of the tag rather than decoding them. The raw 164 - bytes can then be used to look up the proper decoder. This effectively allows 165 - us to trade some work that would be done in pure-python (decoding a varint) 166 - for work that is done in C (searching for a byte string in a hash table). 167 - In a low-level language it would be much cheaper to decode the varint and 168 - use that, but not in Python. 169 - """ 170 - 171 - start = pos 172 - while ord(buffer[pos]) & 0x80: 173 - pos += 1 174 - pos += 1 175 - return (buffer[start:pos], pos) 176 - 177 - 178 - # -------------------------------------------------------------------- 179 - 180 - 181 - def _SimpleDecoder(wire_type, decode_value): 182 - """Return a constructor for a decoder for fields of a particular type. 183 - 184 - Args: 185 - wire_type: The field's wire type. 186 - decode_value: A function which decodes an individual value, e.g. 187 - _DecodeVarint() 188 - """ 189 - 190 - def SpecificDecoder(field_number, is_repeated, is_packed, key, new_default): 191 - if is_packed: 192 - local_DecodeVarint = _DecodeVarint 193 - def DecodePackedField(buffer, pos, end, message, field_dict): 194 - value = field_dict.get(key) 195 - if value is None: 196 - value = field_dict.setdefault(key, new_default(message)) 197 - (endpoint, pos) = local_DecodeVarint(buffer, pos) 198 - endpoint += pos 199 - if endpoint > end: 200 - raise _DecodeError('Truncated message.') 201 - while pos < endpoint: 202 - (element, pos) = decode_value(buffer, pos) 203 - value.append(element) 204 - if pos > endpoint: 205 - del value[-1] # Discard corrupt value. 206 - raise _DecodeError('Packed element was truncated.') 207 - return pos 208 - return DecodePackedField 209 - elif is_repeated: 210 - tag_bytes = encoder.TagBytes(field_number, wire_type) 211 - tag_len = len(tag_bytes) 212 - def DecodeRepeatedField(buffer, pos, end, message, field_dict): 213 - value = field_dict.get(key) 214 - if value is None: 215 - value = field_dict.setdefault(key, new_default(message)) 216 - while 1: 217 - (element, new_pos) = decode_value(buffer, pos) 218 - value.append(element) 219 - # Predict that the next tag is another copy of the same repeated 220 - # field. 221 - pos = new_pos + tag_len 222 - if buffer[new_pos:pos] != tag_bytes or new_pos >= end: 223 - # Prediction failed. Return. 224 - if new_pos > end: 225 - raise _DecodeError('Truncated message.') 226 - return new_pos 227 - return DecodeRepeatedField 228 - else: 229 - def DecodeField(buffer, pos, end, message, field_dict): 230 - (field_dict[key], pos) = decode_value(buffer, pos) 231 - if pos > end: 232 - del field_dict[key] # Discard corrupt value. 233 - raise _DecodeError('Truncated message.') 234 - return pos 235 - return DecodeField 236 - 237 - return SpecificDecoder 238 - 239 - 240 - def _ModifiedDecoder(wire_type, decode_value, modify_value): 241 - """Like SimpleDecoder but additionally invokes modify_value on every value 242 - before storing it. Usually modify_value is ZigZagDecode. 243 - """ 244 - 245 - # Reusing _SimpleDecoder is slightly slower than copying a bunch of code, but 246 - # not enough to make a significant difference. 247 - 248 - def InnerDecode(buffer, pos): 249 - (result, new_pos) = decode_value(buffer, pos) 250 - return (modify_value(result), new_pos) 251 - return _SimpleDecoder(wire_type, InnerDecode) 252 - 253 - 254 - def _StructPackDecoder(wire_type, format): 255 - """Return a constructor for a decoder for a fixed-width field. 256 - 257 - Args: 258 - wire_type: The field's wire type. 259 - format: The format string to pass to struct.unpack(). 260 - """ 261 - 262 - value_size = struct.calcsize(format) 263 - local_unpack = struct.unpack 264 - 265 - # Reusing _SimpleDecoder is slightly slower than copying a bunch of code, but 266 - # not enough to make a significant difference. 267 - 268 - # Note that we expect someone up-stack to catch struct.error and convert 269 - # it to _DecodeError -- this way we don't have to set up exception- 270 - # handling blocks every time we parse one value. 271 - 272 - def InnerDecode(buffer, pos): 273 - new_pos = pos + value_size 274 - result = local_unpack(format, buffer[pos:new_pos])[0] 275 - return (result, new_pos) 276 - return _SimpleDecoder(wire_type, InnerDecode) 277 - 278 - 279 - def _FloatDecoder(): 280 - """Returns a decoder for a float field. 281 - 282 - This code works around a bug in struct.unpack for non-finite 32-bit 283 - floating-point values. 284 - """ 285 - 286 - local_unpack = struct.unpack 287 - 288 - def InnerDecode(buffer, pos): 289 - # We expect a 32-bit value in little-endian byte order. Bit 1 is the sign 290 - # bit, bits 2-9 represent the exponent, and bits 10-32 are the significand. 291 - new_pos = pos + 4 292 - float_bytes = buffer[pos:new_pos] 293 - 294 - # If this value has all its exponent bits set, then it's non-finite. 295 - # In Python 2.4, struct.unpack will convert it to a finite 64-bit value. 296 - # To avoid that, we parse it specially. 297 - if ((float_bytes[3] in '\x7F\xFF') 298 - and (float_bytes[2] >= '\x80')): 299 - # If at least one significand bit is set... 300 - if float_bytes[0:3] != '\x00\x00\x80': 301 - return (_NAN, new_pos) 302 - # If sign bit is set... 303 - if float_bytes[3] == '\xFF': 304 - return (_NEG_INF, new_pos) 305 - return (_POS_INF, new_pos) 306 - 307 - # Note that we expect someone up-stack to catch struct.error and convert 308 - # it to _DecodeError -- this way we don't have to set up exception- 309 - # handling blocks every time we parse one value. 310 - result = local_unpack('<f', float_bytes)[0] 311 - return (result, new_pos) 312 - return _SimpleDecoder(wire_format.WIRETYPE_FIXED32, InnerDecode) 313 - 314 - 315 - def _DoubleDecoder(): 316 - """Returns a decoder for a double field. 317 - 318 - This code works around a bug in struct.unpack for not-a-number. 319 - """ 320 - 321 - local_unpack = struct.unpack 322 - 323 - def InnerDecode(buffer, pos): 324 - # We expect a 64-bit value in little-endian byte order. Bit 1 is the sign 325 - # bit, bits 2-12 represent the exponent, and bits 13-64 are the significand. 326 - new_pos = pos + 8 327 - double_bytes = buffer[pos:new_pos] 328 - 329 - # If this value has all its exponent bits set and at least one significand 330 - # bit set, it's not a number. In Python 2.4, struct.unpack will treat it 331 - # as inf or -inf. To avoid that, we treat it specially. 332 - if ((double_bytes[7] in '\x7F\xFF') 333 - and (double_bytes[6] >= '\xF0') 334 - and (double_bytes[0:7] != '\x00\x00\x00\x00\x00\x00\xF0')): 335 - return (_NAN, new_pos) 336 - 337 - # Note that we expect someone up-stack to catch struct.error and convert 338 - # it to _DecodeError -- this way we don't have to set up exception- 339 - # handling blocks every time we parse one value. 340 - result = local_unpack('<d', double_bytes)[0] 341 - return (result, new_pos) 342 - return _SimpleDecoder(wire_format.WIRETYPE_FIXED64, InnerDecode) 343 - 344 - 345 - # -------------------------------------------------------------------- 346 - 347 - 348 - Int32Decoder = EnumDecoder = _SimpleDecoder( 349 - wire_format.WIRETYPE_VARINT, _DecodeSignedVarint32) 350 - 351 - Int64Decoder = _SimpleDecoder( 352 - wire_format.WIRETYPE_VARINT, _DecodeSignedVarint) 353 - 354 - UInt32Decoder = _SimpleDecoder(wire_format.WIRETYPE_VARINT, _DecodeVarint32) 355 - UInt64Decoder = _SimpleDecoder(wire_format.WIRETYPE_VARINT, _DecodeVarint) 356 - 357 - SInt32Decoder = _ModifiedDecoder( 358 - wire_format.WIRETYPE_VARINT, _DecodeVarint32, wire_format.ZigZagDecode) 359 - SInt64Decoder = _ModifiedDecoder( 360 - wire_format.WIRETYPE_VARINT, _DecodeVarint, wire_format.ZigZagDecode) 361 - 362 - # Note that Python conveniently guarantees that when using the '<' prefix on 363 - # formats, they will also have the same size across all platforms (as opposed 364 - # to without the prefix, where their sizes depend on the C compiler's basic 365 - # type sizes). 366 - Fixed32Decoder = _StructPackDecoder(wire_format.WIRETYPE_FIXED32, '<I') 367 - Fixed64Decoder = _StructPackDecoder(wire_format.WIRETYPE_FIXED64, '<Q') 368 - SFixed32Decoder = _StructPackDecoder(wire_format.WIRETYPE_FIXED32, '<i') 369 - SFixed64Decoder = _StructPackDecoder(wire_format.WIRETYPE_FIXED64, '<q') 370 - FloatDecoder = _FloatDecoder() 371 - DoubleDecoder = _DoubleDecoder() 372 - 373 - BoolDecoder = _ModifiedDecoder( 374 - wire_format.WIRETYPE_VARINT, _DecodeVarint, bool) 375 - 376 - 377 - def StringDecoder(field_number, is_repeated, is_packed, key, new_default): 378 - """Returns a decoder for a string field.""" 379 - 380 - local_DecodeVarint = _DecodeVarint 381 - local_unicode = unicode 382 - 383 - assert not is_packed 384 - if is_repeated: 385 - tag_bytes = encoder.TagBytes(field_number, 386 - wire_format.WIRETYPE_LENGTH_DELIMITED) 387 - tag_len = len(tag_bytes) 388 - def DecodeRepeatedField(buffer, pos, end, message, field_dict): 389 - value = field_dict.get(key) 390 - if value is None: 391 - value = field_dict.setdefault(key, new_default(message)) 392 - while 1: 393 - (size, pos) = local_DecodeVarint(buffer, pos) 394 - new_pos = pos + size 395 - if new_pos > end: 396 - raise _DecodeError('Truncated string.') 397 - value.append(local_unicode(buffer[pos:new_pos], 'utf-8')) 398 - # Predict that the next tag is another copy of the same repeated field. 399 - pos = new_pos + tag_len 400 - if buffer[new_pos:pos] != tag_bytes or new_pos == end: 401 - # Prediction failed. Return. 402 - return new_pos 403 - return DecodeRepeatedField 404 - else: 405 - def DecodeField(buffer, pos, end, message, field_dict): 406 - (size, pos) = local_DecodeVarint(buffer, pos) 407 - new_pos = pos + size 408 - if new_pos > end: 409 - raise _DecodeError('Truncated string.') 410 - field_dict[key] = local_unicode(buffer[pos:new_pos], 'utf-8') 411 - return new_pos 412 - return DecodeField 413 - 414 - 415 - def BytesDecoder(field_number, is_repeated, is_packed, key, new_default): 416 - """Returns a decoder for a bytes field.""" 417 - 418 - local_DecodeVarint = _DecodeVarint 419 - 420 - assert not is_packed 421 - if is_repeated: 422 - tag_bytes = encoder.TagBytes(field_number, 423 - wire_format.WIRETYPE_LENGTH_DELIMITED) 424 - tag_len = len(tag_bytes) 425 - def DecodeRepeatedField(buffer, pos, end, message, field_dict): 426 - value = field_dict.get(key) 427 - if value is None: 428 - value = field_dict.setdefault(key, new_default(message)) 429 - while 1: 430 - (size, pos) = local_DecodeVarint(buffer, pos) 431 - new_pos = pos + size 432 - if new_pos > end: 433 - raise _DecodeError('Truncated string.') 434 - value.append(buffer[pos:new_pos]) 435 - # Predict that the next tag is another copy of the same repeated field. 436 - pos = new_pos + tag_len 437 - if buffer[new_pos:pos] != tag_bytes or new_pos == end: 438 - # Prediction failed. Return. 439 - return new_pos 440 - return DecodeRepeatedField 441 - else: 442 - def DecodeField(buffer, pos, end, message, field_dict): 443 - (size, pos) = local_DecodeVarint(buffer, pos) 444 - new_pos = pos + size 445 - if new_pos > end: 446 - raise _DecodeError('Truncated string.') 447 - field_dict[key] = buffer[pos:new_pos] 448 - return new_pos 449 - return DecodeField 450 - 451 - 452 - def GroupDecoder(field_number, is_repeated, is_packed, key, new_default): 453 - """Returns a decoder for a group field.""" 454 - 455 - end_tag_bytes = encoder.TagBytes(field_number, 456 - wire_format.WIRETYPE_END_GROUP) 457 - end_tag_len = len(end_tag_bytes) 458 - 459 - assert not is_packed 460 - if is_repeated: 461 - tag_bytes = encoder.TagBytes(field_number, 462 - wire_format.WIRETYPE_START_GROUP) 463 - tag_len = len(tag_bytes) 464 - def DecodeRepeatedField(buffer, pos, end, message, field_dict): 465 - value = field_dict.get(key) 466 - if value is None: 467 - value = field_dict.setdefault(key, new_default(message)) 468 - while 1: 469 - value = field_dict.get(key) 470 - if value is None: 471 - value = field_dict.setdefault(key, new_default(message)) 472 - # Read sub-message. 473 - pos = value.add()._InternalParse(buffer, pos, end) 474 - # Read end tag. 475 - new_pos = pos+end_tag_len 476 - if buffer[pos:new_pos] != end_tag_bytes or new_pos > end: 477 - raise _DecodeError('Missing group end tag.') 478 - # Predict that the next tag is another copy of the same repeated field. 479 - pos = new_pos + tag_len 480 - if buffer[new_pos:pos] != tag_bytes or new_pos == end: 481 - # Prediction failed. Return. 482 - return new_pos 483 - return DecodeRepeatedField 484 - else: 485 - def DecodeField(buffer, pos, end, message, field_dict): 486 - value = field_dict.get(key) 487 - if value is None: 488 - value = field_dict.setdefault(key, new_default(message)) 489 - # Read sub-message. 490 - pos = value._InternalParse(buffer, pos, end) 491 - # Read end tag. 492 - new_pos = pos+end_tag_len 493 - if buffer[pos:new_pos] != end_tag_bytes or new_pos > end: 494 - raise _DecodeError('Missing group end tag.') 495 - return new_pos 496 - return DecodeField 497 - 498 - 499 - def MessageDecoder(field_number, is_repeated, is_packed, key, new_default): 500 - """Returns a decoder for a message field.""" 501 - 502 - local_DecodeVarint = _DecodeVarint 503 - 504 - assert not is_packed 505 - if is_repeated: 506 - tag_bytes = encoder.TagBytes(field_number, 507 - wire_format.WIRETYPE_LENGTH_DELIMITED) 508 - tag_len = len(tag_bytes) 509 - def DecodeRepeatedField(buffer, pos, end, message, field_dict): 510 - value = field_dict.get(key) 511 - if value is None: 512 - value = field_dict.setdefault(key, new_default(message)) 513 - while 1: 514 - value = field_dict.get(key) 515 - if value is None: 516 - value = field_dict.setdefault(key, new_default(message)) 517 - # Read length. 518 - (size, pos) = local_DecodeVarint(buffer, pos) 519 - new_pos = pos + size 520 - if new_pos > end: 521 - raise _DecodeError('Truncated message.') 522 - # Read sub-message. 523 - if value.add()._InternalParse(buffer, pos, new_pos) != new_pos: 524 - # The only reason _InternalParse would return early is if it 525 - # encountered an end-group tag. 526 - raise _DecodeError('Unexpected end-group tag.') 527 - # Predict that the next tag is another copy of the same repeated field. 528 - pos = new_pos + tag_len 529 - if buffer[new_pos:pos] != tag_bytes or new_pos == end: 530 - # Prediction failed. Return. 531 - return new_pos 532 - return DecodeRepeatedField 533 - else: 534 - def DecodeField(buffer, pos, end, message, field_dict): 535 - value = field_dict.get(key) 536 - if value is None: 537 - value = field_dict.setdefault(key, new_default(message)) 538 - # Read length. 539 - (size, pos) = local_DecodeVarint(buffer, pos) 540 - new_pos = pos + size 541 - if new_pos > end: 542 - raise _DecodeError('Truncated message.') 543 - # Read sub-message. 544 - if value._InternalParse(buffer, pos, new_pos) != new_pos: 545 - # The only reason _InternalParse would return early is if it encountered 546 - # an end-group tag. 547 - raise _DecodeError('Unexpected end-group tag.') 548 - return new_pos 549 - return DecodeField 550 - 551 - 552 - # -------------------------------------------------------------------- 553 - 554 - MESSAGE_SET_ITEM_TAG = encoder.TagBytes(1, wire_format.WIRETYPE_START_GROUP) 555 - 556 - def MessageSetItemDecoder(extensions_by_number): 557 - """Returns a decoder for a MessageSet item. 558 - 559 - The parameter is the _extensions_by_number map for the message class. 560 - 561 - The message set message looks like this: 562 - message MessageSet { 563 - repeated group Item = 1 { 564 - required int32 type_id = 2; 565 - required string message = 3; 566 - } 567 - } 568 - """ 569 - 570 - type_id_tag_bytes = encoder.TagBytes(2, wire_format.WIRETYPE_VARINT) 571 - message_tag_bytes = encoder.TagBytes(3, wire_format.WIRETYPE_LENGTH_DELIMITED) 572 - item_end_tag_bytes = encoder.TagBytes(1, wire_format.WIRETYPE_END_GROUP) 573 - 574 - local_ReadTag = ReadTag 575 - local_DecodeVarint = _DecodeVarint 576 - local_SkipField = SkipField 577 - 578 - def DecodeItem(buffer, pos, end, message, field_dict): 579 - message_set_item_start = pos 580 - type_id = -1 581 - message_start = -1 582 - message_end = -1 583 - 584 - # Technically, type_id and message can appear in any order, so we need 585 - # a little loop here. 586 - while 1: 587 - (tag_bytes, pos) = local_ReadTag(buffer, pos) 588 - if tag_bytes == type_id_tag_bytes: 589 - (type_id, pos) = local_DecodeVarint(buffer, pos) 590 - elif tag_bytes == message_tag_bytes: 591 - (size, message_start) = local_DecodeVarint(buffer, pos) 592 - pos = message_end = message_start + size 593 - elif tag_bytes == item_end_tag_bytes: 594 - break 595 - else: 596 - pos = SkipField(buffer, pos, end, tag_bytes) 597 - if pos == -1: 598 - raise _DecodeError('Missing group end tag.') 599 - 600 - if pos > end: 601 - raise _DecodeError('Truncated message.') 602 - 603 - if type_id == -1: 604 - raise _DecodeError('MessageSet item missing type_id.') 605 - if message_start == -1: 606 - raise _DecodeError('MessageSet item missing message.') 607 - 608 - extension = extensions_by_number.get(type_id) 609 - if extension is not None: 610 - value = field_dict.get(extension) 611 - if value is None: 612 - value = field_dict.setdefault( 613 - extension, extension.message_type._concrete_class()) 614 - if value._InternalParse(buffer, message_start,message_end) != message_end: 615 - # The only reason _InternalParse would return early is if it encountered 616 - # an end-group tag. 617 - raise _DecodeError('Unexpected end-group tag.') 618 - else: 619 - if not message._unknown_fields: 620 - message._unknown_fields = [] 621 - message._unknown_fields.append((MESSAGE_SET_ITEM_TAG, 622 - buffer[message_set_item_start:pos])) 623 - 624 - return pos 625 - 626 - return DecodeItem 627 - 628 - # -------------------------------------------------------------------- 629 - # Optimization is not as heavy here because calls to SkipField() are rare, 630 - # except for handling end-group tags. 631 - 632 - def _SkipVarint(buffer, pos, end): 633 - """Skip a varint value. Returns the new position.""" 634 - 635 - while ord(buffer[pos]) & 0x80: 636 - pos += 1 637 - pos += 1 638 - if pos > end: 639 - raise _DecodeError('Truncated message.') 640 - return pos 641 - 642 - def _SkipFixed64(buffer, pos, end): 643 - """Skip a fixed64 value. Returns the new position.""" 644 - 645 - pos += 8 646 - if pos > end: 647 - raise _DecodeError('Truncated message.') 648 - return pos 649 - 650 - def _SkipLengthDelimited(buffer, pos, end): 651 - """Skip a length-delimited value. Returns the new position.""" 652 - 653 - (size, pos) = _DecodeVarint(buffer, pos) 654 - pos += size 655 - if pos > end: 656 - raise _DecodeError('Truncated message.') 657 - return pos 658 - 659 - def _SkipGroup(buffer, pos, end): 660 - """Skip sub-group. Returns the new position.""" 661 - 662 - while 1: 663 - (tag_bytes, pos) = ReadTag(buffer, pos) 664 - new_pos = SkipField(buffer, pos, end, tag_bytes) 665 - if new_pos == -1: 666 - return pos 667 - pos = new_pos 668 - 669 - def _EndGroup(buffer, pos, end): 670 - """Skipping an END_GROUP tag returns -1 to tell the parent loop to break.""" 671 - 672 - return -1 673 - 674 - def _SkipFixed32(buffer, pos, end): 675 - """Skip a fixed32 value. Returns the new position.""" 676 - 677 - pos += 4 678 - if pos > end: 679 - raise _DecodeError('Truncated message.') 680 - return pos 681 - 682 - def _RaiseInvalidWireType(buffer, pos, end): 683 - """Skip function for unknown wire types. Raises an exception.""" 684 - 685 - raise _DecodeError('Tag had invalid wire type.') 686 - 687 - def _FieldSkipper(): 688 - """Constructs the SkipField function.""" 689 - 690 - WIRETYPE_TO_SKIPPER = [ 691 - _SkipVarint, 692 - _SkipFixed64, 693 - _SkipLengthDelimited, 694 - _SkipGroup, 695 - _EndGroup, 696 - _SkipFixed32, 697 - _RaiseInvalidWireType, 698 - _RaiseInvalidWireType, 699 - ] 700 - 701 - wiretype_mask = wire_format.TAG_TYPE_MASK 702 - local_ord = ord 703 - 704 - def SkipField(buffer, pos, end, tag_bytes): 705 - """Skips a field with the specified tag. 706 - 707 - |pos| should point to the byte immediately after the tag. 708 - 709 - Returns: 710 - The new position (after the tag value), or -1 if the tag is an end-group 711 - tag (in which case the calling loop should break). 712 - """ 713 - 714 - # The wire type is always in the first byte since varints are little-endian. 715 - wire_type = local_ord(tag_bytes[0]) & wiretype_mask 716 - return WIRETYPE_TO_SKIPPER[wire_type](buffer, pos, end) 717 - 718 - return SkipField 719 - 720 - SkipField = _FieldSkipper() 721 -