1// Go support for Protocol Buffers - Google's data interchange format 2// 3// Copyright 2016 The Go Authors. All rights reserved. 4// https://github.com/golang/protobuf 5// 6// Redistribution and use in source and binary forms, with or without 7// modification, are permitted provided that the following conditions are 8// met: 9// 10// * Redistributions of source code must retain the above copyright 11// notice, this list of conditions and the following disclaimer. 12// * Redistributions in binary form must reproduce the above 13// copyright notice, this list of conditions and the following disclaimer 14// in the documentation and/or other materials provided with the 15// distribution. 16// * Neither the name of Google Inc. nor the names of its 17// contributors may be used to endorse or promote products derived from 18// this software without specific prior written permission. 19// 20// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 21// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 22// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 23// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 24// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 25// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 26// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 27// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 28// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 29// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 30// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 31 32package proto 33 34import ( 35 "errors" 36 "fmt" 37 "math" 38 "reflect" 39 "sort" 40 "strconv" 41 "strings" 42 "sync" 43 "sync/atomic" 44 "unicode/utf8" 45) 46 47// a sizer takes a pointer to a field and the size of its tag, computes the size of 48// the encoded data. 49type sizer func(pointer, int) int 50 51// a marshaler takes a byte slice, a pointer to a field, and its tag (in wire format), 52// marshals the field to the end of the slice, returns the slice and error (if any). 53type marshaler func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) 54 55// marshalInfo is the information used for marshaling a message. 56type marshalInfo struct { 57 typ reflect.Type 58 fields []*marshalFieldInfo 59 unrecognized field // offset of XXX_unrecognized 60 extensions field // offset of XXX_InternalExtensions 61 v1extensions field // offset of XXX_extensions 62 sizecache field // offset of XXX_sizecache 63 initialized int32 // 0 -- only typ is set, 1 -- fully initialized 64 messageset bool // uses message set wire format 65 hasmarshaler bool // has custom marshaler 66 sync.RWMutex // protect extElems map, also for initialization 67 extElems map[int32]*marshalElemInfo // info of extension elements 68} 69 70// marshalFieldInfo is the information used for marshaling a field of a message. 71type marshalFieldInfo struct { 72 field field 73 wiretag uint64 // tag in wire format 74 tagsize int // size of tag in wire format 75 sizer sizer 76 marshaler marshaler 77 isPointer bool 78 required bool // field is required 79 name string // name of the field, for error reporting 80 oneofElems map[reflect.Type]*marshalElemInfo // info of oneof elements 81} 82 83// marshalElemInfo is the information used for marshaling an extension or oneof element. 84type marshalElemInfo struct { 85 wiretag uint64 // tag in wire format 86 tagsize int // size of tag in wire format 87 sizer sizer 88 marshaler marshaler 89 isptr bool // elem is pointer typed, thus interface of this type is a direct interface (extension only) 90 deref bool // dereference the pointer before operating on it; implies isptr 91} 92 93var ( 94 marshalInfoMap = map[reflect.Type]*marshalInfo{} 95 marshalInfoLock sync.Mutex 96) 97 98// getMarshalInfo returns the information to marshal a given type of message. 99// The info it returns may not necessarily initialized. 100// t is the type of the message (NOT the pointer to it). 101func getMarshalInfo(t reflect.Type) *marshalInfo { 102 marshalInfoLock.Lock() 103 u, ok := marshalInfoMap[t] 104 if !ok { 105 u = &marshalInfo{typ: t} 106 marshalInfoMap[t] = u 107 } 108 marshalInfoLock.Unlock() 109 return u 110} 111 112// Size is the entry point from generated code, 113// and should be ONLY called by generated code. 114// It computes the size of encoded data of msg. 115// a is a pointer to a place to store cached marshal info. 116func (a *InternalMessageInfo) Size(msg Message) int { 117 u := getMessageMarshalInfo(msg, a) 118 ptr := toPointer(&msg) 119 if ptr.isNil() { 120 // We get here if msg is a typed nil ((*SomeMessage)(nil)), 121 // so it satisfies the interface, and msg == nil wouldn't 122 // catch it. We don't want crash in this case. 123 return 0 124 } 125 return u.size(ptr) 126} 127 128// Marshal is the entry point from generated code, 129// and should be ONLY called by generated code. 130// It marshals msg to the end of b. 131// a is a pointer to a place to store cached marshal info. 132func (a *InternalMessageInfo) Marshal(b []byte, msg Message, deterministic bool) ([]byte, error) { 133 u := getMessageMarshalInfo(msg, a) 134 ptr := toPointer(&msg) 135 if ptr.isNil() { 136 // We get here if msg is a typed nil ((*SomeMessage)(nil)), 137 // so it satisfies the interface, and msg == nil wouldn't 138 // catch it. We don't want crash in this case. 139 return b, ErrNil 140 } 141 return u.marshal(b, ptr, deterministic) 142} 143 144func getMessageMarshalInfo(msg interface{}, a *InternalMessageInfo) *marshalInfo { 145 // u := a.marshal, but atomically. 146 // We use an atomic here to ensure memory consistency. 147 u := atomicLoadMarshalInfo(&a.marshal) 148 if u == nil { 149 // Get marshal information from type of message. 150 t := reflect.ValueOf(msg).Type() 151 if t.Kind() != reflect.Ptr { 152 panic(fmt.Sprintf("cannot handle non-pointer message type %v", t)) 153 } 154 u = getMarshalInfo(t.Elem()) 155 // Store it in the cache for later users. 156 // a.marshal = u, but atomically. 157 atomicStoreMarshalInfo(&a.marshal, u) 158 } 159 return u 160} 161 162// size is the main function to compute the size of the encoded data of a message. 163// ptr is the pointer to the message. 164func (u *marshalInfo) size(ptr pointer) int { 165 if atomic.LoadInt32(&u.initialized) == 0 { 166 u.computeMarshalInfo() 167 } 168 169 // If the message can marshal itself, let it do it, for compatibility. 170 // NOTE: This is not efficient. 171 if u.hasmarshaler { 172 m := ptr.asPointerTo(u.typ).Interface().(Marshaler) 173 b, _ := m.Marshal() 174 return len(b) 175 } 176 177 n := 0 178 for _, f := range u.fields { 179 if f.isPointer && ptr.offset(f.field).getPointer().isNil() { 180 // nil pointer always marshals to nothing 181 continue 182 } 183 n += f.sizer(ptr.offset(f.field), f.tagsize) 184 } 185 if u.extensions.IsValid() { 186 e := ptr.offset(u.extensions).toExtensions() 187 if u.messageset { 188 n += u.sizeMessageSet(e) 189 } else { 190 n += u.sizeExtensions(e) 191 } 192 } 193 if u.v1extensions.IsValid() { 194 m := *ptr.offset(u.v1extensions).toOldExtensions() 195 n += u.sizeV1Extensions(m) 196 } 197 if u.unrecognized.IsValid() { 198 s := *ptr.offset(u.unrecognized).toBytes() 199 n += len(s) 200 } 201 // cache the result for use in marshal 202 if u.sizecache.IsValid() { 203 atomic.StoreInt32(ptr.offset(u.sizecache).toInt32(), int32(n)) 204 } 205 return n 206} 207 208// cachedsize gets the size from cache. If there is no cache (i.e. message is not generated), 209// fall back to compute the size. 210func (u *marshalInfo) cachedsize(ptr pointer) int { 211 if u.sizecache.IsValid() { 212 return int(atomic.LoadInt32(ptr.offset(u.sizecache).toInt32())) 213 } 214 return u.size(ptr) 215} 216 217// marshal is the main function to marshal a message. It takes a byte slice and appends 218// the encoded data to the end of the slice, returns the slice and error (if any). 219// ptr is the pointer to the message. 220// If deterministic is true, map is marshaled in deterministic order. 221func (u *marshalInfo) marshal(b []byte, ptr pointer, deterministic bool) ([]byte, error) { 222 if atomic.LoadInt32(&u.initialized) == 0 { 223 u.computeMarshalInfo() 224 } 225 226 // If the message can marshal itself, let it do it, for compatibility. 227 // NOTE: This is not efficient. 228 if u.hasmarshaler { 229 m := ptr.asPointerTo(u.typ).Interface().(Marshaler) 230 b1, err := m.Marshal() 231 b = append(b, b1...) 232 return b, err 233 } 234 235 var err, errLater error 236 // The old marshaler encodes extensions at beginning. 237 if u.extensions.IsValid() { 238 e := ptr.offset(u.extensions).toExtensions() 239 if u.messageset { 240 b, err = u.appendMessageSet(b, e, deterministic) 241 } else { 242 b, err = u.appendExtensions(b, e, deterministic) 243 } 244 if err != nil { 245 return b, err 246 } 247 } 248 if u.v1extensions.IsValid() { 249 m := *ptr.offset(u.v1extensions).toOldExtensions() 250 b, err = u.appendV1Extensions(b, m, deterministic) 251 if err != nil { 252 return b, err 253 } 254 } 255 for _, f := range u.fields { 256 if f.required { 257 if ptr.offset(f.field).getPointer().isNil() { 258 // Required field is not set. 259 // We record the error but keep going, to give a complete marshaling. 260 if errLater == nil { 261 errLater = &RequiredNotSetError{f.name} 262 } 263 continue 264 } 265 } 266 if f.isPointer && ptr.offset(f.field).getPointer().isNil() { 267 // nil pointer always marshals to nothing 268 continue 269 } 270 b, err = f.marshaler(b, ptr.offset(f.field), f.wiretag, deterministic) 271 if err != nil { 272 if err1, ok := err.(*RequiredNotSetError); ok { 273 // Required field in submessage is not set. 274 // We record the error but keep going, to give a complete marshaling. 275 if errLater == nil { 276 errLater = &RequiredNotSetError{f.name + "." + err1.field} 277 } 278 continue 279 } 280 if err == errRepeatedHasNil { 281 err = errors.New("proto: repeated field " + f.name + " has nil element") 282 } 283 if err == errInvalidUTF8 { 284 if errLater == nil { 285 fullName := revProtoTypes[reflect.PtrTo(u.typ)] + "." + f.name 286 errLater = &invalidUTF8Error{fullName} 287 } 288 continue 289 } 290 return b, err 291 } 292 } 293 if u.unrecognized.IsValid() { 294 s := *ptr.offset(u.unrecognized).toBytes() 295 b = append(b, s...) 296 } 297 return b, errLater 298} 299 300// computeMarshalInfo initializes the marshal info. 301func (u *marshalInfo) computeMarshalInfo() { 302 u.Lock() 303 defer u.Unlock() 304 if u.initialized != 0 { // non-atomic read is ok as it is protected by the lock 305 return 306 } 307 308 t := u.typ 309 u.unrecognized = invalidField 310 u.extensions = invalidField 311 u.v1extensions = invalidField 312 u.sizecache = invalidField 313 314 // If the message can marshal itself, let it do it, for compatibility. 315 // NOTE: This is not efficient. 316 if reflect.PtrTo(t).Implements(marshalerType) { 317 u.hasmarshaler = true 318 atomic.StoreInt32(&u.initialized, 1) 319 return 320 } 321 322 // get oneof implementers 323 var oneofImplementers []interface{} 324 switch m := reflect.Zero(reflect.PtrTo(t)).Interface().(type) { 325 case oneofFuncsIface: 326 _, _, _, oneofImplementers = m.XXX_OneofFuncs() 327 case oneofWrappersIface: 328 oneofImplementers = m.XXX_OneofWrappers() 329 } 330 331 n := t.NumField() 332 333 // deal with XXX fields first 334 for i := 0; i < t.NumField(); i++ { 335 f := t.Field(i) 336 if !strings.HasPrefix(f.Name, "XXX_") { 337 continue 338 } 339 switch f.Name { 340 case "XXX_sizecache": 341 u.sizecache = toField(&f) 342 case "XXX_unrecognized": 343 u.unrecognized = toField(&f) 344 case "XXX_InternalExtensions": 345 u.extensions = toField(&f) 346 u.messageset = f.Tag.Get("protobuf_messageset") == "1" 347 case "XXX_extensions": 348 u.v1extensions = toField(&f) 349 case "XXX_NoUnkeyedLiteral": 350 // nothing to do 351 default: 352 panic("unknown XXX field: " + f.Name) 353 } 354 n-- 355 } 356 357 // normal fields 358 fields := make([]marshalFieldInfo, n) // batch allocation 359 u.fields = make([]*marshalFieldInfo, 0, n) 360 for i, j := 0, 0; i < t.NumField(); i++ { 361 f := t.Field(i) 362 363 if strings.HasPrefix(f.Name, "XXX_") { 364 continue 365 } 366 field := &fields[j] 367 j++ 368 field.name = f.Name 369 u.fields = append(u.fields, field) 370 if f.Tag.Get("protobuf_oneof") != "" { 371 field.computeOneofFieldInfo(&f, oneofImplementers) 372 continue 373 } 374 if f.Tag.Get("protobuf") == "" { 375 // field has no tag (not in generated message), ignore it 376 u.fields = u.fields[:len(u.fields)-1] 377 j-- 378 continue 379 } 380 field.computeMarshalFieldInfo(&f) 381 } 382 383 // fields are marshaled in tag order on the wire. 384 sort.Sort(byTag(u.fields)) 385 386 atomic.StoreInt32(&u.initialized, 1) 387} 388 389// helper for sorting fields by tag 390type byTag []*marshalFieldInfo 391 392func (a byTag) Len() int { return len(a) } 393func (a byTag) Swap(i, j int) { a[i], a[j] = a[j], a[i] } 394func (a byTag) Less(i, j int) bool { return a[i].wiretag < a[j].wiretag } 395 396// getExtElemInfo returns the information to marshal an extension element. 397// The info it returns is initialized. 398func (u *marshalInfo) getExtElemInfo(desc *ExtensionDesc) *marshalElemInfo { 399 // get from cache first 400 u.RLock() 401 e, ok := u.extElems[desc.Field] 402 u.RUnlock() 403 if ok { 404 return e 405 } 406 407 t := reflect.TypeOf(desc.ExtensionType) // pointer or slice to basic type or struct 408 tags := strings.Split(desc.Tag, ",") 409 tag, err := strconv.Atoi(tags[1]) 410 if err != nil { 411 panic("tag is not an integer") 412 } 413 wt := wiretype(tags[0]) 414 if t.Kind() == reflect.Ptr && t.Elem().Kind() != reflect.Struct { 415 t = t.Elem() 416 } 417 sizer, marshaler := typeMarshaler(t, tags, false, false) 418 var deref bool 419 if t.Kind() == reflect.Slice && t.Elem().Kind() != reflect.Uint8 { 420 t = reflect.PtrTo(t) 421 deref = true 422 } 423 e = &marshalElemInfo{ 424 wiretag: uint64(tag)<<3 | wt, 425 tagsize: SizeVarint(uint64(tag) << 3), 426 sizer: sizer, 427 marshaler: marshaler, 428 isptr: t.Kind() == reflect.Ptr, 429 deref: deref, 430 } 431 432 // update cache 433 u.Lock() 434 if u.extElems == nil { 435 u.extElems = make(map[int32]*marshalElemInfo) 436 } 437 u.extElems[desc.Field] = e 438 u.Unlock() 439 return e 440} 441 442// computeMarshalFieldInfo fills up the information to marshal a field. 443func (fi *marshalFieldInfo) computeMarshalFieldInfo(f *reflect.StructField) { 444 // parse protobuf tag of the field. 445 // tag has format of "bytes,49,opt,name=foo,def=hello!" 446 tags := strings.Split(f.Tag.Get("protobuf"), ",") 447 if tags[0] == "" { 448 return 449 } 450 tag, err := strconv.Atoi(tags[1]) 451 if err != nil { 452 panic("tag is not an integer") 453 } 454 wt := wiretype(tags[0]) 455 if tags[2] == "req" { 456 fi.required = true 457 } 458 fi.setTag(f, tag, wt) 459 fi.setMarshaler(f, tags) 460} 461 462func (fi *marshalFieldInfo) computeOneofFieldInfo(f *reflect.StructField, oneofImplementers []interface{}) { 463 fi.field = toField(f) 464 fi.wiretag = math.MaxInt32 // Use a large tag number, make oneofs sorted at the end. This tag will not appear on the wire. 465 fi.isPointer = true 466 fi.sizer, fi.marshaler = makeOneOfMarshaler(fi, f) 467 fi.oneofElems = make(map[reflect.Type]*marshalElemInfo) 468 469 ityp := f.Type // interface type 470 for _, o := range oneofImplementers { 471 t := reflect.TypeOf(o) 472 if !t.Implements(ityp) { 473 continue 474 } 475 sf := t.Elem().Field(0) // oneof implementer is a struct with a single field 476 tags := strings.Split(sf.Tag.Get("protobuf"), ",") 477 tag, err := strconv.Atoi(tags[1]) 478 if err != nil { 479 panic("tag is not an integer") 480 } 481 wt := wiretype(tags[0]) 482 sizer, marshaler := typeMarshaler(sf.Type, tags, false, true) // oneof should not omit any zero value 483 fi.oneofElems[t.Elem()] = &marshalElemInfo{ 484 wiretag: uint64(tag)<<3 | wt, 485 tagsize: SizeVarint(uint64(tag) << 3), 486 sizer: sizer, 487 marshaler: marshaler, 488 } 489 } 490} 491 492// wiretype returns the wire encoding of the type. 493func wiretype(encoding string) uint64 { 494 switch encoding { 495 case "fixed32": 496 return WireFixed32 497 case "fixed64": 498 return WireFixed64 499 case "varint", "zigzag32", "zigzag64": 500 return WireVarint 501 case "bytes": 502 return WireBytes 503 case "group": 504 return WireStartGroup 505 } 506 panic("unknown wire type " + encoding) 507} 508 509// setTag fills up the tag (in wire format) and its size in the info of a field. 510func (fi *marshalFieldInfo) setTag(f *reflect.StructField, tag int, wt uint64) { 511 fi.field = toField(f) 512 fi.wiretag = uint64(tag)<<3 | wt 513 fi.tagsize = SizeVarint(uint64(tag) << 3) 514} 515 516// setMarshaler fills up the sizer and marshaler in the info of a field. 517func (fi *marshalFieldInfo) setMarshaler(f *reflect.StructField, tags []string) { 518 switch f.Type.Kind() { 519 case reflect.Map: 520 // map field 521 fi.isPointer = true 522 fi.sizer, fi.marshaler = makeMapMarshaler(f) 523 return 524 case reflect.Ptr, reflect.Slice: 525 fi.isPointer = true 526 } 527 fi.sizer, fi.marshaler = typeMarshaler(f.Type, tags, true, false) 528} 529 530// typeMarshaler returns the sizer and marshaler of a given field. 531// t is the type of the field. 532// tags is the generated "protobuf" tag of the field. 533// If nozero is true, zero value is not marshaled to the wire. 534// If oneof is true, it is a oneof field. 535func typeMarshaler(t reflect.Type, tags []string, nozero, oneof bool) (sizer, marshaler) { 536 encoding := tags[0] 537 538 pointer := false 539 slice := false 540 if t.Kind() == reflect.Slice && t.Elem().Kind() != reflect.Uint8 { 541 slice = true 542 t = t.Elem() 543 } 544 if t.Kind() == reflect.Ptr { 545 pointer = true 546 t = t.Elem() 547 } 548 549 packed := false 550 proto3 := false 551 validateUTF8 := true 552 for i := 2; i < len(tags); i++ { 553 if tags[i] == "packed" { 554 packed = true 555 } 556 if tags[i] == "proto3" { 557 proto3 = true 558 } 559 } 560 validateUTF8 = validateUTF8 && proto3 561 562 switch t.Kind() { 563 case reflect.Bool: 564 if pointer { 565 return sizeBoolPtr, appendBoolPtr 566 } 567 if slice { 568 if packed { 569 return sizeBoolPackedSlice, appendBoolPackedSlice 570 } 571 return sizeBoolSlice, appendBoolSlice 572 } 573 if nozero { 574 return sizeBoolValueNoZero, appendBoolValueNoZero 575 } 576 return sizeBoolValue, appendBoolValue 577 case reflect.Uint32: 578 switch encoding { 579 case "fixed32": 580 if pointer { 581 return sizeFixed32Ptr, appendFixed32Ptr 582 } 583 if slice { 584 if packed { 585 return sizeFixed32PackedSlice, appendFixed32PackedSlice 586 } 587 return sizeFixed32Slice, appendFixed32Slice 588 } 589 if nozero { 590 return sizeFixed32ValueNoZero, appendFixed32ValueNoZero 591 } 592 return sizeFixed32Value, appendFixed32Value 593 case "varint": 594 if pointer { 595 return sizeVarint32Ptr, appendVarint32Ptr 596 } 597 if slice { 598 if packed { 599 return sizeVarint32PackedSlice, appendVarint32PackedSlice 600 } 601 return sizeVarint32Slice, appendVarint32Slice 602 } 603 if nozero { 604 return sizeVarint32ValueNoZero, appendVarint32ValueNoZero 605 } 606 return sizeVarint32Value, appendVarint32Value 607 } 608 case reflect.Int32: 609 switch encoding { 610 case "fixed32": 611 if pointer { 612 return sizeFixedS32Ptr, appendFixedS32Ptr 613 } 614 if slice { 615 if packed { 616 return sizeFixedS32PackedSlice, appendFixedS32PackedSlice 617 } 618 return sizeFixedS32Slice, appendFixedS32Slice 619 } 620 if nozero { 621 return sizeFixedS32ValueNoZero, appendFixedS32ValueNoZero 622 } 623 return sizeFixedS32Value, appendFixedS32Value 624 case "varint": 625 if pointer { 626 return sizeVarintS32Ptr, appendVarintS32Ptr 627 } 628 if slice { 629 if packed { 630 return sizeVarintS32PackedSlice, appendVarintS32PackedSlice 631 } 632 return sizeVarintS32Slice, appendVarintS32Slice 633 } 634 if nozero { 635 return sizeVarintS32ValueNoZero, appendVarintS32ValueNoZero 636 } 637 return sizeVarintS32Value, appendVarintS32Value 638 case "zigzag32": 639 if pointer { 640 return sizeZigzag32Ptr, appendZigzag32Ptr 641 } 642 if slice { 643 if packed { 644 return sizeZigzag32PackedSlice, appendZigzag32PackedSlice 645 } 646 return sizeZigzag32Slice, appendZigzag32Slice 647 } 648 if nozero { 649 return sizeZigzag32ValueNoZero, appendZigzag32ValueNoZero 650 } 651 return sizeZigzag32Value, appendZigzag32Value 652 } 653 case reflect.Uint64: 654 switch encoding { 655 case "fixed64": 656 if pointer { 657 return sizeFixed64Ptr, appendFixed64Ptr 658 } 659 if slice { 660 if packed { 661 return sizeFixed64PackedSlice, appendFixed64PackedSlice 662 } 663 return sizeFixed64Slice, appendFixed64Slice 664 } 665 if nozero { 666 return sizeFixed64ValueNoZero, appendFixed64ValueNoZero 667 } 668 return sizeFixed64Value, appendFixed64Value 669 case "varint": 670 if pointer { 671 return sizeVarint64Ptr, appendVarint64Ptr 672 } 673 if slice { 674 if packed { 675 return sizeVarint64PackedSlice, appendVarint64PackedSlice 676 } 677 return sizeVarint64Slice, appendVarint64Slice 678 } 679 if nozero { 680 return sizeVarint64ValueNoZero, appendVarint64ValueNoZero 681 } 682 return sizeVarint64Value, appendVarint64Value 683 } 684 case reflect.Int64: 685 switch encoding { 686 case "fixed64": 687 if pointer { 688 return sizeFixedS64Ptr, appendFixedS64Ptr 689 } 690 if slice { 691 if packed { 692 return sizeFixedS64PackedSlice, appendFixedS64PackedSlice 693 } 694 return sizeFixedS64Slice, appendFixedS64Slice 695 } 696 if nozero { 697 return sizeFixedS64ValueNoZero, appendFixedS64ValueNoZero 698 } 699 return sizeFixedS64Value, appendFixedS64Value 700 case "varint": 701 if pointer { 702 return sizeVarintS64Ptr, appendVarintS64Ptr 703 } 704 if slice { 705 if packed { 706 return sizeVarintS64PackedSlice, appendVarintS64PackedSlice 707 } 708 return sizeVarintS64Slice, appendVarintS64Slice 709 } 710 if nozero { 711 return sizeVarintS64ValueNoZero, appendVarintS64ValueNoZero 712 } 713 return sizeVarintS64Value, appendVarintS64Value 714 case "zigzag64": 715 if pointer { 716 return sizeZigzag64Ptr, appendZigzag64Ptr 717 } 718 if slice { 719 if packed { 720 return sizeZigzag64PackedSlice, appendZigzag64PackedSlice 721 } 722 return sizeZigzag64Slice, appendZigzag64Slice 723 } 724 if nozero { 725 return sizeZigzag64ValueNoZero, appendZigzag64ValueNoZero 726 } 727 return sizeZigzag64Value, appendZigzag64Value 728 } 729 case reflect.Float32: 730 if pointer { 731 return sizeFloat32Ptr, appendFloat32Ptr 732 } 733 if slice { 734 if packed { 735 return sizeFloat32PackedSlice, appendFloat32PackedSlice 736 } 737 return sizeFloat32Slice, appendFloat32Slice 738 } 739 if nozero { 740 return sizeFloat32ValueNoZero, appendFloat32ValueNoZero 741 } 742 return sizeFloat32Value, appendFloat32Value 743 case reflect.Float64: 744 if pointer { 745 return sizeFloat64Ptr, appendFloat64Ptr 746 } 747 if slice { 748 if packed { 749 return sizeFloat64PackedSlice, appendFloat64PackedSlice 750 } 751 return sizeFloat64Slice, appendFloat64Slice 752 } 753 if nozero { 754 return sizeFloat64ValueNoZero, appendFloat64ValueNoZero 755 } 756 return sizeFloat64Value, appendFloat64Value 757 case reflect.String: 758 if validateUTF8 { 759 if pointer { 760 return sizeStringPtr, appendUTF8StringPtr 761 } 762 if slice { 763 return sizeStringSlice, appendUTF8StringSlice 764 } 765 if nozero { 766 return sizeStringValueNoZero, appendUTF8StringValueNoZero 767 } 768 return sizeStringValue, appendUTF8StringValue 769 } 770 if pointer { 771 return sizeStringPtr, appendStringPtr 772 } 773 if slice { 774 return sizeStringSlice, appendStringSlice 775 } 776 if nozero { 777 return sizeStringValueNoZero, appendStringValueNoZero 778 } 779 return sizeStringValue, appendStringValue 780 case reflect.Slice: 781 if slice { 782 return sizeBytesSlice, appendBytesSlice 783 } 784 if oneof { 785 // Oneof bytes field may also have "proto3" tag. 786 // We want to marshal it as a oneof field. Do this 787 // check before the proto3 check. 788 return sizeBytesOneof, appendBytesOneof 789 } 790 if proto3 { 791 return sizeBytes3, appendBytes3 792 } 793 return sizeBytes, appendBytes 794 case reflect.Struct: 795 switch encoding { 796 case "group": 797 if slice { 798 return makeGroupSliceMarshaler(getMarshalInfo(t)) 799 } 800 return makeGroupMarshaler(getMarshalInfo(t)) 801 case "bytes": 802 if slice { 803 return makeMessageSliceMarshaler(getMarshalInfo(t)) 804 } 805 return makeMessageMarshaler(getMarshalInfo(t)) 806 } 807 } 808 panic(fmt.Sprintf("unknown or mismatched type: type: %v, wire type: %v", t, encoding)) 809} 810 811// Below are functions to size/marshal a specific type of a field. 812// They are stored in the field's info, and called by function pointers. 813// They have type sizer or marshaler. 814 815func sizeFixed32Value(_ pointer, tagsize int) int { 816 return 4 + tagsize 817} 818func sizeFixed32ValueNoZero(ptr pointer, tagsize int) int { 819 v := *ptr.toUint32() 820 if v == 0 { 821 return 0 822 } 823 return 4 + tagsize 824} 825func sizeFixed32Ptr(ptr pointer, tagsize int) int { 826 p := *ptr.toUint32Ptr() 827 if p == nil { 828 return 0 829 } 830 return 4 + tagsize 831} 832func sizeFixed32Slice(ptr pointer, tagsize int) int { 833 s := *ptr.toUint32Slice() 834 return (4 + tagsize) * len(s) 835} 836func sizeFixed32PackedSlice(ptr pointer, tagsize int) int { 837 s := *ptr.toUint32Slice() 838 if len(s) == 0 { 839 return 0 840 } 841 return 4*len(s) + SizeVarint(uint64(4*len(s))) + tagsize 842} 843func sizeFixedS32Value(_ pointer, tagsize int) int { 844 return 4 + tagsize 845} 846func sizeFixedS32ValueNoZero(ptr pointer, tagsize int) int { 847 v := *ptr.toInt32() 848 if v == 0 { 849 return 0 850 } 851 return 4 + tagsize 852} 853func sizeFixedS32Ptr(ptr pointer, tagsize int) int { 854 p := ptr.getInt32Ptr() 855 if p == nil { 856 return 0 857 } 858 return 4 + tagsize 859} 860func sizeFixedS32Slice(ptr pointer, tagsize int) int { 861 s := ptr.getInt32Slice() 862 return (4 + tagsize) * len(s) 863} 864func sizeFixedS32PackedSlice(ptr pointer, tagsize int) int { 865 s := ptr.getInt32Slice() 866 if len(s) == 0 { 867 return 0 868 } 869 return 4*len(s) + SizeVarint(uint64(4*len(s))) + tagsize 870} 871func sizeFloat32Value(_ pointer, tagsize int) int { 872 return 4 + tagsize 873} 874func sizeFloat32ValueNoZero(ptr pointer, tagsize int) int { 875 v := math.Float32bits(*ptr.toFloat32()) 876 if v == 0 { 877 return 0 878 } 879 return 4 + tagsize 880} 881func sizeFloat32Ptr(ptr pointer, tagsize int) int { 882 p := *ptr.toFloat32Ptr() 883 if p == nil { 884 return 0 885 } 886 return 4 + tagsize 887} 888func sizeFloat32Slice(ptr pointer, tagsize int) int { 889 s := *ptr.toFloat32Slice() 890 return (4 + tagsize) * len(s) 891} 892func sizeFloat32PackedSlice(ptr pointer, tagsize int) int { 893 s := *ptr.toFloat32Slice() 894 if len(s) == 0 { 895 return 0 896 } 897 return 4*len(s) + SizeVarint(uint64(4*len(s))) + tagsize 898} 899func sizeFixed64Value(_ pointer, tagsize int) int { 900 return 8 + tagsize 901} 902func sizeFixed64ValueNoZero(ptr pointer, tagsize int) int { 903 v := *ptr.toUint64() 904 if v == 0 { 905 return 0 906 } 907 return 8 + tagsize 908} 909func sizeFixed64Ptr(ptr pointer, tagsize int) int { 910 p := *ptr.toUint64Ptr() 911 if p == nil { 912 return 0 913 } 914 return 8 + tagsize 915} 916func sizeFixed64Slice(ptr pointer, tagsize int) int { 917 s := *ptr.toUint64Slice() 918 return (8 + tagsize) * len(s) 919} 920func sizeFixed64PackedSlice(ptr pointer, tagsize int) int { 921 s := *ptr.toUint64Slice() 922 if len(s) == 0 { 923 return 0 924 } 925 return 8*len(s) + SizeVarint(uint64(8*len(s))) + tagsize 926} 927func sizeFixedS64Value(_ pointer, tagsize int) int { 928 return 8 + tagsize 929} 930func sizeFixedS64ValueNoZero(ptr pointer, tagsize int) int { 931 v := *ptr.toInt64() 932 if v == 0 { 933 return 0 934 } 935 return 8 + tagsize 936} 937func sizeFixedS64Ptr(ptr pointer, tagsize int) int { 938 p := *ptr.toInt64Ptr() 939 if p == nil { 940 return 0 941 } 942 return 8 + tagsize 943} 944func sizeFixedS64Slice(ptr pointer, tagsize int) int { 945 s := *ptr.toInt64Slice() 946 return (8 + tagsize) * len(s) 947} 948func sizeFixedS64PackedSlice(ptr pointer, tagsize int) int { 949 s := *ptr.toInt64Slice() 950 if len(s) == 0 { 951 return 0 952 } 953 return 8*len(s) + SizeVarint(uint64(8*len(s))) + tagsize 954} 955func sizeFloat64Value(_ pointer, tagsize int) int { 956 return 8 + tagsize 957} 958func sizeFloat64ValueNoZero(ptr pointer, tagsize int) int { 959 v := math.Float64bits(*ptr.toFloat64()) 960 if v == 0 { 961 return 0 962 } 963 return 8 + tagsize 964} 965func sizeFloat64Ptr(ptr pointer, tagsize int) int { 966 p := *ptr.toFloat64Ptr() 967 if p == nil { 968 return 0 969 } 970 return 8 + tagsize 971} 972func sizeFloat64Slice(ptr pointer, tagsize int) int { 973 s := *ptr.toFloat64Slice() 974 return (8 + tagsize) * len(s) 975} 976func sizeFloat64PackedSlice(ptr pointer, tagsize int) int { 977 s := *ptr.toFloat64Slice() 978 if len(s) == 0 { 979 return 0 980 } 981 return 8*len(s) + SizeVarint(uint64(8*len(s))) + tagsize 982} 983func sizeVarint32Value(ptr pointer, tagsize int) int { 984 v := *ptr.toUint32() 985 return SizeVarint(uint64(v)) + tagsize 986} 987func sizeVarint32ValueNoZero(ptr pointer, tagsize int) int { 988 v := *ptr.toUint32() 989 if v == 0 { 990 return 0 991 } 992 return SizeVarint(uint64(v)) + tagsize 993} 994func sizeVarint32Ptr(ptr pointer, tagsize int) int { 995 p := *ptr.toUint32Ptr() 996 if p == nil { 997 return 0 998 } 999 return SizeVarint(uint64(*p)) + tagsize 1000} 1001func sizeVarint32Slice(ptr pointer, tagsize int) int { 1002 s := *ptr.toUint32Slice() 1003 n := 0 1004 for _, v := range s { 1005 n += SizeVarint(uint64(v)) + tagsize 1006 } 1007 return n 1008} 1009func sizeVarint32PackedSlice(ptr pointer, tagsize int) int { 1010 s := *ptr.toUint32Slice() 1011 if len(s) == 0 { 1012 return 0 1013 } 1014 n := 0 1015 for _, v := range s { 1016 n += SizeVarint(uint64(v)) 1017 } 1018 return n + SizeVarint(uint64(n)) + tagsize 1019} 1020func sizeVarintS32Value(ptr pointer, tagsize int) int { 1021 v := *ptr.toInt32() 1022 return SizeVarint(uint64(v)) + tagsize 1023} 1024func sizeVarintS32ValueNoZero(ptr pointer, tagsize int) int { 1025 v := *ptr.toInt32() 1026 if v == 0 { 1027 return 0 1028 } 1029 return SizeVarint(uint64(v)) + tagsize 1030} 1031func sizeVarintS32Ptr(ptr pointer, tagsize int) int { 1032 p := ptr.getInt32Ptr() 1033 if p == nil { 1034 return 0 1035 } 1036 return SizeVarint(uint64(*p)) + tagsize 1037} 1038func sizeVarintS32Slice(ptr pointer, tagsize int) int { 1039 s := ptr.getInt32Slice() 1040 n := 0 1041 for _, v := range s { 1042 n += SizeVarint(uint64(v)) + tagsize 1043 } 1044 return n 1045} 1046func sizeVarintS32PackedSlice(ptr pointer, tagsize int) int { 1047 s := ptr.getInt32Slice() 1048 if len(s) == 0 { 1049 return 0 1050 } 1051 n := 0 1052 for _, v := range s { 1053 n += SizeVarint(uint64(v)) 1054 } 1055 return n + SizeVarint(uint64(n)) + tagsize 1056} 1057func sizeVarint64Value(ptr pointer, tagsize int) int { 1058 v := *ptr.toUint64() 1059 return SizeVarint(v) + tagsize 1060} 1061func sizeVarint64ValueNoZero(ptr pointer, tagsize int) int { 1062 v := *ptr.toUint64() 1063 if v == 0 { 1064 return 0 1065 } 1066 return SizeVarint(v) + tagsize 1067} 1068func sizeVarint64Ptr(ptr pointer, tagsize int) int { 1069 p := *ptr.toUint64Ptr() 1070 if p == nil { 1071 return 0 1072 } 1073 return SizeVarint(*p) + tagsize 1074} 1075func sizeVarint64Slice(ptr pointer, tagsize int) int { 1076 s := *ptr.toUint64Slice() 1077 n := 0 1078 for _, v := range s { 1079 n += SizeVarint(v) + tagsize 1080 } 1081 return n 1082} 1083func sizeVarint64PackedSlice(ptr pointer, tagsize int) int { 1084 s := *ptr.toUint64Slice() 1085 if len(s) == 0 { 1086 return 0 1087 } 1088 n := 0 1089 for _, v := range s { 1090 n += SizeVarint(v) 1091 } 1092 return n + SizeVarint(uint64(n)) + tagsize 1093} 1094func sizeVarintS64Value(ptr pointer, tagsize int) int { 1095 v := *ptr.toInt64() 1096 return SizeVarint(uint64(v)) + tagsize 1097} 1098func sizeVarintS64ValueNoZero(ptr pointer, tagsize int) int { 1099 v := *ptr.toInt64() 1100 if v == 0 { 1101 return 0 1102 } 1103 return SizeVarint(uint64(v)) + tagsize 1104} 1105func sizeVarintS64Ptr(ptr pointer, tagsize int) int { 1106 p := *ptr.toInt64Ptr() 1107 if p == nil { 1108 return 0 1109 } 1110 return SizeVarint(uint64(*p)) + tagsize 1111} 1112func sizeVarintS64Slice(ptr pointer, tagsize int) int { 1113 s := *ptr.toInt64Slice() 1114 n := 0 1115 for _, v := range s { 1116 n += SizeVarint(uint64(v)) + tagsize 1117 } 1118 return n 1119} 1120func sizeVarintS64PackedSlice(ptr pointer, tagsize int) int { 1121 s := *ptr.toInt64Slice() 1122 if len(s) == 0 { 1123 return 0 1124 } 1125 n := 0 1126 for _, v := range s { 1127 n += SizeVarint(uint64(v)) 1128 } 1129 return n + SizeVarint(uint64(n)) + tagsize 1130} 1131func sizeZigzag32Value(ptr pointer, tagsize int) int { 1132 v := *ptr.toInt32() 1133 return SizeVarint(uint64((uint32(v)<<1)^uint32((int32(v)>>31)))) + tagsize 1134} 1135func sizeZigzag32ValueNoZero(ptr pointer, tagsize int) int { 1136 v := *ptr.toInt32() 1137 if v == 0 { 1138 return 0 1139 } 1140 return SizeVarint(uint64((uint32(v)<<1)^uint32((int32(v)>>31)))) + tagsize 1141} 1142func sizeZigzag32Ptr(ptr pointer, tagsize int) int { 1143 p := ptr.getInt32Ptr() 1144 if p == nil { 1145 return 0 1146 } 1147 v := *p 1148 return SizeVarint(uint64((uint32(v)<<1)^uint32((int32(v)>>31)))) + tagsize 1149} 1150func sizeZigzag32Slice(ptr pointer, tagsize int) int { 1151 s := ptr.getInt32Slice() 1152 n := 0 1153 for _, v := range s { 1154 n += SizeVarint(uint64((uint32(v)<<1)^uint32((int32(v)>>31)))) + tagsize 1155 } 1156 return n 1157} 1158func sizeZigzag32PackedSlice(ptr pointer, tagsize int) int { 1159 s := ptr.getInt32Slice() 1160 if len(s) == 0 { 1161 return 0 1162 } 1163 n := 0 1164 for _, v := range s { 1165 n += SizeVarint(uint64((uint32(v) << 1) ^ uint32((int32(v) >> 31)))) 1166 } 1167 return n + SizeVarint(uint64(n)) + tagsize 1168} 1169func sizeZigzag64Value(ptr pointer, tagsize int) int { 1170 v := *ptr.toInt64() 1171 return SizeVarint(uint64(v<<1)^uint64((int64(v)>>63))) + tagsize 1172} 1173func sizeZigzag64ValueNoZero(ptr pointer, tagsize int) int { 1174 v := *ptr.toInt64() 1175 if v == 0 { 1176 return 0 1177 } 1178 return SizeVarint(uint64(v<<1)^uint64((int64(v)>>63))) + tagsize 1179} 1180func sizeZigzag64Ptr(ptr pointer, tagsize int) int { 1181 p := *ptr.toInt64Ptr() 1182 if p == nil { 1183 return 0 1184 } 1185 v := *p 1186 return SizeVarint(uint64(v<<1)^uint64((int64(v)>>63))) + tagsize 1187} 1188func sizeZigzag64Slice(ptr pointer, tagsize int) int { 1189 s := *ptr.toInt64Slice() 1190 n := 0 1191 for _, v := range s { 1192 n += SizeVarint(uint64(v<<1)^uint64((int64(v)>>63))) + tagsize 1193 } 1194 return n 1195} 1196func sizeZigzag64PackedSlice(ptr pointer, tagsize int) int { 1197 s := *ptr.toInt64Slice() 1198 if len(s) == 0 { 1199 return 0 1200 } 1201 n := 0 1202 for _, v := range s { 1203 n += SizeVarint(uint64(v<<1) ^ uint64((int64(v) >> 63))) 1204 } 1205 return n + SizeVarint(uint64(n)) + tagsize 1206} 1207func sizeBoolValue(_ pointer, tagsize int) int { 1208 return 1 + tagsize 1209} 1210func sizeBoolValueNoZero(ptr pointer, tagsize int) int { 1211 v := *ptr.toBool() 1212 if !v { 1213 return 0 1214 } 1215 return 1 + tagsize 1216} 1217func sizeBoolPtr(ptr pointer, tagsize int) int { 1218 p := *ptr.toBoolPtr() 1219 if p == nil { 1220 return 0 1221 } 1222 return 1 + tagsize 1223} 1224func sizeBoolSlice(ptr pointer, tagsize int) int { 1225 s := *ptr.toBoolSlice() 1226 return (1 + tagsize) * len(s) 1227} 1228func sizeBoolPackedSlice(ptr pointer, tagsize int) int { 1229 s := *ptr.toBoolSlice() 1230 if len(s) == 0 { 1231 return 0 1232 } 1233 return len(s) + SizeVarint(uint64(len(s))) + tagsize 1234} 1235func sizeStringValue(ptr pointer, tagsize int) int { 1236 v := *ptr.toString() 1237 return len(v) + SizeVarint(uint64(len(v))) + tagsize 1238} 1239func sizeStringValueNoZero(ptr pointer, tagsize int) int { 1240 v := *ptr.toString() 1241 if v == "" { 1242 return 0 1243 } 1244 return len(v) + SizeVarint(uint64(len(v))) + tagsize 1245} 1246func sizeStringPtr(ptr pointer, tagsize int) int { 1247 p := *ptr.toStringPtr() 1248 if p == nil { 1249 return 0 1250 } 1251 v := *p 1252 return len(v) + SizeVarint(uint64(len(v))) + tagsize 1253} 1254func sizeStringSlice(ptr pointer, tagsize int) int { 1255 s := *ptr.toStringSlice() 1256 n := 0 1257 for _, v := range s { 1258 n += len(v) + SizeVarint(uint64(len(v))) + tagsize 1259 } 1260 return n 1261} 1262func sizeBytes(ptr pointer, tagsize int) int { 1263 v := *ptr.toBytes() 1264 if v == nil { 1265 return 0 1266 } 1267 return len(v) + SizeVarint(uint64(len(v))) + tagsize 1268} 1269func sizeBytes3(ptr pointer, tagsize int) int { 1270 v := *ptr.toBytes() 1271 if len(v) == 0 { 1272 return 0 1273 } 1274 return len(v) + SizeVarint(uint64(len(v))) + tagsize 1275} 1276func sizeBytesOneof(ptr pointer, tagsize int) int { 1277 v := *ptr.toBytes() 1278 return len(v) + SizeVarint(uint64(len(v))) + tagsize 1279} 1280func sizeBytesSlice(ptr pointer, tagsize int) int { 1281 s := *ptr.toBytesSlice() 1282 n := 0 1283 for _, v := range s { 1284 n += len(v) + SizeVarint(uint64(len(v))) + tagsize 1285 } 1286 return n 1287} 1288 1289// appendFixed32 appends an encoded fixed32 to b. 1290func appendFixed32(b []byte, v uint32) []byte { 1291 b = append(b, 1292 byte(v), 1293 byte(v>>8), 1294 byte(v>>16), 1295 byte(v>>24)) 1296 return b 1297} 1298 1299// appendFixed64 appends an encoded fixed64 to b. 1300func appendFixed64(b []byte, v uint64) []byte { 1301 b = append(b, 1302 byte(v), 1303 byte(v>>8), 1304 byte(v>>16), 1305 byte(v>>24), 1306 byte(v>>32), 1307 byte(v>>40), 1308 byte(v>>48), 1309 byte(v>>56)) 1310 return b 1311} 1312 1313// appendVarint appends an encoded varint to b. 1314func appendVarint(b []byte, v uint64) []byte { 1315 // TODO: make 1-byte (maybe 2-byte) case inline-able, once we 1316 // have non-leaf inliner. 1317 switch { 1318 case v < 1<<7: 1319 b = append(b, byte(v)) 1320 case v < 1<<14: 1321 b = append(b, 1322 byte(v&0x7f|0x80), 1323 byte(v>>7)) 1324 case v < 1<<21: 1325 b = append(b, 1326 byte(v&0x7f|0x80), 1327 byte((v>>7)&0x7f|0x80), 1328 byte(v>>14)) 1329 case v < 1<<28: 1330 b = append(b, 1331 byte(v&0x7f|0x80), 1332 byte((v>>7)&0x7f|0x80), 1333 byte((v>>14)&0x7f|0x80), 1334 byte(v>>21)) 1335 case v < 1<<35: 1336 b = append(b, 1337 byte(v&0x7f|0x80), 1338 byte((v>>7)&0x7f|0x80), 1339 byte((v>>14)&0x7f|0x80), 1340 byte((v>>21)&0x7f|0x80), 1341 byte(v>>28)) 1342 case v < 1<<42: 1343 b = append(b, 1344 byte(v&0x7f|0x80), 1345 byte((v>>7)&0x7f|0x80), 1346 byte((v>>14)&0x7f|0x80), 1347 byte((v>>21)&0x7f|0x80), 1348 byte((v>>28)&0x7f|0x80), 1349 byte(v>>35)) 1350 case v < 1<<49: 1351 b = append(b, 1352 byte(v&0x7f|0x80), 1353 byte((v>>7)&0x7f|0x80), 1354 byte((v>>14)&0x7f|0x80), 1355 byte((v>>21)&0x7f|0x80), 1356 byte((v>>28)&0x7f|0x80), 1357 byte((v>>35)&0x7f|0x80), 1358 byte(v>>42)) 1359 case v < 1<<56: 1360 b = append(b, 1361 byte(v&0x7f|0x80), 1362 byte((v>>7)&0x7f|0x80), 1363 byte((v>>14)&0x7f|0x80), 1364 byte((v>>21)&0x7f|0x80), 1365 byte((v>>28)&0x7f|0x80), 1366 byte((v>>35)&0x7f|0x80), 1367 byte((v>>42)&0x7f|0x80), 1368 byte(v>>49)) 1369 case v < 1<<63: 1370 b = append(b, 1371 byte(v&0x7f|0x80), 1372 byte((v>>7)&0x7f|0x80), 1373 byte((v>>14)&0x7f|0x80), 1374 byte((v>>21)&0x7f|0x80), 1375 byte((v>>28)&0x7f|0x80), 1376 byte((v>>35)&0x7f|0x80), 1377 byte((v>>42)&0x7f|0x80), 1378 byte((v>>49)&0x7f|0x80), 1379 byte(v>>56)) 1380 default: 1381 b = append(b, 1382 byte(v&0x7f|0x80), 1383 byte((v>>7)&0x7f|0x80), 1384 byte((v>>14)&0x7f|0x80), 1385 byte((v>>21)&0x7f|0x80), 1386 byte((v>>28)&0x7f|0x80), 1387 byte((v>>35)&0x7f|0x80), 1388 byte((v>>42)&0x7f|0x80), 1389 byte((v>>49)&0x7f|0x80), 1390 byte((v>>56)&0x7f|0x80), 1391 1) 1392 } 1393 return b 1394} 1395 1396func appendFixed32Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1397 v := *ptr.toUint32() 1398 b = appendVarint(b, wiretag) 1399 b = appendFixed32(b, v) 1400 return b, nil 1401} 1402func appendFixed32ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1403 v := *ptr.toUint32() 1404 if v == 0 { 1405 return b, nil 1406 } 1407 b = appendVarint(b, wiretag) 1408 b = appendFixed32(b, v) 1409 return b, nil 1410} 1411func appendFixed32Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1412 p := *ptr.toUint32Ptr() 1413 if p == nil { 1414 return b, nil 1415 } 1416 b = appendVarint(b, wiretag) 1417 b = appendFixed32(b, *p) 1418 return b, nil 1419} 1420func appendFixed32Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1421 s := *ptr.toUint32Slice() 1422 for _, v := range s { 1423 b = appendVarint(b, wiretag) 1424 b = appendFixed32(b, v) 1425 } 1426 return b, nil 1427} 1428func appendFixed32PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1429 s := *ptr.toUint32Slice() 1430 if len(s) == 0 { 1431 return b, nil 1432 } 1433 b = appendVarint(b, wiretag&^7|WireBytes) 1434 b = appendVarint(b, uint64(4*len(s))) 1435 for _, v := range s { 1436 b = appendFixed32(b, v) 1437 } 1438 return b, nil 1439} 1440func appendFixedS32Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1441 v := *ptr.toInt32() 1442 b = appendVarint(b, wiretag) 1443 b = appendFixed32(b, uint32(v)) 1444 return b, nil 1445} 1446func appendFixedS32ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1447 v := *ptr.toInt32() 1448 if v == 0 { 1449 return b, nil 1450 } 1451 b = appendVarint(b, wiretag) 1452 b = appendFixed32(b, uint32(v)) 1453 return b, nil 1454} 1455func appendFixedS32Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1456 p := ptr.getInt32Ptr() 1457 if p == nil { 1458 return b, nil 1459 } 1460 b = appendVarint(b, wiretag) 1461 b = appendFixed32(b, uint32(*p)) 1462 return b, nil 1463} 1464func appendFixedS32Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1465 s := ptr.getInt32Slice() 1466 for _, v := range s { 1467 b = appendVarint(b, wiretag) 1468 b = appendFixed32(b, uint32(v)) 1469 } 1470 return b, nil 1471} 1472func appendFixedS32PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1473 s := ptr.getInt32Slice() 1474 if len(s) == 0 { 1475 return b, nil 1476 } 1477 b = appendVarint(b, wiretag&^7|WireBytes) 1478 b = appendVarint(b, uint64(4*len(s))) 1479 for _, v := range s { 1480 b = appendFixed32(b, uint32(v)) 1481 } 1482 return b, nil 1483} 1484func appendFloat32Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1485 v := math.Float32bits(*ptr.toFloat32()) 1486 b = appendVarint(b, wiretag) 1487 b = appendFixed32(b, v) 1488 return b, nil 1489} 1490func appendFloat32ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1491 v := math.Float32bits(*ptr.toFloat32()) 1492 if v == 0 { 1493 return b, nil 1494 } 1495 b = appendVarint(b, wiretag) 1496 b = appendFixed32(b, v) 1497 return b, nil 1498} 1499func appendFloat32Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1500 p := *ptr.toFloat32Ptr() 1501 if p == nil { 1502 return b, nil 1503 } 1504 b = appendVarint(b, wiretag) 1505 b = appendFixed32(b, math.Float32bits(*p)) 1506 return b, nil 1507} 1508func appendFloat32Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1509 s := *ptr.toFloat32Slice() 1510 for _, v := range s { 1511 b = appendVarint(b, wiretag) 1512 b = appendFixed32(b, math.Float32bits(v)) 1513 } 1514 return b, nil 1515} 1516func appendFloat32PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1517 s := *ptr.toFloat32Slice() 1518 if len(s) == 0 { 1519 return b, nil 1520 } 1521 b = appendVarint(b, wiretag&^7|WireBytes) 1522 b = appendVarint(b, uint64(4*len(s))) 1523 for _, v := range s { 1524 b = appendFixed32(b, math.Float32bits(v)) 1525 } 1526 return b, nil 1527} 1528func appendFixed64Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1529 v := *ptr.toUint64() 1530 b = appendVarint(b, wiretag) 1531 b = appendFixed64(b, v) 1532 return b, nil 1533} 1534func appendFixed64ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1535 v := *ptr.toUint64() 1536 if v == 0 { 1537 return b, nil 1538 } 1539 b = appendVarint(b, wiretag) 1540 b = appendFixed64(b, v) 1541 return b, nil 1542} 1543func appendFixed64Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1544 p := *ptr.toUint64Ptr() 1545 if p == nil { 1546 return b, nil 1547 } 1548 b = appendVarint(b, wiretag) 1549 b = appendFixed64(b, *p) 1550 return b, nil 1551} 1552func appendFixed64Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1553 s := *ptr.toUint64Slice() 1554 for _, v := range s { 1555 b = appendVarint(b, wiretag) 1556 b = appendFixed64(b, v) 1557 } 1558 return b, nil 1559} 1560func appendFixed64PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1561 s := *ptr.toUint64Slice() 1562 if len(s) == 0 { 1563 return b, nil 1564 } 1565 b = appendVarint(b, wiretag&^7|WireBytes) 1566 b = appendVarint(b, uint64(8*len(s))) 1567 for _, v := range s { 1568 b = appendFixed64(b, v) 1569 } 1570 return b, nil 1571} 1572func appendFixedS64Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1573 v := *ptr.toInt64() 1574 b = appendVarint(b, wiretag) 1575 b = appendFixed64(b, uint64(v)) 1576 return b, nil 1577} 1578func appendFixedS64ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1579 v := *ptr.toInt64() 1580 if v == 0 { 1581 return b, nil 1582 } 1583 b = appendVarint(b, wiretag) 1584 b = appendFixed64(b, uint64(v)) 1585 return b, nil 1586} 1587func appendFixedS64Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1588 p := *ptr.toInt64Ptr() 1589 if p == nil { 1590 return b, nil 1591 } 1592 b = appendVarint(b, wiretag) 1593 b = appendFixed64(b, uint64(*p)) 1594 return b, nil 1595} 1596func appendFixedS64Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1597 s := *ptr.toInt64Slice() 1598 for _, v := range s { 1599 b = appendVarint(b, wiretag) 1600 b = appendFixed64(b, uint64(v)) 1601 } 1602 return b, nil 1603} 1604func appendFixedS64PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1605 s := *ptr.toInt64Slice() 1606 if len(s) == 0 { 1607 return b, nil 1608 } 1609 b = appendVarint(b, wiretag&^7|WireBytes) 1610 b = appendVarint(b, uint64(8*len(s))) 1611 for _, v := range s { 1612 b = appendFixed64(b, uint64(v)) 1613 } 1614 return b, nil 1615} 1616func appendFloat64Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1617 v := math.Float64bits(*ptr.toFloat64()) 1618 b = appendVarint(b, wiretag) 1619 b = appendFixed64(b, v) 1620 return b, nil 1621} 1622func appendFloat64ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1623 v := math.Float64bits(*ptr.toFloat64()) 1624 if v == 0 { 1625 return b, nil 1626 } 1627 b = appendVarint(b, wiretag) 1628 b = appendFixed64(b, v) 1629 return b, nil 1630} 1631func appendFloat64Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1632 p := *ptr.toFloat64Ptr() 1633 if p == nil { 1634 return b, nil 1635 } 1636 b = appendVarint(b, wiretag) 1637 b = appendFixed64(b, math.Float64bits(*p)) 1638 return b, nil 1639} 1640func appendFloat64Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1641 s := *ptr.toFloat64Slice() 1642 for _, v := range s { 1643 b = appendVarint(b, wiretag) 1644 b = appendFixed64(b, math.Float64bits(v)) 1645 } 1646 return b, nil 1647} 1648func appendFloat64PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1649 s := *ptr.toFloat64Slice() 1650 if len(s) == 0 { 1651 return b, nil 1652 } 1653 b = appendVarint(b, wiretag&^7|WireBytes) 1654 b = appendVarint(b, uint64(8*len(s))) 1655 for _, v := range s { 1656 b = appendFixed64(b, math.Float64bits(v)) 1657 } 1658 return b, nil 1659} 1660func appendVarint32Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1661 v := *ptr.toUint32() 1662 b = appendVarint(b, wiretag) 1663 b = appendVarint(b, uint64(v)) 1664 return b, nil 1665} 1666func appendVarint32ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1667 v := *ptr.toUint32() 1668 if v == 0 { 1669 return b, nil 1670 } 1671 b = appendVarint(b, wiretag) 1672 b = appendVarint(b, uint64(v)) 1673 return b, nil 1674} 1675func appendVarint32Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1676 p := *ptr.toUint32Ptr() 1677 if p == nil { 1678 return b, nil 1679 } 1680 b = appendVarint(b, wiretag) 1681 b = appendVarint(b, uint64(*p)) 1682 return b, nil 1683} 1684func appendVarint32Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1685 s := *ptr.toUint32Slice() 1686 for _, v := range s { 1687 b = appendVarint(b, wiretag) 1688 b = appendVarint(b, uint64(v)) 1689 } 1690 return b, nil 1691} 1692func appendVarint32PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1693 s := *ptr.toUint32Slice() 1694 if len(s) == 0 { 1695 return b, nil 1696 } 1697 b = appendVarint(b, wiretag&^7|WireBytes) 1698 // compute size 1699 n := 0 1700 for _, v := range s { 1701 n += SizeVarint(uint64(v)) 1702 } 1703 b = appendVarint(b, uint64(n)) 1704 for _, v := range s { 1705 b = appendVarint(b, uint64(v)) 1706 } 1707 return b, nil 1708} 1709func appendVarintS32Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1710 v := *ptr.toInt32() 1711 b = appendVarint(b, wiretag) 1712 b = appendVarint(b, uint64(v)) 1713 return b, nil 1714} 1715func appendVarintS32ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1716 v := *ptr.toInt32() 1717 if v == 0 { 1718 return b, nil 1719 } 1720 b = appendVarint(b, wiretag) 1721 b = appendVarint(b, uint64(v)) 1722 return b, nil 1723} 1724func appendVarintS32Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1725 p := ptr.getInt32Ptr() 1726 if p == nil { 1727 return b, nil 1728 } 1729 b = appendVarint(b, wiretag) 1730 b = appendVarint(b, uint64(*p)) 1731 return b, nil 1732} 1733func appendVarintS32Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1734 s := ptr.getInt32Slice() 1735 for _, v := range s { 1736 b = appendVarint(b, wiretag) 1737 b = appendVarint(b, uint64(v)) 1738 } 1739 return b, nil 1740} 1741func appendVarintS32PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1742 s := ptr.getInt32Slice() 1743 if len(s) == 0 { 1744 return b, nil 1745 } 1746 b = appendVarint(b, wiretag&^7|WireBytes) 1747 // compute size 1748 n := 0 1749 for _, v := range s { 1750 n += SizeVarint(uint64(v)) 1751 } 1752 b = appendVarint(b, uint64(n)) 1753 for _, v := range s { 1754 b = appendVarint(b, uint64(v)) 1755 } 1756 return b, nil 1757} 1758func appendVarint64Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1759 v := *ptr.toUint64() 1760 b = appendVarint(b, wiretag) 1761 b = appendVarint(b, v) 1762 return b, nil 1763} 1764func appendVarint64ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1765 v := *ptr.toUint64() 1766 if v == 0 { 1767 return b, nil 1768 } 1769 b = appendVarint(b, wiretag) 1770 b = appendVarint(b, v) 1771 return b, nil 1772} 1773func appendVarint64Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1774 p := *ptr.toUint64Ptr() 1775 if p == nil { 1776 return b, nil 1777 } 1778 b = appendVarint(b, wiretag) 1779 b = appendVarint(b, *p) 1780 return b, nil 1781} 1782func appendVarint64Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1783 s := *ptr.toUint64Slice() 1784 for _, v := range s { 1785 b = appendVarint(b, wiretag) 1786 b = appendVarint(b, v) 1787 } 1788 return b, nil 1789} 1790func appendVarint64PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1791 s := *ptr.toUint64Slice() 1792 if len(s) == 0 { 1793 return b, nil 1794 } 1795 b = appendVarint(b, wiretag&^7|WireBytes) 1796 // compute size 1797 n := 0 1798 for _, v := range s { 1799 n += SizeVarint(v) 1800 } 1801 b = appendVarint(b, uint64(n)) 1802 for _, v := range s { 1803 b = appendVarint(b, v) 1804 } 1805 return b, nil 1806} 1807func appendVarintS64Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1808 v := *ptr.toInt64() 1809 b = appendVarint(b, wiretag) 1810 b = appendVarint(b, uint64(v)) 1811 return b, nil 1812} 1813func appendVarintS64ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1814 v := *ptr.toInt64() 1815 if v == 0 { 1816 return b, nil 1817 } 1818 b = appendVarint(b, wiretag) 1819 b = appendVarint(b, uint64(v)) 1820 return b, nil 1821} 1822func appendVarintS64Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1823 p := *ptr.toInt64Ptr() 1824 if p == nil { 1825 return b, nil 1826 } 1827 b = appendVarint(b, wiretag) 1828 b = appendVarint(b, uint64(*p)) 1829 return b, nil 1830} 1831func appendVarintS64Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1832 s := *ptr.toInt64Slice() 1833 for _, v := range s { 1834 b = appendVarint(b, wiretag) 1835 b = appendVarint(b, uint64(v)) 1836 } 1837 return b, nil 1838} 1839func appendVarintS64PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1840 s := *ptr.toInt64Slice() 1841 if len(s) == 0 { 1842 return b, nil 1843 } 1844 b = appendVarint(b, wiretag&^7|WireBytes) 1845 // compute size 1846 n := 0 1847 for _, v := range s { 1848 n += SizeVarint(uint64(v)) 1849 } 1850 b = appendVarint(b, uint64(n)) 1851 for _, v := range s { 1852 b = appendVarint(b, uint64(v)) 1853 } 1854 return b, nil 1855} 1856func appendZigzag32Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1857 v := *ptr.toInt32() 1858 b = appendVarint(b, wiretag) 1859 b = appendVarint(b, uint64((uint32(v)<<1)^uint32((int32(v)>>31)))) 1860 return b, nil 1861} 1862func appendZigzag32ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1863 v := *ptr.toInt32() 1864 if v == 0 { 1865 return b, nil 1866 } 1867 b = appendVarint(b, wiretag) 1868 b = appendVarint(b, uint64((uint32(v)<<1)^uint32((int32(v)>>31)))) 1869 return b, nil 1870} 1871func appendZigzag32Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1872 p := ptr.getInt32Ptr() 1873 if p == nil { 1874 return b, nil 1875 } 1876 b = appendVarint(b, wiretag) 1877 v := *p 1878 b = appendVarint(b, uint64((uint32(v)<<1)^uint32((int32(v)>>31)))) 1879 return b, nil 1880} 1881func appendZigzag32Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1882 s := ptr.getInt32Slice() 1883 for _, v := range s { 1884 b = appendVarint(b, wiretag) 1885 b = appendVarint(b, uint64((uint32(v)<<1)^uint32((int32(v)>>31)))) 1886 } 1887 return b, nil 1888} 1889func appendZigzag32PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1890 s := ptr.getInt32Slice() 1891 if len(s) == 0 { 1892 return b, nil 1893 } 1894 b = appendVarint(b, wiretag&^7|WireBytes) 1895 // compute size 1896 n := 0 1897 for _, v := range s { 1898 n += SizeVarint(uint64((uint32(v) << 1) ^ uint32((int32(v) >> 31)))) 1899 } 1900 b = appendVarint(b, uint64(n)) 1901 for _, v := range s { 1902 b = appendVarint(b, uint64((uint32(v)<<1)^uint32((int32(v)>>31)))) 1903 } 1904 return b, nil 1905} 1906func appendZigzag64Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1907 v := *ptr.toInt64() 1908 b = appendVarint(b, wiretag) 1909 b = appendVarint(b, uint64(v<<1)^uint64((int64(v)>>63))) 1910 return b, nil 1911} 1912func appendZigzag64ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1913 v := *ptr.toInt64() 1914 if v == 0 { 1915 return b, nil 1916 } 1917 b = appendVarint(b, wiretag) 1918 b = appendVarint(b, uint64(v<<1)^uint64((int64(v)>>63))) 1919 return b, nil 1920} 1921func appendZigzag64Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1922 p := *ptr.toInt64Ptr() 1923 if p == nil { 1924 return b, nil 1925 } 1926 b = appendVarint(b, wiretag) 1927 v := *p 1928 b = appendVarint(b, uint64(v<<1)^uint64((int64(v)>>63))) 1929 return b, nil 1930} 1931func appendZigzag64Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1932 s := *ptr.toInt64Slice() 1933 for _, v := range s { 1934 b = appendVarint(b, wiretag) 1935 b = appendVarint(b, uint64(v<<1)^uint64((int64(v)>>63))) 1936 } 1937 return b, nil 1938} 1939func appendZigzag64PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1940 s := *ptr.toInt64Slice() 1941 if len(s) == 0 { 1942 return b, nil 1943 } 1944 b = appendVarint(b, wiretag&^7|WireBytes) 1945 // compute size 1946 n := 0 1947 for _, v := range s { 1948 n += SizeVarint(uint64(v<<1) ^ uint64((int64(v) >> 63))) 1949 } 1950 b = appendVarint(b, uint64(n)) 1951 for _, v := range s { 1952 b = appendVarint(b, uint64(v<<1)^uint64((int64(v)>>63))) 1953 } 1954 return b, nil 1955} 1956func appendBoolValue(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1957 v := *ptr.toBool() 1958 b = appendVarint(b, wiretag) 1959 if v { 1960 b = append(b, 1) 1961 } else { 1962 b = append(b, 0) 1963 } 1964 return b, nil 1965} 1966func appendBoolValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1967 v := *ptr.toBool() 1968 if !v { 1969 return b, nil 1970 } 1971 b = appendVarint(b, wiretag) 1972 b = append(b, 1) 1973 return b, nil 1974} 1975 1976func appendBoolPtr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1977 p := *ptr.toBoolPtr() 1978 if p == nil { 1979 return b, nil 1980 } 1981 b = appendVarint(b, wiretag) 1982 if *p { 1983 b = append(b, 1) 1984 } else { 1985 b = append(b, 0) 1986 } 1987 return b, nil 1988} 1989func appendBoolSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 1990 s := *ptr.toBoolSlice() 1991 for _, v := range s { 1992 b = appendVarint(b, wiretag) 1993 if v { 1994 b = append(b, 1) 1995 } else { 1996 b = append(b, 0) 1997 } 1998 } 1999 return b, nil 2000} 2001func appendBoolPackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 2002 s := *ptr.toBoolSlice() 2003 if len(s) == 0 { 2004 return b, nil 2005 } 2006 b = appendVarint(b, wiretag&^7|WireBytes) 2007 b = appendVarint(b, uint64(len(s))) 2008 for _, v := range s { 2009 if v { 2010 b = append(b, 1) 2011 } else { 2012 b = append(b, 0) 2013 } 2014 } 2015 return b, nil 2016} 2017func appendStringValue(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 2018 v := *ptr.toString() 2019 b = appendVarint(b, wiretag) 2020 b = appendVarint(b, uint64(len(v))) 2021 b = append(b, v...) 2022 return b, nil 2023} 2024func appendStringValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 2025 v := *ptr.toString() 2026 if v == "" { 2027 return b, nil 2028 } 2029 b = appendVarint(b, wiretag) 2030 b = appendVarint(b, uint64(len(v))) 2031 b = append(b, v...) 2032 return b, nil 2033} 2034func appendStringPtr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 2035 p := *ptr.toStringPtr() 2036 if p == nil { 2037 return b, nil 2038 } 2039 v := *p 2040 b = appendVarint(b, wiretag) 2041 b = appendVarint(b, uint64(len(v))) 2042 b = append(b, v...) 2043 return b, nil 2044} 2045func appendStringSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 2046 s := *ptr.toStringSlice() 2047 for _, v := range s { 2048 b = appendVarint(b, wiretag) 2049 b = appendVarint(b, uint64(len(v))) 2050 b = append(b, v...) 2051 } 2052 return b, nil 2053} 2054func appendUTF8StringValue(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 2055 var invalidUTF8 bool 2056 v := *ptr.toString() 2057 if !utf8.ValidString(v) { 2058 invalidUTF8 = true 2059 } 2060 b = appendVarint(b, wiretag) 2061 b = appendVarint(b, uint64(len(v))) 2062 b = append(b, v...) 2063 if invalidUTF8 { 2064 return b, errInvalidUTF8 2065 } 2066 return b, nil 2067} 2068func appendUTF8StringValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 2069 var invalidUTF8 bool 2070 v := *ptr.toString() 2071 if v == "" { 2072 return b, nil 2073 } 2074 if !utf8.ValidString(v) { 2075 invalidUTF8 = true 2076 } 2077 b = appendVarint(b, wiretag) 2078 b = appendVarint(b, uint64(len(v))) 2079 b = append(b, v...) 2080 if invalidUTF8 { 2081 return b, errInvalidUTF8 2082 } 2083 return b, nil 2084} 2085func appendUTF8StringPtr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 2086 var invalidUTF8 bool 2087 p := *ptr.toStringPtr() 2088 if p == nil { 2089 return b, nil 2090 } 2091 v := *p 2092 if !utf8.ValidString(v) { 2093 invalidUTF8 = true 2094 } 2095 b = appendVarint(b, wiretag) 2096 b = appendVarint(b, uint64(len(v))) 2097 b = append(b, v...) 2098 if invalidUTF8 { 2099 return b, errInvalidUTF8 2100 } 2101 return b, nil 2102} 2103func appendUTF8StringSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 2104 var invalidUTF8 bool 2105 s := *ptr.toStringSlice() 2106 for _, v := range s { 2107 if !utf8.ValidString(v) { 2108 invalidUTF8 = true 2109 } 2110 b = appendVarint(b, wiretag) 2111 b = appendVarint(b, uint64(len(v))) 2112 b = append(b, v...) 2113 } 2114 if invalidUTF8 { 2115 return b, errInvalidUTF8 2116 } 2117 return b, nil 2118} 2119func appendBytes(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 2120 v := *ptr.toBytes() 2121 if v == nil { 2122 return b, nil 2123 } 2124 b = appendVarint(b, wiretag) 2125 b = appendVarint(b, uint64(len(v))) 2126 b = append(b, v...) 2127 return b, nil 2128} 2129func appendBytes3(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 2130 v := *ptr.toBytes() 2131 if len(v) == 0 { 2132 return b, nil 2133 } 2134 b = appendVarint(b, wiretag) 2135 b = appendVarint(b, uint64(len(v))) 2136 b = append(b, v...) 2137 return b, nil 2138} 2139func appendBytesOneof(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 2140 v := *ptr.toBytes() 2141 b = appendVarint(b, wiretag) 2142 b = appendVarint(b, uint64(len(v))) 2143 b = append(b, v...) 2144 return b, nil 2145} 2146func appendBytesSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { 2147 s := *ptr.toBytesSlice() 2148 for _, v := range s { 2149 b = appendVarint(b, wiretag) 2150 b = appendVarint(b, uint64(len(v))) 2151 b = append(b, v...) 2152 } 2153 return b, nil 2154} 2155 2156// makeGroupMarshaler returns the sizer and marshaler for a group. 2157// u is the marshal info of the underlying message. 2158func makeGroupMarshaler(u *marshalInfo) (sizer, marshaler) { 2159 return func(ptr pointer, tagsize int) int { 2160 p := ptr.getPointer() 2161 if p.isNil() { 2162 return 0 2163 } 2164 return u.size(p) + 2*tagsize 2165 }, 2166 func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { 2167 p := ptr.getPointer() 2168 if p.isNil() { 2169 return b, nil 2170 } 2171 var err error 2172 b = appendVarint(b, wiretag) // start group 2173 b, err = u.marshal(b, p, deterministic) 2174 b = appendVarint(b, wiretag+(WireEndGroup-WireStartGroup)) // end group 2175 return b, err 2176 } 2177} 2178 2179// makeGroupSliceMarshaler returns the sizer and marshaler for a group slice. 2180// u is the marshal info of the underlying message. 2181func makeGroupSliceMarshaler(u *marshalInfo) (sizer, marshaler) { 2182 return func(ptr pointer, tagsize int) int { 2183 s := ptr.getPointerSlice() 2184 n := 0 2185 for _, v := range s { 2186 if v.isNil() { 2187 continue 2188 } 2189 n += u.size(v) + 2*tagsize 2190 } 2191 return n 2192 }, 2193 func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { 2194 s := ptr.getPointerSlice() 2195 var err error 2196 var nerr nonFatal 2197 for _, v := range s { 2198 if v.isNil() { 2199 return b, errRepeatedHasNil 2200 } 2201 b = appendVarint(b, wiretag) // start group 2202 b, err = u.marshal(b, v, deterministic) 2203 b = appendVarint(b, wiretag+(WireEndGroup-WireStartGroup)) // end group 2204 if !nerr.Merge(err) { 2205 if err == ErrNil { 2206 err = errRepeatedHasNil 2207 } 2208 return b, err 2209 } 2210 } 2211 return b, nerr.E 2212 } 2213} 2214 2215// makeMessageMarshaler returns the sizer and marshaler for a message field. 2216// u is the marshal info of the message. 2217func makeMessageMarshaler(u *marshalInfo) (sizer, marshaler) { 2218 return func(ptr pointer, tagsize int) int { 2219 p := ptr.getPointer() 2220 if p.isNil() { 2221 return 0 2222 } 2223 siz := u.size(p) 2224 return siz + SizeVarint(uint64(siz)) + tagsize 2225 }, 2226 func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { 2227 p := ptr.getPointer() 2228 if p.isNil() { 2229 return b, nil 2230 } 2231 b = appendVarint(b, wiretag) 2232 siz := u.cachedsize(p) 2233 b = appendVarint(b, uint64(siz)) 2234 return u.marshal(b, p, deterministic) 2235 } 2236} 2237 2238// makeMessageSliceMarshaler returns the sizer and marshaler for a message slice. 2239// u is the marshal info of the message. 2240func makeMessageSliceMarshaler(u *marshalInfo) (sizer, marshaler) { 2241 return func(ptr pointer, tagsize int) int { 2242 s := ptr.getPointerSlice() 2243 n := 0 2244 for _, v := range s { 2245 if v.isNil() { 2246 continue 2247 } 2248 siz := u.size(v) 2249 n += siz + SizeVarint(uint64(siz)) + tagsize 2250 } 2251 return n 2252 }, 2253 func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { 2254 s := ptr.getPointerSlice() 2255 var err error 2256 var nerr nonFatal 2257 for _, v := range s { 2258 if v.isNil() { 2259 return b, errRepeatedHasNil 2260 } 2261 b = appendVarint(b, wiretag) 2262 siz := u.cachedsize(v) 2263 b = appendVarint(b, uint64(siz)) 2264 b, err = u.marshal(b, v, deterministic) 2265 2266 if !nerr.Merge(err) { 2267 if err == ErrNil { 2268 err = errRepeatedHasNil 2269 } 2270 return b, err 2271 } 2272 } 2273 return b, nerr.E 2274 } 2275} 2276 2277// makeMapMarshaler returns the sizer and marshaler for a map field. 2278// f is the pointer to the reflect data structure of the field. 2279func makeMapMarshaler(f *reflect.StructField) (sizer, marshaler) { 2280 // figure out key and value type 2281 t := f.Type 2282 keyType := t.Key() 2283 valType := t.Elem() 2284 keyTags := strings.Split(f.Tag.Get("protobuf_key"), ",") 2285 valTags := strings.Split(f.Tag.Get("protobuf_val"), ",") 2286 keySizer, keyMarshaler := typeMarshaler(keyType, keyTags, false, false) // don't omit zero value in map 2287 valSizer, valMarshaler := typeMarshaler(valType, valTags, false, false) // don't omit zero value in map 2288 keyWireTag := 1<<3 | wiretype(keyTags[0]) 2289 valWireTag := 2<<3 | wiretype(valTags[0]) 2290 2291 // We create an interface to get the addresses of the map key and value. 2292 // If value is pointer-typed, the interface is a direct interface, the 2293 // idata itself is the value. Otherwise, the idata is the pointer to the 2294 // value. 2295 // Key cannot be pointer-typed. 2296 valIsPtr := valType.Kind() == reflect.Ptr 2297 2298 // If value is a message with nested maps, calling 2299 // valSizer in marshal may be quadratic. We should use 2300 // cached version in marshal (but not in size). 2301 // If value is not message type, we don't have size cache, 2302 // but it cannot be nested either. Just use valSizer. 2303 valCachedSizer := valSizer 2304 if valIsPtr && valType.Elem().Kind() == reflect.Struct { 2305 u := getMarshalInfo(valType.Elem()) 2306 valCachedSizer = func(ptr pointer, tagsize int) int { 2307 // Same as message sizer, but use cache. 2308 p := ptr.getPointer() 2309 if p.isNil() { 2310 return 0 2311 } 2312 siz := u.cachedsize(p) 2313 return siz + SizeVarint(uint64(siz)) + tagsize 2314 } 2315 } 2316 return func(ptr pointer, tagsize int) int { 2317 m := ptr.asPointerTo(t).Elem() // the map 2318 n := 0 2319 for _, k := range m.MapKeys() { 2320 ki := k.Interface() 2321 vi := m.MapIndex(k).Interface() 2322 kaddr := toAddrPointer(&ki, false, false) // pointer to key 2323 vaddr := toAddrPointer(&vi, valIsPtr, false) // pointer to value 2324 siz := keySizer(kaddr, 1) + valSizer(vaddr, 1) // tag of key = 1 (size=1), tag of val = 2 (size=1) 2325 n += siz + SizeVarint(uint64(siz)) + tagsize 2326 } 2327 return n 2328 }, 2329 func(b []byte, ptr pointer, tag uint64, deterministic bool) ([]byte, error) { 2330 m := ptr.asPointerTo(t).Elem() // the map 2331 var err error 2332 keys := m.MapKeys() 2333 if len(keys) > 1 && deterministic { 2334 sort.Sort(mapKeys(keys)) 2335 } 2336 2337 var nerr nonFatal 2338 for _, k := range keys { 2339 ki := k.Interface() 2340 vi := m.MapIndex(k).Interface() 2341 kaddr := toAddrPointer(&ki, false, false) // pointer to key 2342 vaddr := toAddrPointer(&vi, valIsPtr, false) // pointer to value 2343 b = appendVarint(b, tag) 2344 siz := keySizer(kaddr, 1) + valCachedSizer(vaddr, 1) // tag of key = 1 (size=1), tag of val = 2 (size=1) 2345 b = appendVarint(b, uint64(siz)) 2346 b, err = keyMarshaler(b, kaddr, keyWireTag, deterministic) 2347 if !nerr.Merge(err) { 2348 return b, err 2349 } 2350 b, err = valMarshaler(b, vaddr, valWireTag, deterministic) 2351 if err != ErrNil && !nerr.Merge(err) { // allow nil value in map 2352 return b, err 2353 } 2354 } 2355 return b, nerr.E 2356 } 2357} 2358 2359// makeOneOfMarshaler returns the sizer and marshaler for a oneof field. 2360// fi is the marshal info of the field. 2361// f is the pointer to the reflect data structure of the field. 2362func makeOneOfMarshaler(fi *marshalFieldInfo, f *reflect.StructField) (sizer, marshaler) { 2363 // Oneof field is an interface. We need to get the actual data type on the fly. 2364 t := f.Type 2365 return func(ptr pointer, _ int) int { 2366 p := ptr.getInterfacePointer() 2367 if p.isNil() { 2368 return 0 2369 } 2370 v := ptr.asPointerTo(t).Elem().Elem().Elem() // *interface -> interface -> *struct -> struct 2371 telem := v.Type() 2372 e := fi.oneofElems[telem] 2373 return e.sizer(p, e.tagsize) 2374 }, 2375 func(b []byte, ptr pointer, _ uint64, deterministic bool) ([]byte, error) { 2376 p := ptr.getInterfacePointer() 2377 if p.isNil() { 2378 return b, nil 2379 } 2380 v := ptr.asPointerTo(t).Elem().Elem().Elem() // *interface -> interface -> *struct -> struct 2381 telem := v.Type() 2382 if telem.Field(0).Type.Kind() == reflect.Ptr && p.getPointer().isNil() { 2383 return b, errOneofHasNil 2384 } 2385 e := fi.oneofElems[telem] 2386 return e.marshaler(b, p, e.wiretag, deterministic) 2387 } 2388} 2389 2390// sizeExtensions computes the size of encoded data for a XXX_InternalExtensions field. 2391func (u *marshalInfo) sizeExtensions(ext *XXX_InternalExtensions) int { 2392 m, mu := ext.extensionsRead() 2393 if m == nil { 2394 return 0 2395 } 2396 mu.Lock() 2397 2398 n := 0 2399 for _, e := range m { 2400 if e.value == nil || e.desc == nil { 2401 // Extension is only in its encoded form. 2402 n += len(e.enc) 2403 continue 2404 } 2405 2406 // We don't skip extensions that have an encoded form set, 2407 // because the extension value may have been mutated after 2408 // the last time this function was called. 2409 ei := u.getExtElemInfo(e.desc) 2410 v := e.value 2411 p := toAddrPointer(&v, ei.isptr, ei.deref) 2412 n += ei.sizer(p, ei.tagsize) 2413 } 2414 mu.Unlock() 2415 return n 2416} 2417 2418// appendExtensions marshals a XXX_InternalExtensions field to the end of byte slice b. 2419func (u *marshalInfo) appendExtensions(b []byte, ext *XXX_InternalExtensions, deterministic bool) ([]byte, error) { 2420 m, mu := ext.extensionsRead() 2421 if m == nil { 2422 return b, nil 2423 } 2424 mu.Lock() 2425 defer mu.Unlock() 2426 2427 var err error 2428 var nerr nonFatal 2429 2430 // Fast-path for common cases: zero or one extensions. 2431 // Don't bother sorting the keys. 2432 if len(m) <= 1 { 2433 for _, e := range m { 2434 if e.value == nil || e.desc == nil { 2435 // Extension is only in its encoded form. 2436 b = append(b, e.enc...) 2437 continue 2438 } 2439 2440 // We don't skip extensions that have an encoded form set, 2441 // because the extension value may have been mutated after 2442 // the last time this function was called. 2443 2444 ei := u.getExtElemInfo(e.desc) 2445 v := e.value 2446 p := toAddrPointer(&v, ei.isptr, ei.deref) 2447 b, err = ei.marshaler(b, p, ei.wiretag, deterministic) 2448 if !nerr.Merge(err) { 2449 return b, err 2450 } 2451 } 2452 return b, nerr.E 2453 } 2454 2455 // Sort the keys to provide a deterministic encoding. 2456 // Not sure this is required, but the old code does it. 2457 keys := make([]int, 0, len(m)) 2458 for k := range m { 2459 keys = append(keys, int(k)) 2460 } 2461 sort.Ints(keys) 2462 2463 for _, k := range keys { 2464 e := m[int32(k)] 2465 if e.value == nil || e.desc == nil { 2466 // Extension is only in its encoded form. 2467 b = append(b, e.enc...) 2468 continue 2469 } 2470 2471 // We don't skip extensions that have an encoded form set, 2472 // because the extension value may have been mutated after 2473 // the last time this function was called. 2474 2475 ei := u.getExtElemInfo(e.desc) 2476 v := e.value 2477 p := toAddrPointer(&v, ei.isptr, ei.deref) 2478 b, err = ei.marshaler(b, p, ei.wiretag, deterministic) 2479 if !nerr.Merge(err) { 2480 return b, err 2481 } 2482 } 2483 return b, nerr.E 2484} 2485 2486// message set format is: 2487// message MessageSet { 2488// repeated group Item = 1 { 2489// required int32 type_id = 2; 2490// required string message = 3; 2491// }; 2492// } 2493 2494// sizeMessageSet computes the size of encoded data for a XXX_InternalExtensions field 2495// in message set format (above). 2496func (u *marshalInfo) sizeMessageSet(ext *XXX_InternalExtensions) int { 2497 m, mu := ext.extensionsRead() 2498 if m == nil { 2499 return 0 2500 } 2501 mu.Lock() 2502 2503 n := 0 2504 for id, e := range m { 2505 n += 2 // start group, end group. tag = 1 (size=1) 2506 n += SizeVarint(uint64(id)) + 1 // type_id, tag = 2 (size=1) 2507 2508 if e.value == nil || e.desc == nil { 2509 // Extension is only in its encoded form. 2510 msgWithLen := skipVarint(e.enc) // skip old tag, but leave the length varint 2511 siz := len(msgWithLen) 2512 n += siz + 1 // message, tag = 3 (size=1) 2513 continue 2514 } 2515 2516 // We don't skip extensions that have an encoded form set, 2517 // because the extension value may have been mutated after 2518 // the last time this function was called. 2519 2520 ei := u.getExtElemInfo(e.desc) 2521 v := e.value 2522 p := toAddrPointer(&v, ei.isptr, ei.deref) 2523 n += ei.sizer(p, 1) // message, tag = 3 (size=1) 2524 } 2525 mu.Unlock() 2526 return n 2527} 2528 2529// appendMessageSet marshals a XXX_InternalExtensions field in message set format (above) 2530// to the end of byte slice b. 2531func (u *marshalInfo) appendMessageSet(b []byte, ext *XXX_InternalExtensions, deterministic bool) ([]byte, error) { 2532 m, mu := ext.extensionsRead() 2533 if m == nil { 2534 return b, nil 2535 } 2536 mu.Lock() 2537 defer mu.Unlock() 2538 2539 var err error 2540 var nerr nonFatal 2541 2542 // Fast-path for common cases: zero or one extensions. 2543 // Don't bother sorting the keys. 2544 if len(m) <= 1 { 2545 for id, e := range m { 2546 b = append(b, 1<<3|WireStartGroup) 2547 b = append(b, 2<<3|WireVarint) 2548 b = appendVarint(b, uint64(id)) 2549 2550 if e.value == nil || e.desc == nil { 2551 // Extension is only in its encoded form. 2552 msgWithLen := skipVarint(e.enc) // skip old tag, but leave the length varint 2553 b = append(b, 3<<3|WireBytes) 2554 b = append(b, msgWithLen...) 2555 b = append(b, 1<<3|WireEndGroup) 2556 continue 2557 } 2558 2559 // We don't skip extensions that have an encoded form set, 2560 // because the extension value may have been mutated after 2561 // the last time this function was called. 2562 2563 ei := u.getExtElemInfo(e.desc) 2564 v := e.value 2565 p := toAddrPointer(&v, ei.isptr, ei.deref) 2566 b, err = ei.marshaler(b, p, 3<<3|WireBytes, deterministic) 2567 if !nerr.Merge(err) { 2568 return b, err 2569 } 2570 b = append(b, 1<<3|WireEndGroup) 2571 } 2572 return b, nerr.E 2573 } 2574 2575 // Sort the keys to provide a deterministic encoding. 2576 keys := make([]int, 0, len(m)) 2577 for k := range m { 2578 keys = append(keys, int(k)) 2579 } 2580 sort.Ints(keys) 2581 2582 for _, id := range keys { 2583 e := m[int32(id)] 2584 b = append(b, 1<<3|WireStartGroup) 2585 b = append(b, 2<<3|WireVarint) 2586 b = appendVarint(b, uint64(id)) 2587 2588 if e.value == nil || e.desc == nil { 2589 // Extension is only in its encoded form. 2590 msgWithLen := skipVarint(e.enc) // skip old tag, but leave the length varint 2591 b = append(b, 3<<3|WireBytes) 2592 b = append(b, msgWithLen...) 2593 b = append(b, 1<<3|WireEndGroup) 2594 continue 2595 } 2596 2597 // We don't skip extensions that have an encoded form set, 2598 // because the extension value may have been mutated after 2599 // the last time this function was called. 2600 2601 ei := u.getExtElemInfo(e.desc) 2602 v := e.value 2603 p := toAddrPointer(&v, ei.isptr, ei.deref) 2604 b, err = ei.marshaler(b, p, 3<<3|WireBytes, deterministic) 2605 b = append(b, 1<<3|WireEndGroup) 2606 if !nerr.Merge(err) { 2607 return b, err 2608 } 2609 } 2610 return b, nerr.E 2611} 2612 2613// sizeV1Extensions computes the size of encoded data for a V1-API extension field. 2614func (u *marshalInfo) sizeV1Extensions(m map[int32]Extension) int { 2615 if m == nil { 2616 return 0 2617 } 2618 2619 n := 0 2620 for _, e := range m { 2621 if e.value == nil || e.desc == nil { 2622 // Extension is only in its encoded form. 2623 n += len(e.enc) 2624 continue 2625 } 2626 2627 // We don't skip extensions that have an encoded form set, 2628 // because the extension value may have been mutated after 2629 // the last time this function was called. 2630 2631 ei := u.getExtElemInfo(e.desc) 2632 v := e.value 2633 p := toAddrPointer(&v, ei.isptr, ei.deref) 2634 n += ei.sizer(p, ei.tagsize) 2635 } 2636 return n 2637} 2638 2639// appendV1Extensions marshals a V1-API extension field to the end of byte slice b. 2640func (u *marshalInfo) appendV1Extensions(b []byte, m map[int32]Extension, deterministic bool) ([]byte, error) { 2641 if m == nil { 2642 return b, nil 2643 } 2644 2645 // Sort the keys to provide a deterministic encoding. 2646 keys := make([]int, 0, len(m)) 2647 for k := range m { 2648 keys = append(keys, int(k)) 2649 } 2650 sort.Ints(keys) 2651 2652 var err error 2653 var nerr nonFatal 2654 for _, k := range keys { 2655 e := m[int32(k)] 2656 if e.value == nil || e.desc == nil { 2657 // Extension is only in its encoded form. 2658 b = append(b, e.enc...) 2659 continue 2660 } 2661 2662 // We don't skip extensions that have an encoded form set, 2663 // because the extension value may have been mutated after 2664 // the last time this function was called. 2665 2666 ei := u.getExtElemInfo(e.desc) 2667 v := e.value 2668 p := toAddrPointer(&v, ei.isptr, ei.deref) 2669 b, err = ei.marshaler(b, p, ei.wiretag, deterministic) 2670 if !nerr.Merge(err) { 2671 return b, err 2672 } 2673 } 2674 return b, nerr.E 2675} 2676 2677// newMarshaler is the interface representing objects that can marshal themselves. 2678// 2679// This exists to support protoc-gen-go generated messages. 2680// The proto package will stop type-asserting to this interface in the future. 2681// 2682// DO NOT DEPEND ON THIS. 2683type newMarshaler interface { 2684 XXX_Size() int 2685 XXX_Marshal(b []byte, deterministic bool) ([]byte, error) 2686} 2687 2688// Size returns the encoded size of a protocol buffer message. 2689// This is the main entry point. 2690func Size(pb Message) int { 2691 if m, ok := pb.(newMarshaler); ok { 2692 return m.XXX_Size() 2693 } 2694 if m, ok := pb.(Marshaler); ok { 2695 // If the message can marshal itself, let it do it, for compatibility. 2696 // NOTE: This is not efficient. 2697 b, _ := m.Marshal() 2698 return len(b) 2699 } 2700 // in case somehow we didn't generate the wrapper 2701 if pb == nil { 2702 return 0 2703 } 2704 var info InternalMessageInfo 2705 return info.Size(pb) 2706} 2707 2708// Marshal takes a protocol buffer message 2709// and encodes it into the wire format, returning the data. 2710// This is the main entry point. 2711func Marshal(pb Message) ([]byte, error) { 2712 if m, ok := pb.(newMarshaler); ok { 2713 siz := m.XXX_Size() 2714 b := make([]byte, 0, siz) 2715 return m.XXX_Marshal(b, false) 2716 } 2717 if m, ok := pb.(Marshaler); ok { 2718 // If the message can marshal itself, let it do it, for compatibility. 2719 // NOTE: This is not efficient. 2720 return m.Marshal() 2721 } 2722 // in case somehow we didn't generate the wrapper 2723 if pb == nil { 2724 return nil, ErrNil 2725 } 2726 var info InternalMessageInfo 2727 siz := info.Size(pb) 2728 b := make([]byte, 0, siz) 2729 return info.Marshal(b, pb, false) 2730} 2731 2732// Marshal takes a protocol buffer message 2733// and encodes it into the wire format, writing the result to the 2734// Buffer. 2735// This is an alternative entry point. It is not necessary to use 2736// a Buffer for most applications. 2737func (p *Buffer) Marshal(pb Message) error { 2738 var err error 2739 if m, ok := pb.(newMarshaler); ok { 2740 siz := m.XXX_Size() 2741 p.grow(siz) // make sure buf has enough capacity 2742 p.buf, err = m.XXX_Marshal(p.buf, p.deterministic) 2743 return err 2744 } 2745 if m, ok := pb.(Marshaler); ok { 2746 // If the message can marshal itself, let it do it, for compatibility. 2747 // NOTE: This is not efficient. 2748 b, err := m.Marshal() 2749 p.buf = append(p.buf, b...) 2750 return err 2751 } 2752 // in case somehow we didn't generate the wrapper 2753 if pb == nil { 2754 return ErrNil 2755 } 2756 var info InternalMessageInfo 2757 siz := info.Size(pb) 2758 p.grow(siz) // make sure buf has enough capacity 2759 p.buf, err = info.Marshal(p.buf, pb, p.deterministic) 2760 return err 2761} 2762 2763// grow grows the buffer's capacity, if necessary, to guarantee space for 2764// another n bytes. After grow(n), at least n bytes can be written to the 2765// buffer without another allocation. 2766func (p *Buffer) grow(n int) { 2767 need := len(p.buf) + n 2768 if need <= cap(p.buf) { 2769 return 2770 } 2771 newCap := len(p.buf) * 2 2772 if newCap < need { 2773 newCap = need 2774 } 2775 p.buf = append(make([]byte, 0, newCap), p.buf...) 2776} 2777