• Home
  • Raw
  • Download

Lines Matching refs:b

29 	b := &Builder{}
30 b.Bytes = make([]byte, initialSize)
31 b.head = UOffsetT(initialSize)
32 b.minalign = 1
33 b.vtables = make([]UOffsetT, 0, 16) // sensible default capacity
35 return b
40 func (b *Builder) Reset() {
41 if b.Bytes != nil {
42 b.Bytes = b.Bytes[:cap(b.Bytes)]
45 if b.vtables != nil {
46 b.vtables = b.vtables[:0]
49 if b.vtable != nil {
50 b.vtable = b.vtable[:0]
53 b.head = UOffsetT(len(b.Bytes))
54 b.minalign = 1
55 b.nested = false
56 b.finished = false
62 func (b *Builder) FinishedBytes() []byte {
63 b.assertFinished()
64 return b.Bytes[b.Head():]
68 func (b *Builder) StartObject(numfields int) {
69 b.assertNotNested()
70 b.nested = true
73 if cap(b.vtable) < numfields || b.vtable == nil {
74 b.vtable = make([]UOffsetT, numfields)
76 b.vtable = b.vtable[:numfields]
77 for i := 0; i < len(b.vtable); i++ {
78 b.vtable[i] = 0
82 b.objectEnd = b.Offset()
83 b.minalign = 1
105 func (b *Builder) WriteVtable() (n UOffsetT) {
108 b.PrependSOffsetT(0)
110 objectOffset := b.Offset()
118 for i := len(b.vtables) - 1; i >= 0; i-- {
120 vt2Offset := b.vtables[i]
121 vt2Start := len(b.Bytes) - int(vt2Offset)
122 vt2Len := GetVOffsetT(b.Bytes[vt2Start:])
126 vt2 := b.Bytes[vt2Start+metadata : vt2End]
130 if vtableEqual(b.vtable, objectOffset, vt2) {
141 for i := len(b.vtable) - 1; i >= 0; i-- {
143 if b.vtable[i] != 0 {
146 off = objectOffset - b.vtable[i]
149 b.PrependVOffsetT(VOffsetT(off))
155 objectSize := objectOffset - b.objectEnd
156 b.PrependVOffsetT(VOffsetT(objectSize))
159 vBytes := (len(b.vtable) + VtableMetadataFields) * SizeVOffsetT
160 b.PrependVOffsetT(VOffsetT(vBytes))
164 objectStart := SOffsetT(len(b.Bytes)) - SOffsetT(objectOffset)
165 WriteSOffsetT(b.Bytes[objectStart:],
166 SOffsetT(b.Offset())-SOffsetT(objectOffset))
170 b.vtables = append(b.vtables, b.Offset())
174 objectStart := SOffsetT(len(b.Bytes)) - SOffsetT(objectOffset)
175 b.head = UOffsetT(objectStart)
179 WriteSOffsetT(b.Bytes[b.head:],
183 b.vtable = b.vtable[:0]
188 func (b *Builder) EndObject() UOffsetT {
189 b.assertNested()
190 n := b.WriteVtable()
191 b.nested = false
197 func (b *Builder) growByteBuffer() {
198 if (int64(len(b.Bytes)) & int64(0xC0000000)) != 0 {
201 newLen := len(b.Bytes) * 2
206 if cap(b.Bytes) >= newLen {
207 b.Bytes = b.Bytes[:newLen]
209 extension := make([]byte, newLen-len(b.Bytes))
210 b.Bytes = append(b.Bytes, extension...)
214 copy(b.Bytes[middle:], b.Bytes[:middle])
219 func (b *Builder) Head() UOffsetT {
220 return b.head
224 func (b *Builder) Offset() UOffsetT {
225 return UOffsetT(len(b.Bytes)) - b.head
229 func (b *Builder) Pad(n int) {
231 b.PlaceByte(0)
240 func (b *Builder) Prep(size, additionalBytes int) {
242 if size > b.minalign {
243 b.minalign = size
247 alignSize := (^(len(b.Bytes) - int(b.Head()) + additionalBytes)) + 1
251 for int(b.head) <= alignSize+size+additionalBytes {
252 oldBufSize := len(b.Bytes)
253 b.growByteBuffer()
254 b.head += UOffsetT(len(b.Bytes) - oldBufSize)
256 b.Pad(alignSize)
260 func (b *Builder) PrependSOffsetT(off SOffsetT) {
261 b.Prep(SizeSOffsetT, 0) // Ensure alignment is already done.
262 if !(UOffsetT(off) <= b.Offset()) {
265 off2 := SOffsetT(b.Offset()) - off + SOffsetT(SizeSOffsetT)
266 b.PlaceSOffsetT(off2)
270 func (b *Builder) PrependUOffsetT(off UOffsetT) {
271 b.Prep(SizeUOffsetT, 0) // Ensure alignment is already done.
272 if !(off <= b.Offset()) {
275 off2 := b.Offset() - off + UOffsetT(SizeUOffsetT)
276 b.PlaceUOffsetT(off2)
284 func (b *Builder) StartVector(elemSize, numElems, alignment int) UOffsetT {
285 b.assertNotNested()
286 b.nested = true
287 b.Prep(SizeUint32, elemSize*numElems)
288 b.Prep(alignment, elemSize*numElems) // Just in case alignment > int.
289 return b.Offset()
293 func (b *Builder) EndVector(vectorNumElems int) UOffsetT {
294 b.assertNested()
297 b.PlaceUOffsetT(UOffsetT(vectorNumElems))
299 b.nested = false
300 return b.Offset()
304 func (b *Builder) CreateString(s string) UOffsetT {
305 b.assertNotNested()
306 b.nested = true
308 b.Prep(int(SizeUOffsetT), (len(s)+1)*SizeByte)
309 b.PlaceByte(0)
313 b.head -= l
314 copy(b.Bytes[b.head:b.head+l], s)
316 return b.EndVector(len(s))
320 func (b *Builder) CreateByteString(s []byte) UOffsetT {
321 b.assertNotNested()
322 b.nested = true
324 b.Prep(int(SizeUOffsetT), (len(s)+1)*SizeByte)
325 b.PlaceByte(0)
329 b.head -= l
330 copy(b.Bytes[b.head:b.head+l], s)
332 return b.EndVector(len(s))
336 func (b *Builder) CreateByteVector(v []byte) UOffsetT {
337 b.assertNotNested()
338 b.nested = true
340 b.Prep(int(SizeUOffsetT), len(v)*SizeByte)
344 b.head -= l
345 copy(b.Bytes[b.head:b.head+l], v)
347 return b.EndVector(len(v))
350 func (b *Builder) assertNested() {
355 if !b.nested {
360 func (b *Builder) assertNotNested() {
369 if b.nested {
374 func (b *Builder) assertFinished() {
380 if !b.finished {
388 func (b *Builder) PrependBoolSlot(o int, x, d bool) {
397 b.PrependByteSlot(o, val, def)
403 func (b *Builder) PrependByteSlot(o int, x, d byte) {
405 b.PrependByte(x)
406 b.Slot(o)
413 func (b *Builder) PrependUint8Slot(o int, x, d uint8) {
415 b.PrependUint8(x)
416 b.Slot(o)
423 func (b *Builder) PrependUint16Slot(o int, x, d uint16) {
425 b.PrependUint16(x)
426 b.Slot(o)
433 func (b *Builder) PrependUint32Slot(o int, x, d uint32) {
435 b.PrependUint32(x)
436 b.Slot(o)
443 func (b *Builder) PrependUint64Slot(o int, x, d uint64) {
445 b.PrependUint64(x)
446 b.Slot(o)
453 func (b *Builder) PrependInt8Slot(o int, x, d int8) {
455 b.PrependInt8(x)
456 b.Slot(o)
463 func (b *Builder) PrependInt16Slot(o int, x, d int16) {
465 b.PrependInt16(x)
466 b.Slot(o)
473 func (b *Builder) PrependInt32Slot(o int, x, d int32) {
475 b.PrependInt32(x)
476 b.Slot(o)
483 func (b *Builder) PrependInt64Slot(o int, x, d int64) {
485 b.PrependInt64(x)
486 b.Slot(o)
493 func (b *Builder) PrependFloat32Slot(o int, x, d float32) {
495 b.PrependFloat32(x)
496 b.Slot(o)
503 func (b *Builder) PrependFloat64Slot(o int, x, d float64) {
505 b.PrependFloat64(x)
506 b.Slot(o)
513 func (b *Builder) PrependUOffsetTSlot(o int, x, d UOffsetT) {
515 b.PrependUOffsetT(x)
516 b.Slot(o)
523 func (b *Builder) PrependStructSlot(voffset int, x, d UOffsetT) {
525 b.assertNested()
526 if x != b.Offset() {
529 b.Slot(voffset)
534 func (b *Builder) Slot(slotnum int) {
535 b.vtable[slotnum] = UOffsetT(b.Offset())
539 func (b *Builder) Finish(rootTable UOffsetT) {
540 b.assertNotNested()
541 b.Prep(b.minalign, SizeUOffsetT)
542 b.PrependUOffsetT(rootTable)
543 b.finished = true
547 func vtableEqual(a []UOffsetT, objectStart UOffsetT, b []byte) bool {
548 if len(a)*SizeVOffsetT != len(b) {
553 x := GetVOffsetT(b[i*SizeVOffsetT : (i+1)*SizeVOffsetT])
570 func (b *Builder) PrependBool(x bool) {
571 b.Prep(SizeBool, 0)
572 b.PlaceBool(x)
577 func (b *Builder) PrependUint8(x uint8) {
578 b.Prep(SizeUint8, 0)
579 b.PlaceUint8(x)
584 func (b *Builder) PrependUint16(x uint16) {
585 b.Prep(SizeUint16, 0)
586 b.PlaceUint16(x)
591 func (b *Builder) PrependUint32(x uint32) {
592 b.Prep(SizeUint32, 0)
593 b.PlaceUint32(x)
598 func (b *Builder) PrependUint64(x uint64) {
599 b.Prep(SizeUint64, 0)
600 b.PlaceUint64(x)
605 func (b *Builder) PrependInt8(x int8) {
606 b.Prep(SizeInt8, 0)
607 b.PlaceInt8(x)
612 func (b *Builder) PrependInt16(x int16) {
613 b.Prep(SizeInt16, 0)
614 b.PlaceInt16(x)
619 func (b *Builder) PrependInt32(x int32) {
620 b.Prep(SizeInt32, 0)
621 b.PlaceInt32(x)
626 func (b *Builder) PrependInt64(x int64) {
627 b.Prep(SizeInt64, 0)
628 b.PlaceInt64(x)
633 func (b *Builder) PrependFloat32(x float32) {
634 b.Prep(SizeFloat32, 0)
635 b.PlaceFloat32(x)
640 func (b *Builder) PrependFloat64(x float64) {
641 b.Prep(SizeFloat64, 0)
642 b.PlaceFloat64(x)
647 func (b *Builder) PrependByte(x byte) {
648 b.Prep(SizeByte, 0)
649 b.PlaceByte(x)
654 func (b *Builder) PrependVOffsetT(x VOffsetT) {
655 b.Prep(SizeVOffsetT, 0)
656 b.PlaceVOffsetT(x)
660 func (b *Builder) PlaceBool(x bool) {
661 b.head -= UOffsetT(SizeBool)
662 WriteBool(b.Bytes[b.head:], x)
666 func (b *Builder) PlaceUint8(x uint8) {
667 b.head -= UOffsetT(SizeUint8)
668 WriteUint8(b.Bytes[b.head:], x)
672 func (b *Builder) PlaceUint16(x uint16) {
673 b.head -= UOffsetT(SizeUint16)
674 WriteUint16(b.Bytes[b.head:], x)
678 func (b *Builder) PlaceUint32(x uint32) {
679 b.head -= UOffsetT(SizeUint32)
680 WriteUint32(b.Bytes[b.head:], x)
684 func (b *Builder) PlaceUint64(x uint64) {
685 b.head -= UOffsetT(SizeUint64)
686 WriteUint64(b.Bytes[b.head:], x)
690 func (b *Builder) PlaceInt8(x int8) {
691 b.head -= UOffsetT(SizeInt8)
692 WriteInt8(b.Bytes[b.head:], x)
696 func (b *Builder) PlaceInt16(x int16) {
697 b.head -= UOffsetT(SizeInt16)
698 WriteInt16(b.Bytes[b.head:], x)
702 func (b *Builder) PlaceInt32(x int32) {
703 b.head -= UOffsetT(SizeInt32)
704 WriteInt32(b.Bytes[b.head:], x)
708 func (b *Builder) PlaceInt64(x int64) {
709 b.head -= UOffsetT(SizeInt64)
710 WriteInt64(b.Bytes[b.head:], x)
714 func (b *Builder) PlaceFloat32(x float32) {
715 b.head -= UOffsetT(SizeFloat32)
716 WriteFloat32(b.Bytes[b.head:], x)
720 func (b *Builder) PlaceFloat64(x float64) {
721 b.head -= UOffsetT(SizeFloat64)
722 WriteFloat64(b.Bytes[b.head:], x)
726 func (b *Builder) PlaceByte(x byte) {
727 b.head -= UOffsetT(SizeByte)
728 WriteByte(b.Bytes[b.head:], x)
732 func (b *Builder) PlaceVOffsetT(x VOffsetT) {
733 b.head -= UOffsetT(SizeVOffsetT)
734 WriteVOffsetT(b.Bytes[b.head:], x)
738 func (b *Builder) PlaceSOffsetT(x SOffsetT) {
739 b.head -= UOffsetT(SizeSOffsetT)
740 WriteSOffsetT(b.Bytes[b.head:], x)
744 func (b *Builder) PlaceUOffsetT(x UOffsetT) {
745 b.head -= UOffsetT(SizeUOffsetT)
746 WriteUOffsetT(b.Bytes[b.head:], x)