chrome_models / 19 /libtextclassifier3 /Model_ /EmbeddingPruningMask.py
dejanseo's picture
Upload 51 files
4c8c729 verified
# automatically generated by the FlatBuffers compiler, do not modify
# namespace: Model_
import flatbuffers
from flatbuffers.compat import import_numpy
np = import_numpy()
class EmbeddingPruningMask(object):
__slots__ = ['_tab']
@classmethod
def GetRootAsEmbeddingPruningMask(cls, buf, offset):
n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset)
x = EmbeddingPruningMask()
x.Init(buf, n + offset)
return x
@classmethod
def EmbeddingPruningMaskBufferHasIdentifier(cls, buf, offset, size_prefixed=False):
return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x43\x32\x20", size_prefixed=size_prefixed)
# EmbeddingPruningMask
def Init(self, buf, pos):
self._tab = flatbuffers.table.Table(buf, pos)
# EmbeddingPruningMask
def Enabled(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4))
if o != 0:
return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos))
return False
# EmbeddingPruningMask
def PruningMask(self, j):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6))
if o != 0:
a = self._tab.Vector(o)
return self._tab.Get(flatbuffers.number_types.Uint64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8))
return 0
# EmbeddingPruningMask
def PruningMaskAsNumpy(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6))
if o != 0:
return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint64Flags, o)
return 0
# EmbeddingPruningMask
def PruningMaskLength(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6))
if o != 0:
return self._tab.VectorLen(o)
return 0
# EmbeddingPruningMask
def PruningMaskIsNone(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6))
return o == 0
# EmbeddingPruningMask
def FullNumBuckets(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8))
if o != 0:
return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos)
return 0
# EmbeddingPruningMask
def PrunedRowBucketId(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10))
if o != 0:
return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos)
return 0
def EmbeddingPruningMaskStart(builder): builder.StartObject(4)
def EmbeddingPruningMaskAddEnabled(builder, enabled): builder.PrependBoolSlot(0, enabled, 0)
def EmbeddingPruningMaskAddPruningMask(builder, pruningMask): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(pruningMask), 0)
def EmbeddingPruningMaskStartPruningMaskVector(builder, numElems): return builder.StartVector(8, numElems, 8)
def EmbeddingPruningMaskAddFullNumBuckets(builder, fullNumBuckets): builder.PrependInt32Slot(2, fullNumBuckets, 0)
def EmbeddingPruningMaskAddPrunedRowBucketId(builder, prunedRowBucketId): builder.PrependInt32Slot(3, prunedRowBucketId, 0)
def EmbeddingPruningMaskEnd(builder): return builder.EndObject()